aboutsummaryrefslogtreecommitdiff
path: root/venv/lib/python3.8/site-packages/_plotly_utils
diff options
context:
space:
mode:
authorsotech117 <michael_foiani@brown.edu>2025-07-31 17:27:24 -0400
committersotech117 <michael_foiani@brown.edu>2025-07-31 17:27:24 -0400
commit5bf22fc7e3c392c8bd44315ca2d06d7dca7d084e (patch)
tree8dacb0f195df1c0788d36dd0064f6bbaa3143ede /venv/lib/python3.8/site-packages/_plotly_utils
parentb832d364da8c2efe09e3f75828caf73c50d01ce3 (diff)
add code for analysis of data
Diffstat (limited to 'venv/lib/python3.8/site-packages/_plotly_utils')
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/__init__.py0
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/basevalidators.py2711
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/__init__.py883
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/_swatches.py161
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/carto.py419
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/cmocean.py296
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/colorbrewer.py494
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/cyclical.py157
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/diverging.py75
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/plotlyjs.py180
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/qualitative.py184
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/colors/sequential.py257
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/data_utils.py75
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/exceptions.py97
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/files.py37
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/importers.py50
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/optional_imports.py36
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/png.py2350
-rw-r--r--venv/lib/python3.8/site-packages/_plotly_utils/utils.py557
19 files changed, 9019 insertions, 0 deletions
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/__init__.py b/venv/lib/python3.8/site-packages/_plotly_utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/__init__.py
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/basevalidators.py b/venv/lib/python3.8/site-packages/_plotly_utils/basevalidators.py
new file mode 100644
index 0000000..0d7e387
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/basevalidators.py
@@ -0,0 +1,2711 @@
+import base64
+import numbers
+import textwrap
+import uuid
+from importlib import import_module
+import copy
+import io
+import re
+import sys
+import narwhals.stable.v1 as nw
+
+from _plotly_utils.optional_imports import get_module
+
+
+# back-port of fullmatch from Py3.4+
+def fullmatch(regex, string, flags=0):
+ """Emulate python-3.4 re.fullmatch()."""
+ if "pattern" in dir(regex):
+ regex_string = regex.pattern
+ else:
+ regex_string = regex
+ return re.match("(?:" + regex_string + r")\Z", string, flags=flags)
+
+
+# Utility functions
+# -----------------
+def to_scalar_or_list(v):
+ # Handle the case where 'v' is a non-native scalar-like type,
+ # such as numpy.float32. Without this case, the object might be
+ # considered numpy-convertable and therefore promoted to a
+ # 0-dimensional array, but we instead want it converted to a
+ # Python native scalar type ('float' in the example above).
+ # We explicitly check if is has the 'item' method, which conventionally
+ # converts these types to native scalars.
+ np = get_module("numpy", should_load=False)
+ pd = get_module("pandas", should_load=False)
+ if np and np.isscalar(v) and hasattr(v, "item"):
+ return v.item()
+ if isinstance(v, (list, tuple)):
+ return [to_scalar_or_list(e) for e in v]
+ elif np and isinstance(v, np.ndarray):
+ if v.ndim == 0:
+ return v.item()
+ return [to_scalar_or_list(e) for e in v]
+ elif pd and isinstance(v, (pd.Series, pd.Index)):
+ return [to_scalar_or_list(e) for e in v]
+ elif is_numpy_convertable(v):
+ return to_scalar_or_list(np.array(v))
+ else:
+ return v
+
+
+def copy_to_readonly_numpy_array(v, kind=None, force_numeric=False):
+ """
+ Convert an array-like value into a read-only numpy array
+
+ Parameters
+ ----------
+ v : array like
+ Array like value (list, tuple, numpy array, pandas series, etc.)
+ kind : str or tuple of str
+ If specified, the numpy dtype kind (or kinds) that the array should
+ have, or be converted to if possible.
+ If not specified then let numpy infer the datatype
+ force_numeric : bool
+ If true, raise an exception if the resulting numpy array does not
+ have a numeric dtype (i.e. dtype.kind not in ['u', 'i', 'f'])
+ Returns
+ -------
+ np.ndarray
+ Numpy array with the 'WRITEABLE' flag set to False
+ """
+ np = get_module("numpy")
+
+ assert np is not None
+
+ # ### Process kind ###
+ if not kind:
+ kind = ()
+ elif isinstance(kind, str):
+ kind = (kind,)
+
+ first_kind = kind[0] if kind else None
+
+ # u: unsigned int, i: signed int, f: float
+ numeric_kinds = {"u", "i", "f"}
+ kind_default_dtypes = {
+ "u": "uint32",
+ "i": "int32",
+ "f": "float64",
+ "O": "object",
+ }
+
+ # With `pass_through=True`, the original object will be returned if unable to convert
+ # to a Narwhals DataFrame or Series.
+ v = nw.from_native(v, allow_series=True, pass_through=True)
+
+ if isinstance(v, nw.Series):
+ if v.dtype == nw.Datetime and v.dtype.time_zone is not None:
+ # Remove time zone so that local time is displayed
+ v = v.dt.replace_time_zone(None).to_numpy()
+ else:
+ v = v.to_numpy()
+ elif isinstance(v, nw.DataFrame):
+ schema = v.schema
+ overrides = {}
+ for key, val in schema.items():
+ if val == nw.Datetime and val.time_zone is not None:
+ # Remove time zone so that local time is displayed
+ overrides[key] = nw.col(key).dt.replace_time_zone(None)
+ if overrides:
+ v = v.with_columns(**overrides)
+ v = v.to_numpy()
+
+ if not isinstance(v, np.ndarray):
+ # v has its own logic on how to convert itself into a numpy array
+ if is_numpy_convertable(v):
+ return copy_to_readonly_numpy_array(
+ np.array(v), kind=kind, force_numeric=force_numeric
+ )
+ else:
+ # v is not homogenous array
+ v_list = [to_scalar_or_list(e) for e in v]
+
+ # Lookup dtype for requested kind, if any
+ dtype = kind_default_dtypes.get(first_kind, None)
+
+ # construct new array from list
+ new_v = np.array(v_list, order="C", dtype=dtype)
+ elif v.dtype.kind in numeric_kinds:
+ # v is a homogenous numeric array
+ if kind and v.dtype.kind not in kind:
+ # Kind(s) were specified and this array doesn't match
+ # Convert to the default dtype for the first kind
+ dtype = kind_default_dtypes.get(first_kind, None)
+ new_v = np.ascontiguousarray(v.astype(dtype))
+ else:
+ # Either no kind was requested or requested kind is satisfied
+ new_v = np.ascontiguousarray(v.copy())
+ else:
+ # v is a non-numeric homogenous array
+ new_v = v.copy()
+
+ # Handle force numeric param
+ # --------------------------
+ if force_numeric and new_v.dtype.kind not in numeric_kinds:
+ raise ValueError(
+ "Input value is not numeric and force_numeric parameter set to True"
+ )
+
+ if "U" not in kind:
+ # Force non-numeric arrays to have object type
+ # --------------------------------------------
+ # Here we make sure that non-numeric arrays have the object
+ # datatype. This works around cases like np.array([1, 2, '3']) where
+ # numpy converts the integers to strings and returns array of dtype
+ # '<U21'
+ if new_v.dtype.kind not in ["u", "i", "f", "O", "M"]:
+ new_v = np.array(v, dtype="object")
+
+ # Set new array to be read-only
+ # -----------------------------
+ new_v.flags["WRITEABLE"] = False
+
+ return new_v
+
+
+def is_numpy_convertable(v):
+ """
+ Return whether a value is meaningfully convertable to a numpy array
+ via 'numpy.array'
+ """
+ return hasattr(v, "__array__") or hasattr(v, "__array_interface__")
+
+
+def is_homogeneous_array(v):
+ """
+ Return whether a value is considered to be a homogeneous array
+ """
+ np = get_module("numpy", should_load=False)
+ pd = get_module("pandas", should_load=False)
+ if (
+ np
+ and isinstance(v, np.ndarray)
+ or (pd and isinstance(v, (pd.Series, pd.Index)))
+ or (isinstance(v, nw.Series))
+ ):
+ return True
+ if is_numpy_convertable(v):
+ np = get_module("numpy", should_load=True)
+ if np:
+ v_numpy = np.array(v)
+ # v is essentially a scalar and so shouldn't count as an array
+ if v_numpy.shape == ():
+ return False
+ else:
+ return True # v_numpy.dtype.kind in ["u", "i", "f", "M", "U"]
+ return False
+
+
+def is_simple_array(v):
+ """
+ Return whether a value is considered to be an simple array
+ """
+ return isinstance(v, (list, tuple))
+
+
+def is_array(v):
+ """
+ Return whether a value is considered to be an array
+ """
+ return is_simple_array(v) or is_homogeneous_array(v)
+
+
+def type_str(v):
+ """
+ Return a type string of the form module.name for the input value v
+ """
+ if not isinstance(v, type):
+ v = type(v)
+
+ return "'{module}.{name}'".format(module=v.__module__, name=v.__name__)
+
+
+def is_typed_array_spec(v):
+ """
+ Return whether a value is considered to be a typed array spec for plotly.js
+ """
+ return isinstance(v, dict) and "bdata" in v and "dtype" in v
+
+
+def is_none_or_typed_array_spec(v):
+ return v is None or is_typed_array_spec(v)
+
+
+# Validators
+# ----------
+class BaseValidator(object):
+ """
+ Base class for all validator classes
+ """
+
+ def __init__(self, plotly_name, parent_name, role=None, **_):
+ """
+ Construct a validator instance
+
+ Parameters
+ ----------
+ plotly_name : str
+ Name of the property being validated
+ parent_name : str
+ Names of all of the ancestors of this property joined on '.'
+ characters. e.g.
+ plotly_name == 'range' and parent_name == 'layout.xaxis'
+ role : str
+ The role string for the property as specified in
+ plot-schema.json
+ """
+ self.parent_name = parent_name
+ self.plotly_name = plotly_name
+ self.role = role
+ self.array_ok = False
+
+ def description(self):
+ """
+ Returns a string that describes the values that are acceptable
+ to the validator
+
+ Should start with:
+ The '{plotly_name}' property is a...
+
+ For consistancy, string should have leading 4-space indent
+ """
+ raise NotImplementedError()
+
+ def raise_invalid_val(self, v, inds=None):
+ """
+ Helper method to raise an informative exception when an invalid
+ value is passed to the validate_coerce method.
+
+ Parameters
+ ----------
+ v :
+ Value that was input to validate_coerce and could not be coerced
+ inds: list of int or None (default)
+ Indexes to display after property name. e.g. if self.plotly_name
+ is 'prop' and inds=[2, 1] then the name in the validation error
+ message will be 'prop[2][1]`
+ Raises
+ -------
+ ValueError
+ """
+ name = self.plotly_name
+ if inds:
+ for i in inds:
+ name += "[" + str(i) + "]"
+
+ raise ValueError(
+ """
+ Invalid value of type {typ} received for the '{name}' property of {pname}
+ Received value: {v}
+
+{valid_clr_desc}""".format(
+ name=name,
+ pname=self.parent_name,
+ typ=type_str(v),
+ v=repr(v),
+ valid_clr_desc=self.description(),
+ )
+ )
+
+ def raise_invalid_elements(self, invalid_els):
+ if invalid_els:
+ raise ValueError(
+ """
+ Invalid element(s) received for the '{name}' property of {pname}
+ Invalid elements include: {invalid}
+
+{valid_clr_desc}""".format(
+ name=self.plotly_name,
+ pname=self.parent_name,
+ invalid=invalid_els[:10],
+ valid_clr_desc=self.description(),
+ )
+ )
+
+ def validate_coerce(self, v):
+ """
+ Validate whether an input value is compatible with this property,
+ and coerce the value to be compatible of possible.
+
+ Parameters
+ ----------
+ v
+ The input value to be validated
+
+ Raises
+ ------
+ ValueError
+ if `v` cannot be coerced into a compatible form
+
+ Returns
+ -------
+ The input `v` in a form that's compatible with this property
+ """
+ raise NotImplementedError()
+
+ def present(self, v):
+ """
+ Convert output value of a previous call to `validate_coerce` into a
+ form suitable to be returned to the user on upon property
+ access.
+
+ Note: The value returned by present must be either immutable or an
+ instance of BasePlotlyType, otherwise the value could be mutated by
+ the user and we wouldn't get notified about the change.
+
+ Parameters
+ ----------
+ v
+ A value that was the ouput of a previous call the
+ `validate_coerce` method on the same object
+
+ Returns
+ -------
+
+ """
+ if is_homogeneous_array(v):
+ # Note: numpy array was already coerced into read-only form so
+ # we don't need to copy it here.
+ return v
+ elif is_simple_array(v):
+ return tuple(v)
+ else:
+ return v
+
+
+class DataArrayValidator(BaseValidator):
+ """
+ "data_array": {
+ "description": "An {array} of data. The value MUST be an
+ {array}, or we ignore it.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt"
+ ]
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(DataArrayValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ self.array_ok = True
+
+ def description(self):
+ return """\
+ The '{plotly_name}' property is an array that may be specified as a tuple,
+ list, numpy array, or pandas Series""".format(plotly_name=self.plotly_name)
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif is_homogeneous_array(v):
+ v = copy_to_readonly_numpy_array(v)
+ elif is_simple_array(v):
+ v = to_scalar_or_list(v)
+ else:
+ self.raise_invalid_val(v)
+ return v
+
+
+class EnumeratedValidator(BaseValidator):
+ """
+ "enumerated": {
+ "description": "Enumerated value type. The available values are
+ listed in `values`.",
+ "requiredOpts": [
+ "values"
+ ],
+ "otherOpts": [
+ "dflt",
+ "coerceNumber",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(
+ self,
+ plotly_name,
+ parent_name,
+ values,
+ array_ok=False,
+ coerce_number=False,
+ **kwargs,
+ ):
+ super(EnumeratedValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # Save params
+ # -----------
+ self.values = values
+ self.array_ok = array_ok
+ # coerce_number is rarely used and not implemented
+ self.coerce_number = coerce_number
+ self.kwargs = kwargs
+
+ # Handle regular expressions
+ # --------------------------
+ # Compiled regexs
+ self.val_regexs = []
+
+ # regex replacements that run before the matching regex
+ # So far, this is only used to cast 'x1' -> 'x' for anchor-style
+ # enumeration properties
+ self.regex_replacements = []
+
+ # Loop over enumeration values
+ # ----------------------------
+ # Look for regular expressions
+ for v in self.values:
+ if v and isinstance(v, str) and v[0] == "/" and v[-1] == "/" and len(v) > 1:
+ # String is a regex with leading and trailing '/' character
+ regex_str = v[1:-1]
+ self.val_regexs.append(re.compile(regex_str))
+ self.regex_replacements.append(
+ EnumeratedValidator.build_regex_replacement(regex_str)
+ )
+ else:
+ self.val_regexs.append(None)
+ self.regex_replacements.append(None)
+
+ def __deepcopy__(self, memodict={}):
+ """
+ A custom deepcopy method is needed here because compiled regex
+ objects don't support deepcopy
+ """
+ cls = self.__class__
+ return cls(self.plotly_name, self.parent_name, values=self.values)
+
+ @staticmethod
+ def build_regex_replacement(regex_str):
+ # Example: regex_str == r"^y([2-9]|[1-9][0-9]+)?$"
+ #
+ # When we see a regular expression like the one above, we want to
+ # build regular expression replacement params that will remove a
+ # suffix of 1 from the input string ('y1' -> 'y' in this example)
+ #
+ # Why?: Regular expressions like this one are used in enumeration
+ # properties that refer to subplotids (e.g. layout.annotation.xref)
+ # The regular expressions forbid suffixes of 1, like 'x1'. But we
+ # want to accept 'x1' and coerce it into 'x'
+ #
+ # To be cautious, we only perform this conversion for enumerated
+ # values that match the anchor-style regex
+ match = re.match(
+ r"\^(\w)\(\[2\-9\]\|\[1\-9\]\[0\-9\]\+\)\?\( domain\)\?\$", regex_str
+ )
+
+ if match:
+ anchor_char = match.group(1)
+ return "^" + anchor_char + "1$", anchor_char
+ else:
+ return None
+
+ def perform_replacemenet(self, v):
+ """
+ Return v with any applicable regex replacements applied
+ """
+ if isinstance(v, str):
+ for repl_args in self.regex_replacements:
+ if repl_args:
+ v = re.sub(repl_args[0], repl_args[1], v)
+
+ return v
+
+ def description(self):
+ # Separate regular values from regular expressions
+ enum_vals = []
+ enum_regexs = []
+ for v, regex in zip(self.values, self.val_regexs):
+ if regex is not None:
+ enum_regexs.append(regex.pattern)
+ else:
+ enum_vals.append(v)
+ desc = """\
+ The '{name}' property is an enumeration that may be specified as:""".format(
+ name=self.plotly_name
+ )
+
+ if enum_vals:
+ enum_vals_str = "\n".join(
+ textwrap.wrap(
+ repr(enum_vals),
+ initial_indent=" " * 12,
+ subsequent_indent=" " * 12,
+ break_on_hyphens=False,
+ )
+ )
+
+ desc = (
+ desc
+ + """
+ - One of the following enumeration values:
+{enum_vals_str}""".format(enum_vals_str=enum_vals_str)
+ )
+
+ if enum_regexs:
+ enum_regexs_str = "\n".join(
+ textwrap.wrap(
+ repr(enum_regexs),
+ initial_indent=" " * 12,
+ subsequent_indent=" " * 12,
+ break_on_hyphens=False,
+ )
+ )
+
+ desc = (
+ desc
+ + """
+ - A string that matches one of the following regular expressions:
+{enum_regexs_str}""".format(enum_regexs_str=enum_regexs_str)
+ )
+
+ if self.array_ok:
+ desc = (
+ desc
+ + """
+ - A tuple, list, or one-dimensional numpy array of the above"""
+ )
+
+ return desc
+
+ def in_values(self, e):
+ """
+ Return whether a value matches one of the enumeration options
+ """
+ is_str = isinstance(e, str)
+ for v, regex in zip(self.values, self.val_regexs):
+ if is_str and regex:
+ in_values = fullmatch(regex, e) is not None
+ # in_values = regex.fullmatch(e) is not None
+ else:
+ in_values = e == v
+
+ if in_values:
+ return True
+
+ return False
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_array(v):
+ v_replaced = [self.perform_replacemenet(v_el) for v_el in v]
+
+ invalid_els = [e for e in v_replaced if (not self.in_values(e))]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ if is_homogeneous_array(v):
+ v = copy_to_readonly_numpy_array(v)
+ else:
+ v = to_scalar_or_list(v)
+ else:
+ v = self.perform_replacemenet(v)
+ if not self.in_values(v):
+ self.raise_invalid_val(v)
+ return v
+
+
+class BooleanValidator(BaseValidator):
+ """
+ "boolean": {
+ "description": "A boolean (true/false) value.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt"
+ ]
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(BooleanValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ def description(self):
+ return """\
+ The '{plotly_name}' property must be specified as a bool
+ (either True, or False)""".format(plotly_name=self.plotly_name)
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif not isinstance(v, bool):
+ self.raise_invalid_val(v)
+
+ return v
+
+
+class SrcValidator(BaseValidator):
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(SrcValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ self.chart_studio = get_module("chart_studio")
+
+ def description(self):
+ return """\
+ The '{plotly_name}' property must be specified as a string or
+ as a plotly.grid_objs.Column object""".format(plotly_name=self.plotly_name)
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif isinstance(v, str):
+ pass
+ elif self.chart_studio and isinstance(v, self.chart_studio.grid_objs.Column):
+ # Convert to id string
+ v = v.id
+ else:
+ self.raise_invalid_val(v)
+
+ return v
+
+
+class NumberValidator(BaseValidator):
+ """
+ "number": {
+ "description": "A number or a numeric value (e.g. a number
+ inside a string). When applicable, values
+ greater (less) than `max` (`min`) are coerced to
+ the `dflt`.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "min",
+ "max",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(
+ self, plotly_name, parent_name, min=None, max=None, array_ok=False, **kwargs
+ ):
+ super(NumberValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # Handle min
+ if min is None and max is not None:
+ # Max was specified, so make min -inf
+ self.min_val = float("-inf")
+ else:
+ self.min_val = min
+
+ # Handle max
+ if max is None and min is not None:
+ # Min was specified, so make min inf
+ self.max_val = float("inf")
+ else:
+ self.max_val = max
+
+ if min is not None or max is not None:
+ self.has_min_max = True
+ else:
+ self.has_min_max = False
+
+ self.array_ok = array_ok
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is a number and may be specified as:""".format(
+ plotly_name=self.plotly_name
+ )
+
+ if not self.has_min_max:
+ desc = (
+ desc
+ + """
+ - An int or float"""
+ )
+
+ else:
+ desc = (
+ desc
+ + """
+ - An int or float in the interval [{min_val}, {max_val}]""".format(
+ min_val=self.min_val, max_val=self.max_val
+ )
+ )
+
+ if self.array_ok:
+ desc = (
+ desc
+ + """
+ - A tuple, list, or one-dimensional numpy array of the above"""
+ )
+
+ return desc
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_homogeneous_array(v):
+ np = get_module("numpy")
+ try:
+ v_array = copy_to_readonly_numpy_array(v, force_numeric=True)
+ except (ValueError, TypeError, OverflowError):
+ self.raise_invalid_val(v)
+
+ # Check min/max
+ if self.has_min_max:
+ v_valid = np.logical_and(
+ self.min_val <= v_array, v_array <= self.max_val
+ )
+
+ if not np.all(v_valid):
+ # Grab up to the first 10 invalid values
+ v_invalid = np.logical_not(v_valid)
+ some_invalid_els = np.array(v, dtype="object")[v_invalid][
+ :10
+ ].tolist()
+
+ self.raise_invalid_elements(some_invalid_els)
+
+ v = v_array # Always numeric numpy array
+ elif self.array_ok and is_simple_array(v):
+ # Check numeric
+ invalid_els = [e for e in v if not isinstance(e, numbers.Number)]
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ # Check min/max
+ if self.has_min_max:
+ invalid_els = [e for e in v if not (self.min_val <= e <= self.max_val)]
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ v = to_scalar_or_list(v)
+ else:
+ # Check numeric
+ if not isinstance(v, numbers.Number):
+ self.raise_invalid_val(v)
+
+ # Check min/max
+ if self.has_min_max:
+ if not (self.min_val <= v <= self.max_val):
+ self.raise_invalid_val(v)
+ return v
+
+
+class IntegerValidator(BaseValidator):
+ """
+ "integer": {
+ "description": "An integer or an integer inside a string. When
+ applicable, values greater (less) than `max`
+ (`min`) are coerced to the `dflt`.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "min",
+ "max",
+ "extras",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(
+ self,
+ plotly_name,
+ parent_name,
+ min=None,
+ max=None,
+ extras=None,
+ array_ok=False,
+ **kwargs,
+ ):
+ super(IntegerValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # Handle min
+ if min is None and max is not None:
+ # Max was specified, so make min -inf
+ self.min_val = -sys.maxsize - 1
+ else:
+ self.min_val = min
+
+ # Handle max
+ if max is None and min is not None:
+ # Min was specified, so make min inf
+ self.max_val = sys.maxsize
+ else:
+ self.max_val = max
+
+ if min is not None or max is not None:
+ self.has_min_max = True
+ else:
+ self.has_min_max = False
+
+ self.extras = extras if extras is not None else []
+ self.array_ok = array_ok
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is a integer and may be specified as:""".format(
+ plotly_name=self.plotly_name
+ )
+
+ if not self.has_min_max:
+ desc = (
+ desc
+ + """
+ - An int (or float that will be cast to an int)"""
+ )
+ else:
+ desc = desc + (
+ """
+ - An int (or float that will be cast to an int)
+ in the interval [{min_val}, {max_val}]""".format(
+ min_val=self.min_val, max_val=self.max_val
+ )
+ )
+
+ # Extras
+ if self.extras:
+ desc = desc + (
+ """
+ OR exactly one of {extras} (e.g. '{eg_extra}')"""
+ ).format(extras=self.extras, eg_extra=self.extras[-1])
+
+ if self.array_ok:
+ desc = (
+ desc
+ + """
+ - A tuple, list, or one-dimensional numpy array of the above"""
+ )
+
+ return desc
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif v in self.extras:
+ return v
+ elif self.array_ok and is_homogeneous_array(v):
+ np = get_module("numpy")
+ v_array = copy_to_readonly_numpy_array(
+ v, kind=("i", "u"), force_numeric=True
+ )
+
+ if v_array.dtype.kind not in ["i", "u"]:
+ self.raise_invalid_val(v)
+
+ # Check min/max
+ if self.has_min_max:
+ v_valid = np.logical_and(
+ self.min_val <= v_array, v_array <= self.max_val
+ )
+
+ if not np.all(v_valid):
+ # Grab up to the first 10 invalid values
+ v_invalid = np.logical_not(v_valid)
+ some_invalid_els = np.array(v, dtype="object")[v_invalid][
+ :10
+ ].tolist()
+ self.raise_invalid_elements(some_invalid_els)
+
+ v = v_array
+ elif self.array_ok and is_simple_array(v):
+ # Check integer type
+ invalid_els = [
+ e for e in v if not isinstance(e, int) and e not in self.extras
+ ]
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ # Check min/max
+ if self.has_min_max:
+ invalid_els = [
+ e
+ for e in v
+ if not (isinstance(e, int) and self.min_val <= e <= self.max_val)
+ and e not in self.extras
+ ]
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ v = to_scalar_or_list(v)
+ else:
+ # Check int
+ if not isinstance(v, int):
+ # don't let int() cast strings to ints
+ self.raise_invalid_val(v)
+
+ # Check min/max
+ if self.has_min_max:
+ if not (self.min_val <= v <= self.max_val):
+ self.raise_invalid_val(v)
+
+ return v
+
+
+class StringValidator(BaseValidator):
+ """
+ "string": {
+ "description": "A string value. Numbers are converted to strings
+ except for attributes with `strict` set to true.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "noBlank",
+ "strict",
+ "arrayOk",
+ "values"
+ ]
+ },
+ """
+
+ def __init__(
+ self,
+ plotly_name,
+ parent_name,
+ no_blank=False,
+ strict=False,
+ array_ok=False,
+ values=None,
+ **kwargs,
+ ):
+ super(StringValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+ self.no_blank = no_blank
+ self.strict = strict
+ self.array_ok = array_ok
+ self.values = values
+
+ @staticmethod
+ def to_str_or_unicode_or_none(v):
+ """
+ Convert a value to a string if it's not None, a string,
+ or a unicode (on Python 2).
+ """
+ if v is None or isinstance(v, str):
+ return v
+ else:
+ return str(v)
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is a string and must be specified as:""".format(
+ plotly_name=self.plotly_name
+ )
+
+ if self.no_blank:
+ desc = (
+ desc
+ + """
+ - A non-empty string"""
+ )
+ elif self.values:
+ valid_str = "\n".join(
+ textwrap.wrap(
+ repr(self.values),
+ initial_indent=" " * 12,
+ subsequent_indent=" " * 12,
+ break_on_hyphens=False,
+ )
+ )
+
+ desc = (
+ desc
+ + """
+ - One of the following strings:
+{valid_str}""".format(valid_str=valid_str)
+ )
+ else:
+ desc = (
+ desc
+ + """
+ - A string"""
+ )
+
+ if not self.strict:
+ desc = (
+ desc
+ + """
+ - A number that will be converted to a string"""
+ )
+
+ if self.array_ok:
+ desc = (
+ desc
+ + """
+ - A tuple, list, or one-dimensional numpy array of the above"""
+ )
+
+ return desc
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_array(v):
+ # If strict, make sure all elements are strings.
+ if self.strict:
+ invalid_els = [e for e in v if not isinstance(e, str)]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ if is_homogeneous_array(v):
+ np = get_module("numpy")
+
+ # If not strict, let numpy cast elements to strings
+ v = copy_to_readonly_numpy_array(v, kind="U")
+
+ # Check no_blank
+ if self.no_blank:
+ invalid_els = v[v == ""][:10].tolist()
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ # Check values
+ if self.values:
+ invalid_inds = np.logical_not(np.isin(v, self.values))
+ invalid_els = v[invalid_inds][:10].tolist()
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+ elif is_simple_array(v):
+ if not self.strict:
+ v = [StringValidator.to_str_or_unicode_or_none(e) for e in v]
+
+ # Check no_blank
+ if self.no_blank:
+ invalid_els = [e for e in v if e == ""]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ # Check values
+ if self.values:
+ invalid_els = [e for e in v if v not in self.values]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ v = to_scalar_or_list(v)
+
+ else:
+ if self.strict:
+ if not isinstance(v, str):
+ self.raise_invalid_val(v)
+ else:
+ if isinstance(v, str):
+ pass
+ elif isinstance(v, (int, float)):
+ # Convert value to a string
+ v = str(v)
+ else:
+ self.raise_invalid_val(v)
+
+ if self.no_blank and len(v) == 0:
+ self.raise_invalid_val(v)
+
+ if self.values and v not in self.values:
+ self.raise_invalid_val(v)
+
+ return v
+
+
+class ColorValidator(BaseValidator):
+ """
+ "color": {
+ "description": "A string describing color. Supported formats:
+ - hex (e.g. '#d3d3d3')
+ - rgb (e.g. 'rgb(255, 0, 0)')
+ - rgba (e.g. 'rgb(255, 0, 0, 0.5)')
+ - hsl (e.g. 'hsl(0, 100%, 50%)')
+ - hsv (e.g. 'hsv(0, 100%, 100%)')
+ - named colors(full list:
+ http://www.w3.org/TR/css3-color/#svg-color)",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "arrayOk"
+ ]
+ },
+ """
+
+ re_hex = re.compile(r"#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})")
+ re_rgb_etc = re.compile(r"(rgb|hsl|hsv)a?\([\d.]+%?(,[\d.]+%?){2,3}\)")
+ re_ddk = re.compile(r"var\(\-\-.*\)")
+
+ named_colors = [
+ "aliceblue",
+ "antiquewhite",
+ "aqua",
+ "aquamarine",
+ "azure",
+ "beige",
+ "bisque",
+ "black",
+ "blanchedalmond",
+ "blue",
+ "blueviolet",
+ "brown",
+ "burlywood",
+ "cadetblue",
+ "chartreuse",
+ "chocolate",
+ "coral",
+ "cornflowerblue",
+ "cornsilk",
+ "crimson",
+ "cyan",
+ "darkblue",
+ "darkcyan",
+ "darkgoldenrod",
+ "darkgray",
+ "darkgrey",
+ "darkgreen",
+ "darkkhaki",
+ "darkmagenta",
+ "darkolivegreen",
+ "darkorange",
+ "darkorchid",
+ "darkred",
+ "darksalmon",
+ "darkseagreen",
+ "darkslateblue",
+ "darkslategray",
+ "darkslategrey",
+ "darkturquoise",
+ "darkviolet",
+ "deeppink",
+ "deepskyblue",
+ "dimgray",
+ "dimgrey",
+ "dodgerblue",
+ "firebrick",
+ "floralwhite",
+ "forestgreen",
+ "fuchsia",
+ "gainsboro",
+ "ghostwhite",
+ "gold",
+ "goldenrod",
+ "gray",
+ "grey",
+ "green",
+ "greenyellow",
+ "honeydew",
+ "hotpink",
+ "indianred",
+ "indigo",
+ "ivory",
+ "khaki",
+ "lavender",
+ "lavenderblush",
+ "lawngreen",
+ "lemonchiffon",
+ "lightblue",
+ "lightcoral",
+ "lightcyan",
+ "lightgoldenrodyellow",
+ "lightgray",
+ "lightgrey",
+ "lightgreen",
+ "lightpink",
+ "lightsalmon",
+ "lightseagreen",
+ "lightskyblue",
+ "lightslategray",
+ "lightslategrey",
+ "lightsteelblue",
+ "lightyellow",
+ "lime",
+ "limegreen",
+ "linen",
+ "magenta",
+ "maroon",
+ "mediumaquamarine",
+ "mediumblue",
+ "mediumorchid",
+ "mediumpurple",
+ "mediumseagreen",
+ "mediumslateblue",
+ "mediumspringgreen",
+ "mediumturquoise",
+ "mediumvioletred",
+ "midnightblue",
+ "mintcream",
+ "mistyrose",
+ "moccasin",
+ "navajowhite",
+ "navy",
+ "oldlace",
+ "olive",
+ "olivedrab",
+ "orange",
+ "orangered",
+ "orchid",
+ "palegoldenrod",
+ "palegreen",
+ "paleturquoise",
+ "palevioletred",
+ "papayawhip",
+ "peachpuff",
+ "peru",
+ "pink",
+ "plum",
+ "powderblue",
+ "purple",
+ "red",
+ "rosybrown",
+ "royalblue",
+ "rebeccapurple",
+ "saddlebrown",
+ "salmon",
+ "sandybrown",
+ "seagreen",
+ "seashell",
+ "sienna",
+ "silver",
+ "skyblue",
+ "slateblue",
+ "slategray",
+ "slategrey",
+ "snow",
+ "springgreen",
+ "steelblue",
+ "tan",
+ "teal",
+ "thistle",
+ "tomato",
+ "turquoise",
+ "violet",
+ "wheat",
+ "white",
+ "whitesmoke",
+ "yellow",
+ "yellowgreen",
+ ]
+
+ def __init__(
+ self, plotly_name, parent_name, array_ok=False, colorscale_path=None, **kwargs
+ ):
+ super(ColorValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ self.array_ok = array_ok
+
+ # colorscale_path is the path to the colorscale associated with this
+ # color property, or None if no such colorscale exists. Only colors
+ # with an associated colorscale may take on numeric values
+ self.colorscale_path = colorscale_path
+
+ def numbers_allowed(self):
+ return self.colorscale_path is not None
+
+ def description(self):
+ valid_color_description = """\
+ The '{plotly_name}' property is a color and may be specified as:
+ - A hex string (e.g. '#ff0000')
+ - An rgb/rgba string (e.g. 'rgb(255,0,0)')
+ - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
+ - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
+ - A named CSS color: see https://plotly.com/python/css-colors/ for a list""".format(
+ plotly_name=self.plotly_name
+ )
+
+ if self.colorscale_path:
+ valid_color_description = (
+ valid_color_description
+ + """
+ - A number that will be interpreted as a color
+ according to {colorscale_path}""".format(colorscale_path=self.colorscale_path)
+ )
+
+ if self.array_ok:
+ valid_color_description = (
+ valid_color_description
+ + """
+ - A list or array of any of the above"""
+ )
+
+ return valid_color_description
+
+ def validate_coerce(self, v, should_raise=True):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_homogeneous_array(v):
+ v = copy_to_readonly_numpy_array(v)
+ if self.numbers_allowed() and v.dtype.kind in ["u", "i", "f"]:
+ # Numbers are allowed and we have an array of numbers.
+ # All good
+ pass
+ else:
+ validated_v = [self.validate_coerce(e, should_raise=False) for e in v]
+
+ invalid_els = self.find_invalid_els(v, validated_v)
+
+ if invalid_els and should_raise:
+ self.raise_invalid_elements(invalid_els)
+
+ # ### Check that elements have valid colors types ###
+ elif self.numbers_allowed() or invalid_els:
+ v = copy_to_readonly_numpy_array(validated_v, kind="O")
+ else:
+ v = copy_to_readonly_numpy_array(validated_v, kind="U")
+ elif self.array_ok and is_simple_array(v):
+ validated_v = [self.validate_coerce(e, should_raise=False) for e in v]
+
+ invalid_els = self.find_invalid_els(v, validated_v)
+
+ if invalid_els and should_raise:
+ self.raise_invalid_elements(invalid_els)
+ else:
+ v = validated_v
+ else:
+ # Validate scalar color
+ validated_v = self.vc_scalar(v)
+ if validated_v is None and should_raise:
+ self.raise_invalid_val(v)
+
+ v = validated_v
+
+ return v
+
+ def find_invalid_els(self, orig, validated, invalid_els=None):
+ """
+ Helper method to find invalid elements in orig array.
+ Elements are invalid if their corresponding element in
+ the validated array is None.
+
+ This method handles deeply nested list structures
+ """
+ if invalid_els is None:
+ invalid_els = []
+
+ for orig_el, validated_el in zip(orig, validated):
+ if is_array(orig_el):
+ self.find_invalid_els(orig_el, validated_el, invalid_els)
+ else:
+ if validated_el is None:
+ invalid_els.append(orig_el)
+
+ return invalid_els
+
+ def vc_scalar(self, v):
+ """Helper to validate/coerce a scalar color"""
+ return ColorValidator.perform_validate_coerce(
+ v, allow_number=self.numbers_allowed()
+ )
+
+ @staticmethod
+ def perform_validate_coerce(v, allow_number=None):
+ """
+ Validate, coerce, and return a single color value. If input cannot be
+ coerced to a valid color then return None.
+
+ Parameters
+ ----------
+ v : number or str
+ Candidate color value
+
+ allow_number : bool
+ True if numbers are allowed as colors
+
+ Returns
+ -------
+ number or str or None
+ """
+
+ if isinstance(v, numbers.Number) and allow_number:
+ # If allow_numbers then any number is ok
+ return v
+ elif not isinstance(v, str):
+ # If not allow_numbers then value must be a string
+ return None
+ else:
+ # Remove spaces so regexes don't need to bother with them.
+ v_normalized = v.replace(" ", "").lower()
+
+ # if ColorValidator.re_hex.fullmatch(v_normalized):
+ if fullmatch(ColorValidator.re_hex, v_normalized):
+ # valid hex color (e.g. #f34ab3)
+ return v
+ elif fullmatch(ColorValidator.re_rgb_etc, v_normalized):
+ # elif ColorValidator.re_rgb_etc.fullmatch(v_normalized):
+ # Valid rgb(a), hsl(a), hsv(a) color
+ # (e.g. rgba(10, 234, 200, 50%)
+ return v
+ elif fullmatch(ColorValidator.re_ddk, v_normalized):
+ # Valid var(--*) DDK theme variable, inspired by CSS syntax
+ # (e.g. var(--accent) )
+ # DDK will crawl & eval var(-- colors for Graph theming
+ return v
+ elif v_normalized in ColorValidator.named_colors:
+ # Valid named color (e.g. 'coral')
+ return v
+ else:
+ # Not a valid color
+ return None
+
+
+class ColorlistValidator(BaseValidator):
+ """
+ "colorlist": {
+ "description": "A list of colors. Must be an {array} containing
+ valid colors.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt"
+ ]
+ }
+ """
+
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(ColorlistValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ def description(self):
+ return """\
+ The '{plotly_name}' property is a colorlist that may be specified
+ as a tuple, list, one-dimensional numpy array, or pandas Series of valid
+ color strings""".format(plotly_name=self.plotly_name)
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif is_array(v):
+ validated_v = [
+ ColorValidator.perform_validate_coerce(e, allow_number=False) for e in v
+ ]
+
+ invalid_els = [
+ el for el, validated_el in zip(v, validated_v) if validated_el is None
+ ]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ v = to_scalar_or_list(v)
+ else:
+ self.raise_invalid_val(v)
+ return v
+
+
+class ColorscaleValidator(BaseValidator):
+ """
+ "colorscale": {
+ "description": "A Plotly colorscale either picked by a name:
+ (any of Greys, YlGnBu, Greens, YlOrRd, Bluered,
+ RdBu, Reds, Blues, Picnic, Rainbow, Portland,
+ Jet, Hot, Blackbody, Earth, Electric, Viridis)
+ customized as an {array} of 2-element {arrays}
+ where the first element is the normalized color
+ level value (starting at *0* and ending at *1*),
+ and the second item is a valid color string.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt"
+ ]
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(ColorscaleValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # named colorscales initialized on first use
+ self._named_colorscales = None
+
+ @property
+ def named_colorscales(self):
+ if self._named_colorscales is None:
+ import inspect
+ import itertools
+ from plotly import colors
+
+ colorscale_members = itertools.chain(
+ inspect.getmembers(colors.sequential),
+ inspect.getmembers(colors.diverging),
+ inspect.getmembers(colors.cyclical),
+ )
+
+ self._named_colorscales = {
+ c[0].lower(): c[1]
+ for c in colorscale_members
+ if isinstance(c, tuple)
+ and len(c) == 2
+ and isinstance(c[0], str)
+ and isinstance(c[1], list)
+ and not c[0].endswith("_r")
+ and not c[0].startswith("_")
+ }
+
+ return self._named_colorscales
+
+ def description(self):
+ colorscales_str = "\n".join(
+ textwrap.wrap(
+ repr(sorted(list(self.named_colorscales))),
+ initial_indent=" " * 12,
+ subsequent_indent=" " * 13,
+ break_on_hyphens=False,
+ width=80,
+ )
+ )
+
+ desc = """\
+ The '{plotly_name}' property is a colorscale and may be
+ specified as:
+ - A list of colors that will be spaced evenly to create the colorscale.
+ Many predefined colorscale lists are included in the sequential, diverging,
+ and cyclical modules in the plotly.colors package.
+ - A list of 2-element lists where the first element is the
+ normalized color level value (starting at 0 and ending at 1),
+ and the second item is a valid color string.
+ (e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])
+ - One of the following named colorscales:
+{colorscales_str}.
+ Appending '_r' to a named colorscale reverses it.
+""".format(plotly_name=self.plotly_name, colorscales_str=colorscales_str)
+
+ return desc
+
+ def validate_coerce(self, v):
+ v_valid = False
+
+ if v is None:
+ v_valid = True
+ elif isinstance(v, str):
+ v_lower = v.lower()
+ if v_lower in self.named_colorscales:
+ # Convert to color list
+ v = self.named_colorscales[v_lower]
+ v_valid = True
+ elif v_lower.endswith("_r") and v_lower[:-2] in self.named_colorscales:
+ v = self.named_colorscales[v_lower[:-2]][::-1]
+ v_valid = True
+ #
+ if v_valid:
+ # Convert to list of lists colorscale
+ d = len(v) - 1
+ v = [[(1.0 * i) / (1.0 * d), x] for i, x in enumerate(v)]
+
+ elif is_array(v) and len(v) > 0:
+ # If firset element is a string, treat as colorsequence
+ if isinstance(v[0], str):
+ invalid_els = [
+ e for e in v if ColorValidator.perform_validate_coerce(e) is None
+ ]
+
+ if len(invalid_els) == 0:
+ v_valid = True
+
+ # Convert to list of lists colorscale
+ d = len(v) - 1
+ v = [[(1.0 * i) / (1.0 * d), x] for i, x in enumerate(v)]
+ else:
+ invalid_els = [
+ e
+ for e in v
+ if (
+ not is_array(e)
+ or len(e) != 2
+ or not isinstance(e[0], numbers.Number)
+ or not (0 <= e[0] <= 1)
+ or not isinstance(e[1], str)
+ or ColorValidator.perform_validate_coerce(e[1]) is None
+ )
+ ]
+
+ if len(invalid_els) == 0:
+ v_valid = True
+
+ # Convert to list of lists
+ v = [
+ [e[0], ColorValidator.perform_validate_coerce(e[1])] for e in v
+ ]
+
+ if not v_valid:
+ self.raise_invalid_val(v)
+
+ return v
+
+ def present(self, v):
+ # Return-type must be immutable
+ if v is None:
+ return None
+ elif isinstance(v, str):
+ return v
+ else:
+ return tuple([tuple(e) for e in v])
+
+
+class AngleValidator(BaseValidator):
+ """
+ "angle": {
+ "description": "A number (in degree) between -180 and 180.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, array_ok=False, **kwargs):
+ super(AngleValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+ self.array_ok = array_ok
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is a angle (in degrees) that may be
+ specified as a number between -180 and 180{array_ok}.
+ Numeric values outside this range are converted to the equivalent value
+ (e.g. 270 is converted to -90).
+ """.format(
+ plotly_name=self.plotly_name,
+ array_ok=(
+ ", or a list, numpy array or other iterable thereof"
+ if self.array_ok
+ else ""
+ ),
+ )
+
+ return desc
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_homogeneous_array(v):
+ try:
+ v_array = copy_to_readonly_numpy_array(v, force_numeric=True)
+ except (ValueError, TypeError, OverflowError):
+ self.raise_invalid_val(v)
+ v = v_array # Always numeric numpy array
+ # Normalize v onto the interval [-180, 180)
+ v = (v + 180) % 360 - 180
+ elif self.array_ok and is_simple_array(v):
+ # Check numeric
+ invalid_els = [e for e in v if not isinstance(e, numbers.Number)]
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els[:10])
+
+ v = [(x + 180) % 360 - 180 for x in to_scalar_or_list(v)]
+ elif not isinstance(v, numbers.Number):
+ self.raise_invalid_val(v)
+ else:
+ # Normalize v onto the interval [-180, 180)
+ v = (v + 180) % 360 - 180
+
+ return v
+
+
+class SubplotidValidator(BaseValidator):
+ """
+ "subplotid": {
+ "description": "An id string of a subplot type (given by dflt),
+ optionally followed by an integer >1. e.g. if
+ dflt='geo', we can have 'geo', 'geo2', 'geo3',
+ ...",
+ "requiredOpts": [
+ "dflt"
+ ],
+ "otherOpts": [
+ "regex"
+ ]
+ }
+ """
+
+ def __init__(self, plotly_name, parent_name, dflt=None, regex=None, **kwargs):
+ if dflt is None and regex is None:
+ raise ValueError("One or both of regex and deflt must be specified")
+
+ super(SubplotidValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ if dflt is not None:
+ self.base = dflt
+ else:
+ # e.g. regex == '/^y([2-9]|[1-9][0-9]+)?$/'
+ self.base = re.match(r"/\^(\w+)", regex).group(1)
+
+ self.regex = self.base + r"(\d*)"
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is an identifier of a particular
+ subplot, of type '{base}', that may be specified as the string '{base}'
+ optionally followed by an integer >= 1
+ (e.g. '{base}', '{base}1', '{base}2', '{base}3', etc.)
+ """.format(plotly_name=self.plotly_name, base=self.base)
+ return desc
+
+ def validate_coerce(self, v):
+ if v is None:
+ pass
+ elif not isinstance(v, str):
+ self.raise_invalid_val(v)
+ else:
+ # match = re.fullmatch(self.regex, v)
+ match = fullmatch(self.regex, v)
+ if not match:
+ is_valid = False
+ else:
+ digit_str = match.group(1)
+ if len(digit_str) > 0 and int(digit_str) == 0:
+ is_valid = False
+ elif len(digit_str) > 0 and int(digit_str) == 1:
+ # Remove 1 suffix (e.g. x1 -> x)
+ v = self.base
+ is_valid = True
+ else:
+ is_valid = True
+
+ if not is_valid:
+ self.raise_invalid_val(v)
+ return v
+
+
+class FlaglistValidator(BaseValidator):
+ """
+ "flaglist": {
+ "description": "A string representing a combination of flags
+ (order does not matter here). Combine any of the
+ available `flags` with *+*.
+ (e.g. ('lines+markers')). Values in `extras`
+ cannot be combined.",
+ "requiredOpts": [
+ "flags"
+ ],
+ "otherOpts": [
+ "dflt",
+ "extras",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(
+ self, plotly_name, parent_name, flags, extras=None, array_ok=False, **kwargs
+ ):
+ super(FlaglistValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+ self.flags = flags
+ self.extras = extras if extras is not None else []
+ self.array_ok = array_ok
+
+ def description(self):
+ desc = (
+ """\
+ The '{plotly_name}' property is a flaglist and may be specified
+ as a string containing:"""
+ ).format(plotly_name=self.plotly_name)
+
+ # Flags
+ desc = desc + (
+ """
+ - Any combination of {flags} joined with '+' characters
+ (e.g. '{eg_flag}')"""
+ ).format(flags=self.flags, eg_flag="+".join(self.flags[:2]))
+
+ # Extras
+ if self.extras:
+ desc = desc + (
+ """
+ OR exactly one of {extras} (e.g. '{eg_extra}')"""
+ ).format(extras=self.extras, eg_extra=self.extras[-1])
+
+ if self.array_ok:
+ desc = (
+ desc
+ + """
+ - A list or array of the above"""
+ )
+
+ return desc
+
+ def vc_scalar(self, v):
+ if isinstance(v, str):
+ v = v.strip()
+
+ if v in self.extras:
+ return v
+
+ if not isinstance(v, str):
+ return None
+
+ # To be generous we accept flags separated on plus ('+'),
+ # or comma (',') and we accept whitespace around the flags
+ split_vals = [e.strip() for e in re.split("[,+]", v)]
+
+ # Are all flags valid names?
+ if all(f in self.flags for f in split_vals):
+ return "+".join(split_vals)
+ else:
+ return None
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_array(v):
+ # Coerce individual strings
+ validated_v = [self.vc_scalar(e) for e in v]
+
+ invalid_els = [
+ el for el, validated_el in zip(v, validated_v) if validated_el is None
+ ]
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ if is_homogeneous_array(v):
+ v = copy_to_readonly_numpy_array(validated_v, kind="U")
+ else:
+ v = to_scalar_or_list(v)
+ else:
+ validated_v = self.vc_scalar(v)
+ if validated_v is None:
+ self.raise_invalid_val(v)
+
+ v = validated_v
+
+ return v
+
+
+class AnyValidator(BaseValidator):
+ """
+ "any": {
+ "description": "Any type.",
+ "requiredOpts": [],
+ "otherOpts": [
+ "dflt",
+ "values",
+ "arrayOk"
+ ]
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, values=None, array_ok=False, **kwargs):
+ super(AnyValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+ self.values = values
+ self.array_ok = array_ok
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property accepts values of any type
+ """.format(plotly_name=self.plotly_name)
+ return desc
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ pass
+ elif self.array_ok and is_homogeneous_array(v):
+ v = copy_to_readonly_numpy_array(v, kind="O")
+ elif self.array_ok and is_simple_array(v):
+ v = to_scalar_or_list(v)
+ return v
+
+
+class InfoArrayValidator(BaseValidator):
+ """
+ "info_array": {
+ "description": "An {array} of plot information.",
+ "requiredOpts": [
+ "items"
+ ],
+ "otherOpts": [
+ "dflt",
+ "freeLength",
+ "dimensions"
+ ]
+ }
+ """
+
+ def __init__(
+ self,
+ plotly_name,
+ parent_name,
+ items,
+ free_length=None,
+ dimensions=None,
+ **kwargs,
+ ):
+ super(InfoArrayValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ self.items = items
+ self.dimensions = dimensions if dimensions else 1
+ self.free_length = free_length
+
+ # Instantiate validators for each info array element
+ self.item_validators = []
+ info_array_items = self.items if isinstance(self.items, list) else [self.items]
+
+ for i, item in enumerate(info_array_items):
+ element_name = "{name}[{i}]".format(name=plotly_name, i=i)
+ item_validator = InfoArrayValidator.build_validator(
+ item, element_name, parent_name
+ )
+ self.item_validators.append(item_validator)
+
+ def description(self):
+ # Cases
+ # 1) self.items is array, self.dimensions is 1
+ # a) free_length=True
+ # b) free_length=False
+ # 2) self.items is array, self.dimensions is 2
+ # (requires free_length=True)
+ # 3) self.items is scalar (requires free_length=True)
+ # a) dimensions=1
+ # b) dimensions=2
+ #
+ # dimensions can be set to '1-2' to indicate the both are accepted
+ #
+ desc = """\
+ The '{plotly_name}' property is an info array that may be specified as:\
+""".format(plotly_name=self.plotly_name)
+
+ if isinstance(self.items, list):
+ # ### Case 1 ###
+ if self.dimensions in (1, "1-2"):
+ upto = " up to" if self.free_length and self.dimensions == 1 else ""
+ desc += """
+
+ * a list or tuple of{upto} {N} elements where:\
+""".format(upto=upto, N=len(self.item_validators))
+
+ for i, item_validator in enumerate(self.item_validators):
+ el_desc = item_validator.description().strip()
+ desc = (
+ desc
+ + """
+({i}) {el_desc}""".format(i=i, el_desc=el_desc)
+ )
+
+ # ### Case 2 ###
+ if self.dimensions in ("1-2", 2):
+ assert self.free_length
+
+ desc += """
+
+ * a 2D list where:"""
+ for i, item_validator in enumerate(self.item_validators):
+ # Update name for 2d
+ orig_name = item_validator.plotly_name
+ item_validator.plotly_name = "{name}[i][{i}]".format(
+ name=self.plotly_name, i=i
+ )
+
+ el_desc = item_validator.description().strip()
+ desc = (
+ desc
+ + """
+({i}) {el_desc}""".format(i=i, el_desc=el_desc)
+ )
+ item_validator.plotly_name = orig_name
+ else:
+ # ### Case 3 ###
+ assert self.free_length
+ item_validator = self.item_validators[0]
+ orig_name = item_validator.plotly_name
+
+ if self.dimensions in (1, "1-2"):
+ item_validator.plotly_name = "{name}[i]".format(name=self.plotly_name)
+
+ el_desc = item_validator.description().strip()
+
+ desc += """
+ * a list of elements where:
+ {el_desc}
+""".format(el_desc=el_desc)
+
+ if self.dimensions in ("1-2", 2):
+ item_validator.plotly_name = "{name}[i][j]".format(
+ name=self.plotly_name
+ )
+
+ el_desc = item_validator.description().strip()
+ desc += """
+ * a 2D list where:
+ {el_desc}
+""".format(el_desc=el_desc)
+
+ item_validator.plotly_name = orig_name
+
+ return desc
+
+ @staticmethod
+ def build_validator(validator_info, plotly_name, parent_name):
+ datatype = validator_info["valType"] # type: str
+ validator_classname = datatype.title().replace("_", "") + "Validator"
+ validator_class = eval(validator_classname)
+
+ kwargs = {
+ k: validator_info[k]
+ for k in validator_info
+ if k not in ["valType", "description", "role"]
+ }
+
+ return validator_class(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ def validate_element_with_indexed_name(self, val, validator, inds):
+ """
+ Helper to add indexes to a validator's name, call validate_coerce on
+ a value, then restore the original validator name.
+
+ This makes sure that if a validation error message is raised, the
+ property name the user sees includes the index(es) of the offending
+ element.
+
+ Parameters
+ ----------
+ val:
+ A value to be validated
+ validator
+ A validator
+ inds
+ List of one or more non-negative integers that represent the
+ nested index of the value being validated
+ Returns
+ -------
+ val
+ validated value
+
+ Raises
+ ------
+ ValueError
+ if val fails validation
+ """
+ orig_name = validator.plotly_name
+ new_name = self.plotly_name
+ for i in inds:
+ new_name += "[" + str(i) + "]"
+ validator.plotly_name = new_name
+ try:
+ val = validator.validate_coerce(val)
+ finally:
+ validator.plotly_name = orig_name
+
+ return val
+
+ def validate_coerce(self, v):
+ if is_none_or_typed_array_spec(v):
+ return None
+ elif not is_array(v):
+ self.raise_invalid_val(v)
+
+ # Save off original v value to use in error reporting
+ orig_v = v
+
+ # Convert everything into nested lists
+ # This way we don't need to worry about nested numpy arrays
+ v = to_scalar_or_list(v)
+
+ is_v_2d = v and is_array(v[0])
+
+ if is_v_2d and self.dimensions in ("1-2", 2):
+ if is_array(self.items):
+ # e.g. 2D list as parcoords.dimensions.constraintrange
+ # check that all items are there for each nested element
+ for i, row in enumerate(v):
+ # Check row length
+ if not is_array(row) or len(row) != len(self.items):
+ self.raise_invalid_val(orig_v[i], [i])
+
+ for j, validator in enumerate(self.item_validators):
+ row[j] = self.validate_element_with_indexed_name(
+ v[i][j], validator, [i, j]
+ )
+ else:
+ # e.g. 2D list as layout.grid.subplots
+ # check that all elements match individual validator
+ validator = self.item_validators[0]
+ for i, row in enumerate(v):
+ if not is_array(row):
+ self.raise_invalid_val(orig_v[i], [i])
+
+ for j, el in enumerate(row):
+ row[j] = self.validate_element_with_indexed_name(
+ el, validator, [i, j]
+ )
+ elif v and self.dimensions == 2:
+ # e.g. 1D list passed as layout.grid.subplots
+ self.raise_invalid_val(orig_v[0], [0])
+ elif not is_array(self.items):
+ # e.g. 1D list passed as layout.grid.xaxes
+ validator = self.item_validators[0]
+ for i, el in enumerate(v):
+ v[i] = self.validate_element_with_indexed_name(el, validator, [i])
+
+ elif not self.free_length and len(v) != len(self.item_validators):
+ # e.g. 3 element list as layout.xaxis.range
+ self.raise_invalid_val(orig_v)
+ elif self.free_length and len(v) > len(self.item_validators):
+ # e.g. 4 element list as layout.updatemenu.button.args
+ self.raise_invalid_val(orig_v)
+ else:
+ # We have a 1D array of the correct length
+ for i, (el, validator) in enumerate(zip(v, self.item_validators)):
+ # Validate coerce elements
+ v[i] = validator.validate_coerce(el)
+
+ return v
+
+ def present(self, v):
+ if v is None:
+ return None
+ else:
+ if (
+ self.dimensions == 2
+ or self.dimensions == "1-2"
+ and v
+ and is_array(v[0])
+ ):
+ # 2D case
+ v = copy.deepcopy(v)
+ for row in v:
+ for i, (el, validator) in enumerate(zip(row, self.item_validators)):
+ row[i] = validator.present(el)
+
+ return tuple(tuple(row) for row in v)
+ else:
+ # 1D case
+ v = copy.copy(v)
+ # Call present on each of the item validators
+ for i, (el, validator) in enumerate(zip(v, self.item_validators)):
+ # Validate coerce elements
+ v[i] = validator.present(el)
+
+ # Return tuple form of
+ return tuple(v)
+
+
+class LiteralValidator(BaseValidator):
+ """
+ Validator for readonly literal values
+ """
+
+ def __init__(self, plotly_name, parent_name, val, **kwargs):
+ super(LiteralValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+ self.val = val
+
+ def validate_coerce(self, v):
+ if v != self.val:
+ raise ValueError(
+ """\
+ The '{plotly_name}' property of {parent_name} is read-only""".format(
+ plotly_name=self.plotly_name, parent_name=self.parent_name
+ )
+ )
+ else:
+ return v
+
+
+class DashValidator(EnumeratedValidator):
+ """
+ Special case validator for handling dash properties that may be specified
+ as lists of dash lengths. These are not currently specified in the
+ schema.
+
+ "dash": {
+ "valType": "string",
+ "values": [
+ "solid",
+ "dot",
+ "dash",
+ "longdash",
+ "dashdot",
+ "longdashdot"
+ ],
+ "dflt": "solid",
+ "role": "style",
+ "editType": "style",
+ "description": "Sets the dash style of lines. Set to a dash type
+ string (*solid*, *dot*, *dash*, *longdash*, *dashdot*, or
+ *longdashdot*) or a dash length list in px (eg *5px,10px,2px,2px*)."
+ },
+ """
+
+ def __init__(self, plotly_name, parent_name, values, **kwargs):
+ # Add regex to handle dash length lists
+ dash_list_regex = r"/^\d+(\.\d+)?(px|%)?((,|\s)\s*\d+(\.\d+)?(px|%)?)*$/"
+
+ values = values + [dash_list_regex]
+
+ # Call EnumeratedValidator superclass
+ super(DashValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, values=values, **kwargs
+ )
+
+ def description(self):
+ # Separate regular values from regular expressions
+ enum_vals = []
+ enum_regexs = []
+ for v, regex in zip(self.values, self.val_regexs):
+ if regex is not None:
+ enum_regexs.append(regex.pattern)
+ else:
+ enum_vals.append(v)
+ desc = """\
+ The '{name}' property is an enumeration that may be specified as:""".format(
+ name=self.plotly_name
+ )
+
+ if enum_vals:
+ enum_vals_str = "\n".join(
+ textwrap.wrap(
+ repr(enum_vals),
+ initial_indent=" " * 12,
+ subsequent_indent=" " * 12,
+ break_on_hyphens=False,
+ width=80,
+ )
+ )
+
+ desc = (
+ desc
+ + """
+ - One of the following dash styles:
+{enum_vals_str}""".format(enum_vals_str=enum_vals_str)
+ )
+
+ desc = (
+ desc
+ + """
+ - A string containing a dash length list in pixels or percentages
+ (e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.)
+"""
+ )
+ return desc
+
+
+class ImageUriValidator(BaseValidator):
+ _PIL = None
+
+ try:
+ _PIL = import_module("PIL")
+ except ImportError:
+ pass
+
+ def __init__(self, plotly_name, parent_name, **kwargs):
+ super(ImageUriValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ def description(self):
+ desc = """\
+ The '{plotly_name}' property is an image URI that may be specified as:
+ - A remote image URI string
+ (e.g. 'http://www.somewhere.com/image.png')
+ - A data URI image string
+ (e.g. 'data:image/png;base64,iVBORw0KGgoAAAANSU')
+ - A PIL.Image.Image object which will be immediately converted
+ to a data URI image string
+ See http://pillow.readthedocs.io/en/latest/reference/Image.html
+ """.format(plotly_name=self.plotly_name)
+ return desc
+
+ def validate_coerce(self, v):
+ if v is None:
+ pass
+ elif isinstance(v, str):
+ # Future possibilities:
+ # - Detect filesystem system paths and convert to URI
+ # - Validate either url or data uri
+ pass
+ elif self._PIL and isinstance(v, self._PIL.Image.Image):
+ # Convert PIL image to png data uri string
+ v = self.pil_image_to_uri(v)
+ else:
+ self.raise_invalid_val(v)
+
+ return v
+
+ @staticmethod
+ def pil_image_to_uri(v):
+ in_mem_file = io.BytesIO()
+ v.save(in_mem_file, format="PNG")
+ in_mem_file.seek(0)
+ img_bytes = in_mem_file.read()
+ base64_encoded_result_bytes = base64.b64encode(img_bytes)
+ base64_encoded_result_str = base64_encoded_result_bytes.decode("ascii")
+ v = "data:image/png;base64,{base64_encoded_result_str}".format(
+ base64_encoded_result_str=base64_encoded_result_str
+ )
+ return v
+
+
+class CompoundValidator(BaseValidator):
+ def __init__(self, plotly_name, parent_name, data_class_str, data_docs, **kwargs):
+ super(CompoundValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # Save element class string
+ self.data_class_str = data_class_str
+ self._data_class = None
+ self.data_docs = data_docs
+ self.module_str = CompoundValidator.compute_graph_obj_module_str(
+ self.data_class_str, parent_name
+ )
+
+ @staticmethod
+ def compute_graph_obj_module_str(data_class_str, parent_name):
+ if parent_name == "frame" and data_class_str in ["Data", "Layout"]:
+ # Special case. There are no graph_objs.frame.Data or
+ # graph_objs.frame.Layout classes. These are remapped to
+ # graph_objs.Data and graph_objs.Layout
+
+ parent_parts = parent_name.split(".")
+ module_str = ".".join(["plotly.graph_objs"] + parent_parts[1:])
+ elif parent_name == "layout.template" and data_class_str == "Layout":
+ # Remap template's layout to regular layout
+ module_str = "plotly.graph_objs"
+ elif "layout.template.data" in parent_name:
+ # Remap template's traces to regular traces
+ parent_name = parent_name.replace("layout.template.data.", "")
+ if parent_name:
+ module_str = "plotly.graph_objs." + parent_name
+ else:
+ module_str = "plotly.graph_objs"
+ elif parent_name:
+ module_str = "plotly.graph_objs." + parent_name
+ else:
+ module_str = "plotly.graph_objs"
+
+ return module_str
+
+ @property
+ def data_class(self):
+ if self._data_class is None:
+ module = import_module(self.module_str)
+ self._data_class = getattr(module, self.data_class_str)
+
+ return self._data_class
+
+ def description(self):
+ desc = (
+ """\
+ The '{plotly_name}' property is an instance of {class_str}
+ that may be specified as:
+ - An instance of :class:`{module_str}.{class_str}`
+ - A dict of string/value properties that will be passed
+ to the {class_str} constructor"""
+ ).format(
+ plotly_name=self.plotly_name,
+ class_str=self.data_class_str,
+ module_str=self.module_str,
+ )
+
+ return desc
+
+ def validate_coerce(self, v, skip_invalid=False, _validate=True):
+ if v is None:
+ v = self.data_class()
+
+ elif isinstance(v, dict):
+ v = self.data_class(v, skip_invalid=skip_invalid, _validate=_validate)
+
+ elif isinstance(v, self.data_class):
+ # Copy object
+ v = self.data_class(v)
+ else:
+ if skip_invalid:
+ v = self.data_class()
+ else:
+ self.raise_invalid_val(v)
+
+ v._plotly_name = self.plotly_name
+ return v
+
+ def present(self, v):
+ # Return compound object as-is
+ return v
+
+
+class TitleValidator(CompoundValidator):
+ """
+ This is a special validator to allow compound title properties
+ (e.g. layout.title, layout.xaxis.title, etc.) to be set as strings
+ or numbers. These strings are mapped to the 'text' property of the
+ compound validator.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(TitleValidator, self).__init__(*args, **kwargs)
+
+ def validate_coerce(self, v, skip_invalid=False):
+ if isinstance(v, (str, int, float)):
+ v = {"text": v}
+ return super(TitleValidator, self).validate_coerce(v, skip_invalid=skip_invalid)
+
+
+class CompoundArrayValidator(BaseValidator):
+ def __init__(self, plotly_name, parent_name, data_class_str, data_docs, **kwargs):
+ super(CompoundArrayValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ # Save element class string
+ self.data_class_str = data_class_str
+ self._data_class = None
+
+ self.data_docs = data_docs
+ self.module_str = CompoundValidator.compute_graph_obj_module_str(
+ self.data_class_str, parent_name
+ )
+
+ def description(self):
+ desc = (
+ """\
+ The '{plotly_name}' property is a tuple of instances of
+ {class_str} that may be specified as:
+ - A list or tuple of instances of {module_str}.{class_str}
+ - A list or tuple of dicts of string/value properties that
+ will be passed to the {class_str} constructor"""
+ ).format(
+ plotly_name=self.plotly_name,
+ class_str=self.data_class_str,
+ module_str=self.module_str,
+ )
+
+ return desc
+
+ @property
+ def data_class(self):
+ if self._data_class is None:
+ module = import_module(self.module_str)
+ self._data_class = getattr(module, self.data_class_str)
+
+ return self._data_class
+
+ def validate_coerce(self, v, skip_invalid=False):
+ if v is None:
+ v = []
+
+ elif isinstance(v, (list, tuple)):
+ res = []
+ invalid_els = []
+ for v_el in v:
+ if isinstance(v_el, self.data_class):
+ res.append(self.data_class(v_el))
+ elif isinstance(v_el, dict):
+ res.append(self.data_class(v_el, skip_invalid=skip_invalid))
+ else:
+ if skip_invalid:
+ res.append(self.data_class())
+ else:
+ res.append(None)
+ invalid_els.append(v_el)
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ v = to_scalar_or_list(res)
+ else:
+ if skip_invalid:
+ v = []
+ else:
+ self.raise_invalid_val(v)
+
+ return v
+
+ def present(self, v):
+ # Return compound object as tuple
+ return tuple(v)
+
+
+class BaseDataValidator(BaseValidator):
+ def __init__(
+ self, class_strs_map, plotly_name, parent_name, set_uid=False, **kwargs
+ ):
+ super(BaseDataValidator, self).__init__(
+ plotly_name=plotly_name, parent_name=parent_name, **kwargs
+ )
+
+ self.class_strs_map = class_strs_map
+ self._class_map = {}
+ self.set_uid = set_uid
+
+ def description(self):
+ trace_types = str(list(self.class_strs_map.keys()))
+
+ trace_types_wrapped = "\n".join(
+ textwrap.wrap(
+ trace_types,
+ initial_indent=" One of: ",
+ subsequent_indent=" " * 21,
+ width=79 - 12,
+ )
+ )
+
+ desc = (
+ """\
+ The '{plotly_name}' property is a tuple of trace instances
+ that may be specified as:
+ - A list or tuple of trace instances
+ (e.g. [Scatter(...), Bar(...)])
+ - A single trace instance
+ (e.g. Scatter(...), Bar(...), etc.)
+ - A list or tuple of dicts of string/value properties where:
+ - The 'type' property specifies the trace type
+{trace_types}
+
+ - All remaining properties are passed to the constructor of
+ the specified trace type
+
+ (e.g. [{{'type': 'scatter', ...}}, {{'type': 'bar, ...}}])"""
+ ).format(plotly_name=self.plotly_name, trace_types=trace_types_wrapped)
+
+ return desc
+
+ def get_trace_class(self, trace_name):
+ # Import trace classes
+ if trace_name not in self._class_map:
+ trace_module = import_module("plotly.graph_objs")
+ trace_class_name = self.class_strs_map[trace_name]
+ self._class_map[trace_name] = getattr(trace_module, trace_class_name)
+
+ return self._class_map[trace_name]
+
+ def validate_coerce(self, v, skip_invalid=False, _validate=True):
+ from plotly.basedatatypes import BaseTraceType
+
+ # Import Histogram2dcontour, this is the deprecated name of the
+ # Histogram2dContour trace.
+ from plotly.graph_objs import Histogram2dcontour
+
+ if v is None:
+ v = []
+ else:
+ if not isinstance(v, (list, tuple)):
+ v = [v]
+
+ res = []
+ invalid_els = []
+ for v_el in v:
+ if isinstance(v_el, BaseTraceType):
+ if isinstance(v_el, Histogram2dcontour):
+ v_el = dict(type="histogram2dcontour", **v_el._props)
+ else:
+ v_el = v_el._props
+
+ if isinstance(v_el, dict):
+ type_in_v_el = "type" in v_el
+ trace_type = v_el.pop("type", "scatter")
+
+ if trace_type not in self.class_strs_map:
+ if skip_invalid:
+ # Treat as scatter trace
+ trace = self.get_trace_class("scatter")(
+ skip_invalid=skip_invalid, _validate=_validate, **v_el
+ )
+ res.append(trace)
+ else:
+ res.append(None)
+ invalid_els.append(v_el)
+ else:
+ trace = self.get_trace_class(trace_type)(
+ skip_invalid=skip_invalid, _validate=_validate, **v_el
+ )
+ res.append(trace)
+
+ if type_in_v_el:
+ # Restore type in v_el
+ v_el["type"] = trace_type
+ else:
+ if skip_invalid:
+ # Add empty scatter trace
+ trace = self.get_trace_class("scatter")()
+ res.append(trace)
+ else:
+ res.append(None)
+ invalid_els.append(v_el)
+
+ if invalid_els:
+ self.raise_invalid_elements(invalid_els)
+
+ v = to_scalar_or_list(res)
+
+ # Set new UIDs
+ if self.set_uid:
+ for trace in v:
+ trace.uid = str(uuid.uuid4())
+
+ return v
+
+
+class BaseTemplateValidator(CompoundValidator):
+ def __init__(self, plotly_name, parent_name, data_class_str, data_docs, **kwargs):
+ super(BaseTemplateValidator, self).__init__(
+ plotly_name=plotly_name,
+ parent_name=parent_name,
+ data_class_str=data_class_str,
+ data_docs=data_docs,
+ **kwargs,
+ )
+
+ def description(self):
+ compound_description = super(BaseTemplateValidator, self).description()
+ compound_description += """
+ - The name of a registered template where current registered templates
+ are stored in the plotly.io.templates configuration object. The names
+ of all registered templates can be retrieved with:
+ >>> import plotly.io as pio
+ >>> list(pio.templates) # doctest: +ELLIPSIS
+ ['ggplot2', 'seaborn', 'simple_white', 'plotly', 'plotly_white', ...]
+
+ - A string containing multiple registered template names, joined on '+'
+ characters (e.g. 'template1+template2'). In this case the resulting
+ template is computed by merging together the collection of registered
+ templates"""
+
+ return compound_description
+
+ def validate_coerce(self, v, skip_invalid=False):
+ import plotly.io as pio
+
+ try:
+ # Check if v is a template identifier
+ # (could be any hashable object)
+ if v in pio.templates:
+ return copy.deepcopy(pio.templates[v])
+ # Otherwise, if v is a string, check to see if it consists of
+ # multiple template names joined on '+' characters
+ elif isinstance(v, str):
+ template_names = v.split("+")
+ if all([name in pio.templates for name in template_names]):
+ return pio.templates.merge_templates(*template_names)
+
+ except TypeError:
+ # v is un-hashable
+ pass
+
+ # Check for empty template
+ if v == {} or isinstance(v, self.data_class) and v.to_plotly_json() == {}:
+ # Replace empty template with {'data': {'scatter': [{}]}} so that we can
+ # tell the difference between an un-initialized template and a template
+ # explicitly set to empty.
+ return self.data_class(data_scatter=[{}])
+
+ return super(BaseTemplateValidator, self).validate_coerce(
+ v, skip_invalid=skip_invalid
+ )
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/__init__.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/__init__.py
new file mode 100644
index 0000000..78abe77
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/__init__.py
@@ -0,0 +1,883 @@
+"""
+colors
+=====
+
+Functions that manipulate colors and arrays of colors.
+
+-----
+There are three basic types of color types: rgb, hex and tuple:
+
+rgb - An rgb color is a string of the form 'rgb(a,b,c)' where a, b and c are
+integers between 0 and 255 inclusive.
+
+hex - A hex color is a string of the form '#xxxxxx' where each x is a
+character that belongs to the set [0,1,2,3,4,5,6,7,8,9,a,b,c,d,e,f]. This is
+just the set of characters used in the hexadecimal numeric system.
+
+tuple - A tuple color is a 3-tuple of the form (a,b,c) where a, b and c are
+floats between 0 and 1 inclusive.
+
+-----
+Colormaps and Colorscales:
+A colormap or a colorscale is a correspondence between values - Pythonic
+objects such as strings and floats - to colors.
+
+There are typically two main types of colormaps that exist: numerical and
+categorical colormaps.
+
+Numerical:
+----------
+Numerical colormaps are used when the coloring column being used takes a
+spectrum of values or numbers.
+
+A classic example from the Plotly library:
+```
+rainbow_colorscale = [
+ [0, 'rgb(150,0,90)'], [0.125, 'rgb(0,0,200)'],
+ [0.25, 'rgb(0,25,255)'], [0.375, 'rgb(0,152,255)'],
+ [0.5, 'rgb(44,255,150)'], [0.625, 'rgb(151,255,0)'],
+ [0.75, 'rgb(255,234,0)'], [0.875, 'rgb(255,111,0)'],
+ [1, 'rgb(255,0,0)']
+]
+```
+
+Notice that this colorscale is a list of lists with each inner list containing
+a number and a color. These left hand numbers in the nested lists go from 0 to
+1, and they are like pointers tell you when a number is mapped to a specific
+color.
+
+If you have a column of numbers `col_num` that you want to plot, and you know
+
+```
+min(col_num) = 0
+max(col_num) = 100
+```
+
+then if you pull out the number `12.5` in the list and want to figure out what
+color the corresponding chart element (bar, scatter plot, etc) is going to be,
+you'll figure out that proportionally 12.5 to 100 is the same as 0.125 to 1.
+So, the point will be mapped to 'rgb(0,0,200)'.
+
+All other colors between the pinned values in a colorscale are linearly
+interpolated.
+
+Categorical:
+------------
+Alternatively, a categorical colormap is used to assign a specific value in a
+color column to a specific color everytime it appears in the dataset.
+
+A column of strings in a panadas.dataframe that is chosen to serve as the
+color index would naturally use a categorical colormap. However, you can
+choose to use a categorical colormap with a column of numbers.
+
+Be careful! If you have a lot of unique numbers in your color column you will
+end up with a colormap that is massive and may slow down graphing performance.
+"""
+
+import decimal
+from numbers import Number
+
+from _plotly_utils import exceptions
+
+
+# Built-in qualitative color sequences and sequential,
+# diverging and cyclical color scales.
+#
+# Initially ported over from plotly_express
+from . import ( # noqa: F401
+ qualitative,
+ sequential,
+ diverging,
+ cyclical,
+ cmocean,
+ colorbrewer,
+ carto,
+ plotlyjs,
+)
+
+DEFAULT_PLOTLY_COLORS = [
+ "rgb(31, 119, 180)",
+ "rgb(255, 127, 14)",
+ "rgb(44, 160, 44)",
+ "rgb(214, 39, 40)",
+ "rgb(148, 103, 189)",
+ "rgb(140, 86, 75)",
+ "rgb(227, 119, 194)",
+ "rgb(127, 127, 127)",
+ "rgb(188, 189, 34)",
+ "rgb(23, 190, 207)",
+]
+
+PLOTLY_SCALES = {
+ "Greys": [[0, "rgb(0,0,0)"], [1, "rgb(255,255,255)"]],
+ "YlGnBu": [
+ [0, "rgb(8,29,88)"],
+ [0.125, "rgb(37,52,148)"],
+ [0.25, "rgb(34,94,168)"],
+ [0.375, "rgb(29,145,192)"],
+ [0.5, "rgb(65,182,196)"],
+ [0.625, "rgb(127,205,187)"],
+ [0.75, "rgb(199,233,180)"],
+ [0.875, "rgb(237,248,217)"],
+ [1, "rgb(255,255,217)"],
+ ],
+ "Greens": [
+ [0, "rgb(0,68,27)"],
+ [0.125, "rgb(0,109,44)"],
+ [0.25, "rgb(35,139,69)"],
+ [0.375, "rgb(65,171,93)"],
+ [0.5, "rgb(116,196,118)"],
+ [0.625, "rgb(161,217,155)"],
+ [0.75, "rgb(199,233,192)"],
+ [0.875, "rgb(229,245,224)"],
+ [1, "rgb(247,252,245)"],
+ ],
+ "YlOrRd": [
+ [0, "rgb(128,0,38)"],
+ [0.125, "rgb(189,0,38)"],
+ [0.25, "rgb(227,26,28)"],
+ [0.375, "rgb(252,78,42)"],
+ [0.5, "rgb(253,141,60)"],
+ [0.625, "rgb(254,178,76)"],
+ [0.75, "rgb(254,217,118)"],
+ [0.875, "rgb(255,237,160)"],
+ [1, "rgb(255,255,204)"],
+ ],
+ "Bluered": [[0, "rgb(0,0,255)"], [1, "rgb(255,0,0)"]],
+ # modified RdBu based on
+ # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf
+ "RdBu": [
+ [0, "rgb(5,10,172)"],
+ [0.35, "rgb(106,137,247)"],
+ [0.5, "rgb(190,190,190)"],
+ [0.6, "rgb(220,170,132)"],
+ [0.7, "rgb(230,145,90)"],
+ [1, "rgb(178,10,28)"],
+ ],
+ # Scale for non-negative numeric values
+ "Reds": [
+ [0, "rgb(220,220,220)"],
+ [0.2, "rgb(245,195,157)"],
+ [0.4, "rgb(245,160,105)"],
+ [1, "rgb(178,10,28)"],
+ ],
+ # Scale for non-positive numeric values
+ "Blues": [
+ [0, "rgb(5,10,172)"],
+ [0.35, "rgb(40,60,190)"],
+ [0.5, "rgb(70,100,245)"],
+ [0.6, "rgb(90,120,245)"],
+ [0.7, "rgb(106,137,247)"],
+ [1, "rgb(220,220,220)"],
+ ],
+ "Picnic": [
+ [0, "rgb(0,0,255)"],
+ [0.1, "rgb(51,153,255)"],
+ [0.2, "rgb(102,204,255)"],
+ [0.3, "rgb(153,204,255)"],
+ [0.4, "rgb(204,204,255)"],
+ [0.5, "rgb(255,255,255)"],
+ [0.6, "rgb(255,204,255)"],
+ [0.7, "rgb(255,153,255)"],
+ [0.8, "rgb(255,102,204)"],
+ [0.9, "rgb(255,102,102)"],
+ [1, "rgb(255,0,0)"],
+ ],
+ "Rainbow": [
+ [0, "rgb(150,0,90)"],
+ [0.125, "rgb(0,0,200)"],
+ [0.25, "rgb(0,25,255)"],
+ [0.375, "rgb(0,152,255)"],
+ [0.5, "rgb(44,255,150)"],
+ [0.625, "rgb(151,255,0)"],
+ [0.75, "rgb(255,234,0)"],
+ [0.875, "rgb(255,111,0)"],
+ [1, "rgb(255,0,0)"],
+ ],
+ "Portland": [
+ [0, "rgb(12,51,131)"],
+ [0.25, "rgb(10,136,186)"],
+ [0.5, "rgb(242,211,56)"],
+ [0.75, "rgb(242,143,56)"],
+ [1, "rgb(217,30,30)"],
+ ],
+ "Jet": [
+ [0, "rgb(0,0,131)"],
+ [0.125, "rgb(0,60,170)"],
+ [0.375, "rgb(5,255,255)"],
+ [0.625, "rgb(255,255,0)"],
+ [0.875, "rgb(250,0,0)"],
+ [1, "rgb(128,0,0)"],
+ ],
+ "Hot": [
+ [0, "rgb(0,0,0)"],
+ [0.3, "rgb(230,0,0)"],
+ [0.6, "rgb(255,210,0)"],
+ [1, "rgb(255,255,255)"],
+ ],
+ "Blackbody": [
+ [0, "rgb(0,0,0)"],
+ [0.2, "rgb(230,0,0)"],
+ [0.4, "rgb(230,210,0)"],
+ [0.7, "rgb(255,255,255)"],
+ [1, "rgb(160,200,255)"],
+ ],
+ "Earth": [
+ [0, "rgb(0,0,130)"],
+ [0.1, "rgb(0,180,180)"],
+ [0.2, "rgb(40,210,40)"],
+ [0.4, "rgb(230,230,50)"],
+ [0.6, "rgb(120,70,20)"],
+ [1, "rgb(255,255,255)"],
+ ],
+ "Electric": [
+ [0, "rgb(0,0,0)"],
+ [0.15, "rgb(30,0,100)"],
+ [0.4, "rgb(120,0,100)"],
+ [0.6, "rgb(160,90,0)"],
+ [0.8, "rgb(230,200,0)"],
+ [1, "rgb(255,250,220)"],
+ ],
+ "Viridis": [
+ [0, "#440154"],
+ [0.06274509803921569, "#48186a"],
+ [0.12549019607843137, "#472d7b"],
+ [0.18823529411764706, "#424086"],
+ [0.25098039215686274, "#3b528b"],
+ [0.3137254901960784, "#33638d"],
+ [0.3764705882352941, "#2c728e"],
+ [0.4392156862745098, "#26828e"],
+ [0.5019607843137255, "#21918c"],
+ [0.5647058823529412, "#1fa088"],
+ [0.6274509803921569, "#28ae80"],
+ [0.6901960784313725, "#3fbc73"],
+ [0.7529411764705882, "#5ec962"],
+ [0.8156862745098039, "#84d44b"],
+ [0.8784313725490196, "#addc30"],
+ [0.9411764705882353, "#d8e219"],
+ [1, "#fde725"],
+ ],
+ "Cividis": [
+ [0.000000, "rgb(0,32,76)"],
+ [0.058824, "rgb(0,42,102)"],
+ [0.117647, "rgb(0,52,110)"],
+ [0.176471, "rgb(39,63,108)"],
+ [0.235294, "rgb(60,74,107)"],
+ [0.294118, "rgb(76,85,107)"],
+ [0.352941, "rgb(91,95,109)"],
+ [0.411765, "rgb(104,106,112)"],
+ [0.470588, "rgb(117,117,117)"],
+ [0.529412, "rgb(131,129,120)"],
+ [0.588235, "rgb(146,140,120)"],
+ [0.647059, "rgb(161,152,118)"],
+ [0.705882, "rgb(176,165,114)"],
+ [0.764706, "rgb(192,177,109)"],
+ [0.823529, "rgb(209,191,102)"],
+ [0.882353, "rgb(225,204,92)"],
+ [0.941176, "rgb(243,219,79)"],
+ [1.000000, "rgb(255,233,69)"],
+ ],
+}
+
+
+def color_parser(colors, function):
+ """
+ Takes color(s) and a function and applies the function on the color(s)
+
+ In particular, this function identifies whether the given color object
+ is an iterable or not and applies the given color-parsing function to
+ the color or iterable of colors. If given an iterable, it will only be
+ able to work with it if all items in the iterable are of the same type
+ - rgb string, hex string or tuple
+ """
+ if isinstance(colors, str):
+ return function(colors)
+
+ if isinstance(colors, tuple) and isinstance(colors[0], Number):
+ return function(colors)
+
+ if hasattr(colors, "__iter__"):
+ if isinstance(colors, tuple):
+ new_color_tuple = tuple(function(item) for item in colors)
+ return new_color_tuple
+
+ else:
+ new_color_list = [function(item) for item in colors]
+ return new_color_list
+
+
+def validate_colors(colors, colortype="tuple"):
+ """
+ Validates color(s) and returns a list of color(s) of a specified type
+ """
+ from numbers import Number
+
+ if colors is None:
+ colors = DEFAULT_PLOTLY_COLORS
+
+ if isinstance(colors, str):
+ if colors in PLOTLY_SCALES:
+ colors_list = colorscale_to_colors(PLOTLY_SCALES[colors])
+ # TODO: fix _gantt.py/_scatter.py so that they can accept the
+ # actual colorscale and not just a list of the first and last
+ # color in the plotly colorscale. In resolving this issue we
+ # will be removing the immediate line below
+ colors = [colors_list[0]] + [colors_list[-1]]
+ elif "rgb" in colors or "#" in colors:
+ colors = [colors]
+ else:
+ raise exceptions.PlotlyError(
+ "If your colors variable is a string, it must be a "
+ "Plotly scale, an rgb color or a hex color."
+ )
+
+ elif isinstance(colors, tuple):
+ if isinstance(colors[0], Number):
+ colors = [colors]
+ else:
+ colors = list(colors)
+
+ # convert color elements in list to tuple color
+ for j, each_color in enumerate(colors):
+ if "rgb" in each_color:
+ each_color = color_parser(each_color, unlabel_rgb)
+ for value in each_color:
+ if value > 255.0:
+ raise exceptions.PlotlyError(
+ "Whoops! The elements in your rgb colors "
+ "tuples cannot exceed 255.0."
+ )
+ each_color = color_parser(each_color, unconvert_from_RGB_255)
+ colors[j] = each_color
+
+ if "#" in each_color:
+ each_color = color_parser(each_color, hex_to_rgb)
+ each_color = color_parser(each_color, unconvert_from_RGB_255)
+
+ colors[j] = each_color
+
+ if isinstance(each_color, tuple):
+ for value in each_color:
+ if value > 1.0:
+ raise exceptions.PlotlyError(
+ "Whoops! The elements in your colors tuples cannot exceed 1.0."
+ )
+ colors[j] = each_color
+
+ if colortype == "rgb" and not isinstance(colors, str):
+ for j, each_color in enumerate(colors):
+ rgb_color = color_parser(each_color, convert_to_RGB_255)
+ colors[j] = color_parser(rgb_color, label_rgb)
+
+ return colors
+
+
+def validate_colors_dict(colors, colortype="tuple"):
+ """
+ Validates dictionary of color(s)
+ """
+ # validate each color element in the dictionary
+ for key in colors:
+ if "rgb" in colors[key]:
+ colors[key] = color_parser(colors[key], unlabel_rgb)
+ for value in colors[key]:
+ if value > 255.0:
+ raise exceptions.PlotlyError(
+ "Whoops! The elements in your rgb colors "
+ "tuples cannot exceed 255.0."
+ )
+ colors[key] = color_parser(colors[key], unconvert_from_RGB_255)
+
+ if "#" in colors[key]:
+ colors[key] = color_parser(colors[key], hex_to_rgb)
+ colors[key] = color_parser(colors[key], unconvert_from_RGB_255)
+
+ if isinstance(colors[key], tuple):
+ for value in colors[key]:
+ if value > 1.0:
+ raise exceptions.PlotlyError(
+ "Whoops! The elements in your colors tuples cannot exceed 1.0."
+ )
+
+ if colortype == "rgb":
+ for key in colors:
+ colors[key] = color_parser(colors[key], convert_to_RGB_255)
+ colors[key] = color_parser(colors[key], label_rgb)
+
+ return colors
+
+
+def convert_colors_to_same_type(
+ colors,
+ colortype="rgb",
+ scale=None,
+ return_default_colors=False,
+ num_of_defualt_colors=2,
+):
+ """
+ Converts color(s) to the specified color type
+
+ Takes a single color or an iterable of colors, as well as a list of scale
+ values, and outputs a 2-pair of the list of color(s) converted all to an
+ rgb or tuple color type, aswell as the scale as the second element. If
+ colors is a Plotly Scale name, then 'scale' will be forced to the scale
+ from the respective colorscale and the colors in that colorscale will also
+ be coverted to the selected colortype. If colors is None, then there is an
+ option to return portion of the DEFAULT_PLOTLY_COLORS
+
+ :param (str|tuple|list) colors: either a plotly scale name, an rgb or hex
+ color, a color tuple or a list/tuple of colors
+ :param (list) scale: see docs for validate_scale_values()
+
+ :rtype (tuple) (colors_list, scale) if scale is None in the function call,
+ then scale will remain None in the returned tuple
+ """
+ colors_list = []
+
+ if colors is None and return_default_colors is True:
+ colors_list = DEFAULT_PLOTLY_COLORS[0:num_of_defualt_colors]
+
+ if isinstance(colors, str):
+ if colors in PLOTLY_SCALES:
+ colors_list = colorscale_to_colors(PLOTLY_SCALES[colors])
+ if scale is None:
+ scale = colorscale_to_scale(PLOTLY_SCALES[colors])
+
+ elif "rgb" in colors or "#" in colors:
+ colors_list = [colors]
+
+ elif isinstance(colors, tuple):
+ if isinstance(colors[0], Number):
+ colors_list = [colors]
+ else:
+ colors_list = list(colors)
+
+ elif isinstance(colors, list):
+ colors_list = colors
+
+ # validate scale
+ if scale is not None:
+ validate_scale_values(scale)
+
+ if len(colors_list) != len(scale):
+ raise exceptions.PlotlyError(
+ "Make sure that the length of your scale matches the length "
+ "of your list of colors which is {}.".format(len(colors_list))
+ )
+
+ # convert all colors to rgb
+ for j, each_color in enumerate(colors_list):
+ if "#" in each_color:
+ each_color = color_parser(each_color, hex_to_rgb)
+ each_color = color_parser(each_color, label_rgb)
+ colors_list[j] = each_color
+
+ elif isinstance(each_color, tuple):
+ each_color = color_parser(each_color, convert_to_RGB_255)
+ each_color = color_parser(each_color, label_rgb)
+ colors_list[j] = each_color
+
+ if colortype == "rgb":
+ return (colors_list, scale)
+ elif colortype == "tuple":
+ for j, each_color in enumerate(colors_list):
+ each_color = color_parser(each_color, unlabel_rgb)
+ each_color = color_parser(each_color, unconvert_from_RGB_255)
+ colors_list[j] = each_color
+ return (colors_list, scale)
+ else:
+ raise exceptions.PlotlyError(
+ "You must select either rgb or tuple for your colortype variable."
+ )
+
+
+def convert_dict_colors_to_same_type(colors_dict, colortype="rgb"):
+ """
+ Converts a colors in a dictionary of colors to the specified color type
+
+ :param (dict) colors_dict: a dictionary whose values are single colors
+ """
+ for key in colors_dict:
+ if "#" in colors_dict[key]:
+ colors_dict[key] = color_parser(colors_dict[key], hex_to_rgb)
+ colors_dict[key] = color_parser(colors_dict[key], label_rgb)
+
+ elif isinstance(colors_dict[key], tuple):
+ colors_dict[key] = color_parser(colors_dict[key], convert_to_RGB_255)
+ colors_dict[key] = color_parser(colors_dict[key], label_rgb)
+
+ if colortype == "rgb":
+ return colors_dict
+ elif colortype == "tuple":
+ for key in colors_dict:
+ colors_dict[key] = color_parser(colors_dict[key], unlabel_rgb)
+ colors_dict[key] = color_parser(colors_dict[key], unconvert_from_RGB_255)
+ return colors_dict
+ else:
+ raise exceptions.PlotlyError(
+ "You must select either rgb or tuple for your colortype variable."
+ )
+
+
+def validate_scale_values(scale):
+ """
+ Validates scale values from a colorscale
+
+ :param (list) scale: a strictly increasing list of floats that begins
+ with 0 and ends with 1. Its usage derives from a colorscale which is
+ a list of two-lists (a list with two elements) of the form
+ [value, color] which are used to determine how interpolation weighting
+ works between the colors in the colorscale. Therefore scale is just
+ the extraction of these values from the two-lists in order
+ """
+ if len(scale) < 2:
+ raise exceptions.PlotlyError(
+ "You must input a list of scale values that has at least two values."
+ )
+
+ if (scale[0] != 0) or (scale[-1] != 1):
+ raise exceptions.PlotlyError(
+ "The first and last number in your scale must be 0.0 and 1.0 respectively."
+ )
+
+ if not all(x < y for x, y in zip(scale, scale[1:])):
+ raise exceptions.PlotlyError(
+ "'scale' must be a list that contains a strictly increasing "
+ "sequence of numbers."
+ )
+
+
+def validate_colorscale(colorscale):
+ """Validate the structure, scale values and colors of colorscale."""
+ if not isinstance(colorscale, list):
+ # TODO Write tests for these exceptions
+ raise exceptions.PlotlyError("A valid colorscale must be a list.")
+ if not all(isinstance(innerlist, list) for innerlist in colorscale):
+ raise exceptions.PlotlyError("A valid colorscale must be a list of lists.")
+ colorscale_colors = colorscale_to_colors(colorscale)
+ scale_values = colorscale_to_scale(colorscale)
+
+ validate_scale_values(scale_values)
+ validate_colors(colorscale_colors)
+
+
+def make_colorscale(colors, scale=None):
+ """
+ Makes a colorscale from a list of colors and a scale
+
+ Takes a list of colors and scales and constructs a colorscale based
+ on the colors in sequential order. If 'scale' is left empty, a linear-
+ interpolated colorscale will be generated. If 'scale' is a specificed
+ list, it must be the same legnth as colors and must contain all floats
+ For documentation regarding to the form of the output, see
+ https://plot.ly/python/reference/#mesh3d-colorscale
+
+ :param (list) colors: a list of single colors
+ """
+ colorscale = []
+
+ # validate minimum colors length of 2
+ if len(colors) < 2:
+ raise exceptions.PlotlyError(
+ "You must input a list of colors that has at least two colors."
+ )
+
+ if scale is None:
+ scale_incr = 1.0 / (len(colors) - 1)
+ return [[i * scale_incr, color] for i, color in enumerate(colors)]
+
+ else:
+ if len(colors) != len(scale):
+ raise exceptions.PlotlyError(
+ "The length of colors and scale must be the same."
+ )
+
+ validate_scale_values(scale)
+
+ colorscale = [list(tup) for tup in zip(scale, colors)]
+ return colorscale
+
+
+def find_intermediate_color(lowcolor, highcolor, intermed, colortype="tuple"):
+ """
+ Returns the color at a given distance between two colors
+
+ This function takes two color tuples, where each element is between 0
+ and 1, along with a value 0 < intermed < 1 and returns a color that is
+ intermed-percent from lowcolor to highcolor. If colortype is set to 'rgb',
+ the function will automatically convert the rgb type to a tuple, find the
+ intermediate color and return it as an rgb color.
+ """
+ if colortype == "rgb":
+ # convert to tuple color, eg. (1, 0.45, 0.7)
+ lowcolor = unlabel_rgb(lowcolor)
+ highcolor = unlabel_rgb(highcolor)
+
+ diff_0 = float(highcolor[0] - lowcolor[0])
+ diff_1 = float(highcolor[1] - lowcolor[1])
+ diff_2 = float(highcolor[2] - lowcolor[2])
+
+ inter_med_tuple = (
+ lowcolor[0] + intermed * diff_0,
+ lowcolor[1] + intermed * diff_1,
+ lowcolor[2] + intermed * diff_2,
+ )
+
+ if colortype == "rgb":
+ # back to an rgb string, e.g. rgb(30, 20, 10)
+ inter_med_rgb = label_rgb(inter_med_tuple)
+ return inter_med_rgb
+
+ return inter_med_tuple
+
+
+def unconvert_from_RGB_255(colors):
+ """
+ Return a tuple where each element gets divided by 255
+
+ Takes a (list of) color tuple(s) where each element is between 0 and
+ 255. Returns the same tuples where each tuple element is normalized to
+ a value between 0 and 1
+ """
+ return (colors[0] / (255.0), colors[1] / (255.0), colors[2] / (255.0))
+
+
+def convert_to_RGB_255(colors):
+ """
+ Multiplies each element of a triplet by 255
+
+ Each coordinate of the color tuple is rounded to the nearest float and
+ then is turned into an integer. If a number is of the form x.5, then
+ if x is odd, the number rounds up to (x+1). Otherwise, it rounds down
+ to just x. This is the way rounding works in Python 3 and in current
+ statistical analysis to avoid rounding bias
+
+ :param (list) rgb_components: grabs the three R, G and B values to be
+ returned as computed in the function
+ """
+ rgb_components = []
+
+ for component in colors:
+ rounded_num = decimal.Decimal(str(component * 255.0)).quantize(
+ decimal.Decimal("1"), rounding=decimal.ROUND_HALF_EVEN
+ )
+ # convert rounded number to an integer from 'Decimal' form
+ rounded_num = int(rounded_num)
+ rgb_components.append(rounded_num)
+
+ return (rgb_components[0], rgb_components[1], rgb_components[2])
+
+
+def n_colors(lowcolor, highcolor, n_colors, colortype="tuple"):
+ """
+ Splits a low and high color into a list of n_colors colors in it
+
+ Accepts two color tuples and returns a list of n_colors colors
+ which form the intermediate colors between lowcolor and highcolor
+ from linearly interpolating through RGB space. If colortype is 'rgb'
+ the function will return a list of colors in the same form.
+ """
+ if colortype == "rgb":
+ # convert to tuple
+ lowcolor = unlabel_rgb(lowcolor)
+ highcolor = unlabel_rgb(highcolor)
+
+ diff_0 = float(highcolor[0] - lowcolor[0])
+ incr_0 = diff_0 / (n_colors - 1)
+ diff_1 = float(highcolor[1] - lowcolor[1])
+ incr_1 = diff_1 / (n_colors - 1)
+ diff_2 = float(highcolor[2] - lowcolor[2])
+ incr_2 = diff_2 / (n_colors - 1)
+ list_of_colors = []
+
+ def _constrain_color(c):
+ if c > 255.0:
+ return 255.0
+ elif c < 0.0:
+ return 0.0
+ else:
+ return c
+
+ for index in range(n_colors):
+ new_tuple = (
+ _constrain_color(lowcolor[0] + (index * incr_0)),
+ _constrain_color(lowcolor[1] + (index * incr_1)),
+ _constrain_color(lowcolor[2] + (index * incr_2)),
+ )
+ list_of_colors.append(new_tuple)
+
+ if colortype == "rgb":
+ # back to an rgb string
+ list_of_colors = color_parser(list_of_colors, label_rgb)
+
+ return list_of_colors
+
+
+def label_rgb(colors):
+ """
+ Takes tuple (a, b, c) and returns an rgb color 'rgb(a, b, c)'
+ """
+ return "rgb(%s, %s, %s)" % (colors[0], colors[1], colors[2])
+
+
+def unlabel_rgb(colors):
+ """
+ Takes rgb color(s) 'rgb(a, b, c)' and returns tuple(s) (a, b, c)
+
+ This function takes either an 'rgb(a, b, c)' color or a list of
+ such colors and returns the color tuples in tuple(s) (a, b, c)
+ """
+ str_vals = ""
+ for index in range(len(colors)):
+ try:
+ float(colors[index])
+ str_vals = str_vals + colors[index]
+ except ValueError:
+ if colors[index] == "," or colors[index] == ".":
+ str_vals = str_vals + colors[index]
+
+ str_vals = str_vals + ","
+ numbers = []
+ str_num = ""
+ for char in str_vals:
+ if char != ",":
+ str_num = str_num + char
+ else:
+ numbers.append(float(str_num))
+ str_num = ""
+ return (numbers[0], numbers[1], numbers[2])
+
+
+def hex_to_rgb(value):
+ """
+ Calculates rgb values from a hex color code.
+
+ :param (string) value: Hex color string
+
+ :rtype (tuple) (r_value, g_value, b_value): tuple of rgb values
+ """
+ value = value.lstrip("#")
+ hex_total_length = len(value)
+ rgb_section_length = hex_total_length // 3
+ return tuple(
+ int(value[i : i + rgb_section_length], 16)
+ for i in range(0, hex_total_length, rgb_section_length)
+ )
+
+
+def colorscale_to_colors(colorscale):
+ """
+ Extracts the colors from colorscale as a list
+ """
+ color_list = []
+ for item in colorscale:
+ color_list.append(item[1])
+ return color_list
+
+
+def colorscale_to_scale(colorscale):
+ """
+ Extracts the interpolation scale values from colorscale as a list
+ """
+ scale_list = []
+ for item in colorscale:
+ scale_list.append(item[0])
+ return scale_list
+
+
+def convert_colorscale_to_rgb(colorscale):
+ """
+ Converts the colors in a colorscale to rgb colors
+
+ A colorscale is an array of arrays, each with a numeric value as the
+ first item and a color as the second. This function specifically is
+ converting a colorscale with tuple colors (each coordinate between 0
+ and 1) into a colorscale with the colors transformed into rgb colors
+ """
+ for color in colorscale:
+ color[1] = convert_to_RGB_255(color[1])
+
+ for color in colorscale:
+ color[1] = label_rgb(color[1])
+ return colorscale
+
+
+def named_colorscales():
+ """
+ Returns lowercased names of built-in continuous colorscales.
+ """
+ from _plotly_utils.basevalidators import ColorscaleValidator
+
+ return [c for c in ColorscaleValidator("", "").named_colorscales]
+
+
+def get_colorscale(name):
+ """
+ Returns the colorscale for a given name. See `named_colorscales` for the
+ built-in colorscales.
+ """
+ from _plotly_utils.basevalidators import ColorscaleValidator
+
+ if not isinstance(name, str):
+ raise exceptions.PlotlyError("Name argument have to be a string.")
+
+ name = name.lower()
+ if name[-2:] == "_r":
+ should_reverse = True
+ name = name[:-2]
+ else:
+ should_reverse = False
+
+ if name in ColorscaleValidator("", "").named_colorscales:
+ colorscale = ColorscaleValidator("", "").named_colorscales[name]
+ else:
+ raise exceptions.PlotlyError(f"Colorscale {name} is not a built-in scale.")
+
+ if should_reverse:
+ colorscale = colorscale[::-1]
+ return make_colorscale(colorscale)
+
+
+def sample_colorscale(colorscale, samplepoints, low=0.0, high=1.0, colortype="rgb"):
+ """
+ Samples a colorscale at specific points.
+
+ Interpolates between colors in a colorscale to find the specific colors
+ corresponding to the specified sample values. The colorscale can be specified
+ as a list of `[scale, color]` pairs, as a list of colors, or as a named
+ plotly colorscale. The samplepoints can be specefied as an iterable of specific
+ points in the range [0.0, 1.0], or as an integer number of points which will
+ be spaced equally between the low value (default 0.0) and the high value
+ (default 1.0). The output is a list of colors, formatted according to the
+ specified colortype.
+ """
+ from bisect import bisect_left
+
+ try:
+ validate_colorscale(colorscale)
+ except exceptions.PlotlyError:
+ if isinstance(colorscale, str):
+ colorscale = get_colorscale(colorscale)
+ else:
+ colorscale = make_colorscale(colorscale)
+
+ scale = colorscale_to_scale(colorscale)
+ validate_scale_values(scale)
+ colors = colorscale_to_colors(colorscale)
+ colors = validate_colors(colors, colortype="tuple")
+
+ if isinstance(samplepoints, int):
+ samplepoints = [
+ low + idx / (samplepoints - 1) * (high - low) for idx in range(samplepoints)
+ ]
+ elif isinstance(samplepoints, float):
+ samplepoints = [samplepoints]
+
+ sampled_colors = []
+ for point in samplepoints:
+ high = bisect_left(scale, point)
+ low = high - 1
+ interpolant = (point - scale[low]) / (scale[high] - scale[low])
+ sampled_color = find_intermediate_color(colors[low], colors[high], interpolant)
+ sampled_colors.append(sampled_color)
+ return validate_colors(sampled_colors, colortype=colortype)
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/_swatches.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/_swatches.py
new file mode 100644
index 0000000..9522562
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/_swatches.py
@@ -0,0 +1,161 @@
+def _swatches(module_names, module_contents, template=None):
+ """
+ Parameters
+ ----------
+ template : str or dict or plotly.graph_objects.layout.Template instance
+ The figure template name or definition.
+
+ Returns
+ -------
+ fig : graph_objects.Figure containing the displayed image
+ A `Figure` object. This figure demonstrates the color scales and
+ sequences in this module, as stacked bar charts.
+ """
+ import plotly.graph_objs as go
+ from plotly.express._core import apply_default_cascade
+
+ args = dict(template=template)
+ apply_default_cascade(args)
+
+ sequences = [
+ (k, v)
+ for k, v in module_contents.items()
+ if not (k.startswith("_") or k.startswith("swatches") or k.endswith("_r"))
+ ]
+
+ return go.Figure(
+ data=[
+ go.Bar(
+ orientation="h",
+ y=[name] * len(colors),
+ x=[1] * len(colors),
+ customdata=list(range(len(colors))),
+ marker=dict(color=colors),
+ hovertemplate="%{y}[%{customdata}] = %{marker.color}<extra></extra>",
+ )
+ for name, colors in reversed(sequences)
+ ],
+ layout=dict(
+ title="plotly.colors." + module_names.split(".")[-1],
+ barmode="stack",
+ barnorm="fraction",
+ bargap=0.5,
+ showlegend=False,
+ xaxis=dict(range=[-0.02, 1.02], showticklabels=False, showgrid=False),
+ height=max(600, 40 * len(sequences)),
+ template=args["template"],
+ margin=dict(b=10),
+ ),
+ )
+
+
+def _swatches_continuous(module_names, module_contents, template=None):
+ """
+ Parameters
+ ----------
+ template : str or dict or plotly.graph_objects.layout.Template instance
+ The figure template name or definition.
+
+ Returns
+ -------
+ fig : graph_objects.Figure containing the displayed image
+ A `Figure` object. This figure demonstrates the color scales and
+ sequences in this module, as stacked bar charts.
+ """
+ import plotly.graph_objs as go
+ from plotly.express._core import apply_default_cascade
+
+ args = dict(template=template)
+ apply_default_cascade(args)
+
+ sequences = [
+ (k, v)
+ for k, v in module_contents.items()
+ if not (k.startswith("_") or k.startswith("swatches") or k.endswith("_r"))
+ ]
+
+ n = 100
+
+ return go.Figure(
+ data=[
+ go.Bar(
+ orientation="h",
+ y=[name] * n,
+ x=[1] * n,
+ customdata=[(x + 1) / n for x in range(n)],
+ marker=dict(color=list(range(n)), colorscale=name, line_width=0),
+ hovertemplate="%{customdata}",
+ name=name,
+ )
+ for name, colors in reversed(sequences)
+ ],
+ layout=dict(
+ title="plotly.colors." + module_names.split(".")[-1],
+ barmode="stack",
+ barnorm="fraction",
+ bargap=0.3,
+ showlegend=False,
+ xaxis=dict(range=[-0.02, 1.02], showticklabels=False, showgrid=False),
+ height=max(600, 40 * len(sequences)),
+ width=500,
+ template=args["template"],
+ margin=dict(b=10),
+ ),
+ )
+
+
+def _swatches_cyclical(module_names, module_contents, template=None):
+ """
+ Parameters
+ ----------
+ template : str or dict or plotly.graph_objects.layout.Template instance
+ The figure template name or definition.
+
+ Returns
+ -------
+ fig : graph_objects.Figure containing the displayed image
+ A `Figure` object. This figure demonstrates the color scales and
+ sequences in this module, as polar bar charts.
+ """
+ import plotly.graph_objects as go
+ from plotly.subplots import make_subplots
+ from plotly.express._core import apply_default_cascade
+
+ args = dict(template=template)
+ apply_default_cascade(args)
+
+ rows = 2
+ cols = 4
+ scales = [
+ (k, v)
+ for k, v in module_contents.items()
+ if not (k.startswith("_") or k.startswith("swatches") or k.endswith("_r"))
+ ]
+ names = [name for name, colors in scales]
+ fig = make_subplots(
+ rows=rows,
+ cols=cols,
+ subplot_titles=names,
+ specs=[[{"type": "polar"}] * cols] * rows,
+ )
+
+ for i, (name, scale) in enumerate(scales):
+ fig.add_trace(
+ go.Barpolar(
+ r=[1] * int(360 / 5),
+ theta=list(range(0, 360, 5)),
+ marker_color=list(range(0, 360, 5)),
+ marker_cmin=0,
+ marker_cmax=360,
+ marker_colorscale=name,
+ name=name,
+ ),
+ row=int(i / cols) + 1,
+ col=i % cols + 1,
+ )
+ fig.update_traces(width=5.2, marker_line_width=0, base=0.5, showlegend=False)
+ fig.update_polars(angularaxis_visible=False, radialaxis_visible=False)
+ fig.update_layout(
+ title="plotly.colors." + module_names.split(".")[-1], template=args["template"]
+ )
+ return fig
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/carto.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/carto.py
new file mode 100644
index 0000000..f5c503f
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/carto.py
@@ -0,0 +1,419 @@
+"""
+Color sequences and scales from CARTO's CartoColors
+
+Learn more at https://github.com/CartoDB/CartoColor
+
+CARTOColors are made available under a Creative Commons Attribution license: https://creativecommons.org/licenses/by/3.0/us/
+"""
+
+from ._swatches import _swatches
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+Burg = [
+ "rgb(255, 198, 196)",
+ "rgb(244, 163, 168)",
+ "rgb(227, 129, 145)",
+ "rgb(204, 96, 125)",
+ "rgb(173, 70, 108)",
+ "rgb(139, 48, 88)",
+ "rgb(103, 32, 68)",
+]
+
+Burgyl = [
+ "rgb(251, 230, 197)",
+ "rgb(245, 186, 152)",
+ "rgb(238, 138, 130)",
+ "rgb(220, 113, 118)",
+ "rgb(200, 88, 108)",
+ "rgb(156, 63, 93)",
+ "rgb(112, 40, 74)",
+]
+
+Redor = [
+ "rgb(246, 210, 169)",
+ "rgb(245, 183, 142)",
+ "rgb(241, 156, 124)",
+ "rgb(234, 129, 113)",
+ "rgb(221, 104, 108)",
+ "rgb(202, 82, 104)",
+ "rgb(177, 63, 100)",
+]
+
+Oryel = [
+ "rgb(236, 218, 154)",
+ "rgb(239, 196, 126)",
+ "rgb(243, 173, 106)",
+ "rgb(247, 148, 93)",
+ "rgb(249, 123, 87)",
+ "rgb(246, 99, 86)",
+ "rgb(238, 77, 90)",
+]
+
+Peach = [
+ "rgb(253, 224, 197)",
+ "rgb(250, 203, 166)",
+ "rgb(248, 181, 139)",
+ "rgb(245, 158, 114)",
+ "rgb(242, 133, 93)",
+ "rgb(239, 106, 76)",
+ "rgb(235, 74, 64)",
+]
+
+Pinkyl = [
+ "rgb(254, 246, 181)",
+ "rgb(255, 221, 154)",
+ "rgb(255, 194, 133)",
+ "rgb(255, 166, 121)",
+ "rgb(250, 138, 118)",
+ "rgb(241, 109, 122)",
+ "rgb(225, 83, 131)",
+]
+
+Mint = [
+ "rgb(228, 241, 225)",
+ "rgb(180, 217, 204)",
+ "rgb(137, 192, 182)",
+ "rgb(99, 166, 160)",
+ "rgb(68, 140, 138)",
+ "rgb(40, 114, 116)",
+ "rgb(13, 88, 95)",
+]
+
+Blugrn = [
+ "rgb(196, 230, 195)",
+ "rgb(150, 210, 164)",
+ "rgb(109, 188, 144)",
+ "rgb(77, 162, 132)",
+ "rgb(54, 135, 122)",
+ "rgb(38, 107, 110)",
+ "rgb(29, 79, 96)",
+]
+
+Darkmint = [
+ "rgb(210, 251, 212)",
+ "rgb(165, 219, 194)",
+ "rgb(123, 188, 176)",
+ "rgb(85, 156, 158)",
+ "rgb(58, 124, 137)",
+ "rgb(35, 93, 114)",
+ "rgb(18, 63, 90)",
+]
+
+Emrld = [
+ "rgb(211, 242, 163)",
+ "rgb(151, 225, 150)",
+ "rgb(108, 192, 139)",
+ "rgb(76, 155, 130)",
+ "rgb(33, 122, 121)",
+ "rgb(16, 89, 101)",
+ "rgb(7, 64, 80)",
+]
+
+Aggrnyl = [
+ "rgb(36, 86, 104)",
+ "rgb(15, 114, 121)",
+ "rgb(13, 143, 129)",
+ "rgb(57, 171, 126)",
+ "rgb(110, 197, 116)",
+ "rgb(169, 220, 103)",
+ "rgb(237, 239, 93)",
+]
+
+Bluyl = [
+ "rgb(247, 254, 174)",
+ "rgb(183, 230, 165)",
+ "rgb(124, 203, 162)",
+ "rgb(70, 174, 160)",
+ "rgb(8, 144, 153)",
+ "rgb(0, 113, 139)",
+ "rgb(4, 82, 117)",
+]
+
+Teal = [
+ "rgb(209, 238, 234)",
+ "rgb(168, 219, 217)",
+ "rgb(133, 196, 201)",
+ "rgb(104, 171, 184)",
+ "rgb(79, 144, 166)",
+ "rgb(59, 115, 143)",
+ "rgb(42, 86, 116)",
+]
+
+Tealgrn = [
+ "rgb(176, 242, 188)",
+ "rgb(137, 232, 172)",
+ "rgb(103, 219, 165)",
+ "rgb(76, 200, 163)",
+ "rgb(56, 178, 163)",
+ "rgb(44, 152, 160)",
+ "rgb(37, 125, 152)",
+]
+
+Purp = [
+ "rgb(243, 224, 247)",
+ "rgb(228, 199, 241)",
+ "rgb(209, 175, 232)",
+ "rgb(185, 152, 221)",
+ "rgb(159, 130, 206)",
+ "rgb(130, 109, 186)",
+ "rgb(99, 88, 159)",
+]
+
+Purpor = [
+ "rgb(249, 221, 218)",
+ "rgb(242, 185, 196)",
+ "rgb(229, 151, 185)",
+ "rgb(206, 120, 179)",
+ "rgb(173, 95, 173)",
+ "rgb(131, 75, 160)",
+ "rgb(87, 59, 136)",
+]
+
+Sunset = [
+ "rgb(243, 231, 155)",
+ "rgb(250, 196, 132)",
+ "rgb(248, 160, 126)",
+ "rgb(235, 127, 134)",
+ "rgb(206, 102, 147)",
+ "rgb(160, 89, 160)",
+ "rgb(92, 83, 165)",
+]
+
+Magenta = [
+ "rgb(243, 203, 211)",
+ "rgb(234, 169, 189)",
+ "rgb(221, 136, 172)",
+ "rgb(202, 105, 157)",
+ "rgb(177, 77, 142)",
+ "rgb(145, 53, 125)",
+ "rgb(108, 33, 103)",
+]
+
+Sunsetdark = [
+ "rgb(252, 222, 156)",
+ "rgb(250, 164, 118)",
+ "rgb(240, 116, 110)",
+ "rgb(227, 79, 111)",
+ "rgb(220, 57, 119)",
+ "rgb(185, 37, 122)",
+ "rgb(124, 29, 111)",
+]
+
+Agsunset = [
+ "rgb(75, 41, 145)",
+ "rgb(135, 44, 162)",
+ "rgb(192, 54, 157)",
+ "rgb(234, 79, 136)",
+ "rgb(250, 120, 118)",
+ "rgb(246, 169, 122)",
+ "rgb(237, 217, 163)",
+]
+
+Brwnyl = [
+ "rgb(237, 229, 207)",
+ "rgb(224, 194, 162)",
+ "rgb(211, 156, 131)",
+ "rgb(193, 118, 111)",
+ "rgb(166, 84, 97)",
+ "rgb(129, 55, 83)",
+ "rgb(84, 31, 63)",
+]
+
+# Diverging schemes
+
+Armyrose = [
+ "rgb(121, 130, 52)",
+ "rgb(163, 173, 98)",
+ "rgb(208, 211, 162)",
+ "rgb(253, 251, 228)",
+ "rgb(240, 198, 195)",
+ "rgb(223, 145, 163)",
+ "rgb(212, 103, 128)",
+]
+
+Fall = [
+ "rgb(61, 89, 65)",
+ "rgb(119, 136, 104)",
+ "rgb(181, 185, 145)",
+ "rgb(246, 237, 189)",
+ "rgb(237, 187, 138)",
+ "rgb(222, 138, 90)",
+ "rgb(202, 86, 44)",
+]
+
+Geyser = [
+ "rgb(0, 128, 128)",
+ "rgb(112, 164, 148)",
+ "rgb(180, 200, 168)",
+ "rgb(246, 237, 189)",
+ "rgb(237, 187, 138)",
+ "rgb(222, 138, 90)",
+ "rgb(202, 86, 44)",
+]
+
+Temps = [
+ "rgb(0, 147, 146)",
+ "rgb(57, 177, 133)",
+ "rgb(156, 203, 134)",
+ "rgb(233, 226, 156)",
+ "rgb(238, 180, 121)",
+ "rgb(232, 132, 113)",
+ "rgb(207, 89, 126)",
+]
+
+Tealrose = [
+ "rgb(0, 147, 146)",
+ "rgb(114, 170, 161)",
+ "rgb(177, 199, 179)",
+ "rgb(241, 234, 200)",
+ "rgb(229, 185, 173)",
+ "rgb(217, 137, 148)",
+ "rgb(208, 88, 126)",
+]
+
+Tropic = [
+ "rgb(0, 155, 158)",
+ "rgb(66, 183, 185)",
+ "rgb(167, 211, 212)",
+ "rgb(241, 241, 241)",
+ "rgb(228, 193, 217)",
+ "rgb(214, 145, 193)",
+ "rgb(199, 93, 171)",
+]
+
+Earth = [
+ "rgb(161, 105, 40)",
+ "rgb(189, 146, 90)",
+ "rgb(214, 189, 141)",
+ "rgb(237, 234, 194)",
+ "rgb(181, 200, 184)",
+ "rgb(121, 167, 172)",
+ "rgb(40, 135, 161)",
+]
+
+# Qualitative palettes
+
+Antique = [
+ "rgb(133, 92, 117)",
+ "rgb(217, 175, 107)",
+ "rgb(175, 100, 88)",
+ "rgb(115, 111, 76)",
+ "rgb(82, 106, 131)",
+ "rgb(98, 83, 119)",
+ "rgb(104, 133, 92)",
+ "rgb(156, 156, 94)",
+ "rgb(160, 97, 119)",
+ "rgb(140, 120, 93)",
+ "rgb(124, 124, 124)",
+]
+
+Bold = [
+ "rgb(127, 60, 141)",
+ "rgb(17, 165, 121)",
+ "rgb(57, 105, 172)",
+ "rgb(242, 183, 1)",
+ "rgb(231, 63, 116)",
+ "rgb(128, 186, 90)",
+ "rgb(230, 131, 16)",
+ "rgb(0, 134, 149)",
+ "rgb(207, 28, 144)",
+ "rgb(249, 123, 114)",
+ "rgb(165, 170, 153)",
+]
+
+Pastel = [
+ "rgb(102, 197, 204)",
+ "rgb(246, 207, 113)",
+ "rgb(248, 156, 116)",
+ "rgb(220, 176, 242)",
+ "rgb(135, 197, 95)",
+ "rgb(158, 185, 243)",
+ "rgb(254, 136, 177)",
+ "rgb(201, 219, 116)",
+ "rgb(139, 224, 164)",
+ "rgb(180, 151, 231)",
+ "rgb(179, 179, 179)",
+]
+
+Prism = [
+ "rgb(95, 70, 144)",
+ "rgb(29, 105, 150)",
+ "rgb(56, 166, 165)",
+ "rgb(15, 133, 84)",
+ "rgb(115, 175, 72)",
+ "rgb(237, 173, 8)",
+ "rgb(225, 124, 5)",
+ "rgb(204, 80, 62)",
+ "rgb(148, 52, 110)",
+ "rgb(111, 64, 112)",
+ "rgb(102, 102, 102)",
+]
+
+Safe = [
+ "rgb(136, 204, 238)",
+ "rgb(204, 102, 119)",
+ "rgb(221, 204, 119)",
+ "rgb(17, 119, 51)",
+ "rgb(51, 34, 136)",
+ "rgb(170, 68, 153)",
+ "rgb(68, 170, 153)",
+ "rgb(153, 153, 51)",
+ "rgb(136, 34, 85)",
+ "rgb(102, 17, 0)",
+ "rgb(136, 136, 136)",
+]
+
+Vivid = [
+ "rgb(229, 134, 6)",
+ "rgb(93, 105, 177)",
+ "rgb(82, 188, 163)",
+ "rgb(153, 201, 69)",
+ "rgb(204, 97, 176)",
+ "rgb(36, 121, 108)",
+ "rgb(218, 165, 27)",
+ "rgb(47, 138, 196)",
+ "rgb(118, 78, 159)",
+ "rgb(237, 100, 90)",
+ "rgb(165, 170, 153)",
+]
+
+Aggrnyl_r = Aggrnyl[::-1]
+Agsunset_r = Agsunset[::-1]
+Antique_r = Antique[::-1]
+Armyrose_r = Armyrose[::-1]
+Blugrn_r = Blugrn[::-1]
+Bluyl_r = Bluyl[::-1]
+Bold_r = Bold[::-1]
+Brwnyl_r = Brwnyl[::-1]
+Burg_r = Burg[::-1]
+Burgyl_r = Burgyl[::-1]
+Darkmint_r = Darkmint[::-1]
+Earth_r = Earth[::-1]
+Emrld_r = Emrld[::-1]
+Fall_r = Fall[::-1]
+Geyser_r = Geyser[::-1]
+Magenta_r = Magenta[::-1]
+Mint_r = Mint[::-1]
+Oryel_r = Oryel[::-1]
+Pastel_r = Pastel[::-1]
+Peach_r = Peach[::-1]
+Pinkyl_r = Pinkyl[::-1]
+Prism_r = Prism[::-1]
+Purp_r = Purp[::-1]
+Purpor_r = Purpor[::-1]
+Redor_r = Redor[::-1]
+Safe_r = Safe[::-1]
+Sunset_r = Sunset[::-1]
+Sunsetdark_r = Sunsetdark[::-1]
+Teal_r = Teal[::-1]
+Tealgrn_r = Tealgrn[::-1]
+Tealrose_r = Tealrose[::-1]
+Temps_r = Temps[::-1]
+Tropic_r = Tropic[::-1]
+Vivid_r = Vivid[::-1]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/cmocean.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/cmocean.py
new file mode 100644
index 0000000..18944b5
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/cmocean.py
@@ -0,0 +1,296 @@
+"""
+Color scales from the cmocean project
+
+Learn more at https://matplotlib.org/cmocean/
+
+cmocean is made available under an MIT license: https://github.com/matplotlib/cmocean/blob/master/LICENSE.txt
+"""
+
+from ._swatches import _swatches, _swatches_continuous
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+
+def swatches_continuous(template=None):
+ return _swatches_continuous(__name__, globals(), template)
+
+
+swatches_continuous.__doc__ = _swatches_continuous.__doc__
+
+
+turbid = [
+ "rgb(232, 245, 171)",
+ "rgb(220, 219, 137)",
+ "rgb(209, 193, 107)",
+ "rgb(199, 168, 83)",
+ "rgb(186, 143, 66)",
+ "rgb(170, 121, 60)",
+ "rgb(151, 103, 58)",
+ "rgb(129, 87, 56)",
+ "rgb(104, 72, 53)",
+ "rgb(80, 59, 46)",
+ "rgb(57, 45, 37)",
+ "rgb(34, 30, 27)",
+]
+thermal = [
+ "rgb(3, 35, 51)",
+ "rgb(13, 48, 100)",
+ "rgb(53, 50, 155)",
+ "rgb(93, 62, 153)",
+ "rgb(126, 77, 143)",
+ "rgb(158, 89, 135)",
+ "rgb(193, 100, 121)",
+ "rgb(225, 113, 97)",
+ "rgb(246, 139, 69)",
+ "rgb(251, 173, 60)",
+ "rgb(246, 211, 70)",
+ "rgb(231, 250, 90)",
+]
+haline = [
+ "rgb(41, 24, 107)",
+ "rgb(42, 35, 160)",
+ "rgb(15, 71, 153)",
+ "rgb(18, 95, 142)",
+ "rgb(38, 116, 137)",
+ "rgb(53, 136, 136)",
+ "rgb(65, 157, 133)",
+ "rgb(81, 178, 124)",
+ "rgb(111, 198, 107)",
+ "rgb(160, 214, 91)",
+ "rgb(212, 225, 112)",
+ "rgb(253, 238, 153)",
+]
+solar = [
+ "rgb(51, 19, 23)",
+ "rgb(79, 28, 33)",
+ "rgb(108, 36, 36)",
+ "rgb(135, 47, 32)",
+ "rgb(157, 66, 25)",
+ "rgb(174, 88, 20)",
+ "rgb(188, 111, 19)",
+ "rgb(199, 137, 22)",
+ "rgb(209, 164, 32)",
+ "rgb(217, 192, 44)",
+ "rgb(222, 222, 59)",
+ "rgb(224, 253, 74)",
+]
+ice = [
+ "rgb(3, 5, 18)",
+ "rgb(25, 25, 51)",
+ "rgb(44, 42, 87)",
+ "rgb(58, 60, 125)",
+ "rgb(62, 83, 160)",
+ "rgb(62, 109, 178)",
+ "rgb(72, 134, 187)",
+ "rgb(89, 159, 196)",
+ "rgb(114, 184, 205)",
+ "rgb(149, 207, 216)",
+ "rgb(192, 229, 232)",
+ "rgb(234, 252, 253)",
+]
+gray = [
+ "rgb(0, 0, 0)",
+ "rgb(16, 16, 16)",
+ "rgb(38, 38, 38)",
+ "rgb(59, 59, 59)",
+ "rgb(81, 80, 80)",
+ "rgb(102, 101, 101)",
+ "rgb(124, 123, 122)",
+ "rgb(146, 146, 145)",
+ "rgb(171, 171, 170)",
+ "rgb(197, 197, 195)",
+ "rgb(224, 224, 223)",
+ "rgb(254, 254, 253)",
+]
+oxy = [
+ "rgb(63, 5, 5)",
+ "rgb(101, 6, 13)",
+ "rgb(138, 17, 9)",
+ "rgb(96, 95, 95)",
+ "rgb(119, 118, 118)",
+ "rgb(142, 141, 141)",
+ "rgb(166, 166, 165)",
+ "rgb(193, 192, 191)",
+ "rgb(222, 222, 220)",
+ "rgb(239, 248, 90)",
+ "rgb(230, 210, 41)",
+ "rgb(220, 174, 25)",
+]
+deep = [
+ "rgb(253, 253, 204)",
+ "rgb(206, 236, 179)",
+ "rgb(156, 219, 165)",
+ "rgb(111, 201, 163)",
+ "rgb(86, 177, 163)",
+ "rgb(76, 153, 160)",
+ "rgb(68, 130, 155)",
+ "rgb(62, 108, 150)",
+ "rgb(62, 82, 143)",
+ "rgb(64, 60, 115)",
+ "rgb(54, 43, 77)",
+ "rgb(39, 26, 44)",
+]
+dense = [
+ "rgb(230, 240, 240)",
+ "rgb(191, 221, 229)",
+ "rgb(156, 201, 226)",
+ "rgb(129, 180, 227)",
+ "rgb(115, 154, 228)",
+ "rgb(117, 127, 221)",
+ "rgb(120, 100, 202)",
+ "rgb(119, 74, 175)",
+ "rgb(113, 50, 141)",
+ "rgb(100, 31, 104)",
+ "rgb(80, 20, 66)",
+ "rgb(54, 14, 36)",
+]
+algae = [
+ "rgb(214, 249, 207)",
+ "rgb(186, 228, 174)",
+ "rgb(156, 209, 143)",
+ "rgb(124, 191, 115)",
+ "rgb(85, 174, 91)",
+ "rgb(37, 157, 81)",
+ "rgb(7, 138, 78)",
+ "rgb(13, 117, 71)",
+ "rgb(23, 95, 61)",
+ "rgb(25, 75, 49)",
+ "rgb(23, 55, 35)",
+ "rgb(17, 36, 20)",
+]
+matter = [
+ "rgb(253, 237, 176)",
+ "rgb(250, 205, 145)",
+ "rgb(246, 173, 119)",
+ "rgb(240, 142, 98)",
+ "rgb(231, 109, 84)",
+ "rgb(216, 80, 83)",
+ "rgb(195, 56, 90)",
+ "rgb(168, 40, 96)",
+ "rgb(138, 29, 99)",
+ "rgb(107, 24, 93)",
+ "rgb(76, 21, 80)",
+ "rgb(47, 15, 61)",
+]
+speed = [
+ "rgb(254, 252, 205)",
+ "rgb(239, 225, 156)",
+ "rgb(221, 201, 106)",
+ "rgb(194, 182, 59)",
+ "rgb(157, 167, 21)",
+ "rgb(116, 153, 5)",
+ "rgb(75, 138, 20)",
+ "rgb(35, 121, 36)",
+ "rgb(11, 100, 44)",
+ "rgb(18, 78, 43)",
+ "rgb(25, 56, 34)",
+ "rgb(23, 35, 18)",
+]
+amp = [
+ "rgb(241, 236, 236)",
+ "rgb(230, 209, 203)",
+ "rgb(221, 182, 170)",
+ "rgb(213, 156, 137)",
+ "rgb(205, 129, 103)",
+ "rgb(196, 102, 73)",
+ "rgb(186, 74, 47)",
+ "rgb(172, 44, 36)",
+ "rgb(149, 19, 39)",
+ "rgb(120, 14, 40)",
+ "rgb(89, 13, 31)",
+ "rgb(60, 9, 17)",
+]
+tempo = [
+ "rgb(254, 245, 244)",
+ "rgb(222, 224, 210)",
+ "rgb(189, 206, 181)",
+ "rgb(153, 189, 156)",
+ "rgb(110, 173, 138)",
+ "rgb(65, 157, 129)",
+ "rgb(25, 137, 125)",
+ "rgb(18, 116, 117)",
+ "rgb(25, 94, 106)",
+ "rgb(28, 72, 93)",
+ "rgb(25, 51, 80)",
+ "rgb(20, 29, 67)",
+]
+phase = [
+ "rgb(167, 119, 12)",
+ "rgb(197, 96, 51)",
+ "rgb(217, 67, 96)",
+ "rgb(221, 38, 163)",
+ "rgb(196, 59, 224)",
+ "rgb(153, 97, 244)",
+ "rgb(95, 127, 228)",
+ "rgb(40, 144, 183)",
+ "rgb(15, 151, 136)",
+ "rgb(39, 153, 79)",
+ "rgb(119, 141, 17)",
+ "rgb(167, 119, 12)",
+]
+balance = [
+ "rgb(23, 28, 66)",
+ "rgb(41, 58, 143)",
+ "rgb(11, 102, 189)",
+ "rgb(69, 144, 185)",
+ "rgb(142, 181, 194)",
+ "rgb(210, 216, 219)",
+ "rgb(230, 210, 204)",
+ "rgb(213, 157, 137)",
+ "rgb(196, 101, 72)",
+ "rgb(172, 43, 36)",
+ "rgb(120, 14, 40)",
+ "rgb(60, 9, 17)",
+]
+delta = [
+ "rgb(16, 31, 63)",
+ "rgb(38, 62, 144)",
+ "rgb(30, 110, 161)",
+ "rgb(60, 154, 171)",
+ "rgb(140, 193, 186)",
+ "rgb(217, 229, 218)",
+ "rgb(239, 226, 156)",
+ "rgb(195, 182, 59)",
+ "rgb(115, 152, 5)",
+ "rgb(34, 120, 36)",
+ "rgb(18, 78, 43)",
+ "rgb(23, 35, 18)",
+]
+curl = [
+ "rgb(20, 29, 67)",
+ "rgb(28, 72, 93)",
+ "rgb(18, 115, 117)",
+ "rgb(63, 156, 129)",
+ "rgb(153, 189, 156)",
+ "rgb(223, 225, 211)",
+ "rgb(241, 218, 206)",
+ "rgb(224, 160, 137)",
+ "rgb(203, 101, 99)",
+ "rgb(164, 54, 96)",
+ "rgb(111, 23, 91)",
+ "rgb(51, 13, 53)",
+]
+
+algae_r = algae[::-1]
+amp_r = amp[::-1]
+balance_r = balance[::-1]
+curl_r = curl[::-1]
+deep_r = deep[::-1]
+delta_r = delta[::-1]
+dense_r = dense[::-1]
+gray_r = gray[::-1]
+haline_r = haline[::-1]
+ice_r = ice[::-1]
+matter_r = matter[::-1]
+oxy_r = oxy[::-1]
+phase_r = phase[::-1]
+solar_r = solar[::-1]
+speed_r = speed[::-1]
+tempo_r = tempo[::-1]
+thermal_r = thermal[::-1]
+turbid_r = turbid[::-1]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/colorbrewer.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/colorbrewer.py
new file mode 100644
index 0000000..cbb286e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/colorbrewer.py
@@ -0,0 +1,494 @@
+"""
+Color scales and sequences from the colorbrewer 2 project
+
+Learn more at http://colorbrewer2.org
+
+colorbrewer is made available under an Apache license: http://colorbrewer2.org/export/LICENSE.txt
+"""
+
+from ._swatches import _swatches
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+BrBG = [
+ "rgb(84,48,5)",
+ "rgb(140,81,10)",
+ "rgb(191,129,45)",
+ "rgb(223,194,125)",
+ "rgb(246,232,195)",
+ "rgb(245,245,245)",
+ "rgb(199,234,229)",
+ "rgb(128,205,193)",
+ "rgb(53,151,143)",
+ "rgb(1,102,94)",
+ "rgb(0,60,48)",
+]
+
+PRGn = [
+ "rgb(64,0,75)",
+ "rgb(118,42,131)",
+ "rgb(153,112,171)",
+ "rgb(194,165,207)",
+ "rgb(231,212,232)",
+ "rgb(247,247,247)",
+ "rgb(217,240,211)",
+ "rgb(166,219,160)",
+ "rgb(90,174,97)",
+ "rgb(27,120,55)",
+ "rgb(0,68,27)",
+]
+
+PiYG = [
+ "rgb(142,1,82)",
+ "rgb(197,27,125)",
+ "rgb(222,119,174)",
+ "rgb(241,182,218)",
+ "rgb(253,224,239)",
+ "rgb(247,247,247)",
+ "rgb(230,245,208)",
+ "rgb(184,225,134)",
+ "rgb(127,188,65)",
+ "rgb(77,146,33)",
+ "rgb(39,100,25)",
+]
+
+PuOr = [
+ "rgb(127,59,8)",
+ "rgb(179,88,6)",
+ "rgb(224,130,20)",
+ "rgb(253,184,99)",
+ "rgb(254,224,182)",
+ "rgb(247,247,247)",
+ "rgb(216,218,235)",
+ "rgb(178,171,210)",
+ "rgb(128,115,172)",
+ "rgb(84,39,136)",
+ "rgb(45,0,75)",
+]
+
+RdBu = [
+ "rgb(103,0,31)",
+ "rgb(178,24,43)",
+ "rgb(214,96,77)",
+ "rgb(244,165,130)",
+ "rgb(253,219,199)",
+ "rgb(247,247,247)",
+ "rgb(209,229,240)",
+ "rgb(146,197,222)",
+ "rgb(67,147,195)",
+ "rgb(33,102,172)",
+ "rgb(5,48,97)",
+]
+
+RdGy = [
+ "rgb(103,0,31)",
+ "rgb(178,24,43)",
+ "rgb(214,96,77)",
+ "rgb(244,165,130)",
+ "rgb(253,219,199)",
+ "rgb(255,255,255)",
+ "rgb(224,224,224)",
+ "rgb(186,186,186)",
+ "rgb(135,135,135)",
+ "rgb(77,77,77)",
+ "rgb(26,26,26)",
+]
+
+RdYlBu = [
+ "rgb(165,0,38)",
+ "rgb(215,48,39)",
+ "rgb(244,109,67)",
+ "rgb(253,174,97)",
+ "rgb(254,224,144)",
+ "rgb(255,255,191)",
+ "rgb(224,243,248)",
+ "rgb(171,217,233)",
+ "rgb(116,173,209)",
+ "rgb(69,117,180)",
+ "rgb(49,54,149)",
+]
+
+RdYlGn = [
+ "rgb(165,0,38)",
+ "rgb(215,48,39)",
+ "rgb(244,109,67)",
+ "rgb(253,174,97)",
+ "rgb(254,224,139)",
+ "rgb(255,255,191)",
+ "rgb(217,239,139)",
+ "rgb(166,217,106)",
+ "rgb(102,189,99)",
+ "rgb(26,152,80)",
+ "rgb(0,104,55)",
+]
+
+Spectral = [
+ "rgb(158,1,66)",
+ "rgb(213,62,79)",
+ "rgb(244,109,67)",
+ "rgb(253,174,97)",
+ "rgb(254,224,139)",
+ "rgb(255,255,191)",
+ "rgb(230,245,152)",
+ "rgb(171,221,164)",
+ "rgb(102,194,165)",
+ "rgb(50,136,189)",
+ "rgb(94,79,162)",
+]
+
+Set1 = [
+ "rgb(228,26,28)",
+ "rgb(55,126,184)",
+ "rgb(77,175,74)",
+ "rgb(152,78,163)",
+ "rgb(255,127,0)",
+ "rgb(255,255,51)",
+ "rgb(166,86,40)",
+ "rgb(247,129,191)",
+ "rgb(153,153,153)",
+]
+
+
+Pastel1 = [
+ "rgb(251,180,174)",
+ "rgb(179,205,227)",
+ "rgb(204,235,197)",
+ "rgb(222,203,228)",
+ "rgb(254,217,166)",
+ "rgb(255,255,204)",
+ "rgb(229,216,189)",
+ "rgb(253,218,236)",
+ "rgb(242,242,242)",
+]
+Dark2 = [
+ "rgb(27,158,119)",
+ "rgb(217,95,2)",
+ "rgb(117,112,179)",
+ "rgb(231,41,138)",
+ "rgb(102,166,30)",
+ "rgb(230,171,2)",
+ "rgb(166,118,29)",
+ "rgb(102,102,102)",
+]
+Set2 = [
+ "rgb(102,194,165)",
+ "rgb(252,141,98)",
+ "rgb(141,160,203)",
+ "rgb(231,138,195)",
+ "rgb(166,216,84)",
+ "rgb(255,217,47)",
+ "rgb(229,196,148)",
+ "rgb(179,179,179)",
+]
+
+
+Pastel2 = [
+ "rgb(179,226,205)",
+ "rgb(253,205,172)",
+ "rgb(203,213,232)",
+ "rgb(244,202,228)",
+ "rgb(230,245,201)",
+ "rgb(255,242,174)",
+ "rgb(241,226,204)",
+ "rgb(204,204,204)",
+]
+
+Set3 = [
+ "rgb(141,211,199)",
+ "rgb(255,255,179)",
+ "rgb(190,186,218)",
+ "rgb(251,128,114)",
+ "rgb(128,177,211)",
+ "rgb(253,180,98)",
+ "rgb(179,222,105)",
+ "rgb(252,205,229)",
+ "rgb(217,217,217)",
+ "rgb(188,128,189)",
+ "rgb(204,235,197)",
+ "rgb(255,237,111)",
+]
+
+Accent = [
+ "rgb(127,201,127)",
+ "rgb(190,174,212)",
+ "rgb(253,192,134)",
+ "rgb(255,255,153)",
+ "rgb(56,108,176)",
+ "rgb(240,2,127)",
+ "rgb(191,91,23)",
+ "rgb(102,102,102)",
+]
+
+
+Paired = [
+ "rgb(166,206,227)",
+ "rgb(31,120,180)",
+ "rgb(178,223,138)",
+ "rgb(51,160,44)",
+ "rgb(251,154,153)",
+ "rgb(227,26,28)",
+ "rgb(253,191,111)",
+ "rgb(255,127,0)",
+ "rgb(202,178,214)",
+ "rgb(106,61,154)",
+ "rgb(255,255,153)",
+ "rgb(177,89,40)",
+]
+
+
+Blues = [
+ "rgb(247,251,255)",
+ "rgb(222,235,247)",
+ "rgb(198,219,239)",
+ "rgb(158,202,225)",
+ "rgb(107,174,214)",
+ "rgb(66,146,198)",
+ "rgb(33,113,181)",
+ "rgb(8,81,156)",
+ "rgb(8,48,107)",
+]
+
+BuGn = [
+ "rgb(247,252,253)",
+ "rgb(229,245,249)",
+ "rgb(204,236,230)",
+ "rgb(153,216,201)",
+ "rgb(102,194,164)",
+ "rgb(65,174,118)",
+ "rgb(35,139,69)",
+ "rgb(0,109,44)",
+ "rgb(0,68,27)",
+]
+
+BuPu = [
+ "rgb(247,252,253)",
+ "rgb(224,236,244)",
+ "rgb(191,211,230)",
+ "rgb(158,188,218)",
+ "rgb(140,150,198)",
+ "rgb(140,107,177)",
+ "rgb(136,65,157)",
+ "rgb(129,15,124)",
+ "rgb(77,0,75)",
+]
+
+GnBu = [
+ "rgb(247,252,240)",
+ "rgb(224,243,219)",
+ "rgb(204,235,197)",
+ "rgb(168,221,181)",
+ "rgb(123,204,196)",
+ "rgb(78,179,211)",
+ "rgb(43,140,190)",
+ "rgb(8,104,172)",
+ "rgb(8,64,129)",
+]
+
+Greens = [
+ "rgb(247,252,245)",
+ "rgb(229,245,224)",
+ "rgb(199,233,192)",
+ "rgb(161,217,155)",
+ "rgb(116,196,118)",
+ "rgb(65,171,93)",
+ "rgb(35,139,69)",
+ "rgb(0,109,44)",
+ "rgb(0,68,27)",
+]
+
+Greys = [
+ "rgb(255,255,255)",
+ "rgb(240,240,240)",
+ "rgb(217,217,217)",
+ "rgb(189,189,189)",
+ "rgb(150,150,150)",
+ "rgb(115,115,115)",
+ "rgb(82,82,82)",
+ "rgb(37,37,37)",
+ "rgb(0,0,0)",
+]
+
+OrRd = [
+ "rgb(255,247,236)",
+ "rgb(254,232,200)",
+ "rgb(253,212,158)",
+ "rgb(253,187,132)",
+ "rgb(252,141,89)",
+ "rgb(239,101,72)",
+ "rgb(215,48,31)",
+ "rgb(179,0,0)",
+ "rgb(127,0,0)",
+]
+
+Oranges = [
+ "rgb(255,245,235)",
+ "rgb(254,230,206)",
+ "rgb(253,208,162)",
+ "rgb(253,174,107)",
+ "rgb(253,141,60)",
+ "rgb(241,105,19)",
+ "rgb(217,72,1)",
+ "rgb(166,54,3)",
+ "rgb(127,39,4)",
+]
+
+PuBu = [
+ "rgb(255,247,251)",
+ "rgb(236,231,242)",
+ "rgb(208,209,230)",
+ "rgb(166,189,219)",
+ "rgb(116,169,207)",
+ "rgb(54,144,192)",
+ "rgb(5,112,176)",
+ "rgb(4,90,141)",
+ "rgb(2,56,88)",
+]
+
+PuBuGn = [
+ "rgb(255,247,251)",
+ "rgb(236,226,240)",
+ "rgb(208,209,230)",
+ "rgb(166,189,219)",
+ "rgb(103,169,207)",
+ "rgb(54,144,192)",
+ "rgb(2,129,138)",
+ "rgb(1,108,89)",
+ "rgb(1,70,54)",
+]
+
+PuRd = [
+ "rgb(247,244,249)",
+ "rgb(231,225,239)",
+ "rgb(212,185,218)",
+ "rgb(201,148,199)",
+ "rgb(223,101,176)",
+ "rgb(231,41,138)",
+ "rgb(206,18,86)",
+ "rgb(152,0,67)",
+ "rgb(103,0,31)",
+]
+
+Purples = [
+ "rgb(252,251,253)",
+ "rgb(239,237,245)",
+ "rgb(218,218,235)",
+ "rgb(188,189,220)",
+ "rgb(158,154,200)",
+ "rgb(128,125,186)",
+ "rgb(106,81,163)",
+ "rgb(84,39,143)",
+ "rgb(63,0,125)",
+]
+
+RdPu = [
+ "rgb(255,247,243)",
+ "rgb(253,224,221)",
+ "rgb(252,197,192)",
+ "rgb(250,159,181)",
+ "rgb(247,104,161)",
+ "rgb(221,52,151)",
+ "rgb(174,1,126)",
+ "rgb(122,1,119)",
+ "rgb(73,0,106)",
+]
+
+Reds = [
+ "rgb(255,245,240)",
+ "rgb(254,224,210)",
+ "rgb(252,187,161)",
+ "rgb(252,146,114)",
+ "rgb(251,106,74)",
+ "rgb(239,59,44)",
+ "rgb(203,24,29)",
+ "rgb(165,15,21)",
+ "rgb(103,0,13)",
+]
+
+YlGn = [
+ "rgb(255,255,229)",
+ "rgb(247,252,185)",
+ "rgb(217,240,163)",
+ "rgb(173,221,142)",
+ "rgb(120,198,121)",
+ "rgb(65,171,93)",
+ "rgb(35,132,67)",
+ "rgb(0,104,55)",
+ "rgb(0,69,41)",
+]
+
+YlGnBu = [
+ "rgb(255,255,217)",
+ "rgb(237,248,177)",
+ "rgb(199,233,180)",
+ "rgb(127,205,187)",
+ "rgb(65,182,196)",
+ "rgb(29,145,192)",
+ "rgb(34,94,168)",
+ "rgb(37,52,148)",
+ "rgb(8,29,88)",
+]
+
+YlOrBr = [
+ "rgb(255,255,229)",
+ "rgb(255,247,188)",
+ "rgb(254,227,145)",
+ "rgb(254,196,79)",
+ "rgb(254,153,41)",
+ "rgb(236,112,20)",
+ "rgb(204,76,2)",
+ "rgb(153,52,4)",
+ "rgb(102,37,6)",
+]
+
+YlOrRd = [
+ "rgb(255,255,204)",
+ "rgb(255,237,160)",
+ "rgb(254,217,118)",
+ "rgb(254,178,76)",
+ "rgb(253,141,60)",
+ "rgb(252,78,42)",
+ "rgb(227,26,28)",
+ "rgb(189,0,38)",
+ "rgb(128,0,38)",
+]
+
+Accent_r = Accent[::-1]
+Blues_r = Blues[::-1]
+BrBG_r = BrBG[::-1]
+BuGn_r = BuGn[::-1]
+BuPu_r = BuPu[::-1]
+Dark2_r = Dark2[::-1]
+GnBu_r = GnBu[::-1]
+Greens_r = Greens[::-1]
+Greys_r = Greys[::-1]
+OrRd_r = OrRd[::-1]
+Oranges_r = Oranges[::-1]
+PRGn_r = PRGn[::-1]
+Paired_r = Paired[::-1]
+Pastel1_r = Pastel1[::-1]
+Pastel2_r = Pastel2[::-1]
+PiYG_r = PiYG[::-1]
+PuBu_r = PuBu[::-1]
+PuBuGn_r = PuBuGn[::-1]
+PuOr_r = PuOr[::-1]
+PuRd_r = PuRd[::-1]
+Purples_r = Purples[::-1]
+RdBu_r = RdBu[::-1]
+RdGy_r = RdGy[::-1]
+RdPu_r = RdPu[::-1]
+RdYlBu_r = RdYlBu[::-1]
+RdYlGn_r = RdYlGn[::-1]
+Reds_r = Reds[::-1]
+Set1_r = Set1[::-1]
+Set2_r = Set2[::-1]
+Set3_r = Set3[::-1]
+Spectral_r = Spectral[::-1]
+YlGn_r = YlGn[::-1]
+YlGnBu_r = YlGnBu[::-1]
+YlOrBr_r = YlOrBr[::-1]
+YlOrRd_r = YlOrRd[::-1]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/cyclical.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/cyclical.py
new file mode 100644
index 0000000..bcb7d07
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/cyclical.py
@@ -0,0 +1,157 @@
+"""
+Cyclical color scales are appropriate for continuous data that has a natural cyclical \
+structure, such as temporal data (hour of day, day of week, day of year, seasons) or
+complex numbers or other phase data.
+"""
+
+from ._swatches import _swatches, _swatches_continuous, _swatches_cyclical
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+
+def swatches_continuous(template=None):
+ return _swatches_continuous(__name__, globals(), template)
+
+
+swatches_continuous.__doc__ = _swatches_continuous.__doc__
+
+
+def swatches_cyclical(template=None):
+ return _swatches_cyclical(__name__, globals(), template)
+
+
+swatches_cyclical.__doc__ = _swatches_cyclical.__doc__
+
+
+Twilight = [
+ "#e2d9e2",
+ "#9ebbc9",
+ "#6785be",
+ "#5e43a5",
+ "#421257",
+ "#471340",
+ "#8e2c50",
+ "#ba6657",
+ "#ceac94",
+ "#e2d9e2",
+]
+IceFire = [
+ "#000000",
+ "#001f4d",
+ "#003786",
+ "#0e58a8",
+ "#217eb8",
+ "#30a4ca",
+ "#54c8df",
+ "#9be4ef",
+ "#e1e9d1",
+ "#f3d573",
+ "#e7b000",
+ "#da8200",
+ "#c65400",
+ "#ac2301",
+ "#820000",
+ "#4c0000",
+ "#000000",
+]
+Edge = [
+ "#313131",
+ "#3d019d",
+ "#3810dc",
+ "#2d47f9",
+ "#2593ff",
+ "#2adef6",
+ "#60fdfa",
+ "#aefdff",
+ "#f3f3f1",
+ "#fffda9",
+ "#fafd5b",
+ "#f7da29",
+ "#ff8e25",
+ "#f8432d",
+ "#d90d39",
+ "#97023d",
+ "#313131",
+]
+Phase = [
+ "rgb(167, 119, 12)",
+ "rgb(197, 96, 51)",
+ "rgb(217, 67, 96)",
+ "rgb(221, 38, 163)",
+ "rgb(196, 59, 224)",
+ "rgb(153, 97, 244)",
+ "rgb(95, 127, 228)",
+ "rgb(40, 144, 183)",
+ "rgb(15, 151, 136)",
+ "rgb(39, 153, 79)",
+ "rgb(119, 141, 17)",
+ "rgb(167, 119, 12)",
+]
+HSV = [
+ "#ff0000",
+ "#ffa700",
+ "#afff00",
+ "#08ff00",
+ "#00ff9f",
+ "#00b7ff",
+ "#0010ff",
+ "#9700ff",
+ "#ff00bf",
+ "#ff0000",
+]
+mrybm = [
+ "#f884f7",
+ "#f968c4",
+ "#ea4388",
+ "#cf244b",
+ "#b51a15",
+ "#bd4304",
+ "#cc6904",
+ "#d58f04",
+ "#cfaa27",
+ "#a19f62",
+ "#588a93",
+ "#2269c4",
+ "#3e3ef0",
+ "#6b4ef9",
+ "#956bfa",
+ "#cd7dfe",
+ "#f884f7",
+]
+mygbm = [
+ "#ef55f1",
+ "#fb84ce",
+ "#fbafa1",
+ "#fcd471",
+ "#f0ed35",
+ "#c6e516",
+ "#96d310",
+ "#61c10b",
+ "#31ac28",
+ "#439064",
+ "#3d719a",
+ "#284ec8",
+ "#2e21ea",
+ "#6324f5",
+ "#9139fa",
+ "#c543fa",
+ "#ef55f1",
+]
+
+Edge_r = Edge[::-1]
+HSV_r = HSV[::-1]
+IceFire_r = IceFire[::-1]
+Phase_r = Phase[::-1]
+Twilight_r = Twilight[::-1]
+mrybm_r = mrybm[::-1]
+mygbm_r = mygbm[::-1]
+
+__all__ = [
+ "swatches",
+ "swatches_cyclical",
+]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/diverging.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/diverging.py
new file mode 100644
index 0000000..53170df
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/diverging.py
@@ -0,0 +1,75 @@
+"""
+Diverging color scales are appropriate for continuous data that has a natural midpoint \
+other otherwise informative special value, such as 0 altitude, or the boiling point
+of a liquid. The color scales in this module are \
+mostly meant to be passed in as the `color_continuous_scale` argument to various \
+functions, and to be used with the `color_continuous_midpoint` argument.
+"""
+
+from .colorbrewer import ( # noqa: F401
+ BrBG,
+ PRGn,
+ PiYG,
+ PuOr,
+ RdBu,
+ RdGy,
+ RdYlBu,
+ RdYlGn,
+ Spectral,
+ BrBG_r,
+ PRGn_r,
+ PiYG_r,
+ PuOr_r,
+ RdBu_r,
+ RdGy_r,
+ RdYlBu_r,
+ RdYlGn_r,
+ Spectral_r,
+)
+from .cmocean import ( # noqa: F401
+ balance,
+ delta,
+ curl,
+ oxy,
+ balance_r,
+ delta_r,
+ curl_r,
+ oxy_r,
+)
+from .carto import ( # noqa: F401
+ Armyrose,
+ Fall,
+ Geyser,
+ Temps,
+ Tealrose,
+ Tropic,
+ Earth,
+ Armyrose_r,
+ Fall_r,
+ Geyser_r,
+ Temps_r,
+ Tealrose_r,
+ Tropic_r,
+ Earth_r,
+)
+
+from .plotlyjs import Picnic, Portland, Picnic_r, Portland_r # noqa: F401
+
+from ._swatches import _swatches, _swatches_continuous
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+
+def swatches_continuous(template=None):
+ return _swatches_continuous(__name__, globals(), template)
+
+
+swatches_continuous.__doc__ = _swatches_continuous.__doc__
+
+
+__all__ = ["swatches"]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/plotlyjs.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/plotlyjs.py
new file mode 100644
index 0000000..a49ae02
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/plotlyjs.py
@@ -0,0 +1,180 @@
+# Copied from
+# https://github.com/plotly/plotly.js/blob/master/src/components/colorscale/scales.js
+
+# NOTE: these differ slightly from plotly.colors.PLOTLY_SCALES from Plotly.js because
+# those ones don't have perfectly evenly spaced steps ...
+# not sure when this skew was introduced, possibly as early as Plotly.py v4.0
+
+Blackbody = [
+ "rgb(0,0,0)",
+ "rgb(230,0,0)",
+ "rgb(230,210,0)",
+ "rgb(255,255,255)",
+ "rgb(160,200,255)",
+]
+Bluered = ["rgb(0,0,255)", "rgb(255,0,0)"]
+Blues = [
+ "rgb(5,10,172)",
+ "rgb(40,60,190)",
+ "rgb(70,100,245)",
+ "rgb(90,120,245)",
+ "rgb(106,137,247)",
+ "rgb(220,220,220)",
+]
+Cividis = [
+ "rgb(0,32,76)",
+ "rgb(0,42,102)",
+ "rgb(0,52,110)",
+ "rgb(39,63,108)",
+ "rgb(60,74,107)",
+ "rgb(76,85,107)",
+ "rgb(91,95,109)",
+ "rgb(104,106,112)",
+ "rgb(117,117,117)",
+ "rgb(131,129,120)",
+ "rgb(146,140,120)",
+ "rgb(161,152,118)",
+ "rgb(176,165,114)",
+ "rgb(192,177,109)",
+ "rgb(209,191,102)",
+ "rgb(225,204,92)",
+ "rgb(243,219,79)",
+ "rgb(255,233,69)",
+]
+Earth = [
+ "rgb(0,0,130)",
+ "rgb(0,180,180)",
+ "rgb(40,210,40)",
+ "rgb(230,230,50)",
+ "rgb(120,70,20)",
+ "rgb(255,255,255)",
+]
+Electric = [
+ "rgb(0,0,0)",
+ "rgb(30,0,100)",
+ "rgb(120,0,100)",
+ "rgb(160,90,0)",
+ "rgb(230,200,0)",
+ "rgb(255,250,220)",
+]
+Greens = [
+ "rgb(0,68,27)",
+ "rgb(0,109,44)",
+ "rgb(35,139,69)",
+ "rgb(65,171,93)",
+ "rgb(116,196,118)",
+ "rgb(161,217,155)",
+ "rgb(199,233,192)",
+ "rgb(229,245,224)",
+ "rgb(247,252,245)",
+]
+Greys = ["rgb(0,0,0)", "rgb(255,255,255)"]
+Hot = ["rgb(0,0,0)", "rgb(230,0,0)", "rgb(255,210,0)", "rgb(255,255,255)"]
+Jet = [
+ "rgb(0,0,131)",
+ "rgb(0,60,170)",
+ "rgb(5,255,255)",
+ "rgb(255,255,0)",
+ "rgb(250,0,0)",
+ "rgb(128,0,0)",
+]
+Picnic = [
+ "rgb(0,0,255)",
+ "rgb(51,153,255)",
+ "rgb(102,204,255)",
+ "rgb(153,204,255)",
+ "rgb(204,204,255)",
+ "rgb(255,255,255)",
+ "rgb(255,204,255)",
+ "rgb(255,153,255)",
+ "rgb(255,102,204)",
+ "rgb(255,102,102)",
+ "rgb(255,0,0)",
+]
+Portland = [
+ "rgb(12,51,131)",
+ "rgb(10,136,186)",
+ "rgb(242,211,56)",
+ "rgb(242,143,56)",
+ "rgb(217,30,30)",
+]
+Rainbow = [
+ "rgb(150,0,90)",
+ "rgb(0,0,200)",
+ "rgb(0,25,255)",
+ "rgb(0,152,255)",
+ "rgb(44,255,150)",
+ "rgb(151,255,0)",
+ "rgb(255,234,0)",
+ "rgb(255,111,0)",
+ "rgb(255,0,0)",
+]
+RdBu = [
+ "rgb(5,10,172)",
+ "rgb(106,137,247)",
+ "rgb(190,190,190)",
+ "rgb(220,170,132)",
+ "rgb(230,145,90)",
+ "rgb(178,10,28)",
+]
+Reds = ["rgb(220,220,220)", "rgb(245,195,157)", "rgb(245,160,105)", "rgb(178,10,28)"]
+Viridis = [
+ "#440154",
+ "#48186a",
+ "#472d7b",
+ "#424086",
+ "#3b528b",
+ "#33638d",
+ "#2c728e",
+ "#26828e",
+ "#21918c",
+ "#1fa088",
+ "#28ae80",
+ "#3fbc73",
+ "#5ec962",
+ "#84d44b",
+ "#addc30",
+ "#d8e219",
+ "#fde725",
+]
+YlGnBu = [
+ "rgb(8,29,88)",
+ "rgb(37,52,148)",
+ "rgb(34,94,168)",
+ "rgb(29,145,192)",
+ "rgb(65,182,196)",
+ "rgb(127,205,187)",
+ "rgb(199,233,180)",
+ "rgb(237,248,217)",
+ "rgb(255,255,217)",
+]
+YlOrRd = [
+ "rgb(128,0,38)",
+ "rgb(189,0,38)",
+ "rgb(227,26,28)",
+ "rgb(252,78,42)",
+ "rgb(253,141,60)",
+ "rgb(254,178,76)",
+ "rgb(254,217,118)",
+ "rgb(255,237,160)",
+ "rgb(255,255,204)",
+]
+
+Blackbody_r = Blackbody[::-1]
+Bluered_r = Bluered[::-1]
+Blues_r = Blues[::-1]
+Cividis_r = Cividis[::-1]
+Earth_r = Earth[::-1]
+Electric_r = Electric[::-1]
+Greens_r = Greens[::-1]
+Greys_r = Greys[::-1]
+Hot_r = Hot[::-1]
+Jet_r = Jet[::-1]
+Picnic_r = Picnic[::-1]
+Portland_r = Portland[::-1]
+Rainbow_r = Rainbow[::-1]
+RdBu_r = RdBu[::-1]
+Reds_r = Reds[::-1]
+Viridis_r = Viridis[::-1]
+YlGnBu_r = YlGnBu[::-1]
+YlOrRd_r = YlOrRd[::-1]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/qualitative.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/qualitative.py
new file mode 100644
index 0000000..c26a557
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/qualitative.py
@@ -0,0 +1,184 @@
+"""
+Qualitative color sequences are appropriate for data that has no natural ordering, such \
+as categories, colors, names, countries etc. The color sequences in this module are \
+mostly meant to be passed in as the `color_discrete_sequence` argument to various functions.
+"""
+
+from ._swatches import _swatches
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+Plotly = [
+ "#636EFA",
+ "#EF553B",
+ "#00CC96",
+ "#AB63FA",
+ "#FFA15A",
+ "#19D3F3",
+ "#FF6692",
+ "#B6E880",
+ "#FF97FF",
+ "#FECB52",
+]
+
+D3 = [
+ "#1F77B4",
+ "#FF7F0E",
+ "#2CA02C",
+ "#D62728",
+ "#9467BD",
+ "#8C564B",
+ "#E377C2",
+ "#7F7F7F",
+ "#BCBD22",
+ "#17BECF",
+]
+G10 = [
+ "#3366CC",
+ "#DC3912",
+ "#FF9900",
+ "#109618",
+ "#990099",
+ "#0099C6",
+ "#DD4477",
+ "#66AA00",
+ "#B82E2E",
+ "#316395",
+]
+T10 = [
+ "#4C78A8",
+ "#F58518",
+ "#E45756",
+ "#72B7B2",
+ "#54A24B",
+ "#EECA3B",
+ "#B279A2",
+ "#FF9DA6",
+ "#9D755D",
+ "#BAB0AC",
+]
+Alphabet = [
+ "#AA0DFE",
+ "#3283FE",
+ "#85660D",
+ "#782AB6",
+ "#565656",
+ "#1C8356",
+ "#16FF32",
+ "#F7E1A0",
+ "#E2E2E2",
+ "#1CBE4F",
+ "#C4451C",
+ "#DEA0FD",
+ "#FE00FA",
+ "#325A9B",
+ "#FEAF16",
+ "#F8A19F",
+ "#90AD1C",
+ "#F6222E",
+ "#1CFFCE",
+ "#2ED9FF",
+ "#B10DA1",
+ "#C075A6",
+ "#FC1CBF",
+ "#B00068",
+ "#FBE426",
+ "#FA0087",
+]
+Dark24 = [
+ "#2E91E5",
+ "#E15F99",
+ "#1CA71C",
+ "#FB0D0D",
+ "#DA16FF",
+ "#222A2A",
+ "#B68100",
+ "#750D86",
+ "#EB663B",
+ "#511CFB",
+ "#00A08B",
+ "#FB00D1",
+ "#FC0080",
+ "#B2828D",
+ "#6C7C32",
+ "#778AAE",
+ "#862A16",
+ "#A777F1",
+ "#620042",
+ "#1616A7",
+ "#DA60CA",
+ "#6C4516",
+ "#0D2A63",
+ "#AF0038",
+]
+Light24 = [
+ "#FD3216",
+ "#00FE35",
+ "#6A76FC",
+ "#FED4C4",
+ "#FE00CE",
+ "#0DF9FF",
+ "#F6F926",
+ "#FF9616",
+ "#479B55",
+ "#EEA6FB",
+ "#DC587D",
+ "#D626FF",
+ "#6E899C",
+ "#00B5F7",
+ "#B68E00",
+ "#C9FBE5",
+ "#FF0092",
+ "#22FFA7",
+ "#E3EE9E",
+ "#86CE00",
+ "#BC7196",
+ "#7E7DCD",
+ "#FC6955",
+ "#E48F72",
+]
+
+Alphabet_r = Alphabet[::-1]
+D3_r = D3[::-1]
+Dark24_r = Dark24[::-1]
+G10_r = G10[::-1]
+Light24_r = Light24[::-1]
+Plotly_r = Plotly[::-1]
+T10_r = T10[::-1]
+
+from .colorbrewer import ( # noqa: E402 F401
+ Set1,
+ Pastel1,
+ Dark2,
+ Set2,
+ Pastel2,
+ Set3,
+ Set1_r,
+ Pastel1_r,
+ Dark2_r,
+ Set2_r,
+ Pastel2_r,
+ Set3_r,
+)
+from .carto import ( # noqa: E402 F401
+ Antique,
+ Bold,
+ Pastel,
+ Prism,
+ Safe,
+ Vivid,
+ Antique_r,
+ Bold_r,
+ Pastel_r,
+ Prism_r,
+ Safe_r,
+ Vivid_r,
+)
+
+
+__all__ = ["swatches"]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/colors/sequential.py b/venv/lib/python3.8/site-packages/_plotly_utils/colors/sequential.py
new file mode 100644
index 0000000..0e9ccf6
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/colors/sequential.py
@@ -0,0 +1,257 @@
+"""
+Sequential color scales are appropriate for most continuous data, but in some cases it \
+can be helpful to use a `plotly.colors.diverging` or \
+`plotly.colors.cyclical` scale instead. The color scales in this module are \
+mostly meant to be passed in as the `color_continuous_scale` argument to various functions.
+"""
+
+from ._swatches import _swatches, _swatches_continuous
+
+
+def swatches(template=None):
+ return _swatches(__name__, globals(), template)
+
+
+swatches.__doc__ = _swatches.__doc__
+
+
+def swatches_continuous(template=None):
+ return _swatches_continuous(__name__, globals(), template)
+
+
+swatches_continuous.__doc__ = _swatches_continuous.__doc__
+
+Plotly3 = [
+ "#0508b8",
+ "#1910d8",
+ "#3c19f0",
+ "#6b1cfb",
+ "#981cfd",
+ "#bf1cfd",
+ "#dd2bfd",
+ "#f246fe",
+ "#fc67fd",
+ "#fe88fc",
+ "#fea5fd",
+ "#febefe",
+ "#fec3fe",
+]
+
+Viridis = [
+ "#440154",
+ "#482878",
+ "#3e4989",
+ "#31688e",
+ "#26828e",
+ "#1f9e89",
+ "#35b779",
+ "#6ece58",
+ "#b5de2b",
+ "#fde725",
+]
+Cividis = [
+ "#00224e",
+ "#123570",
+ "#3b496c",
+ "#575d6d",
+ "#707173",
+ "#8a8678",
+ "#a59c74",
+ "#c3b369",
+ "#e1cc55",
+ "#fee838",
+]
+
+Inferno = [
+ "#000004",
+ "#1b0c41",
+ "#4a0c6b",
+ "#781c6d",
+ "#a52c60",
+ "#cf4446",
+ "#ed6925",
+ "#fb9b06",
+ "#f7d13d",
+ "#fcffa4",
+]
+Magma = [
+ "#000004",
+ "#180f3d",
+ "#440f76",
+ "#721f81",
+ "#9e2f7f",
+ "#cd4071",
+ "#f1605d",
+ "#fd9668",
+ "#feca8d",
+ "#fcfdbf",
+]
+Plasma = [
+ "#0d0887",
+ "#46039f",
+ "#7201a8",
+ "#9c179e",
+ "#bd3786",
+ "#d8576b",
+ "#ed7953",
+ "#fb9f3a",
+ "#fdca26",
+ "#f0f921",
+]
+Turbo = [
+ "#30123b",
+ "#4145ab",
+ "#4675ed",
+ "#39a2fc",
+ "#1bcfd4",
+ "#24eca6",
+ "#61fc6c",
+ "#a4fc3b",
+ "#d1e834",
+ "#f3c63a",
+ "#fe9b2d",
+ "#f36315",
+ "#d93806",
+ "#b11901",
+ "#7a0402",
+]
+
+Cividis_r = Cividis[::-1]
+Inferno_r = Inferno[::-1]
+Magma_r = Magma[::-1]
+Plasma_r = Plasma[::-1]
+Plotly3_r = Plotly3[::-1]
+Turbo_r = Turbo[::-1]
+Viridis_r = Viridis[::-1]
+
+from .plotlyjs import ( # noqa: E402 F401
+ Blackbody,
+ Bluered,
+ Electric,
+ Hot,
+ Jet,
+ Rainbow,
+ Blackbody_r,
+ Bluered_r,
+ Electric_r,
+ Hot_r,
+ Jet_r,
+ Rainbow_r,
+)
+
+from .colorbrewer import ( # noqa: E402 F401
+ Blues,
+ BuGn,
+ BuPu,
+ GnBu,
+ Greens,
+ Greys,
+ OrRd,
+ Oranges,
+ PuBu,
+ PuBuGn,
+ PuRd,
+ Purples,
+ RdBu,
+ RdPu,
+ Reds,
+ YlGn,
+ YlGnBu,
+ YlOrBr,
+ YlOrRd,
+ Blues_r,
+ BuGn_r,
+ BuPu_r,
+ GnBu_r,
+ Greens_r,
+ Greys_r,
+ OrRd_r,
+ Oranges_r,
+ PuBu_r,
+ PuBuGn_r,
+ PuRd_r,
+ Purples_r,
+ RdBu_r,
+ RdPu_r,
+ Reds_r,
+ YlGn_r,
+ YlGnBu_r,
+ YlOrBr_r,
+ YlOrRd_r,
+)
+
+from .cmocean import ( # noqa: E402 F401
+ turbid,
+ thermal,
+ haline,
+ solar,
+ ice,
+ gray,
+ deep,
+ dense,
+ algae,
+ matter,
+ speed,
+ amp,
+ tempo,
+ turbid_r,
+ thermal_r,
+ haline_r,
+ solar_r,
+ ice_r,
+ gray_r,
+ deep_r,
+ dense_r,
+ algae_r,
+ matter_r,
+ speed_r,
+ amp_r,
+ tempo_r,
+)
+
+from .carto import ( # noqa: E402 F401
+ Burg,
+ Burgyl,
+ Redor,
+ Oryel,
+ Peach,
+ Pinkyl,
+ Mint,
+ Blugrn,
+ Darkmint,
+ Emrld,
+ Aggrnyl,
+ Bluyl,
+ Teal,
+ Tealgrn,
+ Purp,
+ Purpor,
+ Sunset,
+ Magenta,
+ Sunsetdark,
+ Agsunset,
+ Brwnyl,
+ Burg_r,
+ Burgyl_r,
+ Redor_r,
+ Oryel_r,
+ Peach_r,
+ Pinkyl_r,
+ Mint_r,
+ Blugrn_r,
+ Darkmint_r,
+ Emrld_r,
+ Aggrnyl_r,
+ Bluyl_r,
+ Teal_r,
+ Tealgrn_r,
+ Purp_r,
+ Purpor_r,
+ Sunset_r,
+ Magenta_r,
+ Sunsetdark_r,
+ Agsunset_r,
+ Brwnyl_r,
+)
+
+__all__ = ["swatches"]
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/data_utils.py b/venv/lib/python3.8/site-packages/_plotly_utils/data_utils.py
new file mode 100644
index 0000000..5fb05b0
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/data_utils.py
@@ -0,0 +1,75 @@
+from io import BytesIO
+import base64
+from .png import Writer, from_array
+
+try:
+ from PIL import Image
+
+ pil_imported = True
+except ImportError:
+ pil_imported = False
+
+
+def image_array_to_data_uri(img, backend="pil", compression=4, ext="png"):
+ """Converts a numpy array of uint8 into a base64 png or jpg string.
+
+ Parameters
+ ----------
+ img: ndarray of uint8
+ array image
+ backend: str
+ 'auto', 'pil' or 'pypng'. If 'auto', Pillow is used if installed,
+ otherwise pypng.
+ compression: int, between 0 and 9
+ compression level to be passed to the backend
+ ext: str, 'png' or 'jpg'
+ compression format used to generate b64 string
+ """
+ # PIL and pypng error messages are quite obscure so we catch invalid compression values
+ if compression < 0 or compression > 9:
+ raise ValueError("compression level must be between 0 and 9.")
+ alpha = False
+ if img.ndim == 2:
+ mode = "L"
+ elif img.ndim == 3 and img.shape[-1] == 3:
+ mode = "RGB"
+ elif img.ndim == 3 and img.shape[-1] == 4:
+ mode = "RGBA"
+ alpha = True
+ else:
+ raise ValueError("Invalid image shape")
+ if backend == "auto":
+ backend = "pil" if pil_imported else "pypng"
+ if ext != "png" and backend != "pil":
+ raise ValueError("jpg binary strings are only available with PIL backend")
+
+ if backend == "pypng":
+ ndim = img.ndim
+ sh = img.shape
+ if ndim == 3:
+ img = img.reshape((sh[0], sh[1] * sh[2]))
+ w = Writer(
+ sh[1], sh[0], greyscale=(ndim == 2), alpha=alpha, compression=compression
+ )
+ img_png = from_array(img, mode=mode)
+ prefix = "data:image/png;base64,"
+ with BytesIO() as stream:
+ w.write(stream, img_png.rows)
+ base64_string = prefix + base64.b64encode(stream.getvalue()).decode("utf-8")
+ else: # pil
+ if not pil_imported:
+ raise ImportError(
+ "pillow needs to be installed to use `backend='pil'. Please"
+ "install pillow or use `backend='pypng'."
+ )
+ pil_img = Image.fromarray(img)
+ if ext == "jpg" or ext == "jpeg":
+ prefix = "data:image/jpeg;base64,"
+ ext = "jpeg"
+ else:
+ prefix = "data:image/png;base64,"
+ ext = "png"
+ with BytesIO() as stream:
+ pil_img.save(stream, format=ext, compress_level=compression)
+ base64_string = prefix + base64.b64encode(stream.getvalue()).decode("utf-8")
+ return base64_string
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/exceptions.py b/venv/lib/python3.8/site-packages/_plotly_utils/exceptions.py
new file mode 100644
index 0000000..836ef59
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/exceptions.py
@@ -0,0 +1,97 @@
+class PlotlyError(Exception):
+ pass
+
+
+class PlotlyEmptyDataError(PlotlyError):
+ pass
+
+
+class PlotlyGraphObjectError(PlotlyError):
+ def __init__(self, message="", path=(), notes=()):
+ """
+ General graph object error for validation failures.
+
+ :param (str|unicode) message: The error message.
+ :param (iterable) path: A path pointing to the error.
+ :param notes: Add additional notes, but keep default exception message.
+
+ """
+ self.message = message
+ self.plain_message = message # for backwards compat
+ self.path = list(path)
+ self.notes = notes
+ super(PlotlyGraphObjectError, self).__init__(message)
+
+ def __str__(self):
+ """This is called by Python to present the error message."""
+ format_dict = {
+ "message": self.message,
+ "path": "[" + "][".join(repr(k) for k in self.path) + "]",
+ "notes": "\n".join(self.notes),
+ }
+ return "{message}\n\nPath To Error: {path}\n\n{notes}".format(**format_dict)
+
+
+class PlotlyDictKeyError(PlotlyGraphObjectError):
+ def __init__(self, obj, path, notes=()):
+ """See PlotlyGraphObjectError.__init__ for param docs."""
+ format_dict = {"attribute": path[-1], "object_name": obj._name}
+ message = "'{attribute}' is not allowed in '{object_name}'".format(
+ **format_dict
+ )
+ notes = [obj.help(return_help=True)] + list(notes)
+ super(PlotlyDictKeyError, self).__init__(
+ message=message, path=path, notes=notes
+ )
+
+
+class PlotlyDictValueError(PlotlyGraphObjectError):
+ def __init__(self, obj, path, notes=()):
+ """See PlotlyGraphObjectError.__init__ for param docs."""
+ format_dict = {"attribute": path[-1], "object_name": obj._name}
+ message = "'{attribute}' has invalid value inside '{object_name}'".format(
+ **format_dict
+ )
+ notes = [obj.help(path[-1], return_help=True)] + list(notes)
+ super(PlotlyDictValueError, self).__init__(
+ message=message, notes=notes, path=path
+ )
+
+
+class PlotlyListEntryError(PlotlyGraphObjectError):
+ def __init__(self, obj, path, notes=()):
+ """See PlotlyGraphObjectError.__init__ for param docs."""
+ format_dict = {"index": path[-1], "object_name": obj._name}
+ message = "Invalid entry found in '{object_name}' at index, '{index}'".format(
+ **format_dict
+ )
+ notes = [obj.help(return_help=True)] + list(notes)
+ super(PlotlyListEntryError, self).__init__(
+ message=message, path=path, notes=notes
+ )
+
+
+class PlotlyDataTypeError(PlotlyGraphObjectError):
+ def __init__(self, obj, path, notes=()):
+ """See PlotlyGraphObjectError.__init__ for param docs."""
+ format_dict = {"index": path[-1], "object_name": obj._name}
+ message = "Invalid entry found in '{object_name}' at index, '{index}'".format(
+ **format_dict
+ )
+ note = "It's invalid because it doesn't contain a valid 'type' value."
+ notes = [note] + list(notes)
+ super(PlotlyDataTypeError, self).__init__(
+ message=message, path=path, notes=notes
+ )
+
+
+class PlotlyKeyError(KeyError):
+ """
+ KeyErrors are not printed as beautifully as other errors (this is so that
+ {}[''] prints "KeyError: ''" and not "KeyError:"). So here we use
+ LookupError's __str__ to make a PlotlyKeyError object which will print nicer
+ error messages for KeyErrors.
+ """
+
+ def __str__(self):
+ return LookupError.__str__(self)
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/files.py b/venv/lib/python3.8/site-packages/_plotly_utils/files.py
new file mode 100644
index 0000000..68d11bd
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/files.py
@@ -0,0 +1,37 @@
+import os
+
+PLOTLY_DIR = os.environ.get(
+ "PLOTLY_DIR", os.path.join(os.path.expanduser("~"), ".plotly")
+)
+TEST_FILE = os.path.join(PLOTLY_DIR, ".permission_test")
+
+
+def _permissions():
+ try:
+ if not os.path.exists(PLOTLY_DIR):
+ try:
+ os.mkdir(PLOTLY_DIR)
+ except Exception:
+ # in case of race
+ if not os.path.isdir(PLOTLY_DIR):
+ raise
+ with open(TEST_FILE, "w") as f:
+ f.write("testing\n")
+ try:
+ os.remove(TEST_FILE)
+ except Exception:
+ pass
+ return True
+ except Exception: # Do not trap KeyboardInterrupt.
+ return False
+
+
+_file_permissions = None
+
+
+def ensure_writable_plotly_dir():
+ # Cache permissions status
+ global _file_permissions
+ if _file_permissions is None:
+ _file_permissions = _permissions()
+ return _file_permissions
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/importers.py b/venv/lib/python3.8/site-packages/_plotly_utils/importers.py
new file mode 100644
index 0000000..20c77c1
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/importers.py
@@ -0,0 +1,50 @@
+import importlib
+
+
+def relative_import(parent_name, rel_modules=(), rel_classes=()):
+ """
+ Helper function to import submodules lazily in Python 3.7+
+
+ Parameters
+ ----------
+ rel_modules: list of str
+ list of submodules to import, of the form .submodule
+ rel_classes: list of str
+ list of submodule classes/variables to import, of the form ._submodule.Foo
+
+ Returns
+ -------
+ tuple
+ Tuple that should be assigned to __all__, __getattr__ in the caller
+ """
+ module_names = {rel_module.split(".")[-1]: rel_module for rel_module in rel_modules}
+ class_names = {rel_path.split(".")[-1]: rel_path for rel_path in rel_classes}
+
+ def __getattr__(import_name):
+ # In Python 3.7+, lazy import submodules
+
+ # Check for submodule
+ if import_name in module_names:
+ rel_import = module_names[import_name]
+ return importlib.import_module(rel_import, parent_name)
+
+ # Check for submodule class
+ if import_name in class_names:
+ rel_path_parts = class_names[import_name].split(".")
+ rel_module = ".".join(rel_path_parts[:-1])
+ class_name = import_name
+ class_module = importlib.import_module(rel_module, parent_name)
+ return getattr(class_module, class_name)
+
+ raise AttributeError(
+ "module {__name__!r} has no attribute {name!r}".format(
+ name=import_name, __name__=parent_name
+ )
+ )
+
+ __all__ = list(module_names) + list(class_names)
+
+ def __dir__():
+ return __all__
+
+ return __all__, __getattr__, __dir__
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/optional_imports.py b/venv/lib/python3.8/site-packages/_plotly_utils/optional_imports.py
new file mode 100644
index 0000000..a31e087
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/optional_imports.py
@@ -0,0 +1,36 @@
+"""
+Stand-alone module to provide information about whether optional deps exist.
+
+"""
+
+from importlib import import_module
+import logging
+import sys
+
+logger = logging.getLogger(__name__)
+_not_importable = set()
+
+
+def get_module(name, should_load=True):
+ """
+ Return module or None. Absolute import is required.
+
+ :param (str) name: Dot-separated module path. E.g., 'scipy.stats'.
+ :raise: (ImportError) Only when exc_msg is defined.
+ :return: (module|None) If import succeeds, the module will be returned.
+
+ """
+ if not should_load:
+ return sys.modules.get(name, None)
+
+ if name not in _not_importable:
+ try:
+ return import_module(name)
+ except ImportError:
+ _not_importable.add(name)
+ except Exception:
+ _not_importable.add(name)
+ msg = f"Error importing optional module {name}"
+ logger.exception(msg)
+
+ return None
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/png.py b/venv/lib/python3.8/site-packages/_plotly_utils/png.py
new file mode 100644
index 0000000..3f9e585
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/png.py
@@ -0,0 +1,2350 @@
+#!/usr/bin/env python
+
+# Vendored code from pypng https://github.com/drj11/pypng
+# png.py - PNG encoder/decoder in pure Python
+#
+# Copyright (C) 2006 Johann C. Rocholl <johann@browsershots.org>
+# Portions Copyright (C) 2009 David Jones <drj@pobox.com>
+# And probably portions Copyright (C) 2006 Nicko van Someren <nicko@nicko.org>
+#
+# Original concept by Johann C. Rocholl.
+#
+# LICENCE (MIT)
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+The ``png`` module can read and write PNG files.
+
+Installation and Overview
+-------------------------
+
+``pip install pypng``
+
+For help, type ``import png; help(png)`` in your python interpreter.
+
+A good place to start is the :class:`Reader` and :class:`Writer` classes.
+
+Coverage of PNG formats is fairly complete;
+all allowable bit depths (1/2/4/8/16/24/32/48/64 bits per pixel) and
+colour combinations are supported:
+
+- greyscale (1/2/4/8/16 bit);
+- RGB, RGBA, LA (greyscale with alpha) with 8/16 bits per channel;
+- colour mapped images (1/2/4/8 bit).
+
+Interlaced images,
+which support a progressive display when downloading,
+are supported for both reading and writing.
+
+A number of optional chunks can be specified (when writing)
+and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``.
+
+The ``sBIT`` chunk can be used to specify precision for
+non-native bit depths.
+
+Requires Python 3.5 or higher.
+Installation is trivial,
+but see the ``README.txt`` file (with the source distribution) for details.
+
+Full use of all features will need some reading of the PNG specification
+http://www.w3.org/TR/2003/REC-PNG-20031110/.
+
+The package also comes with command line utilities.
+
+- ``pripamtopng`` converts
+ `Netpbm <http://netpbm.sourceforge.net/>`_ PAM/PNM files to PNG;
+- ``pripngtopam`` converts PNG to file PAM/PNM.
+
+There are a few more for simple PNG manipulations.
+
+Spelling and Terminology
+------------------------
+
+Generally British English spelling is used in the documentation.
+So that's "greyscale" and "colour".
+This not only matches the author's native language,
+it's also used by the PNG specification.
+
+Colour Models
+-------------
+
+The major colour models supported by PNG (and hence by PyPNG) are:
+
+- greyscale;
+- greyscale--alpha;
+- RGB;
+- RGB--alpha.
+
+Also referred to using the abbreviations: L, LA, RGB, RGBA.
+Each letter codes a single channel:
+*L* is for Luminance or Luma or Lightness (greyscale images);
+*A* stands for Alpha, the opacity channel
+(used for transparency effects, but higher values are more opaque,
+so it makes sense to call it opacity);
+*R*, *G*, *B* stand for Red, Green, Blue (colour image).
+
+Lists, arrays, sequences, and so on
+-----------------------------------
+
+When getting pixel data out of this module (reading) and
+presenting data to this module (writing) there are
+a number of ways the data could be represented as a Python value.
+
+The preferred format is a sequence of *rows*,
+which each row being a sequence of *values*.
+In this format, the values are in pixel order,
+with all the values from all the pixels in a row
+being concatenated into a single sequence for that row.
+
+Consider an image that is 3 pixels wide by 2 pixels high, and each pixel
+has RGB components:
+
+Sequence of rows::
+
+ list([R,G,B, R,G,B, R,G,B],
+ [R,G,B, R,G,B, R,G,B])
+
+Each row appears as its own list,
+but the pixels are flattened so that three values for one pixel
+simply follow the three values for the previous pixel.
+
+This is the preferred because
+it provides a good compromise between space and convenience.
+PyPNG regards itself as at liberty to replace any sequence type with
+any sufficiently compatible other sequence type;
+in practice each row is an array (``bytearray`` or ``array.array``).
+
+To allow streaming the outer list is sometimes
+an iterator rather than an explicit list.
+
+An alternative format is a single array holding all the values.
+
+Array of values::
+
+ [R,G,B, R,G,B, R,G,B,
+ R,G,B, R,G,B, R,G,B]
+
+The entire image is one single giant sequence of colour values.
+Generally an array will be used (to save space), not a list.
+
+The top row comes first,
+and within each row the pixels are ordered from left-to-right.
+Within a pixel the values appear in the order R-G-B-A
+(or L-A for greyscale--alpha).
+
+There is another format, which should only be used with caution.
+It is mentioned because it is used internally,
+is close to what lies inside a PNG file itself,
+and has some support from the public API.
+This format is called *packed*.
+When packed, each row is a sequence of bytes (integers from 0 to 255),
+just as it is before PNG scanline filtering is applied.
+When the bit depth is 8 this is the same as a sequence of rows;
+when the bit depth is less than 8 (1, 2 and 4),
+several pixels are packed into each byte;
+when the bit depth is 16 each pixel value is decomposed into 2 bytes
+(and `packed` is a misnomer).
+This format is used by the :meth:`Writer.write_packed` method.
+It isn't usually a convenient format,
+but may be just right if the source data for
+the PNG image comes from something that uses a similar format
+(for example, 1-bit BMPs, or another PNG file).
+"""
+
+__version__ = "0.0.20"
+
+import collections
+import io # For io.BytesIO
+import itertools
+import math
+
+# http://www.python.org/doc/2.4.4/lib/module-operator.html
+import operator
+import re
+import struct
+import sys
+
+# http://www.python.org/doc/2.4.4/lib/module-warnings.html
+import warnings
+import zlib
+
+from array import array
+
+
+__all__ = ["Image", "Reader", "Writer", "write_chunks", "from_array"]
+
+
+# The PNG signature.
+# http://www.w3.org/TR/PNG/#5PNG-file-signature
+signature = struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)
+
+# The xstart, ystart, xstep, ystep for the Adam7 interlace passes.
+adam7 = (
+ (0, 0, 8, 8),
+ (4, 0, 8, 8),
+ (0, 4, 4, 8),
+ (2, 0, 4, 4),
+ (0, 2, 2, 4),
+ (1, 0, 2, 2),
+ (0, 1, 1, 2),
+)
+
+
+def adam7_generate(width, height):
+ """
+ Generate the coordinates for the reduced scanlines
+ of an Adam7 interlaced image
+ of size `width` by `height` pixels.
+
+ Yields a generator for each pass,
+ and each pass generator yields a series of (x, y, xstep) triples,
+ each one identifying a reduced scanline consisting of
+ pixels starting at (x, y) and taking every xstep pixel to the right.
+ """
+
+ for xstart, ystart, xstep, ystep in adam7:
+ if xstart >= width:
+ continue
+ yield ((xstart, y, xstep) for y in range(ystart, height, ystep))
+
+
+# Models the 'pHYs' chunk (used by the Reader)
+Resolution = collections.namedtuple("_Resolution", "x y unit_is_meter")
+
+
+def group(s, n):
+ return list(zip(*[iter(s)] * n))
+
+
+def isarray(x):
+ return isinstance(x, array)
+
+
+def check_palette(palette):
+ """
+ Check a palette argument (to the :class:`Writer` class) for validity.
+ Returns the palette as a list if okay;
+ raises an exception otherwise.
+ """
+
+ # None is the default and is allowed.
+ if palette is None:
+ return None
+
+ p = list(palette)
+ if not (0 < len(p) <= 256):
+ raise ProtocolError(
+ "a palette must have between 1 and 256 entries,"
+ " see https://www.w3.org/TR/PNG/#11PLTE"
+ )
+ seen_triple = False
+ for i, t in enumerate(p):
+ if len(t) not in (3, 4):
+ raise ProtocolError("palette entry %d: entries must be 3- or 4-tuples." % i)
+ if len(t) == 3:
+ seen_triple = True
+ if seen_triple and len(t) == 4:
+ raise ProtocolError(
+ "palette entry %d: all 4-tuples must precede all 3-tuples" % i
+ )
+ for x in t:
+ if int(x) != x or not (0 <= x <= 255):
+ raise ProtocolError(
+ "palette entry %d: values must be integer: 0 <= x <= 255" % i
+ )
+ return p
+
+
+def check_sizes(size, width, height):
+ """
+ Check that these arguments, if supplied, are consistent.
+ Return a (width, height) pair.
+ """
+
+ if not size:
+ return width, height
+
+ if len(size) != 2:
+ raise ProtocolError("size argument should be a pair (width, height)")
+ if width is not None and width != size[0]:
+ raise ProtocolError(
+ "size[0] (%r) and width (%r) should match when both are used."
+ % (size[0], width)
+ )
+ if height is not None and height != size[1]:
+ raise ProtocolError(
+ "size[1] (%r) and height (%r) should match when both are used."
+ % (size[1], height)
+ )
+ return size
+
+
+def check_color(c, greyscale, which):
+ """
+ Checks that a colour argument for transparent or background options
+ is the right form.
+ Returns the colour
+ (which, if it's a bare integer, is "corrected" to a 1-tuple).
+ """
+
+ if c is None:
+ return c
+ if greyscale:
+ try:
+ len(c)
+ except TypeError:
+ c = (c,)
+ if len(c) != 1:
+ raise ProtocolError("%s for greyscale must be 1-tuple" % which)
+ if not is_natural(c[0]):
+ raise ProtocolError("%s colour for greyscale must be integer" % which)
+ else:
+ if not (
+ len(c) == 3 and is_natural(c[0]) and is_natural(c[1]) and is_natural(c[2])
+ ):
+ raise ProtocolError("%s colour must be a triple of integers" % which)
+ return c
+
+
+class Error(Exception):
+ def __str__(self):
+ return self.__class__.__name__ + ": " + " ".join(self.args)
+
+
+class FormatError(Error):
+ """
+ Problem with input file format.
+ In other words, PNG file does not conform to
+ the specification in some way and is invalid.
+ """
+
+
+class ProtocolError(Error):
+ """
+ Problem with the way the programming interface has been used,
+ or the data presented to it.
+ """
+
+
+class ChunkError(FormatError):
+ pass
+
+
+class Default:
+ """The default for the greyscale paramter."""
+
+
+class Writer:
+ """
+ PNG encoder in pure Python.
+ """
+
+ def __init__(
+ self,
+ width=None,
+ height=None,
+ size=None,
+ greyscale=Default,
+ alpha=False,
+ bitdepth=8,
+ palette=None,
+ transparent=None,
+ background=None,
+ gamma=None,
+ compression=None,
+ interlace=False,
+ planes=None,
+ colormap=None,
+ maxval=None,
+ chunk_limit=2**20,
+ x_pixels_per_unit=None,
+ y_pixels_per_unit=None,
+ unit_is_meter=False,
+ ):
+ """
+ Create a PNG encoder object.
+
+ Arguments:
+
+ width, height
+ Image size in pixels, as two separate arguments.
+ size
+ Image size (w,h) in pixels, as single argument.
+ greyscale
+ Pixels are greyscale, not RGB.
+ alpha
+ Input data has alpha channel (RGBA or LA).
+ bitdepth
+ Bit depth: from 1 to 16 (for each channel).
+ palette
+ Create a palette for a colour mapped image (colour type 3).
+ transparent
+ Specify a transparent colour (create a ``tRNS`` chunk).
+ background
+ Specify a default background colour (create a ``bKGD`` chunk).
+ gamma
+ Specify a gamma value (create a ``gAMA`` chunk).
+ compression
+ zlib compression level: 0 (none) to 9 (more compressed);
+ default: -1 or None.
+ interlace
+ Create an interlaced image.
+ chunk_limit
+ Write multiple ``IDAT`` chunks to save memory.
+ x_pixels_per_unit
+ Number of pixels a unit along the x axis (write a
+ `pHYs` chunk).
+ y_pixels_per_unit
+ Number of pixels a unit along the y axis (write a
+ `pHYs` chunk). Along with `x_pixel_unit`, this gives
+ the pixel size ratio.
+ unit_is_meter
+ `True` to indicate that the unit (for the `pHYs`
+ chunk) is metre.
+
+ The image size (in pixels) can be specified either by using the
+ `width` and `height` arguments, or with the single `size`
+ argument.
+ If `size` is used it should be a pair (*width*, *height*).
+
+ The `greyscale` argument indicates whether input pixels
+ are greyscale (when true), or colour (when false).
+ The default is true unless `palette=` is used.
+
+ The `alpha` argument (a boolean) specifies
+ whether input pixels have an alpha channel (or not).
+
+ `bitdepth` specifies the bit depth of the source pixel values.
+ Each channel may have a different bit depth.
+ Each source pixel must have values that are
+ an integer between 0 and ``2**bitdepth-1``, where
+ `bitdepth` is the bit depth for the corresponding channel.
+ For example, 8-bit images have values between 0 and 255.
+ PNG only stores images with bit depths of
+ 1,2,4,8, or 16 (the same for all channels).
+ When `bitdepth` is not one of these values or where
+ channels have different bit depths,
+ the next highest valid bit depth is selected,
+ and an ``sBIT`` (significant bits) chunk is generated
+ that specifies the original precision of the source image.
+ In this case the supplied pixel values will be rescaled to
+ fit the range of the selected bit depth.
+
+ The PNG file format supports many bit depth / colour model
+ combinations, but not all.
+ The details are somewhat arcane
+ (refer to the PNG specification for full details).
+ Briefly:
+ Bit depths < 8 (1,2,4) are only allowed with greyscale and
+ colour mapped images;
+ colour mapped images cannot have bit depth 16.
+
+ For colour mapped images
+ (in other words, when the `palette` argument is specified)
+ the `bitdepth` argument must match one of
+ the valid PNG bit depths: 1, 2, 4, or 8.
+ (It is valid to have a PNG image with a palette and
+ an ``sBIT`` chunk, but the meaning is slightly different;
+ it would be awkward to use the `bitdepth` argument for this.)
+
+ The `palette` option, when specified,
+ causes a colour mapped image to be created:
+ the PNG colour type is set to 3;
+ `greyscale` must not be true; `alpha` must not be true;
+ `transparent` must not be set.
+ The bit depth must be 1,2,4, or 8.
+ When a colour mapped image is created,
+ the pixel values are palette indexes and
+ the `bitdepth` argument specifies the size of these indexes
+ (not the size of the colour values in the palette).
+
+ The palette argument value should be a sequence of 3- or
+ 4-tuples.
+ 3-tuples specify RGB palette entries;
+ 4-tuples specify RGBA palette entries.
+ All the 4-tuples (if present) must come before all the 3-tuples.
+ A ``PLTE`` chunk is created;
+ if there are 4-tuples then a ``tRNS`` chunk is created as well.
+ The ``PLTE`` chunk will contain all the RGB triples in the same
+ sequence;
+ the ``tRNS`` chunk will contain the alpha channel for
+ all the 4-tuples, in the same sequence.
+ Palette entries are always 8-bit.
+
+ If specified, the `transparent` and `background` parameters must be
+ a tuple with one element for each channel in the image.
+ Either a 3-tuple of integer (RGB) values for a colour image, or
+ a 1-tuple of a single integer for a greyscale image.
+
+ If specified, the `gamma` parameter must be a positive number
+ (generally, a `float`).
+ A ``gAMA`` chunk will be created.
+ Note that this will not change the values of the pixels as
+ they appear in the PNG file,
+ they are assumed to have already
+ been converted appropriately for the gamma specified.
+
+ The `compression` argument specifies the compression level to
+ be used by the ``zlib`` module.
+ Values from 1 to 9 (highest) specify compression.
+ 0 means no compression.
+ -1 and ``None`` both mean that the ``zlib`` module uses
+ the default level of compession (which is generally acceptable).
+
+ If `interlace` is true then an interlaced image is created
+ (using PNG's so far only interace method, *Adam7*).
+ This does not affect how the pixels should be passed in,
+ rather it changes how they are arranged into the PNG file.
+ On slow connexions interlaced images can be
+ partially decoded by the browser to give
+ a rough view of the image that is
+ successively refined as more image data appears.
+
+ .. note ::
+
+ Enabling the `interlace` option requires the entire image
+ to be processed in working memory.
+
+ `chunk_limit` is used to limit the amount of memory used whilst
+ compressing the image.
+ In order to avoid using large amounts of memory,
+ multiple ``IDAT`` chunks may be created.
+ """
+
+ # At the moment the `planes` argument is ignored;
+ # its purpose is to act as a dummy so that
+ # ``Writer(x, y, **info)`` works, where `info` is a dictionary
+ # returned by Reader.read and friends.
+ # Ditto for `colormap`.
+
+ width, height = check_sizes(size, width, height)
+ del size
+
+ if not is_natural(width) or not is_natural(height):
+ raise ProtocolError("width and height must be integers")
+ if width <= 0 or height <= 0:
+ raise ProtocolError("width and height must be greater than zero")
+ # http://www.w3.org/TR/PNG/#7Integers-and-byte-order
+ if width > 2**31 - 1 or height > 2**31 - 1:
+ raise ProtocolError("width and height cannot exceed 2**31-1")
+
+ if alpha and transparent is not None:
+ raise ProtocolError("transparent colour not allowed with alpha channel")
+
+ # bitdepth is either single integer, or tuple of integers.
+ # Convert to tuple.
+ try:
+ len(bitdepth)
+ except TypeError:
+ bitdepth = (bitdepth,)
+ for b in bitdepth:
+ valid = is_natural(b) and 1 <= b <= 16
+ if not valid:
+ raise ProtocolError(
+ "each bitdepth %r must be a positive integer <= 16" % (bitdepth,)
+ )
+
+ # Calculate channels, and
+ # expand bitdepth to be one element per channel.
+ palette = check_palette(palette)
+ alpha = bool(alpha)
+ colormap = bool(palette)
+ if greyscale is Default and palette:
+ greyscale = False
+ greyscale = bool(greyscale)
+ if colormap:
+ color_planes = 1
+ planes = 1
+ else:
+ color_planes = (3, 1)[greyscale]
+ planes = color_planes + alpha
+ if len(bitdepth) == 1:
+ bitdepth *= planes
+
+ bitdepth, self.rescale = check_bitdepth_rescale(
+ palette, bitdepth, transparent, alpha, greyscale
+ )
+
+ # These are assertions, because above logic should have
+ # corrected or raised all problematic cases.
+ if bitdepth < 8:
+ assert greyscale or palette
+ assert not alpha
+ if bitdepth > 8:
+ assert not palette
+
+ transparent = check_color(transparent, greyscale, "transparent")
+ background = check_color(background, greyscale, "background")
+
+ # It's important that the true boolean values
+ # (greyscale, alpha, colormap, interlace) are converted
+ # to bool because Iverson's convention is relied upon later on.
+ self.width = width
+ self.height = height
+ self.transparent = transparent
+ self.background = background
+ self.gamma = gamma
+ self.greyscale = greyscale
+ self.alpha = alpha
+ self.colormap = colormap
+ self.bitdepth = int(bitdepth)
+ self.compression = compression
+ self.chunk_limit = chunk_limit
+ self.interlace = bool(interlace)
+ self.palette = palette
+ self.x_pixels_per_unit = x_pixels_per_unit
+ self.y_pixels_per_unit = y_pixels_per_unit
+ self.unit_is_meter = bool(unit_is_meter)
+
+ self.color_type = 4 * self.alpha + 2 * (not greyscale) + 1 * self.colormap
+ assert self.color_type in (0, 2, 3, 4, 6)
+
+ self.color_planes = color_planes
+ self.planes = planes
+ # :todo: fix for bitdepth < 8
+ self.psize = (self.bitdepth / 8) * self.planes
+
+ def write(self, outfile, rows):
+ """
+ Write a PNG image to the output file.
+ `rows` should be an iterable that yields each row
+ (each row is a sequence of values).
+ The rows should be the rows of the original image,
+ so there should be ``self.height`` rows of
+ ``self.width * self.planes`` values.
+ If `interlace` is specified (when creating the instance),
+ then an interlaced PNG file will be written.
+ Supply the rows in the normal image order;
+ the interlacing is carried out internally.
+
+ .. note ::
+
+ Interlacing requires the entire image to be in working memory.
+ """
+
+ # Values per row
+ vpr = self.width * self.planes
+
+ def check_rows(rows):
+ """
+ Yield each row in rows,
+ but check each row first (for correct width).
+ """
+ for i, row in enumerate(rows):
+ try:
+ wrong_length = len(row) != vpr
+ except TypeError:
+ # When using an itertools.ichain object or
+ # other generator not supporting __len__,
+ # we set this to False to skip the check.
+ wrong_length = False
+ if wrong_length:
+ # Note: row numbers start at 0.
+ raise ProtocolError(
+ "Expected %d values but got %d values, in row %d"
+ % (vpr, len(row), i)
+ )
+ yield row
+
+ if self.interlace:
+ fmt = "BH"[self.bitdepth > 8]
+ a = array(fmt, itertools.chain(*check_rows(rows)))
+ return self.write_array(outfile, a)
+
+ nrows = self.write_passes(outfile, check_rows(rows))
+ if nrows != self.height:
+ raise ProtocolError(
+ "rows supplied (%d) does not match height (%d)" % (nrows, self.height)
+ )
+
+ def write_passes(self, outfile, rows):
+ """
+ Write a PNG image to the output file.
+
+ Most users are expected to find the :meth:`write` or
+ :meth:`write_array` method more convenient.
+
+ The rows should be given to this method in the order that
+ they appear in the output file.
+ For straightlaced images, this is the usual top to bottom ordering.
+ For interlaced images the rows should have been interlaced before
+ passing them to this function.
+
+ `rows` should be an iterable that yields each row
+ (each row being a sequence of values).
+ """
+
+ # Ensure rows are scaled (to 4-/8-/16-bit),
+ # and packed into bytes.
+
+ if self.rescale:
+ rows = rescale_rows(rows, self.rescale)
+
+ if self.bitdepth < 8:
+ rows = pack_rows(rows, self.bitdepth)
+ elif self.bitdepth == 16:
+ rows = unpack_rows(rows)
+
+ return self.write_packed(outfile, rows)
+
+ def write_packed(self, outfile, rows):
+ """
+ Write PNG file to `outfile`.
+ `rows` should be an iterator that yields each packed row;
+ a packed row being a sequence of packed bytes.
+
+ The rows have a filter byte prefixed and
+ are then compressed into one or more IDAT chunks.
+ They are not processed any further,
+ so if bitdepth is other than 1, 2, 4, 8, 16,
+ the pixel values should have been scaled
+ before passing them to this method.
+
+ This method does work for interlaced images but it is best avoided.
+ For interlaced images, the rows should be
+ presented in the order that they appear in the file.
+ """
+
+ self.write_preamble(outfile)
+
+ # http://www.w3.org/TR/PNG/#11IDAT
+ if self.compression is not None:
+ compressor = zlib.compressobj(self.compression)
+ else:
+ compressor = zlib.compressobj()
+
+ # data accumulates bytes to be compressed for the IDAT chunk;
+ # it's compressed when sufficiently large.
+ data = bytearray()
+
+ for i, row in enumerate(rows):
+ # Add "None" filter type.
+ # Currently, it's essential that this filter type be used
+ # for every scanline as
+ # we do not mark the first row of a reduced pass image;
+ # that means we could accidentally compute
+ # the wrong filtered scanline if we used
+ # "up", "average", or "paeth" on such a line.
+ data.append(0)
+ data.extend(row)
+ if len(data) > self.chunk_limit:
+ compressed = compressor.compress(data)
+ if len(compressed):
+ write_chunk(outfile, b"IDAT", compressed)
+ data = bytearray()
+
+ compressed = compressor.compress(bytes(data))
+ flushed = compressor.flush()
+ if len(compressed) or len(flushed):
+ write_chunk(outfile, b"IDAT", compressed + flushed)
+ # http://www.w3.org/TR/PNG/#11IEND
+ write_chunk(outfile, b"IEND")
+ return i + 1
+
+ def write_preamble(self, outfile):
+ # http://www.w3.org/TR/PNG/#5PNG-file-signature
+ outfile.write(signature)
+
+ # http://www.w3.org/TR/PNG/#11IHDR
+ write_chunk(
+ outfile,
+ b"IHDR",
+ struct.pack(
+ "!2I5B",
+ self.width,
+ self.height,
+ self.bitdepth,
+ self.color_type,
+ 0,
+ 0,
+ self.interlace,
+ ),
+ )
+
+ # See :chunk:order
+ # http://www.w3.org/TR/PNG/#11gAMA
+ if self.gamma is not None:
+ write_chunk(
+ outfile, b"gAMA", struct.pack("!L", int(round(self.gamma * 1e5)))
+ )
+
+ # See :chunk:order
+ # http://www.w3.org/TR/PNG/#11sBIT
+ if self.rescale:
+ write_chunk(
+ outfile,
+ b"sBIT",
+ struct.pack("%dB" % self.planes, *[s[0] for s in self.rescale]),
+ )
+
+ # :chunk:order: Without a palette (PLTE chunk),
+ # ordering is relatively relaxed.
+ # With one, gAMA chunk must precede PLTE chunk
+ # which must precede tRNS and bKGD.
+ # See http://www.w3.org/TR/PNG/#5ChunkOrdering
+ if self.palette:
+ p, t = make_palette_chunks(self.palette)
+ write_chunk(outfile, b"PLTE", p)
+ if t:
+ # tRNS chunk is optional;
+ # Only needed if palette entries have alpha.
+ write_chunk(outfile, b"tRNS", t)
+
+ # http://www.w3.org/TR/PNG/#11tRNS
+ if self.transparent is not None:
+ if self.greyscale:
+ fmt = "!1H"
+ else:
+ fmt = "!3H"
+ write_chunk(outfile, b"tRNS", struct.pack(fmt, *self.transparent))
+
+ # http://www.w3.org/TR/PNG/#11bKGD
+ if self.background is not None:
+ if self.greyscale:
+ fmt = "!1H"
+ else:
+ fmt = "!3H"
+ write_chunk(outfile, b"bKGD", struct.pack(fmt, *self.background))
+
+ # http://www.w3.org/TR/PNG/#11pHYs
+ if self.x_pixels_per_unit is not None and self.y_pixels_per_unit is not None:
+ tup = (
+ self.x_pixels_per_unit,
+ self.y_pixels_per_unit,
+ int(self.unit_is_meter),
+ )
+ write_chunk(outfile, b"pHYs", struct.pack("!LLB", *tup))
+
+ def write_array(self, outfile, pixels):
+ """
+ Write an array that holds all the image values
+ as a PNG file on the output file.
+ See also :meth:`write` method.
+ """
+
+ if self.interlace:
+ if not isarray(pixels):
+ # Coerce to array type
+ fmt = "BH"[self.bitdepth > 8]
+ pixels = array(fmt, pixels)
+ self.write_passes(outfile, self.array_scanlines_interlace(pixels))
+ else:
+ self.write_passes(outfile, self.array_scanlines(pixels))
+
+ def array_scanlines(self, pixels):
+ """
+ Generates rows (each a sequence of values) from
+ a single array of values.
+ """
+
+ # Values per row
+ vpr = self.width * self.planes
+ stop = 0
+ for y in range(self.height):
+ start = stop
+ stop = start + vpr
+ yield pixels[start:stop]
+
+ def array_scanlines_interlace(self, pixels):
+ """
+ Generator for interlaced scanlines from an array.
+ `pixels` is the full source image as a single array of values.
+ The generator yields each scanline of the reduced passes in turn,
+ each scanline being a sequence of values.
+ """
+
+ # http://www.w3.org/TR/PNG/#8InterlaceMethods
+ # Array type.
+ fmt = "BH"[self.bitdepth > 8]
+ # Value per row
+ vpr = self.width * self.planes
+
+ # Each iteration generates a scanline starting at (x, y)
+ # and consisting of every xstep pixels.
+ for lines in adam7_generate(self.width, self.height):
+ for x, y, xstep in lines:
+ # Pixels per row (of reduced image)
+ ppr = int(math.ceil((self.width - x) / float(xstep)))
+ # Values per row (of reduced image)
+ reduced_row_len = ppr * self.planes
+ if xstep == 1:
+ # Easy case: line is a simple slice.
+ offset = y * vpr
+ yield pixels[offset : offset + vpr]
+ continue
+ # We have to step by xstep,
+ # which we can do one plane at a time
+ # using the step in Python slices.
+ row = array(fmt)
+ # There's no easier way to set the length of an array
+ row.extend(pixels[0:reduced_row_len])
+ offset = y * vpr + x * self.planes
+ end_offset = (y + 1) * vpr
+ skip = self.planes * xstep
+ for i in range(self.planes):
+ row[i :: self.planes] = pixels[offset + i : end_offset : skip]
+ yield row
+
+
+def write_chunk(outfile, tag, data=b""):
+ """
+ Write a PNG chunk to the output file, including length and
+ checksum.
+ """
+
+ data = bytes(data)
+ # http://www.w3.org/TR/PNG/#5Chunk-layout
+ outfile.write(struct.pack("!I", len(data)))
+ outfile.write(tag)
+ outfile.write(data)
+ checksum = zlib.crc32(tag)
+ checksum = zlib.crc32(data, checksum)
+ checksum &= 2**32 - 1
+ outfile.write(struct.pack("!I", checksum))
+
+
+def write_chunks(out, chunks):
+ """Create a PNG file by writing out the chunks."""
+
+ out.write(signature)
+ for chunk in chunks:
+ write_chunk(out, *chunk)
+
+
+def rescale_rows(rows, rescale):
+ """
+ Take each row in rows (an iterator) and yield
+ a fresh row with the pixels scaled according to
+ the rescale parameters in the list `rescale`.
+ Each element of `rescale` is a tuple of
+ (source_bitdepth, target_bitdepth),
+ with one element per channel.
+ """
+
+ # One factor for each channel
+ fs = [float(2 ** s[1] - 1) / float(2 ** s[0] - 1) for s in rescale]
+
+ # Assume all target_bitdepths are the same
+ target_bitdepths = set(s[1] for s in rescale)
+ assert len(target_bitdepths) == 1
+ (target_bitdepth,) = target_bitdepths
+ typecode = "BH"[target_bitdepth > 8]
+
+ # Number of channels
+ n_chans = len(rescale)
+
+ for row in rows:
+ rescaled_row = array(typecode, iter(row))
+ for i in range(n_chans):
+ channel = array(typecode, (int(round(fs[i] * x)) for x in row[i::n_chans]))
+ rescaled_row[i::n_chans] = channel
+ yield rescaled_row
+
+
+def pack_rows(rows, bitdepth):
+ """Yield packed rows that are a byte array.
+ Each byte is packed with the values from several pixels.
+ """
+
+ assert bitdepth < 8
+ assert 8 % bitdepth == 0
+
+ # samples per byte
+ spb = int(8 / bitdepth)
+
+ def make_byte(block):
+ """Take a block of (2, 4, or 8) values,
+ and pack them into a single byte.
+ """
+
+ res = 0
+ for v in block:
+ res = (res << bitdepth) + v
+ return res
+
+ for row in rows:
+ a = bytearray(row)
+ # Adding padding bytes so we can group into a whole
+ # number of spb-tuples.
+ n = float(len(a))
+ extra = math.ceil(n / spb) * spb - n
+ a.extend([0] * int(extra))
+ # Pack into bytes.
+ # Each block is the samples for one byte.
+ blocks = group(a, spb)
+ yield bytearray(make_byte(block) for block in blocks)
+
+
+def unpack_rows(rows):
+ """Unpack each row from being 16-bits per value,
+ to being a sequence of bytes.
+ """
+ for row in rows:
+ fmt = "!%dH" % len(row)
+ yield bytearray(struct.pack(fmt, *row))
+
+
+def make_palette_chunks(palette):
+ """
+ Create the byte sequences for a ``PLTE`` and
+ if necessary a ``tRNS`` chunk.
+ Returned as a pair (*p*, *t*).
+ *t* will be ``None`` if no ``tRNS`` chunk is necessary.
+ """
+
+ p = bytearray()
+ t = bytearray()
+
+ for x in palette:
+ p.extend(x[0:3])
+ if len(x) > 3:
+ t.append(x[3])
+ if t:
+ return p, t
+ return p, None
+
+
+def check_bitdepth_rescale(palette, bitdepth, transparent, alpha, greyscale):
+ """
+ Returns (bitdepth, rescale) pair.
+ """
+
+ if palette:
+ if len(bitdepth) != 1:
+ raise ProtocolError("with palette, only a single bitdepth may be used")
+ (bitdepth,) = bitdepth
+ if bitdepth not in (1, 2, 4, 8):
+ raise ProtocolError("with palette, bitdepth must be 1, 2, 4, or 8")
+ if transparent is not None:
+ raise ProtocolError("transparent and palette not compatible")
+ if alpha:
+ raise ProtocolError("alpha and palette not compatible")
+ if greyscale:
+ raise ProtocolError("greyscale and palette not compatible")
+ return bitdepth, None
+
+ # No palette, check for sBIT chunk generation.
+
+ if greyscale and not alpha:
+ # Single channel, L.
+ (bitdepth,) = bitdepth
+ if bitdepth in (1, 2, 4, 8, 16):
+ return bitdepth, None
+ if bitdepth > 8:
+ targetbitdepth = 16
+ elif bitdepth == 3:
+ targetbitdepth = 4
+ else:
+ assert bitdepth in (5, 6, 7)
+ targetbitdepth = 8
+ return targetbitdepth, [(bitdepth, targetbitdepth)]
+
+ assert alpha or not greyscale
+
+ depth_set = tuple(set(bitdepth))
+ if depth_set in [(8,), (16,)]:
+ # No sBIT required.
+ (bitdepth,) = depth_set
+ return bitdepth, None
+
+ targetbitdepth = (8, 16)[max(bitdepth) > 8]
+ return targetbitdepth, [(b, targetbitdepth) for b in bitdepth]
+
+
+# Regex for decoding mode string
+RegexModeDecode = re.compile("(LA?|RGBA?);?([0-9]*)", flags=re.IGNORECASE)
+
+
+def from_array(a, mode=None, info={}):
+ """
+ Create a PNG :class:`Image` object from a 2-dimensional array.
+ One application of this function is easy PIL-style saving:
+ ``png.from_array(pixels, 'L').save('foo.png')``.
+
+ Unless they are specified using the *info* parameter,
+ the PNG's height and width are taken from the array size.
+ The first axis is the height; the second axis is the
+ ravelled width and channel index.
+ The array is treated is a sequence of rows,
+ each row being a sequence of values (``width*channels`` in number).
+ So an RGB image that is 16 pixels high and 8 wide will
+ occupy a 2-dimensional array that is 16x24
+ (each row will be 8*3 = 24 sample values).
+
+ *mode* is a string that specifies the image colour format in a
+ PIL-style mode. It can be:
+
+ ``'L'``
+ greyscale (1 channel)
+ ``'LA'``
+ greyscale with alpha (2 channel)
+ ``'RGB'``
+ colour image (3 channel)
+ ``'RGBA'``
+ colour image with alpha (4 channel)
+
+ The mode string can also specify the bit depth
+ (overriding how this function normally derives the bit depth,
+ see below).
+ Appending ``';16'`` to the mode will cause the PNG to be
+ 16 bits per channel;
+ any decimal from 1 to 16 can be used to specify the bit depth.
+
+ When a 2-dimensional array is used *mode* determines how many
+ channels the image has, and so allows the width to be derived from
+ the second array dimension.
+
+ The array is expected to be a ``numpy`` array,
+ but it can be any suitable Python sequence.
+ For example, a list of lists can be used:
+ ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``.
+ The exact rules are: ``len(a)`` gives the first dimension, height;
+ ``len(a[0])`` gives the second dimension.
+ It's slightly more complicated than that because
+ an iterator of rows can be used, and it all still works.
+ Using an iterator allows data to be streamed efficiently.
+
+ The bit depth of the PNG is normally taken from
+ the array element's datatype
+ (but if *mode* specifies a bitdepth then that is used instead).
+ The array element's datatype is determined in a way which
+ is supposed to work both for ``numpy`` arrays and for Python
+ ``array.array`` objects.
+ A 1 byte datatype will give a bit depth of 8,
+ a 2 byte datatype will give a bit depth of 16.
+ If the datatype does not have an implicit size,
+ like the above example where it is a plain Python list of lists,
+ then a default of 8 is used.
+
+ The *info* parameter is a dictionary that can
+ be used to specify metadata (in the same style as
+ the arguments to the :class:`png.Writer` class).
+ For this function the keys that are useful are:
+
+ height
+ overrides the height derived from the array dimensions and
+ allows *a* to be an iterable.
+ width
+ overrides the width derived from the array dimensions.
+ bitdepth
+ overrides the bit depth derived from the element datatype
+ (but must match *mode* if that also specifies a bit depth).
+
+ Generally anything specified in the *info* dictionary will
+ override any implicit choices that this function would otherwise make,
+ but must match any explicit ones.
+ For example, if the *info* dictionary has a ``greyscale`` key then
+ this must be true when mode is ``'L'`` or ``'LA'`` and
+ false when mode is ``'RGB'`` or ``'RGBA'``.
+ """
+
+ # We abuse the *info* parameter by modifying it. Take a copy here.
+ # (Also typechecks *info* to some extent).
+ info = dict(info)
+
+ # Syntax check mode string.
+ match = RegexModeDecode.match(mode)
+ if not match:
+ raise Error("mode string should be 'RGB' or 'L;16' or similar.")
+
+ mode, bitdepth = match.groups()
+ if bitdepth:
+ bitdepth = int(bitdepth)
+
+ # Colour format.
+ if "greyscale" in info:
+ if bool(info["greyscale"]) != ("L" in mode):
+ raise ProtocolError("info['greyscale'] should match mode.")
+ info["greyscale"] = "L" in mode
+
+ alpha = "A" in mode
+ if "alpha" in info:
+ if bool(info["alpha"]) != alpha:
+ raise ProtocolError("info['alpha'] should match mode.")
+ info["alpha"] = alpha
+
+ # Get bitdepth from *mode* if possible.
+ if bitdepth:
+ if info.get("bitdepth") and bitdepth != info["bitdepth"]:
+ raise ProtocolError(
+ "bitdepth (%d) should match bitdepth of info (%d)."
+ % (bitdepth, info["bitdepth"])
+ )
+ info["bitdepth"] = bitdepth
+
+ # Fill in and/or check entries in *info*.
+ # Dimensions.
+ width, height = check_sizes(info.get("size"), info.get("width"), info.get("height"))
+ if width:
+ info["width"] = width
+ if height:
+ info["height"] = height
+
+ if "height" not in info:
+ try:
+ info["height"] = len(a)
+ except TypeError:
+ raise ProtocolError("len(a) does not work, supply info['height'] instead.")
+
+ planes = len(mode)
+ if "planes" in info:
+ if info["planes"] != planes:
+ raise Error("info['planes'] should match mode.")
+
+ # In order to work out whether we the array is 2D or 3D we need its
+ # first row, which requires that we take a copy of its iterator.
+ # We may also need the first row to derive width and bitdepth.
+ a, t = itertools.tee(a)
+ row = next(t)
+ del t
+
+ testelement = row
+ if "width" not in info:
+ width = len(row) // planes
+ info["width"] = width
+
+ if "bitdepth" not in info:
+ try:
+ dtype = testelement.dtype
+ # goto the "else:" clause. Sorry.
+ except AttributeError:
+ try:
+ # Try a Python array.array.
+ bitdepth = 8 * testelement.itemsize
+ except AttributeError:
+ # We can't determine it from the array element's datatype,
+ # use a default of 8.
+ bitdepth = 8
+ else:
+ # If we got here without exception,
+ # we now assume that the array is a numpy array.
+ if dtype.kind == "b":
+ bitdepth = 1
+ else:
+ bitdepth = 8 * dtype.itemsize
+ info["bitdepth"] = bitdepth
+
+ for thing in ["width", "height", "bitdepth", "greyscale", "alpha"]:
+ assert thing in info
+
+ return Image(a, info)
+
+
+# So that refugee's from PIL feel more at home. Not documented.
+fromarray = from_array
+
+
+class Image:
+ """A PNG image. You can create an :class:`Image` object from
+ an array of pixels by calling :meth:`png.from_array`. It can be
+ saved to disk with the :meth:`save` method.
+ """
+
+ def __init__(self, rows, info):
+ """
+ .. note ::
+
+ The constructor is not public. Please do not call it.
+ """
+
+ self.rows = rows
+ self.info = info
+
+ def save(self, file):
+ """Save the image to the named *file*.
+
+ See `.write()` if you already have an open file object.
+
+ In general, you can only call this method once;
+ after it has been called the first time the PNG image is written,
+ the source data will have been streamed, and
+ cannot be streamed again.
+ """
+
+ w = Writer(**self.info)
+
+ with open(file, "wb") as fd:
+ w.write(fd, self.rows)
+
+ def write(self, file):
+ """Write the image to the open file object.
+
+ See `.save()` if you have a filename.
+
+ In general, you can only call this method once;
+ after it has been called the first time the PNG image is written,
+ the source data will have been streamed, and
+ cannot be streamed again.
+ """
+
+ w = Writer(**self.info)
+ w.write(file, self.rows)
+
+
+class Reader:
+ """
+ Pure Python PNG decoder in pure Python.
+ """
+
+ def __init__(self, _guess=None, filename=None, file=None, bytes=None):
+ """
+ The constructor expects exactly one keyword argument.
+ If you supply a positional argument instead,
+ it will guess the input type.
+ Choose from the following keyword arguments:
+
+ filename
+ Name of input file (a PNG file).
+ file
+ A file-like object (object with a read() method).
+ bytes
+ ``bytes`` or ``bytearray`` with PNG data.
+
+ """
+ keywords_supplied = (
+ (_guess is not None)
+ + (filename is not None)
+ + (file is not None)
+ + (bytes is not None)
+ )
+ if keywords_supplied != 1:
+ raise TypeError("Reader() takes exactly 1 argument")
+
+ # Will be the first 8 bytes, later on. See validate_signature.
+ self.signature = None
+ self.transparent = None
+ # A pair of (len,type) if a chunk has been read but its data and
+ # checksum have not (in other words the file position is just
+ # past the 4 bytes that specify the chunk type).
+ # See preamble method for how this is used.
+ self.atchunk = None
+
+ if _guess is not None:
+ if isarray(_guess):
+ bytes = _guess
+ elif isinstance(_guess, str):
+ filename = _guess
+ elif hasattr(_guess, "read"):
+ file = _guess
+
+ if bytes is not None:
+ self.file = io.BytesIO(bytes)
+ elif filename is not None:
+ self.file = open(filename, "rb")
+ elif file is not None:
+ self.file = file
+ else:
+ raise ProtocolError("expecting filename, file or bytes array")
+
+ def chunk(self, lenient=False):
+ """
+ Read the next PNG chunk from the input file;
+ returns a (*type*, *data*) tuple.
+ *type* is the chunk's type as a byte string
+ (all PNG chunk types are 4 bytes long).
+ *data* is the chunk's data content, as a byte string.
+
+ If the optional `lenient` argument evaluates to `True`,
+ checksum failures will raise warnings rather than exceptions.
+ """
+
+ self.validate_signature()
+
+ # http://www.w3.org/TR/PNG/#5Chunk-layout
+ if not self.atchunk:
+ self.atchunk = self._chunk_len_type()
+ if not self.atchunk:
+ raise ChunkError("No more chunks.")
+ length, type = self.atchunk
+ self.atchunk = None
+
+ data = self.file.read(length)
+ if len(data) != length:
+ raise ChunkError(
+ "Chunk %s too short for required %i octets." % (type, length)
+ )
+ checksum = self.file.read(4)
+ if len(checksum) != 4:
+ raise ChunkError("Chunk %s too short for checksum." % type)
+ verify = zlib.crc32(type)
+ verify = zlib.crc32(data, verify)
+ verify = struct.pack("!I", verify)
+ if checksum != verify:
+ (a,) = struct.unpack("!I", checksum)
+ (b,) = struct.unpack("!I", verify)
+ message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (
+ type.decode("ascii"),
+ a,
+ b,
+ )
+ if lenient:
+ warnings.warn(message, RuntimeWarning)
+ else:
+ raise ChunkError(message)
+ return type, data
+
+ def chunks(self):
+ """Return an iterator that will yield each chunk as a
+ (*chunktype*, *content*) pair.
+ """
+
+ while True:
+ t, v = self.chunk()
+ yield t, v
+ if t == b"IEND":
+ break
+
+ def undo_filter(self, filter_type, scanline, previous):
+ """
+ Undo the filter for a scanline.
+ `scanline` is a sequence of bytes that
+ does not include the initial filter type byte.
+ `previous` is decoded previous scanline
+ (for straightlaced images this is the previous pixel row,
+ but for interlaced images, it is
+ the previous scanline in the reduced image,
+ which in general is not the previous pixel row in the final image).
+ When there is no previous scanline
+ (the first row of a straightlaced image,
+ or the first row in one of the passes in an interlaced image),
+ then this argument should be ``None``.
+
+ The scanline will have the effects of filtering removed;
+ the result will be returned as a fresh sequence of bytes.
+ """
+
+ # :todo: Would it be better to update scanline in place?
+ result = scanline
+
+ if filter_type == 0:
+ return result
+
+ if filter_type not in (1, 2, 3, 4):
+ raise FormatError(
+ "Invalid PNG Filter Type. "
+ "See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ."
+ )
+
+ # Filter unit. The stride from one pixel to the corresponding
+ # byte from the previous pixel. Normally this is the pixel
+ # size in bytes, but when this is smaller than 1, the previous
+ # byte is used instead.
+ fu = max(1, self.psize)
+
+ # For the first line of a pass, synthesize a dummy previous
+ # line. An alternative approach would be to observe that on the
+ # first line 'up' is the same as 'null', 'paeth' is the same
+ # as 'sub', with only 'average' requiring any special case.
+ if not previous:
+ previous = bytearray([0] * len(scanline))
+
+ # Call appropriate filter algorithm. Note that 0 has already
+ # been dealt with.
+ fn = (
+ None,
+ undo_filter_sub,
+ undo_filter_up,
+ undo_filter_average,
+ undo_filter_paeth,
+ )[filter_type]
+ fn(fu, scanline, previous, result)
+ return result
+
+ def _deinterlace(self, raw):
+ """
+ Read raw pixel data, undo filters, deinterlace, and flatten.
+ Return a single array of values.
+ """
+
+ # Values per row (of the target image)
+ vpr = self.width * self.planes
+
+ # Values per image
+ vpi = vpr * self.height
+ # Interleaving writes to the output array randomly
+ # (well, not quite), so the entire output array must be in memory.
+ # Make a result array, and make it big enough.
+ if self.bitdepth > 8:
+ a = array("H", [0] * vpi)
+ else:
+ a = bytearray([0] * vpi)
+ source_offset = 0
+
+ for lines in adam7_generate(self.width, self.height):
+ # The previous (reconstructed) scanline.
+ # `None` at the beginning of a pass
+ # to indicate that there is no previous line.
+ recon = None
+ for x, y, xstep in lines:
+ # Pixels per row (reduced pass image)
+ ppr = int(math.ceil((self.width - x) / float(xstep)))
+ # Row size in bytes for this pass.
+ row_size = int(math.ceil(self.psize * ppr))
+
+ filter_type = raw[source_offset]
+ source_offset += 1
+ scanline = raw[source_offset : source_offset + row_size]
+ source_offset += row_size
+ recon = self.undo_filter(filter_type, scanline, recon)
+ # Convert so that there is one element per pixel value
+ flat = self._bytes_to_values(recon, width=ppr)
+ if xstep == 1:
+ assert x == 0
+ offset = y * vpr
+ a[offset : offset + vpr] = flat
+ else:
+ offset = y * vpr + x * self.planes
+ end_offset = (y + 1) * vpr
+ skip = self.planes * xstep
+ for i in range(self.planes):
+ a[offset + i : end_offset : skip] = flat[i :: self.planes]
+
+ return a
+
+ def _iter_bytes_to_values(self, byte_rows):
+ """
+ Iterator that yields each scanline;
+ each scanline being a sequence of values.
+ `byte_rows` should be an iterator that yields
+ the bytes of each row in turn.
+ """
+
+ for row in byte_rows:
+ yield self._bytes_to_values(row)
+
+ def _bytes_to_values(self, bs, width=None):
+ """Convert a packed row of bytes into a row of values.
+ Result will be a freshly allocated object,
+ not shared with the argument.
+ """
+
+ if self.bitdepth == 8:
+ return bytearray(bs)
+ if self.bitdepth == 16:
+ return array("H", struct.unpack("!%dH" % (len(bs) // 2), bs))
+
+ assert self.bitdepth < 8
+ if width is None:
+ width = self.width
+ # Samples per byte
+ spb = 8 // self.bitdepth
+ out = bytearray()
+ mask = 2**self.bitdepth - 1
+ shifts = [self.bitdepth * i for i in reversed(list(range(spb)))]
+ for o in bs:
+ out.extend([mask & (o >> i) for i in shifts])
+ return out[:width]
+
+ def _iter_straight_packed(self, byte_blocks):
+ """Iterator that undoes the effect of filtering;
+ yields each row as a sequence of packed bytes.
+ Assumes input is straightlaced.
+ `byte_blocks` should be an iterable that yields the raw bytes
+ in blocks of arbitrary size.
+ """
+
+ # length of row, in bytes
+ rb = self.row_bytes
+ a = bytearray()
+ # The previous (reconstructed) scanline.
+ # None indicates first line of image.
+ recon = None
+ for some_bytes in byte_blocks:
+ a.extend(some_bytes)
+ while len(a) >= rb + 1:
+ filter_type = a[0]
+ scanline = a[1 : rb + 1]
+ del a[: rb + 1]
+ recon = self.undo_filter(filter_type, scanline, recon)
+ yield recon
+ if len(a) != 0:
+ # :file:format We get here with a file format error:
+ # when the available bytes (after decompressing) do not
+ # pack into exact rows.
+ raise FormatError("Wrong size for decompressed IDAT chunk.")
+ assert len(a) == 0
+
+ def validate_signature(self):
+ """
+ If signature (header) has not been read then read and
+ validate it; otherwise do nothing.
+ """
+
+ if self.signature:
+ return
+ self.signature = self.file.read(8)
+ if self.signature != signature:
+ raise FormatError("PNG file has invalid signature.")
+
+ def preamble(self, lenient=False):
+ """
+ Extract the image metadata by reading
+ the initial part of the PNG file up to
+ the start of the ``IDAT`` chunk.
+ All the chunks that precede the ``IDAT`` chunk are
+ read and either processed for metadata or discarded.
+
+ If the optional `lenient` argument evaluates to `True`,
+ checksum failures will raise warnings rather than exceptions.
+ """
+
+ self.validate_signature()
+
+ while True:
+ if not self.atchunk:
+ self.atchunk = self._chunk_len_type()
+ if self.atchunk is None:
+ raise FormatError("This PNG file has no IDAT chunks.")
+ if self.atchunk[1] == b"IDAT":
+ return
+ self.process_chunk(lenient=lenient)
+
+ def _chunk_len_type(self):
+ """
+ Reads just enough of the input to
+ determine the next chunk's length and type;
+ return a (*length*, *type*) pair where *type* is a byte sequence.
+ If there are no more chunks, ``None`` is returned.
+ """
+
+ x = self.file.read(8)
+ if not x:
+ return None
+ if len(x) != 8:
+ raise FormatError("End of file whilst reading chunk length and type.")
+ length, type = struct.unpack("!I4s", x)
+ if length > 2**31 - 1:
+ raise FormatError("Chunk %s is too large: %d." % (type, length))
+ # Check that all bytes are in valid ASCII range.
+ # https://www.w3.org/TR/2003/REC-PNG-20031110/#5Chunk-layout
+ type_bytes = set(bytearray(type))
+ if not (type_bytes <= set(range(65, 91)) | set(range(97, 123))):
+ raise FormatError("Chunk %r has invalid Chunk Type." % list(type))
+ return length, type
+
+ def process_chunk(self, lenient=False):
+ """
+ Process the next chunk and its data.
+ This only processes the following chunk types:
+ ``IHDR``, ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``, ``pHYs``.
+ All other chunk types are ignored.
+
+ If the optional `lenient` argument evaluates to `True`,
+ checksum failures will raise warnings rather than exceptions.
+ """
+
+ type, data = self.chunk(lenient=lenient)
+ method = "_process_" + type.decode("ascii")
+ m = getattr(self, method, None)
+ if m:
+ m(data)
+
+ def _process_IHDR(self, data):
+ # http://www.w3.org/TR/PNG/#11IHDR
+ if len(data) != 13:
+ raise FormatError("IHDR chunk has incorrect length.")
+ (
+ self.width,
+ self.height,
+ self.bitdepth,
+ self.color_type,
+ self.compression,
+ self.filter,
+ self.interlace,
+ ) = struct.unpack("!2I5B", data)
+
+ check_bitdepth_colortype(self.bitdepth, self.color_type)
+
+ if self.compression != 0:
+ raise FormatError("Unknown compression method %d" % self.compression)
+ if self.filter != 0:
+ raise FormatError(
+ "Unknown filter method %d,"
+ " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ."
+ % self.filter
+ )
+ if self.interlace not in (0, 1):
+ raise FormatError(
+ "Unknown interlace method %d, see "
+ "http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods"
+ " ." % self.interlace
+ )
+
+ # Derived values
+ # http://www.w3.org/TR/PNG/#6Colour-values
+ colormap = bool(self.color_type & 1)
+ greyscale = not (self.color_type & 2)
+ alpha = bool(self.color_type & 4)
+ color_planes = (3, 1)[greyscale or colormap]
+ planes = color_planes + alpha
+
+ self.colormap = colormap
+ self.greyscale = greyscale
+ self.alpha = alpha
+ self.color_planes = color_planes
+ self.planes = planes
+ self.psize = float(self.bitdepth) / float(8) * planes
+ if int(self.psize) == self.psize:
+ self.psize = int(self.psize)
+ self.row_bytes = int(math.ceil(self.width * self.psize))
+ # Stores PLTE chunk if present, and is used to check
+ # chunk ordering constraints.
+ self.plte = None
+ # Stores tRNS chunk if present, and is used to check chunk
+ # ordering constraints.
+ self.trns = None
+ # Stores sBIT chunk if present.
+ self.sbit = None
+
+ def _process_PLTE(self, data):
+ # http://www.w3.org/TR/PNG/#11PLTE
+ if self.plte:
+ warnings.warn("Multiple PLTE chunks present.")
+ self.plte = data
+ if len(data) % 3 != 0:
+ raise FormatError("PLTE chunk's length should be a multiple of 3.")
+ if len(data) > (2**self.bitdepth) * 3:
+ raise FormatError("PLTE chunk is too long.")
+ if len(data) == 0:
+ raise FormatError("Empty PLTE is not allowed.")
+
+ def _process_bKGD(self, data):
+ try:
+ if self.colormap:
+ if not self.plte:
+ warnings.warn("PLTE chunk is required before bKGD chunk.")
+ self.background = struct.unpack("B", data)
+ else:
+ self.background = struct.unpack("!%dH" % self.color_planes, data)
+ except struct.error:
+ raise FormatError("bKGD chunk has incorrect length.")
+
+ def _process_tRNS(self, data):
+ # http://www.w3.org/TR/PNG/#11tRNS
+ self.trns = data
+ if self.colormap:
+ if not self.plte:
+ warnings.warn("PLTE chunk is required before tRNS chunk.")
+ else:
+ if len(data) > len(self.plte) / 3:
+ # Was warning, but promoted to Error as it
+ # would otherwise cause pain later on.
+ raise FormatError("tRNS chunk is too long.")
+ else:
+ if self.alpha:
+ raise FormatError(
+ "tRNS chunk is not valid with colour type %d." % self.color_type
+ )
+ try:
+ self.transparent = struct.unpack("!%dH" % self.color_planes, data)
+ except struct.error:
+ raise FormatError("tRNS chunk has incorrect length.")
+
+ def _process_gAMA(self, data):
+ try:
+ self.gamma = struct.unpack("!L", data)[0] / 100000.0
+ except struct.error:
+ raise FormatError("gAMA chunk has incorrect length.")
+
+ def _process_sBIT(self, data):
+ self.sbit = data
+ if (
+ self.colormap
+ and len(data) != 3
+ or not self.colormap
+ and len(data) != self.planes
+ ):
+ raise FormatError("sBIT chunk has incorrect length.")
+
+ def _process_pHYs(self, data):
+ # http://www.w3.org/TR/PNG/#11pHYs
+ self.phys = data
+ fmt = "!LLB"
+ if len(data) != struct.calcsize(fmt):
+ raise FormatError("pHYs chunk has incorrect length.")
+ self.x_pixels_per_unit, self.y_pixels_per_unit, unit = struct.unpack(fmt, data)
+ self.unit_is_meter = bool(unit)
+
+ def read(self, lenient=False):
+ """
+ Read the PNG file and decode it.
+ Returns (`width`, `height`, `rows`, `info`).
+
+ May use excessive memory.
+
+ `rows` is a sequence of rows;
+ each row is a sequence of values.
+
+ If the optional `lenient` argument evaluates to True,
+ checksum failures will raise warnings rather than exceptions.
+ """
+
+ def iteridat():
+ """Iterator that yields all the ``IDAT`` chunks as strings."""
+ while True:
+ type, data = self.chunk(lenient=lenient)
+ if type == b"IEND":
+ # http://www.w3.org/TR/PNG/#11IEND
+ break
+ if type != b"IDAT":
+ continue
+ # type == b'IDAT'
+ # http://www.w3.org/TR/PNG/#11IDAT
+ if self.colormap and not self.plte:
+ warnings.warn("PLTE chunk is required before IDAT chunk")
+ yield data
+
+ self.preamble(lenient=lenient)
+ raw = decompress(iteridat())
+
+ if self.interlace:
+
+ def rows_from_interlace():
+ """Yield each row from an interlaced PNG."""
+ # It's important that this iterator doesn't read
+ # IDAT chunks until it yields the first row.
+ bs = bytearray(itertools.chain(*raw))
+ arraycode = "BH"[self.bitdepth > 8]
+ # Like :meth:`group` but
+ # producing an array.array object for each row.
+ values = self._deinterlace(bs)
+ vpr = self.width * self.planes
+ for i in range(0, len(values), vpr):
+ row = array(arraycode, values[i : i + vpr])
+ yield row
+
+ rows = rows_from_interlace()
+ else:
+ rows = self._iter_bytes_to_values(self._iter_straight_packed(raw))
+ info = dict()
+ for attr in "greyscale alpha planes bitdepth interlace".split():
+ info[attr] = getattr(self, attr)
+ info["size"] = (self.width, self.height)
+ for attr in "gamma transparent background".split():
+ a = getattr(self, attr, None)
+ if a is not None:
+ info[attr] = a
+ if getattr(self, "x_pixels_per_unit", None):
+ info["physical"] = Resolution(
+ self.x_pixels_per_unit, self.y_pixels_per_unit, self.unit_is_meter
+ )
+ if self.plte:
+ info["palette"] = self.palette()
+ return self.width, self.height, rows, info
+
+ def read_flat(self):
+ """
+ Read a PNG file and decode it into a single array of values.
+ Returns (*width*, *height*, *values*, *info*).
+
+ May use excessive memory.
+
+ `values` is a single array.
+
+ The :meth:`read` method is more stream-friendly than this,
+ because it returns a sequence of rows.
+ """
+
+ x, y, pixel, info = self.read()
+ arraycode = "BH"[info["bitdepth"] > 8]
+ pixel = array(arraycode, itertools.chain(*pixel))
+ return x, y, pixel, info
+
+ def palette(self, alpha="natural"):
+ """
+ Returns a palette that is a sequence of 3-tuples or 4-tuples,
+ synthesizing it from the ``PLTE`` and ``tRNS`` chunks.
+ These chunks should have already been processed (for example,
+ by calling the :meth:`preamble` method).
+ All the tuples are the same size:
+ 3-tuples if there is no ``tRNS`` chunk,
+ 4-tuples when there is a ``tRNS`` chunk.
+
+ Assumes that the image is colour type
+ 3 and therefore a ``PLTE`` chunk is required.
+
+ If the `alpha` argument is ``'force'`` then an alpha channel is
+ always added, forcing the result to be a sequence of 4-tuples.
+ """
+
+ if not self.plte:
+ raise FormatError("Required PLTE chunk is missing in colour type 3 image.")
+ plte = group(array("B", self.plte), 3)
+ if self.trns or alpha == "force":
+ trns = array("B", self.trns or [])
+ trns.extend([255] * (len(plte) - len(trns)))
+ plte = list(map(operator.add, plte, group(trns, 1)))
+ return plte
+
+ def asDirect(self):
+ """
+ Returns the image data as a direct representation of
+ an ``x * y * planes`` array.
+ This removes the need for callers to deal with
+ palettes and transparency themselves.
+ Images with a palette (colour type 3) are converted to RGB or RGBA;
+ images with transparency (a ``tRNS`` chunk) are converted to
+ LA or RGBA as appropriate.
+ When returned in this format the pixel values represent
+ the colour value directly without needing to refer
+ to palettes or transparency information.
+
+ Like the :meth:`read` method this method returns a 4-tuple:
+
+ (*width*, *height*, *rows*, *info*)
+
+ This method normally returns pixel values with
+ the bit depth they have in the source image, but
+ when the source PNG has an ``sBIT`` chunk it is inspected and
+ can reduce the bit depth of the result pixels;
+ pixel values will be reduced according to the bit depth
+ specified in the ``sBIT`` chunk.
+ PNG nerds should note a single result bit depth is
+ used for all channels:
+ the maximum of the ones specified in the ``sBIT`` chunk.
+ An RGB565 image will be rescaled to 6-bit RGB666.
+
+ The *info* dictionary that is returned reflects
+ the `direct` format and not the original source image.
+ For example, an RGB source image with a ``tRNS`` chunk
+ to represent a transparent colour,
+ will start with ``planes=3`` and ``alpha=False`` for the
+ source image,
+ but the *info* dictionary returned by this method
+ will have ``planes=4`` and ``alpha=True`` because
+ an alpha channel is synthesized and added.
+
+ *rows* is a sequence of rows;
+ each row being a sequence of values
+ (like the :meth:`read` method).
+
+ All the other aspects of the image data are not changed.
+ """
+
+ self.preamble()
+
+ # Simple case, no conversion necessary.
+ if not self.colormap and not self.trns and not self.sbit:
+ return self.read()
+
+ x, y, pixels, info = self.read()
+
+ if self.colormap:
+ info["colormap"] = False
+ info["alpha"] = bool(self.trns)
+ info["bitdepth"] = 8
+ info["planes"] = 3 + bool(self.trns)
+ plte = self.palette()
+
+ def iterpal(pixels):
+ for row in pixels:
+ row = [plte[x] for x in row]
+ yield array("B", itertools.chain(*row))
+
+ pixels = iterpal(pixels)
+ elif self.trns:
+ # It would be nice if there was some reasonable way
+ # of doing this without generating a whole load of
+ # intermediate tuples. But tuples does seem like the
+ # easiest way, with no other way clearly much simpler or
+ # much faster. (Actually, the L to LA conversion could
+ # perhaps go faster (all those 1-tuples!), but I still
+ # wonder whether the code proliferation is worth it)
+ it = self.transparent
+ maxval = 2 ** info["bitdepth"] - 1
+ planes = info["planes"]
+ info["alpha"] = True
+ info["planes"] += 1
+ typecode = "BH"[info["bitdepth"] > 8]
+
+ def itertrns(pixels):
+ for row in pixels:
+ # For each row we group it into pixels, then form a
+ # characterisation vector that says whether each
+ # pixel is opaque or not. Then we convert
+ # True/False to 0/maxval (by multiplication),
+ # and add it as the extra channel.
+ row = group(row, planes)
+ opa = map(it.__ne__, row)
+ opa = map(maxval.__mul__, opa)
+ opa = list(zip(opa)) # convert to 1-tuples
+ yield array(typecode, itertools.chain(*map(operator.add, row, opa)))
+
+ pixels = itertrns(pixels)
+ targetbitdepth = None
+ if self.sbit:
+ sbit = struct.unpack("%dB" % len(self.sbit), self.sbit)
+ targetbitdepth = max(sbit)
+ if targetbitdepth > info["bitdepth"]:
+ raise Error("sBIT chunk %r exceeds bitdepth %d" % (sbit, self.bitdepth))
+ if min(sbit) <= 0:
+ raise Error("sBIT chunk %r has a 0-entry" % sbit)
+ if targetbitdepth:
+ shift = info["bitdepth"] - targetbitdepth
+ info["bitdepth"] = targetbitdepth
+
+ def itershift(pixels):
+ for row in pixels:
+ yield [p >> shift for p in row]
+
+ pixels = itershift(pixels)
+ return x, y, pixels, info
+
+ def _as_rescale(self, get, targetbitdepth):
+ """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`."""
+
+ width, height, pixels, info = get()
+ maxval = 2 ** info["bitdepth"] - 1
+ targetmaxval = 2**targetbitdepth - 1
+ factor = float(targetmaxval) / float(maxval)
+ info["bitdepth"] = targetbitdepth
+
+ def iterscale():
+ for row in pixels:
+ yield [int(round(x * factor)) for x in row]
+
+ if maxval == targetmaxval:
+ return width, height, pixels, info
+ else:
+ return width, height, iterscale(), info
+
+ def asRGB8(self):
+ """
+ Return the image data as an RGB pixels with 8-bits per sample.
+ This is like the :meth:`asRGB` method except that
+ this method additionally rescales the values so that
+ they are all between 0 and 255 (8-bit).
+ In the case where the source image has a bit depth < 8
+ the transformation preserves all the information;
+ where the source image has bit depth > 8, then
+ rescaling to 8-bit values loses precision.
+ No dithering is performed.
+ Like :meth:`asRGB`,
+ an alpha channel in the source image will raise an exception.
+
+ This function returns a 4-tuple:
+ (*width*, *height*, *rows*, *info*).
+ *width*, *height*, *info* are as per the :meth:`read` method.
+
+ *rows* is the pixel data as a sequence of rows.
+ """
+
+ return self._as_rescale(self.asRGB, 8)
+
+ def asRGBA8(self):
+ """
+ Return the image data as RGBA pixels with 8-bits per sample.
+ This method is similar to :meth:`asRGB8` and :meth:`asRGBA`:
+ The result pixels have an alpha channel, *and*
+ values are rescaled to the range 0 to 255.
+ The alpha channel is synthesized if necessary
+ (with a small speed penalty).
+ """
+
+ return self._as_rescale(self.asRGBA, 8)
+
+ def asRGB(self):
+ """
+ Return image as RGB pixels.
+ RGB colour images are passed through unchanged;
+ greyscales are expanded into RGB triplets
+ (there is a small speed overhead for doing this).
+
+ An alpha channel in the source image will raise an exception.
+
+ The return values are as for the :meth:`read` method except that
+ the *info* reflect the returned pixels, not the source image.
+ In particular,
+ for this method ``info['greyscale']`` will be ``False``.
+ """
+
+ width, height, pixels, info = self.asDirect()
+ if info["alpha"]:
+ raise Error("will not convert image with alpha channel to RGB")
+ if not info["greyscale"]:
+ return width, height, pixels, info
+ info["greyscale"] = False
+ info["planes"] = 3
+
+ if info["bitdepth"] > 8:
+
+ def newarray():
+ return array("H", [0])
+
+ else:
+
+ def newarray():
+ return bytearray([0])
+
+ def iterrgb():
+ for row in pixels:
+ a = newarray() * 3 * width
+ for i in range(3):
+ a[i::3] = row
+ yield a
+
+ return width, height, iterrgb(), info
+
+ def asRGBA(self):
+ """
+ Return image as RGBA pixels.
+ Greyscales are expanded into RGB triplets;
+ an alpha channel is synthesized if necessary.
+ The return values are as for the :meth:`read` method except that
+ the *info* reflect the returned pixels, not the source image.
+ In particular, for this method
+ ``info['greyscale']`` will be ``False``, and
+ ``info['alpha']`` will be ``True``.
+ """
+
+ width, height, pixels, info = self.asDirect()
+ if info["alpha"] and not info["greyscale"]:
+ return width, height, pixels, info
+ typecode = "BH"[info["bitdepth"] > 8]
+ maxval = 2 ** info["bitdepth"] - 1
+ maxbuffer = struct.pack("=" + typecode, maxval) * 4 * width
+
+ if info["bitdepth"] > 8:
+
+ def newarray():
+ return array("H", maxbuffer)
+
+ else:
+
+ def newarray():
+ return bytearray(maxbuffer)
+
+ if info["alpha"] and info["greyscale"]:
+ # LA to RGBA
+ def convert():
+ for row in pixels:
+ # Create a fresh target row, then copy L channel
+ # into first three target channels, and A channel
+ # into fourth channel.
+ a = newarray()
+ convert_la_to_rgba(row, a)
+ yield a
+
+ elif info["greyscale"]:
+ # L to RGBA
+ def convert():
+ for row in pixels:
+ a = newarray()
+ convert_l_to_rgba(row, a)
+ yield a
+
+ else:
+ assert not info["alpha"] and not info["greyscale"]
+ # RGB to RGBA
+
+ def convert():
+ for row in pixels:
+ a = newarray()
+ convert_rgb_to_rgba(row, a)
+ yield a
+
+ info["alpha"] = True
+ info["greyscale"] = False
+ info["planes"] = 4
+ return width, height, convert(), info
+
+
+def decompress(data_blocks):
+ """
+ `data_blocks` should be an iterable that
+ yields the compressed data (from the ``IDAT`` chunks).
+ This yields decompressed byte strings.
+ """
+
+ # Currently, with no max_length parameter to decompress,
+ # this routine will do one yield per IDAT chunk: Not very
+ # incremental.
+ d = zlib.decompressobj()
+ # Each IDAT chunk is passed to the decompressor, then any
+ # remaining state is decompressed out.
+ for data in data_blocks:
+ # :todo: add a max_length argument here to limit output size.
+ yield bytearray(d.decompress(data))
+ yield bytearray(d.flush())
+
+
+def check_bitdepth_colortype(bitdepth, colortype):
+ """
+ Check that `bitdepth` and `colortype` are both valid,
+ and specified in a valid combination.
+ Returns (None) if valid, raise an Exception if not valid.
+ """
+
+ if bitdepth not in (1, 2, 4, 8, 16):
+ raise FormatError("invalid bit depth %d" % bitdepth)
+ if colortype not in (0, 2, 3, 4, 6):
+ raise FormatError("invalid colour type %d" % colortype)
+ # Check indexed (palettized) images have 8 or fewer bits
+ # per pixel; check only indexed or greyscale images have
+ # fewer than 8 bits per pixel.
+ if colortype & 1 and bitdepth > 8:
+ raise FormatError(
+ "Indexed images (colour type %d) cannot"
+ " have bitdepth > 8 (bit depth %d)."
+ " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ."
+ % (bitdepth, colortype)
+ )
+ if bitdepth < 8 and colortype not in (0, 3):
+ raise FormatError(
+ "Illegal combination of bit depth (%d)"
+ " and colour type (%d)."
+ " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ."
+ % (bitdepth, colortype)
+ )
+
+
+def is_natural(x):
+ """A non-negative integer."""
+ try:
+ is_integer = int(x) == x
+ except (TypeError, ValueError):
+ return False
+ return is_integer and x >= 0
+
+
+def undo_filter_sub(filter_unit, scanline, previous, result):
+ """Undo sub filter."""
+
+ ai = 0
+ # Loops starts at index fu. Observe that the initial part
+ # of the result is already filled in correctly with
+ # scanline.
+ for i in range(filter_unit, len(result)):
+ x = scanline[i]
+ a = result[ai]
+ result[i] = (x + a) & 0xFF
+ ai += 1
+
+
+def undo_filter_up(filter_unit, scanline, previous, result):
+ """Undo up filter."""
+
+ for i in range(len(result)):
+ x = scanline[i]
+ b = previous[i]
+ result[i] = (x + b) & 0xFF
+
+
+def undo_filter_average(filter_unit, scanline, previous, result):
+ """Undo up filter."""
+
+ ai = -filter_unit
+ for i in range(len(result)):
+ x = scanline[i]
+ if ai < 0:
+ a = 0
+ else:
+ a = result[ai]
+ b = previous[i]
+ result[i] = (x + ((a + b) >> 1)) & 0xFF
+ ai += 1
+
+
+def undo_filter_paeth(filter_unit, scanline, previous, result):
+ """Undo Paeth filter."""
+
+ # Also used for ci.
+ ai = -filter_unit
+ for i in range(len(result)):
+ x = scanline[i]
+ if ai < 0:
+ a = c = 0
+ else:
+ a = result[ai]
+ c = previous[ai]
+ b = previous[i]
+ p = a + b - c
+ pa = abs(p - a)
+ pb = abs(p - b)
+ pc = abs(p - c)
+ if pa <= pb and pa <= pc:
+ pr = a
+ elif pb <= pc:
+ pr = b
+ else:
+ pr = c
+ result[i] = (x + pr) & 0xFF
+ ai += 1
+
+
+def convert_la_to_rgba(row, result):
+ for i in range(3):
+ result[i::4] = row[0::2]
+ result[3::4] = row[1::2]
+
+
+def convert_l_to_rgba(row, result):
+ """
+ Convert a grayscale image to RGBA.
+ This method assumes the alpha channel in result is
+ already correctly initialized.
+ """
+ for i in range(3):
+ result[i::4] = row
+
+
+def convert_rgb_to_rgba(row, result):
+ """
+ Convert an RGB image to RGBA.
+ This method assumes the alpha channel in result is
+ already correctly initialized.
+ """
+ for i in range(3):
+ result[i::4] = row[i::3]
+
+
+# Only reason to include this in this module is that
+# several utilities need it, and it is small.
+def binary_stdin():
+ """
+ A sys.stdin that returns bytes.
+ """
+
+ return sys.stdin.buffer
+
+
+def binary_stdout():
+ """
+ A sys.stdout that accepts bytes.
+ """
+
+ stdout = sys.stdout.buffer
+
+ # On Windows the C runtime file orientation needs changing.
+ if sys.platform == "win32":
+ import msvcrt
+ import os
+
+ msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+
+ return stdout
+
+
+def cli_open(path):
+ if path == "-":
+ return binary_stdin()
+ return open(path, "rb")
diff --git a/venv/lib/python3.8/site-packages/_plotly_utils/utils.py b/venv/lib/python3.8/site-packages/_plotly_utils/utils.py
new file mode 100644
index 0000000..07a2b9e
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/_plotly_utils/utils.py
@@ -0,0 +1,557 @@
+import base64
+import decimal
+import json as _json
+import sys
+import re
+from functools import reduce
+
+from _plotly_utils.optional_imports import get_module
+from _plotly_utils.basevalidators import (
+ ImageUriValidator,
+ copy_to_readonly_numpy_array,
+ is_homogeneous_array,
+)
+
+
+int8min = -128
+int8max = 127
+int16min = -32768
+int16max = 32767
+int32min = -2147483648
+int32max = 2147483647
+
+uint8max = 255
+uint16max = 65535
+uint32max = 4294967295
+
+plotlyjsShortTypes = {
+ "int8": "i1",
+ "uint8": "u1",
+ "int16": "i2",
+ "uint16": "u2",
+ "int32": "i4",
+ "uint32": "u4",
+ "float32": "f4",
+ "float64": "f8",
+}
+
+
+def to_typed_array_spec(v):
+ """
+ Convert numpy array to plotly.js typed array spec
+ If not possible return the original value
+ """
+ v = copy_to_readonly_numpy_array(v)
+
+ # Skip b64 encoding if numpy is not installed,
+ # or if v is not a numpy array, or if v is empty
+ np = get_module("numpy", should_load=False)
+ if not np or not isinstance(v, np.ndarray) or v.size == 0:
+ return v
+
+ dtype = str(v.dtype)
+
+ # convert default Big Ints until we could support them in plotly.js
+ if dtype == "int64":
+ max = v.max()
+ min = v.min()
+ if max <= int8max and min >= int8min:
+ v = v.astype("int8")
+ elif max <= int16max and min >= int16min:
+ v = v.astype("int16")
+ elif max <= int32max and min >= int32min:
+ v = v.astype("int32")
+ else:
+ return v
+
+ elif dtype == "uint64":
+ max = v.max()
+ min = v.min()
+ if max <= uint8max and min >= 0:
+ v = v.astype("uint8")
+ elif max <= uint16max and min >= 0:
+ v = v.astype("uint16")
+ elif max <= uint32max and min >= 0:
+ v = v.astype("uint32")
+ else:
+ return v
+
+ dtype = str(v.dtype)
+
+ if dtype in plotlyjsShortTypes:
+ arrObj = {
+ "dtype": plotlyjsShortTypes[dtype],
+ "bdata": base64.b64encode(v).decode("ascii"),
+ }
+
+ if v.ndim > 1:
+ arrObj["shape"] = str(v.shape)[1:-1]
+
+ return arrObj
+
+ return v
+
+
+def is_skipped_key(key):
+ """
+ Return whether the key is skipped for conversion to the typed array spec
+ """
+ skipped_keys = ["geojson", "layer", "layers", "range"]
+ return any(skipped_key == key for skipped_key in skipped_keys)
+
+
+def convert_to_base64(obj):
+ if isinstance(obj, dict):
+ for key, value in obj.items():
+ if is_skipped_key(key):
+ continue
+ elif is_homogeneous_array(value):
+ obj[key] = to_typed_array_spec(value)
+ else:
+ convert_to_base64(value)
+ elif isinstance(obj, list) or isinstance(obj, tuple):
+ for value in obj:
+ convert_to_base64(value)
+
+
+def cumsum(x):
+ """
+ Custom cumsum to avoid a numpy import.
+ """
+
+ def _reducer(a, x):
+ if len(a) == 0:
+ return [x]
+ return a + [a[-1] + x]
+
+ ret = reduce(_reducer, x, [])
+ return ret
+
+
+class PlotlyJSONEncoder(_json.JSONEncoder):
+ """
+ Meant to be passed as the `cls` kwarg to json.dumps(obj, cls=..)
+
+ See PlotlyJSONEncoder.default for more implementation information.
+
+ Additionally, this encoder overrides nan functionality so that 'Inf',
+ 'NaN' and '-Inf' encode to 'null'. Which is stricter JSON than the Python
+ version.
+
+ """
+
+ def coerce_to_strict(self, const):
+ """
+ This is used to ultimately *encode* into strict JSON, see `encode`
+
+ """
+ # before python 2.7, 'true', 'false', 'null', were include here.
+ if const in ("Infinity", "-Infinity", "NaN"):
+ return None
+ else:
+ return const
+
+ def encode(self, o):
+ """
+ Load and then dump the result using parse_constant kwarg
+
+ Note that setting invalid separators will cause a failure at this step.
+
+ """
+ # this will raise errors in a normal-expected way
+ encoded_o = super(PlotlyJSONEncoder, self).encode(o)
+ # Brute force guessing whether NaN or Infinity values are in the string
+ # We catch false positive cases (e.g. strings such as titles, labels etc.)
+ # but this is ok since the intention is to skip the decoding / reencoding
+ # step when it's completely safe
+
+ if not ("NaN" in encoded_o or "Infinity" in encoded_o):
+ return encoded_o
+
+ # now:
+ # 1. `loads` to switch Infinity, -Infinity, NaN to None
+ # 2. `dumps` again so you get 'null' instead of extended JSON
+ try:
+ new_o = _json.loads(encoded_o, parse_constant=self.coerce_to_strict)
+ except ValueError:
+ # invalid separators will fail here. raise a helpful exception
+ raise ValueError(
+ "Encoding into strict JSON failed. Did you set the separators "
+ "valid JSON separators?"
+ )
+ else:
+ return _json.dumps(
+ new_o,
+ sort_keys=self.sort_keys,
+ indent=self.indent,
+ separators=(self.item_separator, self.key_separator),
+ )
+
+ def default(self, obj):
+ """
+ Accept an object (of unknown type) and try to encode with priority:
+ 1. builtin: user-defined objects
+ 2. sage: sage math cloud
+ 3. pandas: dataframes/series
+ 4. numpy: ndarrays
+ 5. datetime: time/datetime objects
+
+ Each method throws a NotEncoded exception if it fails.
+
+ The default method will only get hit if the object is not a type that
+ is naturally encoded by json:
+
+ Normal objects:
+ dict object
+ list, tuple array
+ str, unicode string
+ int, long, float number
+ True true
+ False false
+ None null
+
+ Extended objects:
+ float('nan') 'NaN'
+ float('infinity') 'Infinity'
+ float('-infinity') '-Infinity'
+
+ Therefore, we only anticipate either unknown iterables or values here.
+
+ """
+ # TODO: The ordering if these methods is *very* important. Is this OK?
+ encoding_methods = (
+ self.encode_as_plotly,
+ self.encode_as_sage,
+ self.encode_as_numpy,
+ self.encode_as_pandas,
+ self.encode_as_datetime,
+ self.encode_as_date,
+ self.encode_as_list, # because some values have `tolist` do last.
+ self.encode_as_decimal,
+ self.encode_as_pil,
+ )
+ for encoding_method in encoding_methods:
+ try:
+ return encoding_method(obj)
+ except NotEncodable:
+ pass
+ return _json.JSONEncoder.default(self, obj)
+
+ @staticmethod
+ def encode_as_plotly(obj):
+ """Attempt to use a builtin `to_plotly_json` method."""
+ try:
+ return obj.to_plotly_json()
+ except AttributeError:
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_list(obj):
+ """Attempt to use `tolist` method to convert to normal Python list."""
+ if hasattr(obj, "tolist"):
+ return obj.tolist()
+ else:
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_sage(obj):
+ """Attempt to convert sage.all.RR to floats and sage.all.ZZ to ints"""
+ sage_all = get_module("sage.all")
+ if not sage_all:
+ raise NotEncodable
+
+ if obj in sage_all.RR:
+ return float(obj)
+ elif obj in sage_all.ZZ:
+ return int(obj)
+ else:
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_pandas(obj):
+ """Attempt to convert pandas.NaT / pandas.NA"""
+ pandas = get_module("pandas", should_load=False)
+ if not pandas:
+ raise NotEncodable
+
+ if obj is pandas.NaT:
+ return None
+
+ # pandas.NA was introduced in pandas 1.0
+ if hasattr(pandas, "NA") and obj is pandas.NA:
+ return None
+
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_numpy(obj):
+ """Attempt to convert numpy.ma.core.masked"""
+ numpy = get_module("numpy", should_load=False)
+ if not numpy:
+ raise NotEncodable
+
+ if obj is numpy.ma.core.masked:
+ return float("nan")
+ elif isinstance(obj, numpy.ndarray) and obj.dtype.kind == "M":
+ try:
+ return numpy.datetime_as_string(obj).tolist()
+ except TypeError:
+ pass
+
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_datetime(obj):
+ """Convert datetime objects to iso-format strings"""
+ try:
+ return obj.isoformat()
+ except AttributeError:
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_date(obj):
+ """Attempt to convert to utc-iso time string using date methods."""
+ try:
+ time_string = obj.isoformat()
+ except AttributeError:
+ raise NotEncodable
+ else:
+ return iso_to_plotly_time_string(time_string)
+
+ @staticmethod
+ def encode_as_decimal(obj):
+ """Attempt to encode decimal by converting it to float"""
+ if isinstance(obj, decimal.Decimal):
+ return float(obj)
+ else:
+ raise NotEncodable
+
+ @staticmethod
+ def encode_as_pil(obj):
+ """Attempt to convert PIL.Image.Image to base64 data uri"""
+ image = get_module("PIL.Image")
+ if image is not None and isinstance(obj, image.Image):
+ return ImageUriValidator.pil_image_to_uri(obj)
+ else:
+ raise NotEncodable
+
+
+class NotEncodable(Exception):
+ pass
+
+
+def iso_to_plotly_time_string(iso_string):
+ """Remove timezone info and replace 'T' delimeter with ' ' (ws)."""
+ # make sure we don't send timezone info to plotly
+ if (iso_string.split("-")[:3] == "00:00") or (iso_string.split("+")[0] == "00:00"):
+ raise Exception(
+ "Plotly won't accept timestrings with timezone info.\n"
+ "All timestrings are assumed to be in UTC."
+ )
+
+ iso_string = iso_string.replace("-00:00", "").replace("+00:00", "")
+
+ if iso_string.endswith("T00:00:00"):
+ return iso_string.replace("T00:00:00", "")
+ else:
+ return iso_string.replace("T", " ")
+
+
+def template_doc(**names):
+ def _decorator(func):
+ if not sys.version_info[:2] == (3, 2):
+ if func.__doc__ is not None:
+ func.__doc__ = func.__doc__.format(**names)
+ return func
+
+ return _decorator
+
+
+def _natural_sort_strings(vals, reverse=False):
+ def key(v):
+ v_parts = re.split(r"(\d+)", v)
+ for i in range(len(v_parts)):
+ try:
+ v_parts[i] = int(v_parts[i])
+ except ValueError:
+ # not an int
+ pass
+ return tuple(v_parts)
+
+ return sorted(vals, key=key, reverse=reverse)
+
+
+def _get_int_type():
+ np = get_module("numpy", should_load=False)
+ if np:
+ int_type = (int, np.integer)
+ else:
+ int_type = (int,)
+ return int_type
+
+
+def split_multichar(ss, chars):
+ """
+ Split all the strings in ss at any of the characters in chars.
+ Example:
+
+ >>> ss = ["a.string[0].with_separators"]
+ >>> chars = list(".[]_")
+ >>> split_multichar(ss, chars)
+ ['a', 'string', '0', '', 'with', 'separators']
+
+ :param (list) ss: A list of strings.
+ :param (list) chars: Is a list of chars (note: not a string).
+ """
+ if len(chars) == 0:
+ return ss
+ c = chars.pop()
+ ss = reduce(lambda x, y: x + y, map(lambda x: x.split(c), ss))
+ return split_multichar(ss, chars)
+
+
+def split_string_positions(ss):
+ """
+ Given a list of strings split using split_multichar, return a list of
+ integers representing the indices of the first character of every string in
+ the original string.
+ Example:
+
+ >>> ss = ["a.string[0].with_separators"]
+ >>> chars = list(".[]_")
+ >>> ss_split = split_multichar(ss, chars)
+ >>> ss_split
+ ['a', 'string', '0', '', 'with', 'separators']
+ >>> split_string_positions(ss_split)
+ [0, 2, 9, 11, 12, 17]
+
+ :param (list) ss: A list of strings.
+ """
+ return list(
+ map(
+ lambda t: t[0] + t[1],
+ zip(range(len(ss)), cumsum([0] + list(map(len, ss[:-1])))),
+ )
+ )
+
+
+def display_string_positions(p, i=None, offset=0, length=1, char="^", trim=True):
+ """
+ Return a string that is whitespace except at p[i] which is replaced with char.
+ If i is None then all the indices of the string in p are replaced with char.
+
+ Example:
+
+ >>> ss = ["a.string[0].with_separators"]
+ >>> chars = list(".[]_")
+ >>> ss_split = split_multichar(ss, chars)
+ >>> ss_split
+ ['a', 'string', '0', '', 'with', 'separators']
+ >>> ss_pos = split_string_positions(ss_split)
+ >>> ss[0]
+ 'a.string[0].with_separators'
+ >>> display_string_positions(ss_pos,4)
+ ' ^'
+ >>> display_string_positions(ss_pos,4,offset=1,length=3,char="~",trim=False)
+ ' ~~~ '
+ >>> display_string_positions(ss_pos)
+ '^ ^ ^ ^^ ^'
+ :param (list) p: A list of integers.
+ :param (integer|None) i: Optional index of p to display.
+ :param (integer) offset: Allows adding a number of spaces to the replacement.
+ :param (integer) length: Allows adding a replacement that is the char
+ repeated length times.
+ :param (str) char: allows customizing the replacement character.
+ :param (boolean) trim: trims the remaining whitespace if True.
+ """
+ s = [" " for _ in range(max(p) + 1 + offset + length)]
+ maxaddr = 0
+ if i is None:
+ for p_ in p:
+ for temp in range(length):
+ maxaddr = p_ + offset + temp
+ s[maxaddr] = char
+ else:
+ for temp in range(length):
+ maxaddr = p[i] + offset + temp
+ s[maxaddr] = char
+ ret = "".join(s)
+ if trim:
+ ret = ret[: maxaddr + 1]
+ return ret
+
+
+def chomp_empty_strings(strings, c, reverse=False):
+ """
+ Given a list of strings, some of which are the empty string "", replace the
+ empty strings with c and combine them with the closest non-empty string on
+ the left or "" if it is the first string.
+ Examples:
+ for c="_"
+ ['hey', '', 'why', '', '', 'whoa', '', ''] -> ['hey_', 'why__', 'whoa__']
+ ['', 'hi', '', "I'm", 'bob', '', ''] -> ['_', 'hi_', "I'm", 'bob__']
+ ['hi', "i'm", 'a', 'good', 'string'] -> ['hi', "i'm", 'a', 'good', 'string']
+ Some special cases are:
+ [] -> []
+ [''] -> ['']
+ ['', ''] -> ['_']
+ ['', '', '', ''] -> ['___']
+ If reverse is true, empty strings are combined with closest non-empty string
+ on the right or "" if it is the last string.
+ """
+
+ def _rev(vals):
+ return [s[::-1] for s in vals][::-1]
+
+ if reverse:
+ return _rev(chomp_empty_strings(_rev(strings), c))
+ if not len(strings):
+ return strings
+ if sum(map(len, strings)) == 0:
+ return [c * (len(strings) - 1)]
+
+ class _Chomper:
+ def __init__(self, c):
+ self.c = c
+
+ def __call__(self, x, y):
+ # x is list up to now
+ # y is next item in list
+ # x should be [""] initially, and then empty strings filtered out at the
+ # end
+ if len(y) == 0:
+ return x[:-1] + [x[-1] + self.c]
+ else:
+ return x + [y]
+
+ return list(filter(len, reduce(_Chomper(c), strings, [""])))
+
+
+# taken from
+# https://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Python
+def levenshtein(s1, s2):
+ if len(s1) < len(s2):
+ return levenshtein(s2, s1) # len(s1) >= len(s2)
+ if len(s2) == 0:
+ return len(s1)
+ previous_row = range(len(s2) + 1)
+ for i, c1 in enumerate(s1):
+ current_row = [i + 1]
+ for j, c2 in enumerate(s2):
+ # j+1 instead of j since previous_row and current_row are one character longer
+ # than s2
+ insertions = previous_row[j + 1] + 1
+ deletions = current_row[j] + 1
+ substitutions = previous_row[j] + (c1 != c2)
+ current_row.append(min(insertions, deletions, substitutions))
+ previous_row = current_row
+ return previous_row[-1]
+
+
+def find_closest_string(string, strings):
+ def _key(s):
+ # sort by levenshtein distance and lexographically to maintain a stable
+ # sort for different keys with the same levenshtein distance
+ return (levenshtein(s, string), s)
+
+ return sorted(strings, key=_key)[0]