nwo
stringlengths
10
28
sha
stringlengths
40
40
path
stringlengths
11
97
identifier
stringlengths
1
64
parameters
stringlengths
2
2.24k
return_statement
stringlengths
0
2.17k
docstring
stringlengths
0
5.45k
docstring_summary
stringlengths
0
3.83k
func_begin
int64
1
13.4k
func_end
int64
2
13.4k
function
stringlengths
28
56.4k
url
stringlengths
106
209
project
int64
1
48
executed_lines
list
executed_lines_pc
float64
0
153
missing_lines
list
missing_lines_pc
float64
0
100
covered
bool
2 classes
filecoverage
float64
2.53
100
function_lines
int64
2
1.46k
mccabe
int64
1
253
coverage
float64
0
100
docstring_lines
int64
0
112
function_nodoc
stringlengths
9
56.4k
id
int64
0
29.8k
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/data.py
PlotData.__init__
( self, data: DataSource, variables: dict[str, VariableSpec], )
49
64
def __init__( self, data: DataSource, variables: dict[str, VariableSpec], ): frame, names, ids = self._assign_variables(data, variables) self.frame = frame self.names = names self.ids = ids self.frames = {} # TODO this is a hack, remove self.source_data = data self.source_vars = variables
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/data.py#L49-L64
26
[ 0, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ]
75
[]
0
false
98.913043
16
1
100
0
def __init__( self, data: DataSource, variables: dict[str, VariableSpec], ): frame, names, ids = self._assign_variables(data, variables) self.frame = frame self.names = names self.ids = ids self.frames = {} # TODO this is a hack, remove self.source_data = data self.source_vars = variables
19,295
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/data.py
PlotData.__contains__
(self, key: str)
return key in self.frame
Boolean check on whether a variable is defined in this dataset.
Boolean check on whether a variable is defined in this dataset.
66
70
def __contains__(self, key: str) -> bool: """Boolean check on whether a variable is defined in this dataset.""" if self.frame is None: return any(key in df for df in self.frames.values()) return key in self.frame
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/data.py#L66-L70
26
[ 0, 1, 2, 4 ]
80
[ 3 ]
20
false
98.913043
5
2
80
1
def __contains__(self, key: str) -> bool: if self.frame is None: return any(key in df for df in self.frames.values()) return key in self.frame
19,296
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/data.py
PlotData.join
( self, data: DataSource, variables: dict[str, VariableSpec] | None, )
return new
Add, replace, or drop variables and return as a new dataset.
Add, replace, or drop variables and return as a new dataset.
72
117
def join( self, data: DataSource, variables: dict[str, VariableSpec] | None, ) -> PlotData: """Add, replace, or drop variables and return as a new dataset.""" # Inherit the original source of the upsteam data by default if data is None: data = self.source_data # TODO allow `data` to be a function (that is called on the source data?) if not variables: variables = self.source_vars # Passing var=None implies that we do not want that variable in this layer disinherit = [k for k, v in variables.items() if v is None] # Create a new dataset with just the info passed here new = PlotData(data, variables) # -- Update the inherited DataSource with this new information drop_cols = [k for k in self.frame if k in new.frame or k in disinherit] parts = [self.frame.drop(columns=drop_cols), new.frame] # Because we are combining distinct columns, this is perhaps more # naturally thought of as a "merge"/"join". But using concat because # some simple testing suggests that it is marginally faster. frame = pd.concat(parts, axis=1, sort=False, copy=False) names = {k: v for k, v in self.names.items() if k not in disinherit} names.update(new.names) ids = {k: v for k, v in self.ids.items() if k not in disinherit} ids.update(new.ids) new.frame = frame new.names = names new.ids = ids # Multiple chained operations should always inherit from the original object new.source_data = self.source_data new.source_vars = self.source_vars return new
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/data.py#L72-L117
26
[ 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 ]
89.130435
[]
0
false
98.913043
46
6
100
1
def join( self, data: DataSource, variables: dict[str, VariableSpec] | None, ) -> PlotData: # Inherit the original source of the upsteam data by default if data is None: data = self.source_data # TODO allow `data` to be a function (that is called on the source data?) if not variables: variables = self.source_vars # Passing var=None implies that we do not want that variable in this layer disinherit = [k for k, v in variables.items() if v is None] # Create a new dataset with just the info passed here new = PlotData(data, variables) # -- Update the inherited DataSource with this new information drop_cols = [k for k in self.frame if k in new.frame or k in disinherit] parts = [self.frame.drop(columns=drop_cols), new.frame] # Because we are combining distinct columns, this is perhaps more # naturally thought of as a "merge"/"join". But using concat because # some simple testing suggests that it is marginally faster. frame = pd.concat(parts, axis=1, sort=False, copy=False) names = {k: v for k, v in self.names.items() if k not in disinherit} names.update(new.names) ids = {k: v for k, v in self.ids.items() if k not in disinherit} ids.update(new.ids) new.frame = frame new.names = names new.ids = ids # Multiple chained operations should always inherit from the original object new.source_data = self.source_data new.source_vars = self.source_vars return new
19,297
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/data.py
PlotData._assign_variables
( self, data: DataSource, variables: dict[str, VariableSpec], )
return frame, names, ids
Assign values for plot variables given long-form data and/or vector inputs. Parameters ---------- data Input data where variable names map to vector values. variables Keys are names of plot variables (x, y, ...) each value is one of: - name of a column (or index level, or dictionary entry) in `data` - vector in any format that can construct a :class:`pandas.DataFrame` Returns ------- frame Table mapping seaborn variables (x, y, color, ...) to data vectors. names Keys are defined seaborn variables; values are names inferred from the inputs (or None when no name can be determined). ids Like the `names` dict, but `None` values are replaced by the `id()` of the data object that defined the variable. Raises ------ ValueError When variables are strings that don't appear in `data`, or when they are non-indexed vector datatypes that have a different length from `data`.
Assign values for plot variables given long-form data and/or vector inputs.
119
260
def _assign_variables( self, data: DataSource, variables: dict[str, VariableSpec], ) -> tuple[DataFrame, dict[str, str | None], dict[str, str | int]]: """ Assign values for plot variables given long-form data and/or vector inputs. Parameters ---------- data Input data where variable names map to vector values. variables Keys are names of plot variables (x, y, ...) each value is one of: - name of a column (or index level, or dictionary entry) in `data` - vector in any format that can construct a :class:`pandas.DataFrame` Returns ------- frame Table mapping seaborn variables (x, y, color, ...) to data vectors. names Keys are defined seaborn variables; values are names inferred from the inputs (or None when no name can be determined). ids Like the `names` dict, but `None` values are replaced by the `id()` of the data object that defined the variable. Raises ------ ValueError When variables are strings that don't appear in `data`, or when they are non-indexed vector datatypes that have a different length from `data`. """ source_data: Mapping | DataFrame frame: DataFrame names: dict[str, str | None] ids: dict[str, str | int] plot_data = {} names = {} ids = {} given_data = data is not None if data is not None: source_data = data else: # Data is optional; all variables can be defined as vectors # But simplify downstream code by always having a usable source data object source_data = {} # TODO Generally interested in accepting a generic DataFrame interface # Track https://data-apis.org/ for development # Variables can also be extracted from the index of a DataFrame if isinstance(source_data, pd.DataFrame): index = source_data.index.to_frame().to_dict("series") else: index = {} for key, val in variables.items(): # Simply ignore variables with no specification if val is None: continue # Try to treat the argument as a key for the data collection. # But be flexible about what can be used as a key. # Usually it will be a string, but allow other hashables when # taking from the main data object. Allow only strings to reference # fields in the index, because otherwise there is too much ambiguity. # TODO this will be rendered unnecessary by the following pandas fix: # https://github.com/pandas-dev/pandas/pull/41283 try: hash(val) val_is_hashable = True except TypeError: val_is_hashable = False val_as_data_key = ( # See https://github.com/pandas-dev/pandas/pull/41283 # (isinstance(val, abc.Hashable) and val in source_data) (val_is_hashable and val in source_data) or (isinstance(val, str) and val in index) ) if val_as_data_key: val = cast(ColumnName, val) if val in source_data: plot_data[key] = source_data[val] elif val in index: plot_data[key] = index[val] names[key] = ids[key] = str(val) elif isinstance(val, str): # This looks like a column name but, lookup failed. err = f"Could not interpret value `{val}` for `{key}`. " if not given_data: err += "Value is a string, but `data` was not passed." else: err += "An entry with this name does not appear in `data`." raise ValueError(err) else: # Otherwise, assume the value somehow represents data # Ignore empty data structures if isinstance(val, Sized) and len(val) == 0: continue # If vector has no index, it must match length of data table if isinstance(data, pd.DataFrame) and not isinstance(val, pd.Series): if isinstance(val, Sized) and len(data) != len(val): val_cls = val.__class__.__name__ err = ( f"Length of {val_cls} vectors must match length of `data`" f" when both are used, but `data` has length {len(data)}" f" and the vector passed to `{key}` has length {len(val)}." ) raise ValueError(err) plot_data[key] = val # Try to infer the original name using pandas-like metadata if hasattr(val, "name"): names[key] = ids[key] = str(val.name) # type: ignore # mypy/1424 else: names[key] = None ids[key] = id(val) # Construct a tidy plot DataFrame. This will convert a number of # types automatically, aligning on index in case of pandas objects # TODO Note: this fails when variable specs *only* have scalars! frame = pd.DataFrame(plot_data) return frame, names, ids
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/data.py#L119-L260
26
[ 0, 40, 41, 42, 43, 44, 45, 46, 47, 50, 51, 52, 53, 54, 55, 56, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 112, 113, 114, 115, 116, 117, 118, 119, 120, 125, 126, 127, 128, 129, 130, 131, 133, 134, 135, 136, 137, 138, 139, 140, 141 ]
61.267606
[]
0
false
98.913043
142
21
100
28
def _assign_variables( self, data: DataSource, variables: dict[str, VariableSpec], ) -> tuple[DataFrame, dict[str, str | None], dict[str, str | int]]: source_data: Mapping | DataFrame frame: DataFrame names: dict[str, str | None] ids: dict[str, str | int] plot_data = {} names = {} ids = {} given_data = data is not None if data is not None: source_data = data else: # Data is optional; all variables can be defined as vectors # But simplify downstream code by always having a usable source data object source_data = {} # TODO Generally interested in accepting a generic DataFrame interface # Track https://data-apis.org/ for development # Variables can also be extracted from the index of a DataFrame if isinstance(source_data, pd.DataFrame): index = source_data.index.to_frame().to_dict("series") else: index = {} for key, val in variables.items(): # Simply ignore variables with no specification if val is None: continue # Try to treat the argument as a key for the data collection. # But be flexible about what can be used as a key. # Usually it will be a string, but allow other hashables when # taking from the main data object. Allow only strings to reference # fields in the index, because otherwise there is too much ambiguity. # TODO this will be rendered unnecessary by the following pandas fix: # https://github.com/pandas-dev/pandas/pull/41283 try: hash(val) val_is_hashable = True except TypeError: val_is_hashable = False val_as_data_key = ( # See https://github.com/pandas-dev/pandas/pull/41283 # (isinstance(val, abc.Hashable) and val in source_data) (val_is_hashable and val in source_data) or (isinstance(val, str) and val in index) ) if val_as_data_key: val = cast(ColumnName, val) if val in source_data: plot_data[key] = source_data[val] elif val in index: plot_data[key] = index[val] names[key] = ids[key] = str(val) elif isinstance(val, str): # This looks like a column name but, lookup failed. err = f"Could not interpret value `{val}` for `{key}`. " if not given_data: err += "Value is a string, but `data` was not passed." else: err += "An entry with this name does not appear in `data`." raise ValueError(err) else: # Otherwise, assume the value somehow represents data # Ignore empty data structures if isinstance(val, Sized) and len(val) == 0: continue # If vector has no index, it must match length of data table if isinstance(data, pd.DataFrame) and not isinstance(val, pd.Series): if isinstance(val, Sized) and len(data) != len(val): val_cls = val.__class__.__name__ err = ( f"Length of {val_cls} vectors must match length of `data`" f" when both are used, but `data` has length {len(data)}" f" and the vector passed to `{key}` has length {len(val)}." ) raise ValueError(err) plot_data[key] = val # Try to infer the original name using pandas-like metadata if hasattr(val, "name"): names[key] = ids[key] = str(val.name) # type: ignore # mypy/1424 else: names[key] = None ids[key] = id(val) # Construct a tidy plot DataFrame. This will convert a number of # types automatically, aligning on index in case of pandas objects # TODO Note: this fails when variable specs *only* have scalars! frame = pd.DataFrame(plot_data) return frame, names, ids
19,298
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/exceptions.py
PlotSpecError._during
(cls, step: str, var: str = "")
return cls(" ".join(message))
Initialize the class to report the failure of a specific operation.
Initialize the class to report the failure of a specific operation.
22
32
def _during(cls, step: str, var: str = "") -> PlotSpecError: """ Initialize the class to report the failure of a specific operation. """ message = [] if var: message.append(f"{step} failed for the `{var}` variable.") else: message.append(f"{step} failed.") message.append("See the traceback above for more information.") return cls(" ".join(message))
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/exceptions.py#L22-L32
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 9, 10 ]
90.909091
[ 8 ]
9.090909
false
90
11
2
90.909091
1
def _during(cls, step: str, var: str = "") -> PlotSpecError: message = [] if var: message.append(f"{step} failed for the `{var}` variable.") else: message.append(f"{step} failed.") message.append("See the traceback above for more information.") return cls(" ".join(message))
19,299
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_identity_transforms
()
return identity, identity
1,001
1,006
def _make_identity_transforms() -> TransFuncs: def identity(x): return x return identity, identity
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1001-L1006
26
[ 0, 1, 2, 3, 4, 5 ]
100
[]
0
true
90.258449
6
2
100
0
def _make_identity_transforms() -> TransFuncs: def identity(x): return x return identity, identity
19,300
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_logit_transforms
(base: float | None = None)
return logit, expit
1,009
1,021
def _make_logit_transforms(base: float | None = None) -> TransFuncs: log, exp = _make_log_transforms(base) def logit(x): with np.errstate(invalid="ignore", divide="ignore"): return log(x) - log(1 - x) def expit(x): with np.errstate(invalid="ignore", divide="ignore"): return exp(x) / (1 + exp(x)) return logit, expit
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1009-L1021
26
[ 0, 1 ]
15.384615
[ 2, 4, 5, 6, 8, 9, 10, 12 ]
61.538462
false
90.258449
13
5
38.461538
0
def _make_logit_transforms(base: float | None = None) -> TransFuncs: log, exp = _make_log_transforms(base) def logit(x): with np.errstate(invalid="ignore", divide="ignore"): return log(x) - log(1 - x) def expit(x): with np.errstate(invalid="ignore", divide="ignore"): return exp(x) / (1 + exp(x)) return logit, expit
19,301
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_log_transforms
(base: float | None = None)
return log, exp
1,024
1,046
def _make_log_transforms(base: float | None = None) -> TransFuncs: fs: TransFuncs if base is None: fs = np.log, np.exp elif base == 2: fs = np.log2, partial(np.power, 2) elif base == 10: fs = np.log10, partial(np.power, 10) else: def forward(x): return np.log(x) / np.log(base) fs = forward, partial(np.power, base) def log(x: ArrayLike) -> ArrayLike: with np.errstate(invalid="ignore", divide="ignore"): return fs[0](x) def exp(x: ArrayLike) -> ArrayLike: with np.errstate(invalid="ignore", divide="ignore"): return fs[1](x) return log, exp
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1024-L1046
26
[ 0, 1, 3, 5, 7, 8, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22 ]
69.565217
[ 4, 6, 10, 11, 12 ]
21.73913
false
90.258449
23
9
78.26087
0
def _make_log_transforms(base: float | None = None) -> TransFuncs: fs: TransFuncs if base is None: fs = np.log, np.exp elif base == 2: fs = np.log2, partial(np.power, 2) elif base == 10: fs = np.log10, partial(np.power, 10) else: def forward(x): return np.log(x) / np.log(base) fs = forward, partial(np.power, base) def log(x: ArrayLike) -> ArrayLike: with np.errstate(invalid="ignore", divide="ignore"): return fs[0](x) def exp(x: ArrayLike) -> ArrayLike: with np.errstate(invalid="ignore", divide="ignore"): return fs[1](x) return log, exp
19,302
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_symlog_transforms
(c: float = 1, base: float = 10)
return symlog, symexp
1,049
1,066
def _make_symlog_transforms(c: float = 1, base: float = 10) -> TransFuncs: # From https://iopscience.iop.org/article/10.1088/0957-0233/24/2/027001 # Note: currently not using base because we only get # one parameter from the string, and are using c (this is consistent with d3) log, exp = _make_log_transforms(base) def symlog(x): with np.errstate(invalid="ignore", divide="ignore"): return np.sign(x) * log(1 + np.abs(np.divide(x, c))) def symexp(x): with np.errstate(invalid="ignore", divide="ignore"): return np.sign(x) * c * (exp(np.abs(x)) - 1) return symlog, symexp
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1049-L1066
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 13, 16, 17 ]
77.777778
[ 10, 11, 14, 15 ]
22.222222
false
90.258449
18
5
77.777778
0
def _make_symlog_transforms(c: float = 1, base: float = 10) -> TransFuncs: # From https://iopscience.iop.org/article/10.1088/0957-0233/24/2/027001 # Note: currently not using base because we only get # one parameter from the string, and are using c (this is consistent with d3) log, exp = _make_log_transforms(base) def symlog(x): with np.errstate(invalid="ignore", divide="ignore"): return np.sign(x) * log(1 + np.abs(np.divide(x, c))) def symexp(x): with np.errstate(invalid="ignore", divide="ignore"): return np.sign(x) * c * (exp(np.abs(x)) - 1) return symlog, symexp
19,303
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_sqrt_transforms
()
return sqrt, square
1,069
1,077
def _make_sqrt_transforms() -> TransFuncs: def sqrt(x): return np.sign(x) * np.sqrt(np.abs(x)) def square(x): return np.sign(x) * np.square(x) return sqrt, square
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1069-L1077
26
[ 0, 1 ]
22.222222
[ 2, 3, 5, 6, 8 ]
55.555556
false
90.258449
9
3
44.444444
0
def _make_sqrt_transforms() -> TransFuncs: def sqrt(x): return np.sign(x) * np.sqrt(np.abs(x)) def square(x): return np.sign(x) * np.square(x) return sqrt, square
19,304
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_make_power_transforms
(exp: float)
return forward, inverse
1,080
1,088
def _make_power_transforms(exp: float) -> TransFuncs: def forward(x): return np.sign(x) * np.power(np.abs(x), exp) def inverse(x): return np.sign(x) * np.power(np.abs(x), 1 / exp) return forward, inverse
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1080-L1088
26
[ 0, 1, 2, 3, 4, 5, 7, 8 ]
88.888889
[ 6 ]
11.111111
false
90.258449
9
3
88.888889
0
def _make_power_transforms(exp: float) -> TransFuncs: def forward(x): return np.sign(x) * np.power(np.abs(x), exp) def inverse(x): return np.sign(x) * np.power(np.abs(x), 1 / exp) return forward, inverse
19,305
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
_default_spacer
(x: Series)
return 1
1,091
1,092
def _default_spacer(x: Series) -> float: return 1
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L1091-L1092
26
[ 0, 1 ]
100
[]
0
true
90.258449
2
1
100
0
def _default_spacer(x: Series) -> float: return 1
19,306
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale.__post_init__
(self)
66
70
def __post_init__(self): self._tick_params = None self._label_params = None self._legend = None
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L66-L70
26
[ 0, 1, 2, 3, 4 ]
100
[]
0
true
90.258449
5
1
100
0
def __post_init__(self): self._tick_params = None self._label_params = None self._legend = None
19,307
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale._get_scale
(self, name: str, forward: Callable, inverse: Callable)
return InternalScale(name, (forward, inverse))
84
96
def _get_scale(self, name: str, forward: Callable, inverse: Callable): major_locator, minor_locator = self._get_locators(**self._tick_params) major_formatter = self._get_formatter(major_locator, **self._label_params) class InternalScale(mpl.scale.FuncScale): def set_default_locators_and_formatters(self, axis): axis.set_major_locator(major_locator) if minor_locator is not None: axis.set_minor_locator(minor_locator) axis.set_major_formatter(major_formatter) return InternalScale(name, (forward, inverse))
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L84-L96
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ]
100
[]
0
true
90.258449
13
3
100
0
def _get_scale(self, name: str, forward: Callable, inverse: Callable): major_locator, minor_locator = self._get_locators(**self._tick_params) major_formatter = self._get_formatter(major_locator, **self._label_params) class InternalScale(mpl.scale.FuncScale): def set_default_locators_and_formatters(self, axis): axis.set_major_locator(major_locator) if minor_locator is not None: axis.set_minor_locator(minor_locator) axis.set_major_formatter(major_formatter) return InternalScale(name, (forward, inverse))
19,308
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale._spacing
(self, x: Series)
return space
98
104
def _spacing(self, x: Series) -> float: space = self._spacer(x) if np.isnan(space): # This happens when there is no variance in the orient coordinate data # Not exactly clear what the right default is, but 1 seems reasonable? return 1 return space
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L98-L104
26
[ 0, 1, 2, 3, 4, 5, 6 ]
100
[]
0
true
90.258449
7
2
100
0
def _spacing(self, x: Series) -> float: space = self._spacer(x) if np.isnan(space): # This happens when there is no variance in the orient coordinate data # Not exactly clear what the right default is, but 1 seems reasonable? return 1 return space
19,309
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale._finalize
(self, p: Plot, axis: Axis)
Perform scale-specific axis tweaks after adding artists.
Perform scale-specific axis tweaks after adding artists.
111
113
def _finalize(self, p: Plot, axis: Axis) -> None: """Perform scale-specific axis tweaks after adding artists.""" pass
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L111-L113
26
[ 0, 1, 2 ]
100
[]
0
true
90.258449
3
1
100
1
def _finalize(self, p: Plot, axis: Axis) -> None: pass
19,310
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale.__call__
(self, data: Series)
115
134
def __call__(self, data: Series) -> ArrayLike: trans_data: Series | NDArray | list # TODO sometimes we need to handle scalars (e.g. for Line) # but what is the best way to do that? scalar_data = np.isscalar(data) if scalar_data: trans_data = np.array([data]) else: trans_data = data for func in self._pipeline: if func is not None: trans_data = func(trans_data) if scalar_data: return trans_data[0] else: return trans_data
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L115-L134
26
[ 0, 1, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19 ]
75
[]
0
false
90.258449
20
5
100
0
def __call__(self, data: Series) -> ArrayLike: trans_data: Series | NDArray | list # TODO sometimes we need to handle scalars (e.g. for Line) # but what is the best way to do that? scalar_data = np.isscalar(data) if scalar_data: trans_data = np.array([data]) else: trans_data = data for func in self._pipeline: if func is not None: trans_data = func(trans_data) if scalar_data: return trans_data[0] else: return trans_data
19,311
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
Scale._identity
()
return Identity()
137
145
def _identity(): class Identity(Scale): _pipeline = [] _spacer = None _legend = None _matplotlib_scale = None return Identity()
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L137-L145
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8 ]
100
[]
0
true
90.258449
9
1
100
0
def _identity(): class Identity(Scale): _pipeline = [] _spacer = None _legend = None _matplotlib_scale = None return Identity()
19,312
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.__init__
(self, scale)
903
915
def __init__(self, scale): self.converter = None self.units = None self.scale = scale self.major = mpl.axis.Ticker() self.minor = mpl.axis.Ticker() # It appears that this needs to be initialized this way on matplotlib 3.1, # but not later versions. It is unclear whether there are any issues with it. self._data_interval = None, None scale.set_default_locators_and_formatters(self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L903-L915
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ]
100
[]
0
true
90.258449
13
1
100
0
def __init__(self, scale): self.converter = None self.units = None self.scale = scale self.major = mpl.axis.Ticker() self.minor = mpl.axis.Ticker() # It appears that this needs to be initialized this way on matplotlib 3.1, # but not later versions. It is unclear whether there are any issues with it. self._data_interval = None, None scale.set_default_locators_and_formatters(self)
19,313
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_view_interval
(self, vmin, vmax)
918
919
def set_view_interval(self, vmin, vmax): self._view_interval = vmin, vmax
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L918-L919
26
[ 0, 1 ]
100
[]
0
true
90.258449
2
1
100
0
def set_view_interval(self, vmin, vmax): self._view_interval = vmin, vmax
19,314
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.get_view_interval
(self)
return self._view_interval
921
922
def get_view_interval(self): return self._view_interval
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L921-L922
26
[ 0, 1 ]
100
[]
0
true
90.258449
2
1
100
0
def get_view_interval(self): return self._view_interval
19,315
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_data_interval
(self, vmin, vmax)
929
930
def set_data_interval(self, vmin, vmax): self._data_interval = vmin, vmax
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L929-L930
26
[ 0 ]
50
[ 1 ]
50
false
90.258449
2
1
50
0
def set_data_interval(self, vmin, vmax): self._data_interval = vmin, vmax
19,316
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.get_data_interval
(self)
return self._data_interval
932
933
def get_data_interval(self): return self._data_interval
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L932-L933
26
[ 0 ]
50
[ 1 ]
50
false
90.258449
2
1
50
0
def get_data_interval(self): return self._data_interval
19,317
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.get_tick_space
(self)
return 5
935
938
def get_tick_space(self): # TODO how to do this in a configurable / auto way? # Would be cool to have legend density adapt to figure size, etc. return 5
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L935-L938
26
[ 0, 1, 2, 3 ]
100
[]
0
true
90.258449
4
1
100
0
def get_tick_space(self): # TODO how to do this in a configurable / auto way? # Would be cool to have legend density adapt to figure size, etc. return 5
19,318
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_major_locator
(self, locator)
940
942
def set_major_locator(self, locator): self.major.locator = locator locator.set_axis(self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L940-L942
26
[ 0, 1, 2 ]
100
[]
0
true
90.258449
3
1
100
0
def set_major_locator(self, locator): self.major.locator = locator locator.set_axis(self)
19,319
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_major_formatter
(self, formatter)
944
946
def set_major_formatter(self, formatter): self.major.formatter = formatter formatter.set_axis(self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L944-L946
26
[ 0, 1, 2 ]
100
[]
0
true
90.258449
3
1
100
0
def set_major_formatter(self, formatter): self.major.formatter = formatter formatter.set_axis(self)
19,320
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_minor_locator
(self, locator)
948
950
def set_minor_locator(self, locator): self.minor.locator = locator locator.set_axis(self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L948-L950
26
[ 0, 1, 2 ]
100
[]
0
true
90.258449
3
1
100
0
def set_minor_locator(self, locator): self.minor.locator = locator locator.set_axis(self)
19,321
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_minor_formatter
(self, formatter)
952
954
def set_minor_formatter(self, formatter): self.minor.formatter = formatter formatter.set_axis(self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L952-L954
26
[ 0 ]
33.333333
[ 1, 2 ]
66.666667
false
90.258449
3
1
33.333333
0
def set_minor_formatter(self, formatter): self.minor.formatter = formatter formatter.set_axis(self)
19,322
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.set_units
(self, units)
956
957
def set_units(self, units): self.units = units
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L956-L957
26
[ 0, 1 ]
100
[]
0
true
90.258449
2
1
100
0
def set_units(self, units): self.units = units
19,323
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.update_units
(self, x)
Pass units to the internal converter, potentially updating its mapping.
Pass units to the internal converter, potentially updating its mapping.
959
972
def update_units(self, x): """Pass units to the internal converter, potentially updating its mapping.""" self.converter = mpl.units.registry.get_converter(x) if self.converter is not None: self.converter.default_units(x, self) info = self.converter.axisinfo(self.units, self) if info is None: return if info.majloc is not None: self.set_major_locator(info.majloc) if info.majfmt is not None: self.set_major_formatter(info.majfmt)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L959-L972
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13 ]
92.857143
[ 9 ]
7.142857
false
90.258449
14
5
92.857143
1
def update_units(self, x): self.converter = mpl.units.registry.get_converter(x) if self.converter is not None: self.converter.default_units(x, self) info = self.converter.axisinfo(self.units, self) if info is None: return if info.majloc is not None: self.set_major_locator(info.majloc) if info.majfmt is not None: self.set_major_formatter(info.majfmt)
19,324
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.convert_units
(self, x)
return self.converter.convert(x, self.units, self)
Return a numeric representation of the input data.
Return a numeric representation of the input data.
977
983
def convert_units(self, x): """Return a numeric representation of the input data.""" if np.issubdtype(np.asarray(x).dtype, np.number): return x elif self.converter is None: return x return self.converter.convert(x, self.units, self)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L977-L983
26
[ 0, 1, 2, 3, 4, 5, 6 ]
100
[]
0
true
90.258449
7
3
100
1
def convert_units(self, x): if np.issubdtype(np.asarray(x).dtype, np.number): return x elif self.converter is None: return x return self.converter.convert(x, self.units, self)
19,325
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.get_scale
(self)
return self.scale
985
991
def get_scale(self): # Note that matplotlib actually returns a string here! # (e.g., with a log scale, axis.get_scale() returns "log") # Currently we just hit it with minor ticks where it checks for # scale == "log". I'm not sure how you'd actually use log-scale # minor "ticks" in a legend context, so this is fine.... return self.scale
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L985-L991
26
[ 0, 1, 2, 3, 4, 5, 6 ]
100
[]
0
true
90.258449
7
1
100
0
def get_scale(self): # Note that matplotlib actually returns a string here! # (e.g., with a log scale, axis.get_scale() returns "log") # Currently we just hit it with minor ticks where it checks for # scale == "log". I'm not sure how you'd actually use log-scale # minor "ticks" in a legend context, so this is fine.... return self.scale
19,326
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/scales.py
PseudoAxis.get_majorticklocs
(self)
return self.major.locator()
993
994
def get_majorticklocs(self): return self.major.locator()
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/scales.py#L993-L994
26
[ 0, 1 ]
100
[]
0
true
90.258449
2
1
100
0
def get_majorticklocs(self): return self.major.locator()
19,327
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/rules.py
variable_type
( vector: Series, boolean_type: Literal["numeric", "categorical", "boolean"] = "numeric", strict_boolean: bool = False, )
return VarType("categorical")
Determine whether a vector contains numeric, categorical, or datetime data. This function differs from the pandas typing API in a few ways: - Python sequences or object-typed PyData objects are considered numeric if all of their entries are numeric. - String or mixed-type data are considered categorical even if not explicitly represented as a :class:`pandas.api.types.CategoricalDtype`. - There is some flexibility about how to treat binary / boolean data. Parameters ---------- vector : :func:`pandas.Series`, :func:`numpy.ndarray`, or Python sequence Input data to test. boolean_type : 'numeric', 'categorical', or 'boolean' Type to use for vectors containing only 0s and 1s (and NAs). strict_boolean : bool If True, only consider data to be boolean when the dtype is bool or Boolean. Returns ------- var_type : 'numeric', 'categorical', or 'datetime' Name identifying the type of data in the vector.
Determine whether a vector contains numeric, categorical, or datetime data.
38
134
def variable_type( vector: Series, boolean_type: Literal["numeric", "categorical", "boolean"] = "numeric", strict_boolean: bool = False, ) -> VarType: """ Determine whether a vector contains numeric, categorical, or datetime data. This function differs from the pandas typing API in a few ways: - Python sequences or object-typed PyData objects are considered numeric if all of their entries are numeric. - String or mixed-type data are considered categorical even if not explicitly represented as a :class:`pandas.api.types.CategoricalDtype`. - There is some flexibility about how to treat binary / boolean data. Parameters ---------- vector : :func:`pandas.Series`, :func:`numpy.ndarray`, or Python sequence Input data to test. boolean_type : 'numeric', 'categorical', or 'boolean' Type to use for vectors containing only 0s and 1s (and NAs). strict_boolean : bool If True, only consider data to be boolean when the dtype is bool or Boolean. Returns ------- var_type : 'numeric', 'categorical', or 'datetime' Name identifying the type of data in the vector. """ # If a categorical dtype is set, infer categorical if pd.api.types.is_categorical_dtype(vector): return VarType("categorical") # Special-case all-na data, which is always "numeric" if pd.isna(vector).all(): return VarType("numeric") # Special-case binary/boolean data, allow caller to determine # This triggers a numpy warning when vector has strings/objects # https://github.com/numpy/numpy/issues/6784 # Because we reduce with .all(), we are agnostic about whether the # comparison returns a scalar or vector, so we will ignore the warning. # It triggers a separate DeprecationWarning when the vector has datetimes: # https://github.com/numpy/numpy/issues/13548 # This is considered a bug by numpy and will likely go away. with warnings.catch_warnings(): warnings.simplefilter( action='ignore', category=(FutureWarning, DeprecationWarning) # type: ignore # mypy bug? ) if strict_boolean: if isinstance(vector.dtype, pd.core.dtypes.base.ExtensionDtype): boolean_dtypes = ["bool", "boolean"] else: boolean_dtypes = ["bool"] boolean_vector = vector.dtype in boolean_dtypes else: boolean_vector = bool(np.isin(vector, [0, 1, np.nan]).all()) if boolean_vector: return VarType(boolean_type) # Defer to positive pandas tests if pd.api.types.is_numeric_dtype(vector): return VarType("numeric") if pd.api.types.is_datetime64_dtype(vector): return VarType("datetime") # --- If we get to here, we need to check the entries # Check for a collection where everything is a number def all_numeric(x): for x_i in x: if not isinstance(x_i, Number): return False return True if all_numeric(vector): return VarType("numeric") # Check for a collection where everything is a datetime def all_datetime(x): for x_i in x: if not isinstance(x_i, (datetime, np.datetime64)): return False return True if all_datetime(vector): return VarType("datetime") # Otherwise, our final fallback is to consider things categorical return VarType("categorical")
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/rules.py#L38-L134
26
[ 0, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 52, 53, 56, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96 ]
62.886598
[ 54 ]
1.030928
false
98.305085
97
17
98.969072
23
def variable_type( vector: Series, boolean_type: Literal["numeric", "categorical", "boolean"] = "numeric", strict_boolean: bool = False, ) -> VarType: # If a categorical dtype is set, infer categorical if pd.api.types.is_categorical_dtype(vector): return VarType("categorical") # Special-case all-na data, which is always "numeric" if pd.isna(vector).all(): return VarType("numeric") # Special-case binary/boolean data, allow caller to determine # This triggers a numpy warning when vector has strings/objects # https://github.com/numpy/numpy/issues/6784 # Because we reduce with .all(), we are agnostic about whether the # comparison returns a scalar or vector, so we will ignore the warning. # It triggers a separate DeprecationWarning when the vector has datetimes: # https://github.com/numpy/numpy/issues/13548 # This is considered a bug by numpy and will likely go away. with warnings.catch_warnings(): warnings.simplefilter( action='ignore', category=(FutureWarning, DeprecationWarning) # type: ignore # mypy bug? ) if strict_boolean: if isinstance(vector.dtype, pd.core.dtypes.base.ExtensionDtype): boolean_dtypes = ["bool", "boolean"] else: boolean_dtypes = ["bool"] boolean_vector = vector.dtype in boolean_dtypes else: boolean_vector = bool(np.isin(vector, [0, 1, np.nan]).all()) if boolean_vector: return VarType(boolean_type) # Defer to positive pandas tests if pd.api.types.is_numeric_dtype(vector): return VarType("numeric") if pd.api.types.is_datetime64_dtype(vector): return VarType("datetime") # --- If we get to here, we need to check the entries # Check for a collection where everything is a number def all_numeric(x): for x_i in x: if not isinstance(x_i, Number): return False return True if all_numeric(vector): return VarType("numeric") # Check for a collection where everything is a datetime def all_datetime(x): for x_i in x: if not isinstance(x_i, (datetime, np.datetime64)): return False return True if all_datetime(vector): return VarType("datetime") # Otherwise, our final fallback is to consider things categorical return VarType("categorical")
19,328
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/rules.py
categorical_order
(vector: Series, order: list | None = None)
return order
Return a list of unique data values using seaborn's ordering rules. Parameters ---------- vector : Series Vector of "categorical" values order : list Desired order of category levels to override the order determined from the `data` object. Returns ------- order : list Ordered list of category levels not including null values.
Return a list of unique data values using seaborn's ordering rules.
137
165
def categorical_order(vector: Series, order: list | None = None) -> list: """ Return a list of unique data values using seaborn's ordering rules. Parameters ---------- vector : Series Vector of "categorical" values order : list Desired order of category levels to override the order determined from the `data` object. Returns ------- order : list Ordered list of category levels not including null values. """ if order is not None: return order if vector.dtype.name == "category": order = list(vector.cat.categories) else: order = list(filter(pd.notnull, vector.unique())) if variable_type(pd.Series(order)) == "numeric": order.sort() return order
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/rules.py#L137-L165
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28 ]
100
[]
0
true
98.305085
29
4
100
14
def categorical_order(vector: Series, order: list | None = None) -> list: if order is not None: return order if vector.dtype.name == "category": order = list(vector.cat.categories) else: order = list(filter(pd.notnull, vector.unique())) if variable_type(pd.Series(order)) == "numeric": order.sort() return order
19,329
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/rules.py
VarType.__init__
(self, data)
29
31
def __init__(self, data): assert data in self.allowed, data super().__init__(data)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/rules.py#L29-L31
26
[ 0, 1, 2 ]
100
[]
0
true
98.305085
3
2
100
0
def __init__(self, data): assert data in self.allowed, data super().__init__(data)
19,330
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/rules.py
VarType.__eq__
(self, other)
return self.data == other
33
35
def __eq__(self, other): assert other in self.allowed, other return self.data == other
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/rules.py#L33-L35
26
[ 0, 1, 2 ]
100
[]
0
true
98.305085
3
2
100
0
def __eq__(self, other): assert other in self.allowed, other return self.data == other
19,331
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property.__init__
(self, variable: str | None = None)
Initialize the property with the name of the corresponding plot variable.
Initialize the property with the name of the corresponding plot variable.
58
62
def __init__(self, variable: str | None = None): """Initialize the property with the name of the corresponding plot variable.""" if not variable: variable = self.__class__.__name__.lower() self.variable = variable
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L58-L62
26
[ 0, 1, 2, 3, 4 ]
100
[]
0
true
99.058824
5
2
100
1
def __init__(self, variable: str | None = None): if not variable: variable = self.__class__.__name__.lower() self.variable = variable
19,332
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property.default_scale
(self, data: Series)
Given data, initialize appropriate scale class.
Given data, initialize appropriate scale class.
64
75
def default_scale(self, data: Series) -> Scale: """Given data, initialize appropriate scale class.""" var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "numeric": return Continuous() elif var_type == "datetime": return Temporal() elif var_type == "boolean": return Boolean() else: return Nominal()
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L64-L75
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 ]
100
[]
0
true
99.058824
12
4
100
1
def default_scale(self, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "numeric": return Continuous() elif var_type == "datetime": return Temporal() elif var_type == "boolean": return Boolean() else: return Nominal()
19,333
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property.infer_scale
(self, arg: Any, data: Series)
Given data and a scaling argument, initialize appropriate scale class.
Given data and a scaling argument, initialize appropriate scale class.
77
95
def infer_scale(self, arg: Any, data: Series) -> Scale: """Given data and a scaling argument, initialize appropriate scale class.""" # TODO put these somewhere external for validation # TODO putting this here won't pick it up if subclasses define infer_scale # (e.g. color). How best to handle that? One option is to call super after # handling property-specific possibilities (e.g. for color check that the # arg is not a valid palette name) but that could get tricky. trans_args = ["log", "symlog", "logit", "pow", "sqrt"] if isinstance(arg, str): if any(arg.startswith(k) for k in trans_args): # TODO validate numeric type? That should happen centrally somewhere return Continuous(trans=arg) else: msg = f"Unknown magic arg for {self.variable} scale: '{arg}'." raise ValueError(msg) else: arg_type = type(arg).__name__ msg = f"Magic arg for {self.variable} scale must be str, not {arg_type}." raise TypeError(msg)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L77-L95
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18 ]
100
[]
0
true
99.058824
19
3
100
1
def infer_scale(self, arg: Any, data: Series) -> Scale: # TODO put these somewhere external for validation # TODO putting this here won't pick it up if subclasses define infer_scale # (e.g. color). How best to handle that? One option is to call super after # handling property-specific possibilities (e.g. for color check that the # arg is not a valid palette name) but that could get tricky. trans_args = ["log", "symlog", "logit", "pow", "sqrt"] if isinstance(arg, str): if any(arg.startswith(k) for k in trans_args): # TODO validate numeric type? That should happen centrally somewhere return Continuous(trans=arg) else: msg = f"Unknown magic arg for {self.variable} scale: '{arg}'." raise ValueError(msg) else: arg_type = type(arg).__name__ msg = f"Magic arg for {self.variable} scale must be str, not {arg_type}." raise TypeError(msg)
19,334
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property.get_mapping
(self, scale: Scale, data: Series)
return identity
Return a function that maps from data domain to property range.
Return a function that maps from data domain to property range.
97
101
def get_mapping(self, scale: Scale, data: Series) -> Mapping: """Return a function that maps from data domain to property range.""" def identity(x): return x return identity
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L97-L101
26
[ 0, 1, 2, 3, 4 ]
100
[]
0
true
99.058824
5
2
100
1
def get_mapping(self, scale: Scale, data: Series) -> Mapping: def identity(x): return x return identity
19,335
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property.standardize
(self, val: Any)
return val
Coerce flexible property value to standardized representation.
Coerce flexible property value to standardized representation.
103
105
def standardize(self, val: Any) -> Any: """Coerce flexible property value to standardized representation.""" return val
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L103-L105
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def standardize(self, val: Any) -> Any: return val
19,336
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property._check_dict_entries
(self, levels: list, values: dict)
Input check when values are provided as a dictionary.
Input check when values are provided as a dictionary.
107
113
def _check_dict_entries(self, levels: list, values: dict) -> None: """Input check when values are provided as a dictionary.""" missing = set(levels) - set(values) if missing: formatted = ", ".join(map(repr, sorted(missing, key=str))) err = f"No entry in {self.variable} dictionary for {formatted}" raise ValueError(err)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L107-L113
26
[ 0, 1, 2, 3, 4, 5, 6 ]
100
[]
0
true
99.058824
7
2
100
1
def _check_dict_entries(self, levels: list, values: dict) -> None: missing = set(levels) - set(values) if missing: formatted = ", ".join(map(repr, sorted(missing, key=str))) err = f"No entry in {self.variable} dictionary for {formatted}" raise ValueError(err)
19,337
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Property._check_list_length
(self, levels: list, values: list)
return values
Input check when values are provided as a list.
Input check when values are provided as a list.
115
137
def _check_list_length(self, levels: list, values: list) -> list: """Input check when values are provided as a list.""" message = "" if len(levels) > len(values): message = " ".join([ f"\nThe {self.variable} list has fewer values ({len(values)})", f"than needed ({len(levels)}) and will cycle, which may", "produce an uninterpretable plot." ]) values = [x for _, x in zip(levels, itertools.cycle(values))] elif len(values) > len(levels): message = " ".join([ f"The {self.variable} list has more values ({len(values)})", f"than needed ({len(levels)}), which may not be intended.", ]) values = values[:len(levels)] # TODO look into custom PlotSpecWarning with better formatting if message: warnings.warn(message, UserWarning) return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L115-L137
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22 ]
100
[]
0
true
99.058824
23
5
100
1
def _check_list_length(self, levels: list, values: list) -> list: message = "" if len(levels) > len(values): message = " ".join([ f"\nThe {self.variable} list has fewer values ({len(values)})", f"than needed ({len(levels)}) and will cycle, which may", "produce an uninterpretable plot." ]) values = [x for _, x in zip(levels, itertools.cycle(values))] elif len(values) > len(levels): message = " ".join([ f"The {self.variable} list has more values ({len(values)})", f"than needed ({len(levels)}), which may not be intended.", ]) values = values[:len(levels)] # TODO look into custom PlotSpecWarning with better formatting if message: warnings.warn(message, UserWarning) return values
19,338
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty.default_range
(self)
return self._default_range
Min and max values used by default for semantic mapping.
Min and max values used by default for semantic mapping.
164
166
def default_range(self) -> tuple[float, float]: """Min and max values used by default for semantic mapping.""" return self._default_range
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L164-L166
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def default_range(self) -> tuple[float, float]: return self._default_range
19,339
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty._forward
(self, values: ArrayLike)
return values
Transform applied to native values before linear mapping into interval.
Transform applied to native values before linear mapping into interval.
168
170
def _forward(self, values: ArrayLike) -> ArrayLike: """Transform applied to native values before linear mapping into interval.""" return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L168-L170
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def _forward(self, values: ArrayLike) -> ArrayLike: return values
19,340
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty._inverse
(self, values: ArrayLike)
return values
Transform applied to results of mapping that returns to native values.
Transform applied to results of mapping that returns to native values.
172
174
def _inverse(self, values: ArrayLike) -> ArrayLike: """Transform applied to results of mapping that returns to native values.""" return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L172-L174
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def _inverse(self, values: ArrayLike) -> ArrayLike: return values
19,341
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty.infer_scale
(self, arg: Any, data: Series)
Given data and a scaling argument, initialize appropriate scale class.
Given data and a scaling argument, initialize appropriate scale class.
176
193
def infer_scale(self, arg: Any, data: Series) -> Scale: """Given data and a scaling argument, initialize appropriate scale class.""" # TODO infer continuous based on log/sqrt etc? var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "boolean": return Boolean(arg) elif isinstance(arg, (list, dict)): return Nominal(arg) elif var_type == "categorical": return Nominal(arg) elif var_type == "datetime": return Temporal(arg) # TODO other variable types else: return Continuous(arg)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L176-L193
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 16, 17 ]
88.888889
[ 14 ]
5.555556
false
99.058824
18
5
94.444444
1
def infer_scale(self, arg: Any, data: Series) -> Scale: # TODO infer continuous based on log/sqrt etc? var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "boolean": return Boolean(arg) elif isinstance(arg, (list, dict)): return Nominal(arg) elif var_type == "categorical": return Nominal(arg) elif var_type == "datetime": return Temporal(arg) # TODO other variable types else: return Continuous(arg)
19,342
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty.get_mapping
(self, scale: Scale, data: Series)
return mapping
Return a function that maps from data domain to property range.
Return a function that maps from data domain to property range.
195
221
def get_mapping(self, scale: Scale, data: Series) -> Mapping: """Return a function that maps from data domain to property range.""" if isinstance(scale, Nominal): return self._get_nominal_mapping(scale, data) elif isinstance(scale, Boolean): return self._get_boolean_mapping(scale, data) if scale.values is None: vmin, vmax = self._forward(self.default_range) elif isinstance(scale.values, tuple) and len(scale.values) == 2: vmin, vmax = self._forward(scale.values) else: if isinstance(scale.values, tuple): actual = f"{len(scale.values)}-tuple" else: actual = str(type(scale.values)) scale_class = scale.__class__.__name__ err = " ".join([ f"Values for {self.variable} variables with {scale_class} scale", f"must be 2-tuple; not {actual}.", ]) raise TypeError(err) def mapping(x): return self._inverse(np.multiply(x, vmax - vmin) + vmin) return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L195-L221
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26 ]
100
[]
0
true
99.058824
27
8
100
1
def get_mapping(self, scale: Scale, data: Series) -> Mapping: if isinstance(scale, Nominal): return self._get_nominal_mapping(scale, data) elif isinstance(scale, Boolean): return self._get_boolean_mapping(scale, data) if scale.values is None: vmin, vmax = self._forward(self.default_range) elif isinstance(scale.values, tuple) and len(scale.values) == 2: vmin, vmax = self._forward(scale.values) else: if isinstance(scale.values, tuple): actual = f"{len(scale.values)}-tuple" else: actual = str(type(scale.values)) scale_class = scale.__class__.__name__ err = " ".join([ f"Values for {self.variable} variables with {scale_class} scale", f"must be 2-tuple; not {actual}.", ]) raise TypeError(err) def mapping(x): return self._inverse(np.multiply(x, vmax - vmin) + vmin) return mapping
19,343
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty._get_nominal_mapping
(self, scale: Nominal, data: Series)
return mapping
Identify evenly-spaced values using interval or explicit mapping.
Identify evenly-spaced values using interval or explicit mapping.
223
235
def _get_nominal_mapping(self, scale: Nominal, data: Series) -> Mapping: """Identify evenly-spaced values using interval or explicit mapping.""" levels = categorical_order(data, scale.order) values = self._get_values(scale, levels) def mapping(x): ixs = np.asarray(x, np.intp) out = np.full(len(x), np.nan) use = np.isfinite(x) out[use] = np.take(values, ixs[use]) return out return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L223-L235
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ]
100
[]
0
true
99.058824
13
2
100
1
def _get_nominal_mapping(self, scale: Nominal, data: Series) -> Mapping: levels = categorical_order(data, scale.order) values = self._get_values(scale, levels) def mapping(x): ixs = np.asarray(x, np.intp) out = np.full(len(x), np.nan) use = np.isfinite(x) out[use] = np.take(values, ixs[use]) return out return mapping
19,344
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty._get_boolean_mapping
(self, scale: Boolean, data: Series)
return mapping
Identify evenly-spaced values using interval or explicit mapping.
Identify evenly-spaced values using interval or explicit mapping.
237
247
def _get_boolean_mapping(self, scale: Boolean, data: Series) -> Mapping: """Identify evenly-spaced values using interval or explicit mapping.""" values = self._get_values(scale, [True, False]) def mapping(x): out = np.full(len(x), np.nan) use = np.isfinite(x) out[use] = np.where(x[use], *values) return out return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L237-L247
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ]
100
[]
0
true
99.058824
11
2
100
1
def _get_boolean_mapping(self, scale: Boolean, data: Series) -> Mapping: values = self._get_values(scale, [True, False]) def mapping(x): out = np.full(len(x), np.nan) use = np.isfinite(x) out[use] = np.where(x[use], *values) return out return mapping
19,345
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
IntervalProperty._get_values
(self, scale: Scale, levels: list)
return values
Validate scale.values and identify a value for each level.
Validate scale.values and identify a value for each level.
249
272
def _get_values(self, scale: Scale, levels: list) -> list: """Validate scale.values and identify a value for each level.""" if isinstance(scale.values, dict): self._check_dict_entries(levels, scale.values) values = [scale.values[x] for x in levels] elif isinstance(scale.values, list): values = self._check_list_length(levels, scale.values) else: if scale.values is None: vmin, vmax = self.default_range elif isinstance(scale.values, tuple): vmin, vmax = scale.values else: scale_class = scale.__class__.__name__ err = " ".join([ f"Values for {self.variable} variables with {scale_class} scale", f"must be a dict, list or tuple; not {type(scale.values)}", ]) raise TypeError(err) vmin, vmax = self._forward([vmin, vmax]) values = list(self._inverse(np.linspace(vmax, vmin, len(levels)))) return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L249-L272
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23 ]
100
[]
0
true
99.058824
24
6
100
1
def _get_values(self, scale: Scale, levels: list) -> list: if isinstance(scale.values, dict): self._check_dict_entries(levels, scale.values) values = [scale.values[x] for x in levels] elif isinstance(scale.values, list): values = self._check_list_length(levels, scale.values) else: if scale.values is None: vmin, vmax = self.default_range elif isinstance(scale.values, tuple): vmin, vmax = scale.values else: scale_class = scale.__class__.__name__ err = " ".join([ f"Values for {self.variable} variables with {scale_class} scale", f"must be a dict, list or tuple; not {type(scale.values)}", ]) raise TypeError(err) vmin, vmax = self._forward([vmin, vmax]) values = list(self._inverse(np.linspace(vmax, vmin, len(levels)))) return values
19,346
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
PointSize._forward
(self, values)
return np.square(values)
Square native values to implement linear scaling of point area.
Square native values to implement linear scaling of point area.
279
281
def _forward(self, values): """Square native values to implement linear scaling of point area.""" return np.square(values)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L279-L281
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def _forward(self, values): return np.square(values)
19,347
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
PointSize._inverse
(self, values)
return np.sqrt(values)
Invert areal values back to point diameter.
Invert areal values back to point diameter.
283
285
def _inverse(self, values): """Invert areal values back to point diameter.""" return np.sqrt(values)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L283-L285
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
1
def _inverse(self, values): return np.sqrt(values)
19,348
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
LineWidth.default_range
(self)
return base * .5, base * 2
Min and max values used by default for semantic mapping.
Min and max values used by default for semantic mapping.
291
294
def default_range(self) -> tuple[float, float]: """Min and max values used by default for semantic mapping.""" base = mpl.rcParams["lines.linewidth"] return base * .5, base * 2
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L291-L294
26
[ 0, 1, 2, 3 ]
100
[]
0
true
99.058824
4
1
100
1
def default_range(self) -> tuple[float, float]: base = mpl.rcParams["lines.linewidth"] return base * .5, base * 2
19,349
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
EdgeWidth.default_range
(self)
return base * .5, base * 2
Min and max values used by default for semantic mapping.
Min and max values used by default for semantic mapping.
300
303
def default_range(self) -> tuple[float, float]: """Min and max values used by default for semantic mapping.""" base = mpl.rcParams["patch.linewidth"] return base * .5, base * 2
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L300-L303
26
[ 0, 1, 2, 3 ]
100
[]
0
true
99.058824
4
1
100
1
def default_range(self) -> tuple[float, float]: base = mpl.rcParams["patch.linewidth"] return base * .5, base * 2
19,350
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
FontSize.default_range
(self)
return base * .5, base * 2
Min and max values used by default for semantic mapping.
Min and max values used by default for semantic mapping.
328
331
def default_range(self) -> tuple[float, float]: """Min and max values used by default for semantic mapping.""" base = mpl.rcParams["font.size"] return base * .5, base * 2
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L328-L331
26
[ 0, 1, 2, 3 ]
100
[]
0
true
99.058824
4
1
100
1
def default_range(self) -> tuple[float, float]: base = mpl.rcParams["font.size"] return base * .5, base * 2
19,351
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
ObjectProperty.default_scale
(self, data: Series)
return Boolean() if var_type == "boolean" else Nominal()
351
353
def default_scale(self, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean() if var_type == "boolean" else Nominal()
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L351-L353
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
0
def default_scale(self, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean() if var_type == "boolean" else Nominal()
19,352
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
ObjectProperty.infer_scale
(self, arg: Any, data: Series)
return Boolean(arg) if var_type == "boolean" else Nominal(arg)
355
357
def infer_scale(self, arg: Any, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean(arg) if var_type == "boolean" else Nominal(arg)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L355-L357
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
0
def infer_scale(self, arg: Any, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean(arg) if var_type == "boolean" else Nominal(arg)
19,353
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
ObjectProperty.get_mapping
(self, scale: Scale, data: Series)
return mapping
Define mapping as lookup into list of object values.
Define mapping as lookup into list of object values.
359
376
def get_mapping(self, scale: Scale, data: Series) -> Mapping: """Define mapping as lookup into list of object values.""" boolean_scale = isinstance(scale, Boolean) order = getattr(scale, "order", [True, False] if boolean_scale else None) levels = categorical_order(data, order) values = self._get_values(scale, levels) if boolean_scale: values = values[::-1] def mapping(x): ixs = np.asarray(x, np.intp) return [ values[ix] if np.isfinite(x_i) else self.null_value for x_i, ix in zip(x, ixs) ] return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L359-L376
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17 ]
100
[]
0
true
99.058824
18
4
100
1
def get_mapping(self, scale: Scale, data: Series) -> Mapping: boolean_scale = isinstance(scale, Boolean) order = getattr(scale, "order", [True, False] if boolean_scale else None) levels = categorical_order(data, order) values = self._get_values(scale, levels) if boolean_scale: values = values[::-1] def mapping(x): ixs = np.asarray(x, np.intp) return [ values[ix] if np.isfinite(x_i) else self.null_value for x_i, ix in zip(x, ixs) ] return mapping
19,354
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
ObjectProperty._get_values
(self, scale: Scale, levels: list)
return values
Validate scale.values and identify a value for each level.
Validate scale.values and identify a value for each level.
378
396
def _get_values(self, scale: Scale, levels: list) -> list: """Validate scale.values and identify a value for each level.""" n = len(levels) if isinstance(scale.values, dict): self._check_dict_entries(levels, scale.values) values = [scale.values[x] for x in levels] elif isinstance(scale.values, list): values = self._check_list_length(levels, scale.values) elif scale.values is None: values = self._default_values(n) else: msg = " ".join([ f"Scale values for a {self.variable} variable must be provided", f"in a dict or list; not {type(scale.values)}." ]) raise TypeError(msg) values = [self.standardize(x) for x in values] return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L378-L396
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18 ]
100
[]
0
true
99.058824
19
6
100
1
def _get_values(self, scale: Scale, levels: list) -> list: n = len(levels) if isinstance(scale.values, dict): self._check_dict_entries(levels, scale.values) values = [scale.values[x] for x in levels] elif isinstance(scale.values, list): values = self._check_list_length(levels, scale.values) elif scale.values is None: values = self._default_values(n) else: msg = " ".join([ f"Scale values for a {self.variable} variable must be provided", f"in a dict or list; not {type(scale.values)}." ]) raise TypeError(msg) values = [self.standardize(x) for x in values] return values
19,355
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Marker.standardize
(self, val: MarkerPattern)
return MarkerStyle(val)
408
409
def standardize(self, val: MarkerPattern) -> MarkerStyle: return MarkerStyle(val)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L408-L409
26
[ 0, 1 ]
100
[]
0
true
99.058824
2
1
100
0
def standardize(self, val: MarkerPattern) -> MarkerStyle: return MarkerStyle(val)
19,356
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Marker._default_values
(self, n: int)
return markers
Build an arbitrarily long list of unique marker styles. Parameters ---------- n : int Number of unique marker specs to generate. Returns ------- markers : list of string or tuples Values for defining :class:`matplotlib.markers.MarkerStyle` objects. All markers will be filled.
Build an arbitrarily long list of unique marker styles.
411
440
def _default_values(self, n: int) -> list[MarkerStyle]: """Build an arbitrarily long list of unique marker styles. Parameters ---------- n : int Number of unique marker specs to generate. Returns ------- markers : list of string or tuples Values for defining :class:`matplotlib.markers.MarkerStyle` objects. All markers will be filled. """ # Start with marker specs that are well distinguishable markers = [ "o", "X", (4, 0, 45), "P", (4, 0, 0), (4, 1, 0), "^", (4, 1, 45), "v", ] # Now generate more from regular polygons of increasing order s = 5 while len(markers) < n: a = 360 / (s + 1) / 2 markers.extend([(s + 1, 1, a), (s + 1, 0, a), (s, 1, 0), (s, 0, 0)]) s += 1 markers = [MarkerStyle(m) for m in markers[:n]] return markers
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L411-L440
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29 ]
100
[]
0
true
99.058824
30
3
100
12
def _default_values(self, n: int) -> list[MarkerStyle]: # Start with marker specs that are well distinguishable markers = [ "o", "X", (4, 0, 45), "P", (4, 0, 0), (4, 1, 0), "^", (4, 1, 45), "v", ] # Now generate more from regular polygons of increasing order s = 5 while len(markers) < n: a = 360 / (s + 1) / 2 markers.extend([(s + 1, 1, a), (s + 1, 0, a), (s, 1, 0), (s, 0, 0)]) s += 1 markers = [MarkerStyle(m) for m in markers[:n]] return markers
19,357
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
LineStyle.standardize
(self, val: str | DashPattern)
return self._get_dash_pattern(val)
447
448
def standardize(self, val: str | DashPattern) -> DashPatternWithOffset: return self._get_dash_pattern(val)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L447-L448
26
[ 0, 1 ]
100
[]
0
true
99.058824
2
1
100
0
def standardize(self, val: str | DashPattern) -> DashPatternWithOffset: return self._get_dash_pattern(val)
19,358
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
LineStyle._default_values
(self, n: int)
return [self._get_dash_pattern(x) for x in dashes]
Build an arbitrarily long list of unique dash styles for lines. Parameters ---------- n : int Number of unique dash specs to generate. Returns ------- dashes : list of strings or tuples Valid arguments for the ``dashes`` parameter on :class:`matplotlib.lines.Line2D`. The first spec is a solid line (``""``), the remainder are sequences of long and short dashes.
Build an arbitrarily long list of unique dash styles for lines.
450
491
def _default_values(self, n: int) -> list[DashPatternWithOffset]: """Build an arbitrarily long list of unique dash styles for lines. Parameters ---------- n : int Number of unique dash specs to generate. Returns ------- dashes : list of strings or tuples Valid arguments for the ``dashes`` parameter on :class:`matplotlib.lines.Line2D`. The first spec is a solid line (``""``), the remainder are sequences of long and short dashes. """ # Start with dash specs that are well distinguishable dashes: list[str | DashPattern] = [ "-", (4, 1.5), (1, 1), (3, 1.25, 1.5, 1.25), (5, 1, 1, 1), ] # Now programmatically build as many as we need p = 3 while len(dashes) < n: # Take combinations of long and short dashes a = itertools.combinations_with_replacement([3, 1.25], p) b = itertools.combinations_with_replacement([4, 1], p) # Interleave the combinations, reversing one of the streams segment_list = itertools.chain(*zip(list(a)[1:-1][::-1], list(b)[1:-1])) # Now insert the gaps for segments in segment_list: gap = min(segments) spec = tuple(itertools.chain(*((seg, gap) for seg in segments))) dashes.append(spec) p += 1 return [self._get_dash_pattern(x) for x in dashes]
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L450-L491
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41 ]
100
[]
0
true
99.058824
42
4
100
14
def _default_values(self, n: int) -> list[DashPatternWithOffset]: # Start with dash specs that are well distinguishable dashes: list[str | DashPattern] = [ "-", (4, 1.5), (1, 1), (3, 1.25, 1.5, 1.25), (5, 1, 1, 1), ] # Now programmatically build as many as we need p = 3 while len(dashes) < n: # Take combinations of long and short dashes a = itertools.combinations_with_replacement([3, 1.25], p) b = itertools.combinations_with_replacement([4, 1], p) # Interleave the combinations, reversing one of the streams segment_list = itertools.chain(*zip(list(a)[1:-1][::-1], list(b)[1:-1])) # Now insert the gaps for segments in segment_list: gap = min(segments) spec = tuple(itertools.chain(*((seg, gap) for seg in segments))) dashes.append(spec) p += 1 return [self._get_dash_pattern(x) for x in dashes]
19,359
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
LineStyle._get_dash_pattern
(style: str | DashPattern)
return offset, dashes
Convert linestyle arguments to dash pattern with offset.
Convert linestyle arguments to dash pattern with offset.
494
537
def _get_dash_pattern(style: str | DashPattern) -> DashPatternWithOffset: """Convert linestyle arguments to dash pattern with offset.""" # Copied and modified from Matplotlib 3.4 # go from short hand -> full strings ls_mapper = {"-": "solid", "--": "dashed", "-.": "dashdot", ":": "dotted"} if isinstance(style, str): style = ls_mapper.get(style, style) # un-dashed styles if style in ["solid", "none", "None"]: offset = 0 dashes = None # dashed styles elif style in ["dashed", "dashdot", "dotted"]: offset = 0 dashes = tuple(mpl.rcParams[f"lines.{style}_pattern"]) else: options = [*ls_mapper.values(), *ls_mapper.keys()] msg = f"Linestyle string must be one of {options}, not {repr(style)}." raise ValueError(msg) elif isinstance(style, tuple): if len(style) > 1 and isinstance(style[1], tuple): offset, dashes = style elif len(style) > 1 and style[1] is None: offset, dashes = style else: offset = 0 dashes = style else: val_type = type(style).__name__ msg = f"Linestyle must be str or tuple, not {val_type}." raise TypeError(msg) # Normalize offset to be positive and shorter than the dash cycle if dashes is not None: try: dsum = sum(dashes) except TypeError as err: msg = f"Invalid dash pattern: {dashes}" raise TypeError(msg) from err if dsum: offset %= dsum return offset, dashes
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L494-L537
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43 ]
100
[]
0
true
99.058824
44
12
100
1
def _get_dash_pattern(style: str | DashPattern) -> DashPatternWithOffset: # Copied and modified from Matplotlib 3.4 # go from short hand -> full strings ls_mapper = {"-": "solid", "--": "dashed", "-.": "dashdot", ":": "dotted"} if isinstance(style, str): style = ls_mapper.get(style, style) # un-dashed styles if style in ["solid", "none", "None"]: offset = 0 dashes = None # dashed styles elif style in ["dashed", "dashdot", "dotted"]: offset = 0 dashes = tuple(mpl.rcParams[f"lines.{style}_pattern"]) else: options = [*ls_mapper.values(), *ls_mapper.keys()] msg = f"Linestyle string must be one of {options}, not {repr(style)}." raise ValueError(msg) elif isinstance(style, tuple): if len(style) > 1 and isinstance(style[1], tuple): offset, dashes = style elif len(style) > 1 and style[1] is None: offset, dashes = style else: offset = 0 dashes = style else: val_type = type(style).__name__ msg = f"Linestyle must be str or tuple, not {val_type}." raise TypeError(msg) # Normalize offset to be positive and shorter than the dash cycle if dashes is not None: try: dsum = sum(dashes) except TypeError as err: msg = f"Invalid dash pattern: {dashes}" raise TypeError(msg) from err if dsum: offset %= dsum return offset, dashes
19,360
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
HorizontalAlignment._default_values
(self, n: int)
return [next(vals) for _ in range(n)]
546
548
def _default_values(self, n: int) -> list: vals = itertools.cycle(["left", "right"]) return [next(vals) for _ in range(n)]
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L546-L548
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
2
100
0
def _default_values(self, n: int) -> list: vals = itertools.cycle(["left", "right"]) return [next(vals) for _ in range(n)]
19,361
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
VerticalAlignment._default_values
(self, n: int)
return [next(vals) for _ in range(n)]
553
555
def _default_values(self, n: int) -> list: vals = itertools.cycle(["top", "bottom"]) return [next(vals) for _ in range(n)]
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L553-L555
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
2
100
0
def _default_values(self, n: int) -> list: vals = itertools.cycle(["top", "bottom"]) return [next(vals) for _ in range(n)]
19,362
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color.standardize
(self, val: ColorSpec)
568
574
def standardize(self, val: ColorSpec) -> RGBTuple | RGBATuple: # Return color with alpha channel only if the input spec has it # This is so that RGBA colors can override the Alpha property if to_rgba(val) != to_rgba(val, 1): return to_rgba(val) else: return to_rgb(val)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L568-L574
26
[ 0, 1, 2, 3, 4, 6 ]
85.714286
[]
0
false
99.058824
7
2
100
0
def standardize(self, val: ColorSpec) -> RGBTuple | RGBATuple: # Return color with alpha channel only if the input spec has it # This is so that RGBA colors can override the Alpha property if to_rgba(val) != to_rgba(val, 1): return to_rgba(val) else: return to_rgb(val)
19,363
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color._standardize_color_sequence
(self, colors: ArrayLike)
Convert color sequence to RGB(A) array, preserving but not adding alpha.
Convert color sequence to RGB(A) array, preserving but not adding alpha.
576
589
def _standardize_color_sequence(self, colors: ArrayLike) -> ArrayLike: """Convert color sequence to RGB(A) array, preserving but not adding alpha.""" def has_alpha(x): return to_rgba(x) != to_rgba(x, 1) if isinstance(colors, np.ndarray): needs_alpha = colors.shape[1] == 4 else: needs_alpha = any(has_alpha(x) for x in colors) if needs_alpha: return to_rgba_array(colors) else: return to_rgba_array(colors)[:, :3]
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L576-L589
26
[ 0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13 ]
92.857143
[ 6 ]
7.142857
false
99.058824
14
4
92.857143
1
def _standardize_color_sequence(self, colors: ArrayLike) -> ArrayLike: def has_alpha(x): return to_rgba(x) != to_rgba(x, 1) if isinstance(colors, np.ndarray): needs_alpha = colors.shape[1] == 4 else: needs_alpha = any(has_alpha(x) for x in colors) if needs_alpha: return to_rgba_array(colors) else: return to_rgba_array(colors)[:, :3]
19,364
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color.infer_scale
(self, arg: Any, data: Series)
591
630
def infer_scale(self, arg: Any, data: Series) -> Scale: # TODO when inferring Continuous without data, verify type # TODO need to rethink the variable type system # (e.g. boolean, ordered categories as Ordinal, etc).. var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "boolean": return Boolean(arg) if isinstance(arg, (dict, list)): return Nominal(arg) if isinstance(arg, tuple): if var_type == "categorical": # TODO It seems reasonable to allow a gradient mapping for nominal # scale but it also feels "technically" wrong. Should this infer # Ordinal with categorical data and, if so, verify orderedness? return Nominal(arg) return Continuous(arg) if callable(arg): return Continuous(arg) # TODO Do we accept str like "log", "pow", etc. for semantics? if not isinstance(arg, str): msg = " ".join([ f"A single scale argument for {self.variable} variables must be", f"a string, dict, tuple, list, or callable, not {type(arg)}." ]) raise TypeError(msg) if arg in QUAL_PALETTES: return Nominal(arg) elif var_type == "numeric": return Continuous(arg) # TODO implement scales for date variables and any others. else: return Nominal(arg)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L591-L630
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 31, 32, 33, 34, 35, 36, 37, 39 ]
90
[]
0
false
99.058824
40
9
100
0
def infer_scale(self, arg: Any, data: Series) -> Scale: # TODO when inferring Continuous without data, verify type # TODO need to rethink the variable type system # (e.g. boolean, ordered categories as Ordinal, etc).. var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) if var_type == "boolean": return Boolean(arg) if isinstance(arg, (dict, list)): return Nominal(arg) if isinstance(arg, tuple): if var_type == "categorical": # TODO It seems reasonable to allow a gradient mapping for nominal # scale but it also feels "technically" wrong. Should this infer # Ordinal with categorical data and, if so, verify orderedness? return Nominal(arg) return Continuous(arg) if callable(arg): return Continuous(arg) # TODO Do we accept str like "log", "pow", etc. for semantics? if not isinstance(arg, str): msg = " ".join([ f"A single scale argument for {self.variable} variables must be", f"a string, dict, tuple, list, or callable, not {type(arg)}." ]) raise TypeError(msg) if arg in QUAL_PALETTES: return Nominal(arg) elif var_type == "numeric": return Continuous(arg) # TODO implement scales for date variables and any others. else: return Nominal(arg)
19,365
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color.get_mapping
(self, scale: Scale, data: Series)
return _mapping
Return a function that maps from data domain to color values.
Return a function that maps from data domain to color values.
632
671
def get_mapping(self, scale: Scale, data: Series) -> Mapping: """Return a function that maps from data domain to color values.""" # TODO what is best way to do this conditional? # Should it be class-based or should classes have behavioral attributes? if isinstance(scale, Nominal): return self._get_nominal_mapping(scale, data) elif isinstance(scale, Boolean): return self._get_boolean_mapping(scale, data) if scale.values is None: # TODO Rethink best default continuous color gradient mapping = color_palette("ch:", as_cmap=True) elif isinstance(scale.values, tuple): # TODO blend_palette will strip alpha, but we should support # interpolation on all four channels mapping = blend_palette(scale.values, as_cmap=True) elif isinstance(scale.values, str): # TODO for matplotlib colormaps this will clip extremes, which is # different from what using the named colormap directly would do # This may or may not be desireable. mapping = color_palette(scale.values, as_cmap=True) elif callable(scale.values): mapping = scale.values else: scale_class = scale.__class__.__name__ msg = " ".join([ f"Scale values for {self.variable} with a {scale_class} mapping", f"must be string, tuple, or callable; not {type(scale.values)}." ]) raise TypeError(msg) def _mapping(x): # Remove alpha channel so it does not override alpha property downstream # TODO this will need to be more flexible to support RGBA tuples (see above) invalid = ~np.isfinite(x) out = mapping(x)[:, :3] out[invalid] = np.nan return out return _mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L632-L671
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39 ]
100
[]
0
true
99.058824
40
8
100
1
def get_mapping(self, scale: Scale, data: Series) -> Mapping: # TODO what is best way to do this conditional? # Should it be class-based or should classes have behavioral attributes? if isinstance(scale, Nominal): return self._get_nominal_mapping(scale, data) elif isinstance(scale, Boolean): return self._get_boolean_mapping(scale, data) if scale.values is None: # TODO Rethink best default continuous color gradient mapping = color_palette("ch:", as_cmap=True) elif isinstance(scale.values, tuple): # TODO blend_palette will strip alpha, but we should support # interpolation on all four channels mapping = blend_palette(scale.values, as_cmap=True) elif isinstance(scale.values, str): # TODO for matplotlib colormaps this will clip extremes, which is # different from what using the named colormap directly would do # This may or may not be desireable. mapping = color_palette(scale.values, as_cmap=True) elif callable(scale.values): mapping = scale.values else: scale_class = scale.__class__.__name__ msg = " ".join([ f"Scale values for {self.variable} with a {scale_class} mapping", f"must be string, tuple, or callable; not {type(scale.values)}." ]) raise TypeError(msg) def _mapping(x): # Remove alpha channel so it does not override alpha property downstream # TODO this will need to be more flexible to support RGBA tuples (see above) invalid = ~np.isfinite(x) out = mapping(x)[:, :3] out[invalid] = np.nan return out return _mapping
19,366
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color._get_nominal_mapping
(self, scale: Nominal, data: Series)
return mapping
673
685
def _get_nominal_mapping(self, scale: Nominal, data: Series) -> Mapping: levels = categorical_order(data, scale.order) colors = self._get_values(scale, levels) def mapping(x): ixs = np.asarray(x, np.intp) use = np.isfinite(x) out = np.full((len(ixs), colors.shape[1]), np.nan) out[use] = np.take(colors, ixs[use], axis=0) return out return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L673-L685
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ]
100
[]
0
true
99.058824
13
2
100
0
def _get_nominal_mapping(self, scale: Nominal, data: Series) -> Mapping: levels = categorical_order(data, scale.order) colors = self._get_values(scale, levels) def mapping(x): ixs = np.asarray(x, np.intp) use = np.isfinite(x) out = np.full((len(ixs), colors.shape[1]), np.nan) out[use] = np.take(colors, ixs[use], axis=0) return out return mapping
19,367
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color._get_boolean_mapping
(self, scale: Boolean, data: Series)
return mapping
687
700
def _get_boolean_mapping(self, scale: Boolean, data: Series) -> Mapping: colors = self._get_values(scale, [True, False]) def mapping(x): use = np.isfinite(x) x = np.asarray(x).astype(bool) out = np.full((len(x), colors.shape[1]), np.nan) out[x & use] = colors[0] out[~x & use] = colors[1] return out return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L687-L700
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 ]
100
[]
0
true
99.058824
14
2
100
0
def _get_boolean_mapping(self, scale: Boolean, data: Series) -> Mapping: colors = self._get_values(scale, [True, False]) def mapping(x): use = np.isfinite(x) x = np.asarray(x).astype(bool) out = np.full((len(x), colors.shape[1]), np.nan) out[x & use] = colors[0] out[~x & use] = colors[1] return out return mapping
19,368
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Color._get_values
(self, scale: Scale, levels: list)
return self._standardize_color_sequence(colors)
Validate scale.values and identify a value for each level.
Validate scale.values and identify a value for each level.
702
729
def _get_values(self, scale: Scale, levels: list) -> ArrayLike: """Validate scale.values and identify a value for each level.""" n = len(levels) values = scale.values if isinstance(values, dict): self._check_dict_entries(levels, values) colors = [values[x] for x in levels] elif isinstance(values, list): colors = self._check_list_length(levels, values) elif isinstance(values, tuple): colors = blend_palette(values, n) elif isinstance(values, str): colors = color_palette(values, n) elif values is None: if n <= len(get_color_cycle()): # Use current (global) default palette colors = color_palette(n_colors=n) else: colors = color_palette("husl", n) else: scale_class = scale.__class__.__name__ msg = " ".join([ f"Scale values for {self.variable} with a {scale_class} mapping", f"must be string, list, tuple, or dict; not {type(scale.values)}." ]) raise TypeError(msg) return self._standardize_color_sequence(colors)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L702-L729
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27 ]
100
[]
0
true
99.058824
28
8
100
1
def _get_values(self, scale: Scale, levels: list) -> ArrayLike: n = len(levels) values = scale.values if isinstance(values, dict): self._check_dict_entries(levels, values) colors = [values[x] for x in levels] elif isinstance(values, list): colors = self._check_list_length(levels, values) elif isinstance(values, tuple): colors = blend_palette(values, n) elif isinstance(values, str): colors = color_palette(values, n) elif values is None: if n <= len(get_color_cycle()): # Use current (global) default palette colors = color_palette(n_colors=n) else: colors = color_palette("husl", n) else: scale_class = scale.__class__.__name__ msg = " ".join([ f"Scale values for {self.variable} with a {scale_class} mapping", f"must be string, list, tuple, or dict; not {type(scale.values)}." ]) raise TypeError(msg) return self._standardize_color_sequence(colors)
19,369
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill.default_scale
(self, data: Series)
return Boolean() if var_type == "boolean" else Nominal()
742
744
def default_scale(self, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean() if var_type == "boolean" else Nominal()
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L742-L744
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
0
def default_scale(self, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean() if var_type == "boolean" else Nominal()
19,370
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill.infer_scale
(self, arg: Any, data: Series)
return Boolean(arg) if var_type == "boolean" else Nominal(arg)
746
748
def infer_scale(self, arg: Any, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean(arg) if var_type == "boolean" else Nominal(arg)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L746-L748
26
[ 0, 1, 2 ]
100
[]
0
true
99.058824
3
1
100
0
def infer_scale(self, arg: Any, data: Series) -> Scale: var_type = variable_type(data, boolean_type="boolean", strict_boolean=True) return Boolean(arg) if var_type == "boolean" else Nominal(arg)
19,371
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill.standardize
(self, val: Any)
return bool(val)
750
751
def standardize(self, val: Any) -> bool: return bool(val)
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L750-L751
26
[ 0, 1 ]
100
[]
0
true
99.058824
2
1
100
0
def standardize(self, val: Any) -> bool: return bool(val)
19,372
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill._default_values
(self, n: int)
return [x for x, _ in zip(itertools.cycle([True, False]), range(n))]
Return a list of n values, alternating True and False.
Return a list of n values, alternating True and False.
753
762
def _default_values(self, n: int) -> list: """Return a list of n values, alternating True and False.""" if n > 2: msg = " ".join([ f"The variable assigned to {self.variable} has more than two levels,", f"so {self.variable} values will cycle and may be uninterpretable", ]) # TODO fire in a "nice" way (see above) warnings.warn(msg, UserWarning) return [x for x, _ in zip(itertools.cycle([True, False]), range(n))]
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L753-L762
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ]
100
[]
0
true
99.058824
10
3
100
1
def _default_values(self, n: int) -> list: if n > 2: msg = " ".join([ f"The variable assigned to {self.variable} has more than two levels,", f"so {self.variable} values will cycle and may be uninterpretable", ]) # TODO fire in a "nice" way (see above) warnings.warn(msg, UserWarning) return [x for x, _ in zip(itertools.cycle([True, False]), range(n))]
19,373
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill.get_mapping
(self, scale: Scale, data: Series)
return mapping
Return a function that maps each data value to True or False.
Return a function that maps each data value to True or False.
764
781
def get_mapping(self, scale: Scale, data: Series) -> Mapping: """Return a function that maps each data value to True or False.""" boolean_scale = isinstance(scale, Boolean) order = getattr(scale, "order", [True, False] if boolean_scale else None) levels = categorical_order(data, order) values = self._get_values(scale, levels) if boolean_scale: values = values[::-1] def mapping(x): ixs = np.asarray(x, np.intp) return [ values[ix] if np.isfinite(x_i) else False for x_i, ix in zip(x, ixs) ] return mapping
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L764-L781
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17 ]
100
[]
0
true
99.058824
18
4
100
1
def get_mapping(self, scale: Scale, data: Series) -> Mapping: boolean_scale = isinstance(scale, Boolean) order = getattr(scale, "order", [True, False] if boolean_scale else None) levels = categorical_order(data, order) values = self._get_values(scale, levels) if boolean_scale: values = values[::-1] def mapping(x): ixs = np.asarray(x, np.intp) return [ values[ix] if np.isfinite(x_i) else False for x_i, ix in zip(x, ixs) ] return mapping
19,374
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/properties.py
Fill._get_values
(self, scale: Scale, levels: list)
return values
Validate scale.values and identify a value for each level.
Validate scale.values and identify a value for each level.
783
798
def _get_values(self, scale: Scale, levels: list) -> list: """Validate scale.values and identify a value for each level.""" if isinstance(scale.values, list): values = [bool(x) for x in scale.values] elif isinstance(scale.values, dict): values = [bool(scale.values[x]) for x in levels] elif scale.values is None: values = self._default_values(len(levels)) else: msg = " ".join([ f"Scale values for {self.variable} must be passed in", f"a list or dict; not {type(scale.values)}." ]) raise TypeError(msg) return values
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/properties.py#L783-L798
26
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ]
100
[]
0
true
99.058824
16
6
100
1
def _get_values(self, scale: Scale, levels: list) -> list: if isinstance(scale.values, list): values = [bool(x) for x in scale.values] elif isinstance(scale.values, dict): values = [bool(scale.values[x]) for x in levels] elif scale.values is None: values = self._default_values(len(levels)) else: msg = " ".join([ f"Scale values for {self.variable} must be passed in", f"a list or dict; not {type(scale.values)}." ]) raise TypeError(msg) return values
19,375
mwaskom/seaborn
a47b97e4b98c809db55cbd283de21acba89fe186
seaborn/_core/typing.py
Default.__repr__
(self)
return "<default>"
35
36
def __repr__(self): return "<default>"
https://github.com/mwaskom/seaborn/blob/a47b97e4b98c809db55cbd283de21acba89fe186/project26/seaborn/_core/typing.py#L35-L36
26
[ 0, 1 ]
100
[]
0
true
100
2
1
100
0
def __repr__(self): return "<default>"
19,376
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/funcmakers.py
make_func
(f, builtin=False, test=False)
10
26
def make_func(f, builtin=False, test=False): if callable(f): return f elif f is None: # pass None to builtin as predicate or mapping function for speed return None if builtin else \ bool if test else lambda x: x elif isinstance(f, (basestring, _re_type)): return re_tester(f) if test else re_finder(f) elif isinstance(f, (int, slice)): return itemgetter(f) elif isinstance(f, Mapping): return f.__getitem__ elif isinstance(f, Set): return f.__contains__ else: raise TypeError("Can't make a func from %s" % f.__class__.__name__)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/funcmakers.py#L10-L26
29
[ 0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14 ]
82.352941
[ 16 ]
5.882353
false
95
17
7
94.117647
0
def make_func(f, builtin=False, test=False): if callable(f): return f elif f is None: # pass None to builtin as predicate or mapping function for speed return None if builtin else \ bool if test else lambda x: x elif isinstance(f, (basestring, _re_type)): return re_tester(f) if test else re_finder(f) elif isinstance(f, (int, slice)): return itemgetter(f) elif isinstance(f, Mapping): return f.__getitem__ elif isinstance(f, Set): return f.__contains__ else: raise TypeError("Can't make a func from %s" % f.__class__.__name__)
19,943
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/funcmakers.py
make_pred
(pred, builtin=False)
return make_func(pred, builtin=builtin, test=True)
28
29
def make_pred(pred, builtin=False): return make_func(pred, builtin=builtin, test=True)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/funcmakers.py#L28-L29
29
[ 0, 1 ]
100
[]
0
true
95
2
1
100
0
def make_pred(pred, builtin=False): return make_func(pred, builtin=builtin, test=True)
19,944
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
tap
(x, label=None)
return x
Prints x and then returns it.
Prints x and then returns it.
26
32
def tap(x, label=None): """Prints x and then returns it.""" if label: print('%s: %s' % (label, x)) else: print(x) return x
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L26-L32
29
[ 0, 1, 2, 3, 4, 5, 6 ]
100
[]
0
true
82.781457
7
2
100
1
def tap(x, label=None): if label: print('%s: %s' % (label, x)) else: print(x) return x
19,945
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
log_calls
(call, print_func, errors=True, stack=True, repr_len=REPR_LEN)
Logs or prints all function calls, including arguments, results and raised exceptions.
Logs or prints all function calls, including arguments, results and raised exceptions.
36
49
def log_calls(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): """Logs or prints all function calls, including arguments, results and raised exceptions.""" signature = signature_repr(call, repr_len) try: print_func('Call %s' % signature) result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L36-L49
29
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 ]
100
[]
0
true
82.781457
14
3
100
2
def log_calls(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): signature = signature_repr(call, repr_len) try: print_func('Call %s' % signature) result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise
19,946
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
print_calls
(errors=True, stack=True, repr_len=REPR_LEN)
51
55
def print_calls(errors=True, stack=True, repr_len=REPR_LEN): if callable(errors): return log_calls(print)(errors) else: return log_calls(print, errors, stack, repr_len)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L51-L55
29
[ 0, 1, 2, 4 ]
80
[]
0
false
82.781457
5
2
100
0
def print_calls(errors=True, stack=True, repr_len=REPR_LEN): if callable(errors): return log_calls(print)(errors) else: return log_calls(print, errors, stack, repr_len)
19,947
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
log_enters
(call, print_func, repr_len=REPR_LEN)
return call()
Logs each entrance to a function.
Logs each entrance to a function.
60
63
def log_enters(call, print_func, repr_len=REPR_LEN): """Logs each entrance to a function.""" print_func('Call %s' % signature_repr(call, repr_len)) return call()
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L60-L63
29
[ 0, 1 ]
50
[ 2, 3 ]
50
false
82.781457
4
1
50
1
def log_enters(call, print_func, repr_len=REPR_LEN): print_func('Call %s' % signature_repr(call, repr_len)) return call()
19,948
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
print_enters
(repr_len=REPR_LEN)
Prints on each entrance to a function.
Prints on each entrance to a function.
66
71
def print_enters(repr_len=REPR_LEN): """Prints on each entrance to a function.""" if callable(repr_len): return log_enters(print)(repr_len) else: return log_enters(print, repr_len)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L66-L71
29
[ 0, 1 ]
33.333333
[ 2, 3, 5 ]
50
false
82.781457
6
2
50
1
def print_enters(repr_len=REPR_LEN): if callable(repr_len): return log_enters(print)(repr_len) else: return log_enters(print, repr_len)
19,949
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
log_exits
(call, print_func, errors=True, stack=True, repr_len=REPR_LEN)
Logs exits from a function.
Logs exits from a function.
75
86
def log_exits(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): """Logs exits from a function.""" signature = signature_repr(call, repr_len) try: result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L75-L86
29
[ 0, 1 ]
16.666667
[ 2, 3, 4, 6, 7, 8, 9, 10, 11 ]
75
false
82.781457
12
3
25
1
def log_exits(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): signature = signature_repr(call, repr_len) try: result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise
19,950
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
print_exits
(errors=True, stack=True, repr_len=REPR_LEN)
Prints on exits from a function.
Prints on exits from a function.
88
93
def print_exits(errors=True, stack=True, repr_len=REPR_LEN): """Prints on exits from a function.""" if callable(errors): return log_exits(print)(errors) else: return log_exits(print, errors, stack, repr_len)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L88-L93
29
[ 0, 1 ]
33.333333
[ 2, 3, 5 ]
50
false
82.781457
6
2
50
1
def print_exits(errors=True, stack=True, repr_len=REPR_LEN): if callable(errors): return log_exits(print)(errors) else: return log_exits(print, errors, stack, repr_len)
19,951
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
format_time
(sec)
145
153
def format_time(sec): if sec < 1e-6: return '%8.2f ns' % (sec * 1e9) elif sec < 1e-3: return '%8.2f mks' % (sec * 1e6) elif sec < 1: return '%8.2f ms' % (sec * 1e3) else: return '%8.2f s' % sec
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L145-L153
29
[ 0, 1, 2, 3, 4, 5, 6 ]
77.777778
[ 8 ]
11.111111
false
82.781457
9
4
88.888889
0
def format_time(sec): if sec < 1e-6: return '%8.2f ns' % (sec * 1e9) elif sec < 1e-3: return '%8.2f mks' % (sec * 1e6) elif sec < 1: return '%8.2f ms' % (sec * 1e3) else: return '%8.2f s' % sec
19,952
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
log_iter_durations
(seq, print_func, label=None, unit='auto')
Times processing of each item in seq.
Times processing of each item in seq.
186
197
def log_iter_durations(seq, print_func, label=None, unit='auto'): """Times processing of each item in seq.""" if unit not in time_formatters: raise ValueError('Unknown time unit: %s. It should be ns, mks, ms, s or auto.' % unit) _format_time = time_formatters[unit] suffix = " of %s" % label if label else "" it = iter(seq) for i, item in enumerate(it): start = timer() yield item duration = _format_time(timer() - start) print_func("%s in iteration %d%s" % (duration, i, suffix))
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L186-L197
29
[ 0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11 ]
91.666667
[ 3 ]
8.333333
false
82.781457
12
3
91.666667
1
def log_iter_durations(seq, print_func, label=None, unit='auto'): if unit not in time_formatters: raise ValueError('Unknown time unit: %s. It should be ns, mks, ms, s or auto.' % unit) _format_time = time_formatters[unit] suffix = " of %s" % label if label else "" it = iter(seq) for i, item in enumerate(it): start = timer() yield item duration = _format_time(timer() - start) print_func("%s in iteration %d%s" % (duration, i, suffix))
19,953
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
print_iter_durations
(seq, label=None, unit='auto')
return log_iter_durations(seq, print, label, unit=unit)
Times processing of each item in seq.
Times processing of each item in seq.
199
201
def print_iter_durations(seq, label=None, unit='auto'): """Times processing of each item in seq.""" return log_iter_durations(seq, print, label, unit=unit)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L199-L201
29
[ 0, 1 ]
66.666667
[ 2 ]
33.333333
false
82.781457
3
1
66.666667
1
def print_iter_durations(seq, label=None, unit='auto'): return log_iter_durations(seq, print, label, unit=unit)
19,954
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
_format_error
(label, e, stack=True)
206
219
def _format_error(label, e, stack=True): if isinstance(e, Exception): if stack: e_message = traceback.format_exc() else: e_message = '%s: %s' % (e.__class__.__name__, e) else: e_message = e if label: template = '%s raised in %s' if stack else '%s raised in %s' return template % (e_message, label) else: return e_message
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L206-L219
29
[ 0, 1, 2, 5, 7, 8, 9, 10, 11, 13 ]
71.428571
[ 3 ]
7.142857
false
82.781457
14
4
92.857143
0
def _format_error(label, e, stack=True): if isinstance(e, Exception): if stack: e_message = traceback.format_exc() else: e_message = '%s: %s' % (e.__class__.__name__, e) else: e_message = e if label: template = '%s raised in %s' if stack else '%s raised in %s' return template % (e_message, label) else: return e_message
19,955
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
signature_repr
(call, repr_len=REPR_LEN)
return '%s(%s)' % (name, ', '.join(chain(args_repr, kwargs_repr)))
224
235
def signature_repr(call, repr_len=REPR_LEN): if isinstance(call._func, partial): if hasattr(call._func.func, '__name__'): name = '<%s partial>' % call._func.func.__name__ else: name = '<unknown partial>' else: name = getattr(call._func, '__name__', '<unknown>') args_repr = (smart_repr(arg, repr_len) for arg in call._args) kwargs_repr = ('%s=%s' % (key, smart_repr(value, repr_len)) for key, value in call._kwargs.items()) return '%s(%s)' % (name, ', '.join(chain(args_repr, kwargs_repr)))
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L224-L235
29
[ 0, 1, 7, 8, 9, 11 ]
50
[ 2, 3, 5 ]
25
false
82.781457
12
3
75
0
def signature_repr(call, repr_len=REPR_LEN): if isinstance(call._func, partial): if hasattr(call._func.func, '__name__'): name = '<%s partial>' % call._func.func.__name__ else: name = '<unknown partial>' else: name = getattr(call._func, '__name__', '<unknown>') args_repr = (smart_repr(arg, repr_len) for arg in call._args) kwargs_repr = ('%s=%s' % (key, smart_repr(value, repr_len)) for key, value in call._kwargs.items()) return '%s(%s)' % (name, ', '.join(chain(args_repr, kwargs_repr)))
19,956
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
smart_repr
(value, max_len=REPR_LEN)
return res
237
246
def smart_repr(value, max_len=REPR_LEN): if isinstance(value, basestring): res = repr(value) else: res = str(value) res = re.sub(r'\s+', ' ', res) if max_len and len(res) > max_len: res = res[:max_len-3] + '...' return res
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L237-L246
29
[ 0, 1, 2, 4, 5, 6, 7, 9 ]
80
[ 8 ]
10
false
82.781457
10
4
90
0
def smart_repr(value, max_len=REPR_LEN): if isinstance(value, basestring): res = repr(value) else: res = str(value) res = re.sub(r'\s+', ' ', res) if max_len and len(res) > max_len: res = res[:max_len-3] + '...' return res
19,957
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
LabeledContextDecorator.__init__
(self, print_func, label=None, repr_len=REPR_LEN)
100
103
def __init__(self, print_func, label=None, repr_len=REPR_LEN): self.print_func = print_func self.label = label self.repr_len = repr_len
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L100-L103
29
[ 0, 1, 2, 3 ]
100
[]
0
true
82.781457
4
1
100
0
def __init__(self, print_func, label=None, repr_len=REPR_LEN): self.print_func = print_func self.label = label self.repr_len = repr_len
19,958
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
LabeledContextDecorator.__call__
(self, label=None, **kwargs)
105
109
def __call__(self, label=None, **kwargs): if callable(label): return self.decorator(label) else: return self.__class__(self.print_func, label, **kwargs)
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L105-L109
29
[ 0, 1, 2, 4 ]
80
[]
0
false
82.781457
5
2
100
0
def __call__(self, label=None, **kwargs): if callable(label): return self.decorator(label) else: return self.__class__(self.print_func, label, **kwargs)
19,959
Suor/funcy
5f23815f224e595b8a54822bcb330cd31a799c78
funcy/debug.py
LabeledContextDecorator.decorator
(self, func)
return inner
111
120
def decorator(self, func): @wraps(func) def inner(*args, **kwargs): # Recreate self with a new label so that nested and recursive calls will work cm = self.__class__.__new__(self.__class__) cm.__dict__.update(self.__dict__) cm.label = signature_repr(Call(func, args, kwargs), self.repr_len) with cm: return func(*args, **kwargs) return inner
https://github.com/Suor/funcy/blob/5f23815f224e595b8a54822bcb330cd31a799c78/project29/funcy/debug.py#L111-L120
29
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ]
100
[]
0
true
82.781457
10
3
100
0
def decorator(self, func): @wraps(func) def inner(*args, **kwargs): # Recreate self with a new label so that nested and recursive calls will work cm = self.__class__.__new__(self.__class__) cm.__dict__.update(self.__dict__) cm.label = signature_repr(Call(func, args, kwargs), self.repr_len) with cm: return func(*args, **kwargs) return inner
19,960