plotnine 0.15.0.dev2__py3-none-any.whl → 0.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (140) hide show
  1. plotnine/__init__.py +2 -0
  2. plotnine/_mpl/layout_manager/_engine.py +1 -1
  3. plotnine/_mpl/layout_manager/_layout_items.py +128 -83
  4. plotnine/_mpl/layout_manager/_layout_tree.py +761 -310
  5. plotnine/_mpl/layout_manager/_spaces.py +320 -103
  6. plotnine/_mpl/patches.py +70 -34
  7. plotnine/_mpl/text.py +144 -63
  8. plotnine/_mpl/utils.py +1 -1
  9. plotnine/_utils/__init__.py +50 -107
  10. plotnine/_utils/context.py +78 -2
  11. plotnine/_utils/ipython.py +35 -51
  12. plotnine/_utils/quarto.py +21 -0
  13. plotnine/_utils/yippie.py +115 -0
  14. plotnine/composition/__init__.py +11 -0
  15. plotnine/composition/_beside.py +55 -0
  16. plotnine/composition/_compose.py +471 -0
  17. plotnine/composition/_plot_spacer.py +60 -0
  18. plotnine/composition/_stack.py +55 -0
  19. plotnine/coords/coord.py +3 -3
  20. plotnine/data/__init__.py +31 -0
  21. plotnine/data/anscombe-quartet.csv +45 -0
  22. plotnine/doctools.py +4 -4
  23. plotnine/facets/facet.py +4 -4
  24. plotnine/facets/strips.py +17 -28
  25. plotnine/geoms/annotate.py +13 -13
  26. plotnine/geoms/annotation_logticks.py +7 -8
  27. plotnine/geoms/annotation_stripes.py +6 -6
  28. plotnine/geoms/geom.py +60 -27
  29. plotnine/geoms/geom_abline.py +3 -2
  30. plotnine/geoms/geom_area.py +2 -2
  31. plotnine/geoms/geom_bar.py +11 -2
  32. plotnine/geoms/geom_bin_2d.py +6 -2
  33. plotnine/geoms/geom_blank.py +0 -3
  34. plotnine/geoms/geom_boxplot.py +8 -4
  35. plotnine/geoms/geom_col.py +8 -2
  36. plotnine/geoms/geom_count.py +6 -2
  37. plotnine/geoms/geom_crossbar.py +3 -3
  38. plotnine/geoms/geom_density_2d.py +6 -2
  39. plotnine/geoms/geom_dotplot.py +2 -2
  40. plotnine/geoms/geom_errorbar.py +2 -2
  41. plotnine/geoms/geom_errorbarh.py +2 -2
  42. plotnine/geoms/geom_histogram.py +1 -1
  43. plotnine/geoms/geom_hline.py +3 -2
  44. plotnine/geoms/geom_linerange.py +2 -2
  45. plotnine/geoms/geom_map.py +5 -5
  46. plotnine/geoms/geom_path.py +11 -12
  47. plotnine/geoms/geom_point.py +4 -5
  48. plotnine/geoms/geom_pointdensity.py +4 -0
  49. plotnine/geoms/geom_pointrange.py +3 -5
  50. plotnine/geoms/geom_polygon.py +2 -3
  51. plotnine/geoms/geom_qq.py +4 -0
  52. plotnine/geoms/geom_qq_line.py +4 -0
  53. plotnine/geoms/geom_quantile.py +4 -0
  54. plotnine/geoms/geom_raster.py +4 -5
  55. plotnine/geoms/geom_rect.py +3 -4
  56. plotnine/geoms/geom_ribbon.py +7 -7
  57. plotnine/geoms/geom_rug.py +1 -1
  58. plotnine/geoms/geom_segment.py +2 -2
  59. plotnine/geoms/geom_sina.py +3 -3
  60. plotnine/geoms/geom_smooth.py +7 -3
  61. plotnine/geoms/geom_step.py +2 -2
  62. plotnine/geoms/geom_text.py +2 -3
  63. plotnine/geoms/geom_violin.py +28 -8
  64. plotnine/geoms/geom_vline.py +3 -2
  65. plotnine/ggplot.py +64 -85
  66. plotnine/guides/guide.py +7 -10
  67. plotnine/guides/guide_colorbar.py +3 -3
  68. plotnine/guides/guide_legend.py +3 -3
  69. plotnine/guides/guides.py +6 -6
  70. plotnine/helpers.py +49 -0
  71. plotnine/iapi.py +28 -5
  72. plotnine/labels.py +3 -3
  73. plotnine/layer.py +36 -19
  74. plotnine/mapping/_atomic.py +178 -0
  75. plotnine/mapping/_env.py +13 -2
  76. plotnine/mapping/_eval_environment.py +85 -0
  77. plotnine/mapping/aes.py +91 -72
  78. plotnine/mapping/evaluation.py +7 -65
  79. plotnine/scales/__init__.py +2 -0
  80. plotnine/scales/limits.py +7 -7
  81. plotnine/scales/scale.py +3 -3
  82. plotnine/scales/scale_color.py +82 -18
  83. plotnine/scales/scale_continuous.py +6 -4
  84. plotnine/scales/scale_datetime.py +28 -14
  85. plotnine/scales/scale_discrete.py +1 -1
  86. plotnine/scales/scale_identity.py +21 -2
  87. plotnine/scales/scale_manual.py +8 -2
  88. plotnine/scales/scale_xy.py +2 -2
  89. plotnine/stats/binning.py +4 -1
  90. plotnine/stats/smoothers.py +23 -36
  91. plotnine/stats/stat.py +20 -32
  92. plotnine/stats/stat_bin.py +6 -5
  93. plotnine/stats/stat_bin_2d.py +11 -9
  94. plotnine/stats/stat_bindot.py +13 -16
  95. plotnine/stats/stat_boxplot.py +6 -6
  96. plotnine/stats/stat_count.py +6 -9
  97. plotnine/stats/stat_density.py +7 -10
  98. plotnine/stats/stat_density_2d.py +12 -8
  99. plotnine/stats/stat_ecdf.py +7 -6
  100. plotnine/stats/stat_ellipse.py +9 -6
  101. plotnine/stats/stat_function.py +10 -8
  102. plotnine/stats/stat_hull.py +6 -3
  103. plotnine/stats/stat_identity.py +5 -2
  104. plotnine/stats/stat_pointdensity.py +5 -7
  105. plotnine/stats/stat_qq.py +46 -20
  106. plotnine/stats/stat_qq_line.py +16 -11
  107. plotnine/stats/stat_quantile.py +15 -9
  108. plotnine/stats/stat_sina.py +45 -14
  109. plotnine/stats/stat_smooth.py +8 -10
  110. plotnine/stats/stat_sum.py +5 -2
  111. plotnine/stats/stat_summary.py +7 -10
  112. plotnine/stats/stat_summary_bin.py +11 -14
  113. plotnine/stats/stat_unique.py +5 -2
  114. plotnine/stats/stat_ydensity.py +8 -11
  115. plotnine/themes/elements/__init__.py +2 -1
  116. plotnine/themes/elements/element_line.py +17 -9
  117. plotnine/themes/elements/margin.py +64 -1
  118. plotnine/themes/theme.py +9 -1
  119. plotnine/themes/theme_538.py +0 -1
  120. plotnine/themes/theme_bw.py +0 -1
  121. plotnine/themes/theme_dark.py +0 -1
  122. plotnine/themes/theme_gray.py +6 -5
  123. plotnine/themes/theme_light.py +1 -1
  124. plotnine/themes/theme_matplotlib.py +5 -5
  125. plotnine/themes/theme_seaborn.py +7 -4
  126. plotnine/themes/theme_void.py +9 -8
  127. plotnine/themes/theme_xkcd.py +0 -1
  128. plotnine/themes/themeable.py +110 -32
  129. plotnine/typing.py +17 -6
  130. plotnine/watermark.py +3 -3
  131. {plotnine-0.15.0.dev2.dist-info → plotnine-0.15.1.dist-info}/METADATA +13 -6
  132. plotnine-0.15.1.dist-info/RECORD +221 -0
  133. {plotnine-0.15.0.dev2.dist-info → plotnine-0.15.1.dist-info}/WHEEL +1 -1
  134. plotnine/plot_composition/__init__.py +0 -10
  135. plotnine/plot_composition/_compose.py +0 -436
  136. plotnine/plot_composition/_spacer.py +0 -32
  137. plotnine-0.15.0.dev2.dist-info/RECORD +0 -214
  138. /plotnine/{plot_composition → composition}/_plotspec.py +0 -0
  139. {plotnine-0.15.0.dev2.dist-info → plotnine-0.15.1.dist-info}/licenses/LICENSE +0 -0
  140. {plotnine-0.15.0.dev2.dist-info → plotnine-0.15.1.dist-info}/top_level.txt +0 -0
@@ -25,7 +25,7 @@ class stat_ecdf(stat):
25
25
 
26
26
  See Also
27
27
  --------
28
- plotnine.geom_step
28
+ plotnine.geom_step : The default `geom` for this `stat`.
29
29
  """
30
30
 
31
31
  _aesthetics_doc = """
@@ -50,17 +50,18 @@ class stat_ecdf(stat):
50
50
  DEFAULT_AES = {"y": after_stat("ecdf")}
51
51
  CREATES = {"ecdf"}
52
52
 
53
- @classmethod
54
- def compute_group(cls, data, scales, **params):
53
+ def compute_group(self, data, scales):
55
54
  from statsmodels.distributions.empirical_distribution import ECDF
56
55
 
56
+ n, pad = self.params["n"], self.params["pad"]
57
+
57
58
  # If n is None, use raw values; otherwise interpolate
58
- if params["n"] is None:
59
+ if n is None:
59
60
  x = np.unique(data["x"])
60
61
  else:
61
- x = np.linspace(data["x"].min(), data["x"].max(), params["n"])
62
+ x = np.linspace(data["x"].min(), data["x"].max(), n)
62
63
 
63
- if params["pad"]:
64
+ if pad:
64
65
  x = np.hstack([-np.inf, x, np.inf])
65
66
 
66
67
  ecdf = ECDF(data["x"].to_numpy())(x)
@@ -37,6 +37,10 @@ class stat_ellipse(stat):
37
37
  The confidence level at which to draw the ellipse.
38
38
  segments : int, default=51
39
39
  Number of segments to be used in drawing the ellipse.
40
+
41
+ See Also
42
+ --------
43
+ plotnine.geom_path : The default `geom` for this `stat`.
40
44
  """
41
45
 
42
46
  REQUIRED_AES = {"x", "y"}
@@ -49,14 +53,13 @@ class stat_ellipse(stat):
49
53
  "segments": 51,
50
54
  }
51
55
 
52
- @classmethod
53
- def compute_group(cls, data, scales, **params):
56
+ def compute_group(self, data, scales):
54
57
  import scipy.stats as stats
55
58
  from scipy import linalg
56
59
 
57
- level = params["level"]
58
- segments = params["segments"]
59
- type_ = params["type"]
60
+ level = self.params["level"]
61
+ segments = self.params["segments"]
62
+ type_ = self.params["type"]
60
63
 
61
64
  dfn = 2
62
65
  dfd = len(data) - 1
@@ -203,7 +206,7 @@ def cov_trob(
203
206
  wt = wt[wt > 0]
204
207
  n, _ = x.shape
205
208
 
206
- wt = wt[:, np.newaxis]
209
+ wt = wt[:, np.newaxis] # pyright: ignore[reportCallIssue,reportArgumentType,reportOptionalSubscript]
207
210
 
208
211
  # loc
209
212
  use_loc = False
@@ -37,6 +37,10 @@ class stat_function(stat):
37
37
  then the `xlim` must be provided.
38
38
  args : Optional[tuple[Any] | dict[str, Any]], default=None
39
39
  Arguments to pass to `fun`.
40
+
41
+ See Also
42
+ --------
43
+ plotnine.geom_path : The default `geom` for this `stat`.
40
44
  """
41
45
 
42
46
  _aesthetics_doc = """
@@ -82,14 +86,12 @@ class stat_function(stat):
82
86
  "stat_function requires parameter 'fun' to be "
83
87
  "a function or any other callable object"
84
88
  )
85
- return self.params
86
-
87
- @classmethod
88
- def compute_group(cls, data, scales, **params):
89
- old_fun: Callable[..., FloatArrayLike] = params["fun"]
90
- n = params["n"]
91
- args = params["args"]
92
- xlim = params["xlim"]
89
+
90
+ def compute_group(self, data, scales):
91
+ old_fun: Callable[..., FloatArrayLike] = self.params["fun"]
92
+ n = self.params["n"]
93
+ args = self.params["args"]
94
+ xlim = self.params["xlim"]
93
95
  range_x = xlim or scales.x.dimension((0, 0))
94
96
 
95
97
  if isinstance(args, (list, tuple)):
@@ -26,6 +26,10 @@ class stat_hull(stat):
26
26
  Raised when Qhull encounters an error condition,
27
27
  such as geometrical degeneracy when options to resolve are
28
28
  not enabled.
29
+
30
+ See Also
31
+ --------
32
+ plotnine.geom_path : The default `geom` for this `stat`.
29
33
  """
30
34
 
31
35
  _aesthetics_doc = """
@@ -47,12 +51,11 @@ class stat_hull(stat):
47
51
  }
48
52
  CREATES = {"area"}
49
53
 
50
- @classmethod
51
- def compute_group(cls, data, scales, **params):
54
+ def compute_group(self, data, scales):
52
55
  from scipy.spatial import ConvexHull
53
56
 
54
57
  hull = ConvexHull(
55
- data[["x", "y"]], qhull_options=params["qhull_options"]
58
+ data[["x", "y"]], qhull_options=self.params["qhull_options"]
56
59
  )
57
60
  idx = np.hstack([hull.vertices, hull.vertices[0]])
58
61
 
@@ -12,10 +12,13 @@ class stat_identity(stat):
12
12
  Parameters
13
13
  ----------
14
14
  {common_parameters}
15
+
16
+ See Also
17
+ --------
18
+ plotnine.geom_point : The default `geom` for this `stat`.
15
19
  """
16
20
 
17
21
  DEFAULT_PARAMS = {"geom": "point", "position": "identity", "na_rm": False}
18
22
 
19
- @classmethod
20
- def compute_panel(cls, data, scales, **params):
23
+ def compute_panel(self, data, scales):
21
24
  return data
@@ -24,6 +24,7 @@ class stat_pointdensity(stat):
24
24
 
25
25
  See Also
26
26
  --------
27
+ plotnine.geom_density_2d : The default `geom` for this `stat`.
27
28
  statsmodels.nonparametric.kde.KDEMultivariate
28
29
  scipy.stats.gaussian_kde
29
30
  sklearn.neighbors.KernelDensity
@@ -51,7 +52,7 @@ class stat_pointdensity(stat):
51
52
  CREATES = {"density"}
52
53
 
53
54
  def setup_params(self, data):
54
- params = self.params.copy()
55
+ params = self.params
55
56
  if params["kde_params"] is None:
56
57
  params["kde_params"] = {}
57
58
 
@@ -63,12 +64,9 @@ class stat_pointdensity(stat):
63
64
  y_type = get_var_type(data["y"])
64
65
  kde_params["var_type"] = f"{x_type}{y_type}"
65
66
 
66
- return params
67
-
68
- @classmethod
69
- def compute_group(cls, data, scales, **params):
70
- package = params["package"]
71
- kde_params = params["kde_params"]
67
+ def compute_group(self, data, scales):
68
+ package = self.params["package"]
69
+ kde_params = self.params["kde_params"]
72
70
 
73
71
  var_data = np.array([data["x"].to_numpy(), data["y"].to_numpy()]).T
74
72
  density = kde(var_data, var_data, package, **kde_params)
plotnine/stats/stat_qq.py CHANGED
@@ -1,3 +1,7 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
1
5
  import numpy as np
2
6
  import pandas as pd
3
7
 
@@ -6,6 +10,11 @@ from ..exceptions import PlotnineError
6
10
  from ..mapping.evaluation import after_stat
7
11
  from .stat import stat
8
12
 
13
+ if TYPE_CHECKING:
14
+ from typing import Any, Sequence
15
+
16
+ from plotnine.typing import FloatArray
17
+
9
18
 
10
19
  # Note: distribution should be a name from scipy.stat.distribution
11
20
  @document
@@ -38,6 +47,7 @@ class stat_qq(stat):
38
47
 
39
48
  See Also
40
49
  --------
50
+ plotnine.geom_qq : The default `geom` for this `stat`.
41
51
  scipy.stats.mstats.plotting_positions : Uses `alpha_beta`
42
52
  to calculate the quantiles.
43
53
  """
@@ -65,25 +75,41 @@ class stat_qq(stat):
65
75
  "alpha_beta": (3 / 8, 3 / 8),
66
76
  }
67
77
 
68
- @classmethod
69
- def compute_group(cls, data, scales, **params):
70
- from scipy.stats.mstats import plotting_positions
71
-
72
- from .distributions import get_continuous_distribution
73
-
78
+ def compute_group(self, data, scales):
74
79
  sample = data["sample"].sort_values().to_numpy()
75
- alpha, beta = params["alpha_beta"]
76
- quantiles = params["quantiles"]
77
-
78
- if quantiles is None:
79
- quantiles = plotting_positions(sample, alpha, beta)
80
- elif len(quantiles) != len(sample):
81
- raise PlotnineError(
82
- "The number of quantile values is not the same as "
83
- "the number of sample values."
84
- )
85
-
86
- quantiles = np.asarray(quantiles)
87
- cdist = get_continuous_distribution(params["distribution"])
88
- theoretical = cdist.ppf(quantiles, **params["dparams"])
80
+ theoretical = theoretical_qq(
81
+ sample,
82
+ self.params["distribution"],
83
+ alpha=self.params["alpha_beta"][0],
84
+ beta=self.params["alpha_beta"][1],
85
+ quantiles=self.params["quantiles"],
86
+ distribution_params=self.params["dparams"],
87
+ )
89
88
  return pd.DataFrame({"sample": sample, "theoretical": theoretical})
89
+
90
+
91
+ def theoretical_qq(
92
+ x: FloatArray,
93
+ distribution: str,
94
+ alpha: float,
95
+ beta: float,
96
+ quantiles: Sequence[float] | None,
97
+ distribution_params: dict[str, Any],
98
+ ) -> FloatArray:
99
+ """
100
+ Caculate theoretical qq distribution
101
+ """
102
+ from scipy.stats.mstats import plotting_positions
103
+
104
+ from .distributions import get_continuous_distribution
105
+
106
+ if quantiles is None:
107
+ quantiles = plotting_positions(x, alpha, beta)
108
+ elif len(quantiles) != len(x):
109
+ raise PlotnineError(
110
+ "The number of quantile values is not the same as "
111
+ "the number of sample values."
112
+ )
113
+
114
+ cdist = get_continuous_distribution(distribution)
115
+ return cdist.ppf(np.asarray(quantiles), **distribution_params)
@@ -4,7 +4,7 @@ import pandas as pd
4
4
  from ..doctools import document
5
5
  from ..exceptions import PlotnineError
6
6
  from .stat import stat
7
- from .stat_qq import stat_qq
7
+ from .stat_qq import theoretical_qq
8
8
 
9
9
 
10
10
  @document
@@ -41,6 +41,7 @@ class stat_qq_line(stat):
41
41
 
42
42
  See Also
43
43
  --------
44
+ plotnine.geom_qq_line : The default `geom` for this `stat`.
44
45
  scipy.stats.mstats.plotting_positions : Uses `alpha_beta`
45
46
  to calculate the quantiles.
46
47
  """
@@ -64,31 +65,35 @@ class stat_qq_line(stat):
64
65
  raise PlotnineError(
65
66
  "Cannot fit line quantiles. 'line_p' must be of length 2"
66
67
  )
67
- return self.params
68
68
 
69
- @classmethod
70
- def compute_group(cls, data, scales, **params):
69
+ def compute_group(self, data, scales):
71
70
  from scipy.stats.mstats import mquantiles
72
71
 
73
72
  from .distributions import get_continuous_distribution
74
73
 
75
- line_p = params["line_p"]
76
- dparams = params["dparams"]
74
+ line_p = self.params["line_p"]
75
+ dparams = self.params["dparams"]
77
76
 
78
77
  # Compute theoretical values
79
- qq_gdata = stat_qq.compute_group(data, scales, **params)
80
- sample = qq_gdata["sample"].to_numpy()
81
- theoretical = qq_gdata["theoretical"].to_numpy()
78
+ sample = data["sample"].sort_values().to_numpy()
79
+ theoretical = theoretical_qq(
80
+ sample,
81
+ self.params["distribution"],
82
+ alpha=self.params["alpha_beta"][0],
83
+ beta=self.params["alpha_beta"][1],
84
+ quantiles=self.params["quantiles"],
85
+ distribution_params=dparams,
86
+ )
82
87
 
83
88
  # Compute slope & intercept of the line through the quantiles
84
- cdist = get_continuous_distribution(params["distribution"])
89
+ cdist = get_continuous_distribution(self.params["distribution"])
85
90
  x_coords = cdist.ppf(line_p, **dparams)
86
91
  y_coords = mquantiles(sample, line_p)
87
92
  slope = (np.diff(y_coords) / np.diff(x_coords))[0]
88
93
  intercept = y_coords[0] - slope * x_coords[0]
89
94
 
90
95
  # Get x,y points that describe the line
91
- if params["fullrange"] and scales.x:
96
+ if self.params["fullrange"] and scales.x:
92
97
  x = scales.x.dimension()
93
98
  else:
94
99
  x = theoretical.min(), theoretical.max()
@@ -29,8 +29,8 @@ class stat_quantile(stat):
29
29
 
30
30
  See Also
31
31
  --------
32
+ plotnine.geom_quantile : The default `geom` for this `stat`.
32
33
  statsmodels.regression.quantile_regression.QuantReg
33
- plotnine.geom_quantile
34
34
  """
35
35
 
36
36
  _aesthetics_doc = """
@@ -59,30 +59,36 @@ class stat_quantile(stat):
59
59
  CREATES = {"quantile", "group"}
60
60
 
61
61
  def setup_params(self, data):
62
- params = self.params.copy()
62
+ params = self.params
63
63
  if params["formula"] is None:
64
64
  params["formula"] = "y ~ x"
65
65
  warn("Formula not specified, using '{}'", PlotnineWarning)
66
+ else:
67
+ params["eval_env"] = self.environment.to_patsy_env()
68
+
66
69
  try:
67
70
  iter(params["quantiles"])
68
71
  except TypeError:
69
72
  params["quantiles"] = (params["quantiles"],)
70
73
 
71
- return params
72
-
73
- @classmethod
74
- def compute_group(cls, data, scales, **params):
75
- res = [quant_pred(q, data, **params) for q in params["quantiles"]]
74
+ def compute_group(self, data, scales):
75
+ res = [
76
+ quant_pred(q, data, self.params) for q in self.params["quantiles"]
77
+ ]
76
78
  return pd.concat(res, axis=0, ignore_index=True)
77
79
 
78
80
 
79
- def quant_pred(q, data, **params):
81
+ def quant_pred(q, data, params):
80
82
  """
81
83
  Quantile precitions
82
84
  """
83
85
  import statsmodels.formula.api as smf
84
86
 
85
- mod = smf.quantreg(params["formula"], data)
87
+ mod = smf.quantreg(
88
+ params["formula"],
89
+ data,
90
+ eval_env=params.get("eval_env"),
91
+ )
86
92
  reg_res = mod.fit(q=q, **params["method_args"])
87
93
  out = pd.DataFrame(
88
94
  {
@@ -57,10 +57,19 @@ class stat_sina(stat):
57
57
  - `area` - Scale by the largest density/bin among the different sinas
58
58
  - `count` - areas are scaled proportionally to the number of points
59
59
  - `width` - Only scale according to the maxwidth parameter.
60
+ style :
61
+ Type of sina plot to draw. The options are
62
+ ```python
63
+ 'full' # Regular (2 sided)
64
+ 'left' # Left-sided half
65
+ 'right' # Right-sided half
66
+ 'left-right' # Alternate (left first) half by the group
67
+ 'right-left' # Alternate (right first) half by the group
68
+ ```
60
69
 
61
70
  See Also
62
71
  --------
63
- plotnine.geom_sina
72
+ plotnine.geom_sina : The default `geom` for this `stat`.
64
73
  """
65
74
 
66
75
  _aesthetics_doc = """
@@ -91,6 +100,7 @@ class stat_sina(stat):
91
100
  "bin_limit": 1,
92
101
  "random_state": None,
93
102
  "scale": "area",
103
+ "style": "full",
94
104
  }
95
105
  CREATES = {"scaled"}
96
106
 
@@ -106,7 +116,7 @@ class stat_sina(stat):
106
116
  return data
107
117
 
108
118
  def setup_params(self, data):
109
- params = self.params.copy()
119
+ params = self.params
110
120
  random_state = params["random_state"]
111
121
 
112
122
  if params["maxwidth"] is None:
@@ -127,10 +137,9 @@ class stat_sina(stat):
127
137
  params["clip"] = (-np.inf, np.inf)
128
138
  params["bounds"] = (-np.inf, np.inf)
129
139
  params["n"] = 512
130
- return params
131
140
 
132
- @classmethod
133
- def compute_panel(cls, data, scales, **params):
141
+ def compute_panel(self, data, scales):
142
+ params = self.params
134
143
  maxwidth = params["maxwidth"]
135
144
  random_state = params["random_state"]
136
145
  fuzz = 1e-8
@@ -144,7 +153,7 @@ class stat_sina(stat):
144
153
  else:
145
154
  params["bins"] = breaks_from_bins(y_dim_fuzzed, params["bins"])
146
155
 
147
- data = super(cls, stat_sina).compute_panel(data, scales, **params)
156
+ data = super().compute_panel(data, scales)
148
157
 
149
158
  if not len(data):
150
159
  return data
@@ -188,11 +197,10 @@ class stat_sina(stat):
188
197
 
189
198
  return data
190
199
 
191
- @classmethod
192
- def compute_group(cls, data, scales, **params):
193
- maxwidth = params["maxwidth"]
194
- bins = params["bins"]
195
- bin_limit = params["bin_limit"]
200
+ def compute_group(self, data, scales):
201
+ maxwidth = self.params["maxwidth"]
202
+ bins = self.params["bins"]
203
+ bin_limit = self.params["bin_limit"]
196
204
  weight = None
197
205
  y = data["y"]
198
206
 
@@ -205,12 +213,12 @@ class stat_sina(stat):
205
213
  elif len(np.unique(y)) < 2:
206
214
  data["density"] = 1
207
215
  data["scaled"] = 1
208
- elif params["method"] == "density":
216
+ elif self.params["method"] == "density":
209
217
  from scipy.interpolate import interp1d
210
218
 
211
219
  # density kernel estimation
212
220
  range_y = y.min(), y.max()
213
- dens = compute_density(y, weight, range_y, **params)
221
+ dens = compute_density(y, weight, range_y, self.params)
214
222
  densf = interp1d(
215
223
  dens["x"],
216
224
  dens["density"],
@@ -243,8 +251,31 @@ class stat_sina(stat):
243
251
 
244
252
  return data
245
253
 
246
- def finish_layer(self, data, params):
254
+ def finish_layer(self, data):
247
255
  # Rescale x in case positions have been adjusted
256
+ style = self.params["style"]
257
+ x_mean = data["x"].to_numpy()
248
258
  x_mod = (data["xmax"] - data["xmin"]) / data["width"]
249
259
  data["x"] = data["x"] + data["x_diff"] * x_mod
260
+ x = data["x"].to_numpy()
261
+ even = data["group"].to_numpy() % 2 == 0
262
+
263
+ def mirror_x(bool_idx):
264
+ """
265
+ Mirror x locations along the mean value
266
+ """
267
+ data.loc[bool_idx, "x"] = (
268
+ 2 * x_mean[bool_idx] - data.loc[bool_idx, "x"]
269
+ )
270
+
271
+ match style:
272
+ case "left":
273
+ mirror_x(x_mean < x)
274
+ case "right":
275
+ mirror_x(x < x_mean)
276
+ case "left-right":
277
+ mirror_x(even & (x < x_mean) | ~even & (x_mean < x))
278
+ case "right-left":
279
+ mirror_x(even & (x_mean < x) | ~even & (x < x_mean))
280
+
250
281
  return data
@@ -37,7 +37,7 @@ class stat_smooth(stat):
37
37
  If a `callable` is passed, it must have the signature:
38
38
 
39
39
  ```python
40
- def my_smoother(data, xseq, **params):
40
+ def my_smoother(data, xseq, params):
41
41
  # * data - has the x and y values for the model
42
42
  # * xseq - x values to be predicted
43
43
  # * params - stat parameters
@@ -106,6 +106,7 @@ class stat_smooth(stat):
106
106
 
107
107
  See Also
108
108
  --------
109
+ plotnine.geom_smooth : The default `geom` for this `stat`.
109
110
  statsmodels.regression.linear_model.OLS
110
111
  statsmodels.regression.linear_model.WLS
111
112
  statsmodels.robust.robust_linear_model.RLM
@@ -163,7 +164,7 @@ class stat_smooth(stat):
163
164
  return data
164
165
 
165
166
  def setup_params(self, data):
166
- params = self.params.copy()
167
+ params = self.params
167
168
  # Use loess/lowess for small datasets
168
169
  # and glm for large
169
170
  if params["method"] == "auto":
@@ -202,12 +203,9 @@ class stat_smooth(stat):
202
203
  )
203
204
  params["environment"] = self.environment
204
205
 
205
- return params
206
-
207
- @classmethod
208
- def compute_group(cls, data, scales, **params):
206
+ def compute_group(self, data, scales):
209
207
  data = data.sort_values("x")
210
- n = params["n"]
208
+ n = self.params["n"]
211
209
 
212
210
  x_unique = data["x"].unique()
213
211
 
@@ -223,15 +221,15 @@ class stat_smooth(stat):
223
221
  return pd.DataFrame()
224
222
 
225
223
  if data["x"].dtype.kind == "i":
226
- if params["fullrange"]:
224
+ if self.params["fullrange"]:
227
225
  xseq = scales.x.dimension()
228
226
  else:
229
227
  xseq = np.sort(x_unique)
230
228
  else:
231
- if params["fullrange"]:
229
+ if self.params["fullrange"]:
232
230
  rangee = scales.x.dimension()
233
231
  else:
234
232
  rangee = [data["x"].min(), data["x"].max()]
235
233
  xseq = np.linspace(rangee[0], rangee[1], n)
236
234
 
237
- return predictdf(data, xseq, **params)
235
+ return predictdf(data, xseq, self.params)
@@ -17,6 +17,10 @@ class stat_sum(stat):
17
17
  Parameters
18
18
  ----------
19
19
  {common_parameters}
20
+
21
+ See Also
22
+ --------
23
+ plotnine.geom_point : The default `geom` for this `stat`.
20
24
  """
21
25
 
22
26
  _aesthetics_doc = """
@@ -35,8 +39,7 @@ class stat_sum(stat):
35
39
  DEFAULT_AES = {"size": after_stat("n"), "weight": 1}
36
40
  CREATES = {"n", "prop"}
37
41
 
38
- @classmethod
39
- def compute_panel(cls, data, scales, **params):
42
+ def compute_panel(self, data, scales):
40
43
  if "weight" not in data:
41
44
  data["weight"] = 1
42
45
 
@@ -247,7 +247,7 @@ class stat_summary(stat):
247
247
 
248
248
  See Also
249
249
  --------
250
- plotnine.geom_pointrange
250
+ plotnine.geom_pointrange : The default `geom` for this `stat`.
251
251
  """
252
252
 
253
253
  _aesthetics_doc = """
@@ -299,16 +299,13 @@ class stat_summary(stat):
299
299
 
300
300
  self.params["fun_args"]["random_state"] = random_state
301
301
 
302
- return self.params
303
-
304
- @classmethod
305
- def compute_panel(cls, data, scales, **params):
302
+ def compute_panel(self, data, scales):
306
303
  func = make_summary_fun(
307
- params["fun_data"],
308
- params["fun_y"],
309
- params["fun_ymin"],
310
- params["fun_ymax"],
311
- params["fun_args"],
304
+ self.params["fun_data"],
305
+ self.params["fun_y"],
306
+ self.params["fun_ymin"],
307
+ self.params["fun_ymax"],
308
+ self.params["fun_args"],
312
309
  )
313
310
 
314
311
  # break a dataframe into pieces, summarise each piece,
@@ -63,7 +63,7 @@ class stat_summary_bin(stat):
63
63
 
64
64
  See Also
65
65
  --------
66
- plotnine.geom_pointrange
66
+ plotnine.geom_pointrange : The default `geom` for this `stat`.
67
67
  """
68
68
 
69
69
  _aesthetics_doc = """
@@ -123,21 +123,18 @@ class stat_summary_bin(stat):
123
123
 
124
124
  self.params["fun_args"]["random_state"] = random_state
125
125
 
126
- return self.params
127
-
128
- @classmethod
129
- def compute_group(cls, data, scales, **params):
130
- bins = params["bins"]
131
- breaks = params["breaks"]
132
- binwidth = params["binwidth"]
133
- boundary = params["boundary"]
126
+ def compute_group(self, data, scales):
127
+ bins = self.params["bins"]
128
+ breaks = self.params["breaks"]
129
+ binwidth = self.params["binwidth"]
130
+ boundary = self.params["boundary"]
134
131
 
135
132
  func = make_summary_fun(
136
- params["fun_data"],
137
- params["fun_y"],
138
- params["fun_ymin"],
139
- params["fun_ymax"],
140
- params["fun_args"],
133
+ self.params["fun_data"],
134
+ self.params["fun_y"],
135
+ self.params["fun_ymin"],
136
+ self.params["fun_ymax"],
137
+ self.params["fun_args"],
141
138
  )
142
139
 
143
140
  breaks = fuzzybreaks(scales.x, breaks, boundary, binwidth, bins)