summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorZack Kneupper <zachary.kneupper@gmail.com>2021-05-25 00:30:58 (GMT)
committerGitHub <noreply@github.com>2021-05-25 00:30:58 (GMT)
commit2f3a87856c7033227577b9ed0c77ed75311430b7 (patch)
treeb3c06f5f87a54c4e4464426c7ba6af8071e2d4f3 /Lib
parent8450e8a81f6d54f45e1fc5c13a03878c9978750d (diff)
downloadcpython-2f3a87856c7033227577b9ed0c77ed75311430b7.zip
cpython-2f3a87856c7033227577b9ed0c77ed75311430b7.tar.gz
cpython-2f3a87856c7033227577b9ed0c77ed75311430b7.tar.bz2
bpo-44151: linear_regression() minor API improvements (GH-26199)
Diffstat (limited to 'Lib')
-rw-r--r--Lib/statistics.py29
-rw-r--r--Lib/test/test_statistics.py2
2 files changed, 15 insertions, 16 deletions
diff --git a/Lib/statistics.py b/Lib/statistics.py
index bd3813c..c505a05 100644
--- a/Lib/statistics.py
+++ b/Lib/statistics.py
@@ -94,7 +94,7 @@ for two inputs:
>>> correlation(x, y) #doctest: +ELLIPSIS
0.31622776601...
>>> linear_regression(x, y) #doctest:
-LinearRegression(intercept=1.5, slope=0.1)
+LinearRegression(slope=0.1, intercept=1.5)
Exceptions
@@ -932,18 +932,18 @@ def correlation(x, y, /):
raise StatisticsError('at least one of the inputs is constant')
-LinearRegression = namedtuple('LinearRegression', ['intercept', 'slope'])
+LinearRegression = namedtuple('LinearRegression', ('slope', 'intercept'))
-def linear_regression(regressor, dependent_variable, /):
+def linear_regression(x, y, /):
"""Intercept and slope for simple linear regression
Return the intercept and slope of simple linear regression
parameters estimated using ordinary least squares. Simple linear
- regression describes relationship between *regressor* and
- *dependent variable* in terms of linear function:
+ regression describes relationship between *x* and
+ *y* in terms of linear function:
- dependent_variable = intercept + slope * regressor + noise
+ y = intercept + slope * x + noise
where *intercept* and *slope* are the regression parameters that are
estimated, and noise represents the variability of the data that was
@@ -953,19 +953,18 @@ def linear_regression(regressor, dependent_variable, /):
The parameters are returned as a named tuple.
- >>> regressor = [1, 2, 3, 4, 5]
+ >>> x = [1, 2, 3, 4, 5]
>>> noise = NormalDist().samples(5, seed=42)
- >>> dependent_variable = [2 + 3 * regressor[i] + noise[i] for i in range(5)]
- >>> linear_regression(regressor, dependent_variable) #doctest: +ELLIPSIS
- LinearRegression(intercept=1.75684970486..., slope=3.09078914170...)
+ >>> y = [2 + 3 * x[i] + noise[i] for i in range(5)]
+ >>> linear_regression(x, y) #doctest: +ELLIPSIS
+ LinearRegression(slope=3.09078914170..., intercept=1.75684970486...)
"""
- n = len(regressor)
- if len(dependent_variable) != n:
+ n = len(x)
+ if len(y) != n:
raise StatisticsError('linear regression requires that both inputs have same number of data points')
if n < 2:
raise StatisticsError('linear regression requires at least two data points')
- x, y = regressor, dependent_variable
xbar = fsum(x) / n
ybar = fsum(y) / n
sxy = fsum((xi - xbar) * (yi - ybar) for xi, yi in zip(x, y))
@@ -973,9 +972,9 @@ def linear_regression(regressor, dependent_variable, /):
try:
slope = sxy / s2x # equivalent to: covariance(x, y) / variance(x)
except ZeroDivisionError:
- raise StatisticsError('regressor is constant')
+ raise StatisticsError('x is constant')
intercept = ybar - slope * xbar
- return LinearRegression(intercept=intercept, slope=slope)
+ return LinearRegression(slope=slope, intercept=intercept)
## Normal Distribution #####################################################
diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py
index 3e6e17a..a7cb027 100644
--- a/Lib/test/test_statistics.py
+++ b/Lib/test/test_statistics.py
@@ -2501,7 +2501,7 @@ class TestLinearRegression(unittest.TestCase):
([1, 2, 3], [21, 22, 23], 20, 1),
([1, 2, 3], [5.1, 5.2, 5.3], 5, 0.1),
]:
- intercept, slope = statistics.linear_regression(x, y)
+ slope, intercept = statistics.linear_regression(x, y)
self.assertAlmostEqual(intercept, true_intercept)
self.assertAlmostEqual(slope, true_slope)