Skip to content
This repository has been archived by the owner on Jun 18, 2023. It is now read-only.

Commit

Permalink
Don't alias np until bugfix in numba #70
Browse files Browse the repository at this point in the history
  • Loading branch information
ceholden committed Dec 7, 2015
1 parent d9b4b80 commit ec0d06e
Showing 1 changed file with 12 additions and 10 deletions.
22 changes: 12 additions & 10 deletions yatsm/regression/robust_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
"""
import inspect

import numpy as np
# Don't alias to ``np`` until fix is implemented
# https://github.com/numba/numba/issues/1559
import numpy
import six

from yatsm.accel import try_jit
Expand All @@ -37,7 +39,7 @@ def bisquare(resid, c=4.685):
http://statsmodels.sourceforge.net/stable/generated/statsmodels.robust.norms.TukeyBiweight.html
"""
# Weight where abs(resid) < c; otherwise 0
return (np.abs(resid) < c) * (1 - (resid / c) ** 2) ** 2
return (numpy.abs(resid) < c) * (1 - (resid / c) ** 2) ** 2


@try_jit(nopython=True)
Expand All @@ -57,14 +59,14 @@ def mad(resid, c=0.6745):
http://en.wikipedia.org/wiki/Median_absolute_deviation
"""
# Return median absolute deviation adjusted sigma
return np.median(np.fabs(resid)) / c
return numpy.median(numpy.fabs(resid)) / c


# UTILITY FUNCTIONS
# np.any prevents nopython
@try_jit()
def _check_converge(x0, x, tol=1e-8):
return not np.any(np.fabs(x0 - x > tol))
return not numpy.any(numpy.fabs(x0 - x > tol))


# Broadcast on sw prevents nopython
Expand All @@ -83,14 +85,14 @@ def _weight_fit(X, y, w):
tuple: coefficients and residual vector
"""
sw = np.sqrt(w)
sw = numpy.sqrt(w)

Xw = X * sw[:, None]
yw = y * sw

beta, _, _, _ = np.linalg.lstsq(Xw, yw)
beta, _, _, _ = numpy.linalg.lstsq(Xw, yw)

resid = y - np.dot(X, beta)
resid = y - numpy.dot(X, beta)

return beta, resid

Expand Down Expand Up @@ -153,7 +155,7 @@ def fit(self, X, y):
chaining
"""
self.coef_, resid = _weight_fit(X, y, np.ones_like(y))
self.coef_, resid = _weight_fit(X, y, numpy.ones_like(y))
self.scale = self.scale_est(resid, c=self.scale_constant)

iteration = 1
Expand All @@ -179,13 +181,13 @@ def predict(self, X):
np.ndarray: 1D yhat prediction
"""
return np.dot(X, self.coef_) + self.intercept_
return numpy.dot(X, self.coef_) + self.intercept_

def __str__(self):
return ("%s:\n"
" * Coefficients: %s\n"
" * Intercept = %.5f\n") % (self.__class__.__name__,
np.array_str(self.coef_,
numpy.array_str(self.coef_,
precision=4),
self.intercept_)

Expand Down

0 comments on commit ec0d06e

Please sign in to comment.