# Changeset 726

Ignore:
Timestamp:
Jan 4, 2007, 3:38:56 PM (16 years ago)
Message:

fixes #165 added test checking Linear Regression is equivalent to Polynomial regression of degree one.

Location:
trunk
Files:
11 edited

Unmodified
Removed
• ## trunk/test/regression_test.cc

 r702 utility::vector y(4); y(0)=12;   y(1)=11;   y(2)=14;   y(3)=13; utility::vector w(4); w(0)=0.1;  w(1)=0.2;  w(2)=0.3;  w(3)=0.4; // Comparing linear and polynomial(1) regression::Linear linear; linear.fit(x,y); regression::Polynomial polynomial(1); polynomial.fit(x,y); if ( fabs(linear.beta()-polynomial.fit_parameters()(1))>0.0001 ){ *error << "error: beta and fit_parameters(1) not equal" << std::endl; *error << "       beta = " << linear.beta() << std::endl; *error << "       fit_parameters(1) = " << polynomial.fit_parameters()(1) << std::endl; ok = false; } if ( fabs(polynomial.fit_parameters()(0)-linear.alpha()+ linear.beta()*1985)>0.0001){ *error << "error: fit_parameters(0) = " << polynomial.fit_parameters()(0)<< std::endl; *error << "error: alpha-beta*m_x = " << linear.alpha()-linear.beta()*1985 << std::endl; ok = false; } if ( fabs(polynomial.chisq()-linear.chisq())>0.0001){ *error << "error: chisq not same in linear and polynomial(1)" << std::endl; ok = false; } if ( fabs(polynomial.predict(1.0)-linear.predict(1.0))>0.0001){ *error << "error: predict not same in linear and polynomial(1)" << std::endl; ok = false; } if ( fabs(polynomial.standard_error(1985)-linear.standard_error(1985)) >0.0001){ *error << "error: standard_error not same in linear and polynomial(1)" << "\n  polynomial: " << polynomial.standard_error(1.0) << "\n  linear: " << linear.standard_error(1.0) << "\n  alpha_var: " << linear.alpha_var() << "\n  beta_var: " << linear.beta_var() << "\n  covariance: " << polynomial.covariance()(0,0) << " " << polynomial.covariance()(0,1) << "\n" << " " << polynomial.covariance()(1,0) << " " << polynomial.covariance()(1,1) << std::endl; ok = false; } *error << "testing regression::LinearWeighted" << std::endl;
• ## trunk/yat/regression/Linear.cc

 r724 Linear::Linear(void) : OneDimensional(), alpha_(0), alpha_var_(0), beta_(0), beta_var_(0), chisq_(0), m_x_(0) chisq_(0) { } } double Linear::alpha_err(void) const double Linear::alpha_var(void) const { return sqrt(alpha_var_); return alpha_var_; } } double Linear::beta_err(void) const double Linear::beta_var(void) const { return sqrt(beta_var_); return beta_var_; } // calculating deviation between data and model chisq_ = ( (ap_.y_averager().sum_xx_centered() - ap_.sum_xy_centered()* ap_.sum_xy_centered()/ap_.x_averager().sum_xx_centered() ) / (x.size()-2) ); r2_= 1-chisq_/ap_.x_averager().variance(); alpha_var_ = chisq_ / x.size(); beta_var_ = chisq_ / ap_.x_averager().sum_xx_centered(); m_x_ = ap_.x_averager().mean(); chisq_ = (ap_.y_averager().sum_xx_centered() - ap_.sum_xy_centered()* ap_.sum_xy_centered()/ap_.x_averager().sum_xx_centered() ); r2_= 1-chisq_/ap_.x_averager().sum_xx_centered(); alpha_var_ = s2() / x.size(); beta_var_ = s2() / ap_.x_averager().sum_xx_centered(); } double Linear::predict(const double x) const { return alpha_ + beta_ * (x-m_x_); return alpha_ + beta_ * (x - ap_.x_averager().mean()); } } double Linear::s2(void) const { return chisq()/(ap_.n()-2); } double Linear::standard_error(const double x) const { return sqrt( alpha_var_+beta_var_*(x-m_x_)*(x-m_x_)); return sqrt( alpha_var_+beta_var_*(x-ap_.x_averager().mean())* (x-ap_.x_averager().mean()) ); }
• ## trunk/yat/regression/Linear.h

 r718 /** The standard deviation is estimated as \f$\sqrt{\frac{\chi^2}{n}} \f$ The standard deviation is estimated as \f$\sqrt{\frac{s^2}{n}} \f$ where \f$s^2 = \frac{\sum \epsilon^2}{n-2} \f$ @return standard deviation of parameter \f$\alpha \f$ */ double alpha_err(void) const; double alpha_var(void) const; /** /** The standard deviation is estimated as \f$\sqrt{\frac{\chi^2}{\sum (x_i-m_x)^2}} \f$ The standard deviation is estimated as \f$\frac{s^2}{\sum (x-m_x)^2} \f$ where \f$s^2 = \frac{\sum \epsilon^2}{n-2} \f$ @return standard deviation of parameter \f$\beta \f$ */ double beta_err(void) const; double beta_var(void) const; /** @brief Mean Squared Error Chisq is calculated as \f$\frac{\sum (y_i-\alpha-\beta(x_i-m_x))^2}{n-2} \f$ Chi-squared is calculated as \f$\sum (y_i-\alpha-\beta(x_i-m_x))^2 \f$ */ double chisq(void) const; Linear(const Linear&); double s2(void) const; double alpha_; double alpha_var_; double beta_var_; double chisq_; double m_x_; // average of x values double r2_; // coefficient of determination };
• ## trunk/yat/regression/Local.h

 r718 /// /// The output operator for the RegressionLocal class. /// The output operator for the Regression::Local class. /// std::ostream& operator<<(std::ostream&, const Local& );
• ## trunk/yat/regression/MultiDimensional.cc

 r718 if (work_) gsl_multifit_linear_free(work_); } const utility::matrix& MultiDimensional::covariance(void) const { return covariance_; }
• ## trunk/yat/regression/MultiDimensional.h

 r718 /// /// @brief covariance of parameters /// const utility::matrix& covariance(void) const; /// /// Function fitting parameters of the linear model by miminizing /// the quadratic deviation between model and data.
• ## trunk/yat/regression/Naive.cc

 r713 double Naive::chisq(void) const { return ap_.y_averager().sum_xx_centered()/(ap_.n()-1); return ap_.y_averager().sum_xx_centered(); }
• ## trunk/yat/regression/Naive.h

 r713 /** \f$\frac{1}{N-1} \sum (x_i-m)^2 \f$ @brief Mean Squared Error Chi-squared \f$\sum (x_i-m)^2 \f$ */ double chisq(void) const;
• ## trunk/yat/regression/OneDimensional.h

 r718 /** @brief Mean Squared Error @brief Chi-squared Mean Squared Error is defined as the \f$\frac {\sum{(\hat{y_i}-y_i)^2}}{df} \f$ where \f$df \f$ number of degree of freedom typically is number data points minus number of paramers in model. Chi-squared is defined as the \f$\frac {\sum{(\hat{y_i}-y_i)^2}}{1} \f$ */ virtual double chisq(void) const=0;
• ## trunk/yat/regression/Polynomial.cc

 r713 const utility::matrix& Polynomial::covariance(void) const { return md_.covariance(); } void Polynomial::fit(const utility::vector& x, const utility::vector& y) {
• ## trunk/yat/regression/Polynomial.h

 r713 /// /// @brief covariance of parameters /// const utility::matrix& covariance(void) const; /// /// Fit the model by minimizing the mean squared deviation between /// model and data. /// /// @brief Variance of residuals /// @brief Sum of squared residuals /// double chisq(void) const;
Note: See TracChangeset for help on using the changeset viewer.