Changeset 730
- Timestamp:
- Jan 6, 2007, 12:02:21 PM (16 years ago)
- Location:
- trunk
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/test/regression_test.cc
r729 r730 120 120 *error << " testing regression::LinearWeighted" << std::endl; 121 121 regression::LinearWeighted linear_w; 122 ok = equal(linear, linear_w, error) && ok; 122 123 linear_w.fit(x,y,w); 123 124 double y_predicted = linear_w.predict(1990); … … 134 135 regression::NaiveWeighted naive_w; 135 136 regression::Naive naive; 136 ok = ok && equal(naive, naive_w, error);137 ok = equal(naive, naive_w, error) && ok; 137 138 naive_w.fit(x,y,w); 138 139 … … 145 146 ok=false; 146 147 } 147 148 148 149 149 // testing regression::Local … … 183 183 } 184 184 185 *error << " testing regression::Linear" << std::endl;186 regression::Linear lin;187 188 185 *error << " testing regression::PolynomialWeighted" << std::endl; 189 186 regression::PolynomialWeighted pol_weighted(2); … … 225 222 *error << "Error: predict not equal" << std::endl; 226 223 } 227 if (r.prediction_error2(2000) != rw.prediction_error2(2000)){ 228 ok = false; 229 *error << "Error: prediction_error2 not equal non-weighted version." 230 << std::endl; 231 } 232 if (r.r2() != rw.r2()){ 224 if (fabs(r.prediction_error2(2000)-rw.prediction_error2(2000))>10e-7){ 225 ok = false; 226 *error << "Error: prediction_error2 not equal non-weighted version.\n" 227 << " weighted: " << rw.prediction_error2(2000) << "\n" 228 << " non-weighted: " << r.prediction_error2(2000) 229 << std::endl; 230 } 231 if (fabs(r.s2()-rw.s2())>10E-7){ 233 232 ok = false; 234 233 *error << "Error: r2 not equal non-weighted version." << std::endl; 235 } 236 if (r.s2() != rw.s2()){ 234 *error << "weighted r2 = " << rw.r2() << std::endl; 235 *error << "non-weighted r2 = " << r.r2() << std::endl; 236 } 237 if (fabs(r.s2()-rw.s2())>10E-7){ 237 238 ok = false; 238 239 *error << "Error: s2 not equal non-weighted version." << std::endl; 239 } 240 if (r.standard_error2(2000) != rw.standard_error2(2000)){ 240 *error << "weighted s2 = " << rw.s2() << std::endl; 241 *error << "non-weighted s2 = " << r.s2() << std::endl; 242 } 243 if (fabs(r.standard_error2(2000)-rw.standard_error2(2000))>10e-7){ 241 244 ok = false; 242 245 *error << "Error: standard_error not equal non-weighted version." … … 279 282 *error << "Error: s2 not equal after rescaling.\n"; 280 283 *error << " s2 = " << s2 << " and after doubling weights.\n"; 281 *error << " s2 = " << wr.s2( ) << "\n";284 *error << " s2 = " << wr.s2(2) << "\n"; 282 285 } 283 286 if (wr.standard_error2(2000) != standard_error2){ -
trunk/yat/regression/Linear.cc
r729 r730 66 66 67 67 alpha_ = ap_.y_averager().mean(); 68 beta_ = ap_. covariance() / ap_.x_averager().variance();68 beta_ = ap_.sum_xy_centered() / ap_.x_averager().sum_xx_centered(); 69 69 70 70 // calculating deviation between data and model -
trunk/yat/regression/LinearWeighted.cc
r729 r730 26 26 #include "yat/utility/vector.h" 27 27 28 #include <cassert> 29 28 30 namespace theplu { 29 31 namespace yat { … … 32 34 LinearWeighted::LinearWeighted(void) 33 35 : OneDimensionalWeighted(), alpha_(0), alpha_var_(0), beta_(0), 34 beta_var_(0) , m_x_(0), s2_(0)36 beta_var_(0) 35 37 { 36 38 } … … 47 49 double LinearWeighted::alpha_var(void) const 48 50 { 49 return sqrt(alpha_var_);51 return alpha_var_; 50 52 } 51 53 … … 57 59 double LinearWeighted::beta_var(void) const 58 60 { 59 return sqrt(beta_var_);61 return beta_var_; 60 62 } 61 63 … … 64 66 const utility::vector& w) 65 67 { 68 assert(x.size()==y.size()); 69 assert(x.size()==w.size()); 70 66 71 // AveragerPairWeighted requires 2 weights but works only on the 67 72 // product wx*wy, so we can send in w and a dummie to get what we … … 70 75 ap_.add_values(x,y,utility::vector(x.size(),1),w); 71 76 72 // estimating the noise level. see attached document for motivation73 // of the expression.74 s2_= (syy()-sxy()*sxy()/sxx())/(w.sum()-2*(w*w)/w.sum()) ;75 76 77 alpha_ = m_y(); 77 78 beta_ = sxy()/sxx(); 78 alpha_var_ = ap_.y_averager().standard_error() * 79 ap_.y_averager().standard_error(); 80 beta_var_ = s2_/sxx(); 81 m_x_=m_x(); 79 80 chisq_=0; 81 for (size_t i=0; i<x.size(); ++i){ 82 double res = predict(x(i))-y(i); 83 chisq_ += w(i)*res*res; 84 } 85 86 alpha_var_ = s2()/ap_.y_averager().sum_w(); 87 beta_var_ = s2()/sxx(); 82 88 } 83 89 … … 92 98 } 93 99 94 double LinearWeighted::predict ion_error2(const double x, const double w) const95 { 96 return sqrt(alpha_var_ + beta_var_*(x-m_x_)*(x-m_x_)+s2(w));100 double LinearWeighted::predict(const double x) const 101 { 102 return alpha_ + beta_ * (x-m_x()); 97 103 } 98 104 105 99 106 double LinearWeighted::s2(double w) const 100 107 { 101 return s2_/w;108 return chisq_/(w*(ap_.y_averager().n()-2)); 102 109 } 103 110 104 111 double LinearWeighted::standard_error2(const double x) const 105 112 { 106 return sqrt(alpha_var_ + beta_var_*(x-m_x_)*(x-m_x_));113 return alpha_var_ + beta_var_*(x-m_x())*(x-m_x()); 107 114 } 115 108 116 109 117 double LinearWeighted::sxx(void) const … … 112 120 } 113 121 122 114 123 double LinearWeighted::sxy(void) const 115 124 { 116 125 return ap_.sum_xy_centered(); 117 126 } 127 118 128 119 129 double LinearWeighted::syy(void) const -
trunk/yat/regression/LinearWeighted.h
r729 r730 27 27 #include "OneDimensionalWeighted.h" 28 28 29 #include <cmath>30 31 29 namespace theplu { 32 30 namespace yat { … … 55 53 virtual ~LinearWeighted(void); 56 54 57 /// 58 /// @return the parameter \f$ \alpha \f$ 59 /// 55 /** 56 \f$ alpha \f$ is estimated as \f$ \frac{\sum w_iy_i}{\sum w_i} \f$ 57 58 @return the parameter \f$ \alpha \f$ 59 */ 60 60 double alpha(void) const; 61 61 62 /// 63 /// @return standard deviation of parameter \f$ \alpha \f$ 64 /// 62 /** 63 Variance is estimated as \f$ \frac{s^2}{\sum w_i} \f$ 64 65 @see s2() 66 67 @return variance of parameter \f$ \alpha \f$ 68 */ 65 69 double alpha_var(void) const; 66 70 67 /// 68 /// @return the parameter \f$ \beta \f$ 69 /// 71 /** 72 \f$ beta \f$ is estimated as \f$ \frac{\sum 73 w_i(y_i-m_y)(x_i-m_x)}{\sum w_i(x_i-m_x)^2} \f$ 74 75 @return the parameter \f$ \beta \f$ 76 */ 70 77 double beta(void) const; 71 78 72 /// 73 /// @return standard deviation of parameter \f$ \beta \f$ 74 /// 79 /** 80 Variance is estimated as \f$ \frac{s^2}{\sum w_i(x_i-m_x)^2} \f$ 81 82 @see s2() 83 84 @return variance of parameter \f$ \beta \f$ 85 */ 75 86 double beta_var(void) const; 76 87 … … 91 102 /// \f$ y =\alpha + \beta (x - m) \f$ 92 103 /// 93 double predict(const double x) const { return alpha_ + beta_ * (x-m_x_); } 94 95 /// 96 /// estimated squared deviation from predicted value for a new 97 /// data point in @a x with weight @a w 98 /// 99 double prediction_error2(const double x, const double w=1) const; 104 double predict(const double x) const; 100 105 101 106 /** … … 126 131 double beta_; 127 132 double beta_var_; 128 double m_x_; // average of x values129 double r2_; // coefficient of determination130 double s2_;131 double mse_;132 133 }; 133 134 -
trunk/yat/regression/NaiveWeighted.cc
r729 r730 68 68 double NaiveWeighted::standard_error2(const double x) const 69 69 { 70 return chisq_/( (ap_.y_averager().n()-1)*ap_.sum_w());70 return chisq_/( (ap_.y_averager().n()-1)*ap_.y_averager().sum_w() ); 71 71 } 72 72
Note: See TracChangeset
for help on using the changeset viewer.