Changeset 729


Ignore:
Timestamp:
Jan 5, 2007, 5:00:15 PM (16 years ago)
Author:
Peter
Message:

Fixes #159. Also removed some inlines in OneDimensionalWeighted? by adding source file. Refs #81

Location:
trunk
Files:
1 added
18 edited

Legend:

Unmodified
Added
Removed
  • trunk/test/regression_test.cc

    r727 r729  
    4141using namespace theplu::yat;
    4242
     43bool equal(regression::OneDimensional&, regression::OneDimensionalWeighted&,
     44           std::ostream*);
     45
     46bool unity_weights(regression::OneDimensional&,
     47                   regression::OneDimensionalWeighted&,
     48                   const utility::vector&, const utility::vector&,
     49                   std::ostream*);
     50
     51bool rescale_weights(regression::OneDimensionalWeighted&,
     52                     const utility::vector&, const utility::vector&,
     53                     std::ostream*);
     54
     55bool zero_weights(regression::OneDimensionalWeighted&,
     56                  const utility::vector&, const utility::vector&,
     57                  std::ostream*);
     58
     59
    4360bool Local_test(regression::OneDimensionalWeighted&,
    4461                regression::Kernel&);
    45 
    4662
    4763int main(const int argc,const char* argv[])
     
    5571      std::cout << "regression_test -v : for printing extra information\n";
    5672  }
    57   *error << "testing regression" << std::endl;
     73  *error << "  testing regression" << std::endl;
    5874  bool ok = true;
    5975
     
    102118  }
    103119
    104   *error << "testing regression::LinearWeighted" << std::endl;
     120  *error << "  testing regression::LinearWeighted" << std::endl;
    105121  regression::LinearWeighted linear_w;
    106122  linear_w.fit(x,y,w);
    107123  double y_predicted = linear_w.predict(1990);
    108   double y_predicted_err = linear_w.prediction_error(1990);
     124  double y_predicted_err = linear_w.prediction_error2(1990);
    109125  if (fabs(y_predicted-12.8)>0.001){
    110126    *error << "error: cannot reproduce fit." << std::endl;
     
    115131
    116132  // testing regression::NaiveWeighted
    117   *error << "testing regression::NaiveWeighted" << std::endl;
     133  *error << "  testing regression::NaiveWeighted" << std::endl;
    118134  regression::NaiveWeighted naive_w;
     135  regression::Naive naive;
     136  ok = ok && equal(naive, naive_w, error);
    119137  naive_w.fit(x,y,w);
    120138
    121139  y_predicted=naive_w.predict(0.0);
    122   y_predicted_err=naive_w.prediction_error(0.0);
     140  y_predicted_err=naive_w.prediction_error2(0.0);
    123141  if (y_predicted!=(0.1*12+0.2*11+0.3*14+0.4*13)) {
    124142    *error << "regression_NaiveWeighted: cannot reproduce fit." << std::endl;
     
    128146  }
    129147
     148
    130149  // testing regression::Local
    131   *error << "testing regression::Local" << std::endl;
     150  *error << "  testing regression::Local" << std::endl;
    132151  regression::KernelBox kb;
    133152  regression::LinearWeighted rl;
     
    143162
    144163  // testing regression::Polynomial
    145   *error << "testing regression::Polynomial" << std::endl;
     164  *error << "  testing regression::Polynomial" << std::endl;
    146165  {
    147166    std::ifstream s("data/regression_gauss.data");
     
    164183  }
    165184
    166   *error << "testing regression::Linear" << std::endl;
     185  *error << "  testing regression::Linear" << std::endl;
    167186  regression::Linear lin;
    168187 
    169   *error << "testing regression::Naive" << std::endl;
    170   regression::Naive naive;
    171 
    172   *error << "testing regression::Polynomial" << std::endl;
    173   regression::Polynomial pol(2);
    174 
    175   *error << "testing regression::PolynomialWeighted" << std::endl;
     188  *error << "  testing regression::PolynomialWeighted" << std::endl;
    176189  regression::PolynomialWeighted pol_weighted(2);
    177190
     
    182195}
    183196
     197
     198bool equal(regression::OneDimensional& r,
     199           regression::OneDimensionalWeighted& wr,
     200           std::ostream* error)
     201{
     202  bool ok=true;
     203  utility::vector x(5); x(0)=1970; x(1)=1980; x(2)=1990; x(3)=2000; x(4)=2010;
     204  utility::vector y(5); y(0)=12;   y(1)=11;   y(2)=14;   y(3)=13;   y(4)=15;
     205
     206  ok = unity_weights(r, wr, x, y, error) && ok;
     207  ok = rescale_weights(wr, x, y, error) && ok;
     208  ok = zero_weights(wr, x, y, error) && ok;
     209  return ok;
     210}
     211
     212
     213bool unity_weights(regression::OneDimensional& r,
     214                   regression::OneDimensionalWeighted& rw,
     215                   const utility::vector& x, const utility::vector& y,
     216                   std::ostream* error)
     217{
     218  *error << "    testing unity weights equal to non-weighted version.\n";
     219  bool ok=true;
     220  utility::vector w(x.size(), 1.0);
     221  r.fit(x,y);
     222  rw.fit(x,y,w);
     223  if (r.predict(2000) != rw.predict(2000)){
     224    ok = false;
     225    *error << "Error: predict not equal" << std::endl;
     226  }
     227  if (r.prediction_error2(2000) != rw.prediction_error2(2000)){
     228    ok = false;
     229    *error << "Error: prediction_error2 not equal non-weighted version."
     230           << std::endl;
     231  }
     232  if (r.r2() != rw.r2()){
     233    ok = false;
     234    *error << "Error: r2 not equal non-weighted version." << std::endl;
     235  }
     236  if (r.s2() != rw.s2()){
     237    ok = false;
     238    *error << "Error: s2 not equal non-weighted version." << std::endl;
     239  }
     240  if (r.standard_error2(2000) != rw.standard_error2(2000)){
     241    ok = false;
     242    *error << "Error: standard_error not equal non-weighted version."
     243           << std::endl;
     244  }
     245  return ok;
     246}
     247
     248
     249bool rescale_weights(regression::OneDimensionalWeighted& wr,
     250                     const utility::vector& x, const utility::vector& y,
     251                     std::ostream* error)
     252{
     253  *error << "    testing rescaling weights.\n";
     254  bool ok = true;
     255  utility::vector w(5);  w(0)=1.0;  w(1)=1.0;  w(2)=0.5;  w(3)=0.2;  w(4)=0.2;
     256  wr.fit(x,y,w);
     257  double predict = wr.predict(2000);
     258  double prediction_error2 = wr.prediction_error2(2000);
     259  double r2 = wr.r2();
     260  double s2 = wr.s2();
     261  double standard_error2 = wr.standard_error2(2000);
     262
     263  w.scale(2);
     264  wr.fit(x,y,w);
     265  if (wr.predict(2000) != predict){
     266    ok = false;
     267    *error << "Error: predict not equal after rescaling.\n";
     268  }
     269  if (wr.prediction_error2(2000,2) != prediction_error2){
     270    ok = false;
     271    *error << "Error: prediction_error2 not equal after rescaling.\n";
     272  }
     273  if (wr.r2() != r2){
     274    ok = false;
     275    *error << "Error: r2 not equal after rescaling.\n";
     276  }
     277  if (wr.s2(2) != s2){
     278    ok = false;
     279    *error << "Error: s2 not equal after rescaling.\n";
     280    *error << "       s2 = " << s2 << " and after doubling weights.\n";
     281    *error << "       s2 = " << wr.s2() << "\n";
     282  }
     283  if (wr.standard_error2(2000) != standard_error2){
     284    ok = false;
     285    *error << "Error: standard_error2 not equal after rescaling.\n";
     286  }
     287  return ok;
     288}
     289
     290
     291bool zero_weights(regression::OneDimensionalWeighted& wr,
     292                  const utility::vector& x, const utility::vector& y,
     293                  std::ostream* error)
     294{
     295  *error << "    testing zero weights equal to missing value.\n";
     296  bool ok = true;
     297  utility::vector w(5);  w(0)=1.0;  w(1)=1.0;  w(2)=0.5;  w(3)=0.2;  w(4)=0;
     298  wr.fit(x,y,w);
     299  double predict = wr.predict(2000);
     300  double prediction_error2 = wr.prediction_error2(2000);
     301  double r2 = wr.r2();
     302  double s2 = wr.s2();
     303  double standard_error2 = wr.standard_error2(2000);
     304
     305  utility::vector x2(4);
     306  utility::vector y2(4);
     307  utility::vector w2(4);
     308  for (size_t i=0; i<4; ++i){
     309    x2(i) = x(i);
     310    y2(i) = y(i);
     311    w2(i) = w(i);
     312  }
     313
     314  wr.fit(x2,y2,w2);
     315  if (wr.predict(2000) != predict){
     316    ok = false;
     317    *error << "Error: predict not equal.\n";
     318  }
     319  if (wr.prediction_error2(2000) != prediction_error2){
     320    ok = false;
     321    *error << "Error: prediction_error2 not equal.\n";
     322  }
     323  if (wr.r2() != r2){
     324    ok = false;
     325    *error << "Error: r2 not equal.\n";
     326  }
     327  if (wr.s2() != s2){
     328    ok = false;
     329    *error << "Error: s2 not equal.\n";
     330  }
     331  if (wr.standard_error2(2000) != standard_error2){
     332    ok = false;
     333    *error << "Error: standard_error2 not equal.\n";
     334  }
     335  return ok;
     336}
    184337
    185338
  • trunk/yat/regression/Linear.cc

    r727 r729  
    3131
    3232  Linear::Linear(void)
    33     : OneDimensional(), alpha_(0), alpha_var_(0), beta_(0), beta_var_(0),
    34       chisq_(0)
     33    : OneDimensional(), alpha_(0), alpha_var_(0), beta_(0), beta_var_(0)
    3534  {
    3635  }
     
    6059  }
    6160
    62   double Linear::chisq(void) const
    63   {
    64     return chisq_;
    65   }
    66 
    6761  void Linear::fit(const utility::vector& x, const utility::vector& y)
    6862  {
     
    7771    chisq_ = (ap_.y_averager().sum_xx_centered() - ap_.sum_xy_centered()*
    7872              ap_.sum_xy_centered()/ap_.x_averager().sum_xx_centered() );
    79     r2_= 1-chisq_/ap_.x_averager().sum_xx_centered();
    8073    alpha_var_ = s2() / x.size();
    8174    beta_var_ = s2() / ap_.x_averager().sum_xx_centered();
     
    8578  {
    8679    return alpha_ + beta_ * (x - ap_.x_averager().mean());
    87   }
    88 
    89   double Linear::r2(void) const
    90   {
    91     return r2_;
    9280  }
    9381
  • trunk/yat/regression/Linear.h

    r728 r729  
    8888    double beta_var(void) const;
    8989
    90   /**
    91        Chi-squared is calculated as \f$ \sum
    92        (y_i-\alpha-\beta(x_i-m_x))^2 \f$
    93     */
    94     double chisq(void) const;
    95 
    9690    /**
    9791       Model is fitted by minimizing \f$ \sum{(y_i - \alpha - \beta
     
    138132    double beta_;
    139133    double beta_var_;
    140     double chisq_;
    141     double r2_; // coefficient of determination
    142134  };
    143135
  • trunk/yat/regression/LinearWeighted.cc

    r724 r729  
    4545  }
    4646
    47   double LinearWeighted::alpha_err(void) const
     47  double LinearWeighted::alpha_var(void) const
    4848  {
    4949    return sqrt(alpha_var_);
     
    5555  }
    5656
    57   double LinearWeighted::beta_err(void) const
     57  double LinearWeighted::beta_var(void) const
    5858  {
    5959    return sqrt(beta_var_);
     
    9292  }
    9393
    94   double LinearWeighted::mse(void) const
    95   {
    96     return mse_;
    97   }
    98 
    99   double LinearWeighted::prediction_error(const double x, const double w) const
     94  double LinearWeighted::prediction_error2(const double x, const double w) const
    10095  {
    10196    return sqrt(alpha_var_ + beta_var_*(x-m_x_)*(x-m_x_)+s2(w));
     
    107102  }
    108103
    109   double LinearWeighted::standard_error(const double x) const
     104  double LinearWeighted::standard_error2(const double x) const
    110105  {
    111106    return sqrt(alpha_var_ + beta_var_*(x-m_x_)*(x-m_x_) );
  • trunk/yat/regression/LinearWeighted.h

    r718 r729  
    6363    /// @return standard deviation of parameter \f$ \alpha \f$
    6464    ///
    65     double alpha_err(void) const;
     65    double alpha_var(void) const;
    6666
    6767    ///
     
    7373    /// @return standard deviation of parameter \f$ \beta \f$
    7474    ///
    75     double beta_err(void) const;
     75    double beta_var(void) const;
    7676   
    7777    /**
     
    8888   
    8989    ///
    90     /// @brief Mean Squared Error
    91     ///
    92     double mse(void) const;
    93 
    94     ///
    9590    ///  Function predicting value using the linear model:
    9691    /// \f$ y =\alpha + \beta (x - m) \f$
     
    9994
    10095    ///
    101     /// estimated deviation from predicted value for a new data point
    102     /// in @a x with weight @a w
     96    /// estimated squared deviation from predicted value for a new
     97    /// data point in @a x with weight @a w
    10398    ///
    104     double prediction_error(const double x, const double w=1) const;
     99    double prediction_error2(const double x, const double w=1) const;
    105100
    106101    /**
     
    113108       Var(\beta)*(x-m)} \f$.
    114109    */
    115     double standard_error(const double x) const;
     110    double standard_error2(const double x) const;
    116111
    117112  private:
  • trunk/yat/regression/Local.cc

    r718 r729  
    115115      assert(i<y_err_.size());
    116116      y_predicted_(i) = regressor_->predict(x(i*step_size));
    117       y_err_(i) = regressor_->standard_error(x(i*step_size));
     117      y_err_(i) = sqrt(regressor_->standard_error2(x(i*step_size)));
    118118    }
    119119  }
  • trunk/yat/regression/Makefile.am

    r682 r729  
    2626  LinearWeighted.cc Local.cc MultiDimensional.cc      \
    2727  MultiDimensionalWeighted.cc Naive.cc NaiveWeighted.cc   \
    28   OneDimensional.cc Polynomial.cc PolynomialWeighted.cc
     28  OneDimensional.cc OneDimensionalWeighted.cc Polynomial.cc \
     29  PolynomialWeighted.cc
    2930
    3031include_regressiondir = $(includedir)/yat/regression
  • trunk/yat/regression/Naive.cc

    r728 r729  
    4545
    4646
    47   double Naive::chisq(void) const
    48   {
    49     return ap_.y_averager().sum_xx_centered();
    50   }
    51  
    52 
    5347  void Naive::fit(const utility::vector& x, const utility::vector& y)
    5448  {
     
    5650    for (size_t i=0; i<y.size(); i++)
    5751      ap_.add(x(i),y(i));
    58    
     52    chisq_ = ap_.y_averager().sum_xx_centered();
    5953  }
    6054
  • trunk/yat/regression/Naive.h

    r728 r729  
    5757    virtual ~Naive(void);
    5858         
    59     /**
    60         Chi-squared \f$ \sum (x_i-m)^2 \f$
    61     */
    62     double chisq(void) const;
    63 
    6459    ///
    6560    /// This function computes the best-fit for the naive model \f$ y
     
    7772       \f$ \frac{\sum \epsilon_i^2}{N-1} \f$
    7873
    79        @return variance of residuals
     74       @return Conditional variance
    8075    */
    8176    double s2(void) const;
    8277
    8378    ///
    84     /// @return standard error
     79    /// \f$ \frac{s^2}{N} \f$
     80    ///
     81    /// @return squared standard error
    8582    ///
    8683    /// @see statistics::Averager
  • trunk/yat/regression/NaiveWeighted.cc

    r718 r729  
    5050    ap_.reset();
    5151    ap_.add_values(x,y,utility::vector(x.size(),1.0), w);
     52    chisq_ = ap_.y_averager().sum_xx_centered();
    5253  }
     54
    5355
    5456  double NaiveWeighted::predict(const double x) const
     
    5759  }
    5860
    59   double NaiveWeighted::mse(void) const
     61
     62  double NaiveWeighted::s2(double w) const
    6063  {
    61     return ap_.y_averager().std();
     64    return chisq_/(w*(ap_.y_averager().n()-1));
    6265  }
    6366
    64   double NaiveWeighted::standard_error(const double x) const
    65   {
    66     return ap_.y_averager().standard_error();
     67
     68  double NaiveWeighted::standard_error2(const double x) const
     69  {
     70    return chisq_/((ap_.y_averager().n()-1)*ap_.sum_w());
    6771  }
    6872
  • trunk/yat/regression/NaiveWeighted.h

    r718 r729  
    7373    double predict(const double x) const;
    7474
    75     ///
    76     /// @brief For a naive model mse is identical to standard deviation
    77     ///
    78     /// @see AveragerWeighted
    79     ///
    80     double mse(void) const;
     75    /**
     76       \f$ \frac{\sum w_i\epsilon_i^2}{ w \left(\frac{\left(\sum
     77       w_i\right)^2}{\sum w_i^2}-1\right)} \f$
    8178
    82     ///
    83     /// @return estimation of error of model value in @a x
    84     ///
    85     double standard_error(const double x) const;
     79       Rescaling all weights, both in fit and the passed @a w, results
     80       in the same returned value.
     81
     82       @return Conditional variance of Y with weight @a w.
     83    */
     84    double s2(const double w=1) const;
     85
     86    /**
     87       \f$ \frac{\sum w_i\epsilon_i^2}{ \left(\frac{\left(\sum
     88       w_i\right)^2}{\sum w_i^2}-1\right)\sum w_i} \f$
     89
     90       @return estimated squared error of model value in @a x
     91    */
     92    double standard_error2(const double x) const;
    8693
    8794  private:
  • trunk/yat/regression/OneDimensional.cc

    r727 r729  
    2929
    3030  OneDimensional::OneDimensional(void)
     31    : chisq_(0)
    3132  {
    3233  }
     
    3738
    3839
     40  double OneDimensional::chisq(void) const
     41  {
     42    return chisq_;
     43  }
     44
     45
    3946  double OneDimensional::prediction_error2(const double x) const
    4047  {
    41     return chisq()+standard_error2(x);
     48    return s2()+standard_error2(x);
    4249  }
    4350
     
    6370
    6471
    65   double OneDimensional::r_squared(void) const
     72  double OneDimensional::r2(void) const
    6673  {
    67     return chisq()/variance();
     74    return 1 - chisq()/ap_.y_averager().sum_xx_centered();
    6875  }
    6976
  • trunk/yat/regression/OneDimensional.h

    r728 r729  
    5858       @brief Chi-squared
    5959       
    60        Chi-squared is defined as the \f$ \frac
    61        {\sum{(\hat{y_i}-y_i)^2}}{1} \f$
     60       Chi-squared is defined as the \f$
     61       \sum{(\hat{y_i}-y_i)^2} \f$
    6262    */
    63     virtual double chisq(void) const=0;
     63    double chisq(void) const;
    6464   
    6565    /**
     
    7979       deviation a new data point will have from value the model
    8080       provides: \f$ E(Y|x - \hat{y}(x))^2 \f$ and is typically
    81        divided into two terms \f$ E(Y|x - E(Y|x))^2 \f$ and \f$
    82        E(E(Y|x) - \hat{y}(x))^2 \f$, which is the conditional variance
    83        given \f$ x \f$ and the squared standard error (see
    84        standard_error2()) of the model estimation in \f$ x \f$,
    85        respectively.
     81       divided into the conditional variance ( see s2() )
     82       given \f$ x \f$ and the squared standard error ( see
     83       standard_error2() ) of the model estimation in \f$ x \f$.
    8684       
    8785       @return expected squared prediction error for a new data point
     
    108106
    109107    /**
    110        r-squared is defined as \f$ \frac{Var(Y|x)}{Var(Y)} \f$ or the
     108       r2 is defined as \f$ 1 - \frac{Var(Y|x)}{Var(Y)} \f$ or the
    111109       fraction of the variance explained by the regression model.
     110
     111       @see s2()
    112112    */
    113     double r_squared(void) const;
     113    double r2(void) const;
    114114
    115115    /**
    116        @return variance of residuals
     116       \f$ E(Y|x - E(Y|x))^2 \f$
     117
     118       @return Conditional variance of Y
    117119    */
    118120    virtual double s2(void) const=0;
     
    136138    statistics::AveragerPair ap_;
    137139
     140    ///
     141    /// @see chisq()
     142    ///
     143    double chisq_;
    138144  };
    139145
  • trunk/yat/regression/OneDimensionalWeighted.h

    r702 r729  
    4747    /// Default Constructor.
    4848    ///
    49     inline OneDimensionalWeighted(void){}
     49    OneDimensionalWeighted(void);
    5050
    5151    ///
    5252    /// Destructor
    5353    ///
    54     virtual ~OneDimensionalWeighted(void) {};
     54    virtual ~OneDimensionalWeighted(void);
    5555         
    5656    /**
     
    6464                     const utility::vector& w)=0;
    6565
    66     /**
    67        @brief Mean Squared Error
    68 
    69        Mean Squared Error is defined as the weighted mean of the
    70        squared residiuals \f$ \frac{\sum w_i(y_i-\hat{y}_i)^2}{\sum
    71        w_i} \f$, which is minimized when fitting the regression model.
    72     */
    73     virtual double mse(void) const=0;
    74 
    7566    ///
    7667    /// @return expected value in @a x according to the fitted model
     
    7970
    8071    /**
    81        The prediction error is defined as the square root of the
    82        expected squared deviation a new data point will have from
    83        value the model provides. The expected squared deviation is
    84        defined as \f$ E((Y|x,w - \hat{y}(x))^2) \f$ which is equal to
    85        \f$ E((Y|x,w - E(Y|x))^2) + E((E(Y|x) - \hat{y}(x))^2) \f$,
    86        which is the conditional variance given \f$ x \f$ and the
    87        squared standard error (see standard_error()) of the model
    88        estimation in \f$ x \f$, respectively.
    89        
    90        The conditional variance is inversely proportional to the
    91        weight \f$ w \f$ and is calculated as \f$ Var(Y|x,w) =
    92        \frac{1}{w}\frac{\sum w_i(y_i-\hat{y}_i)^2\sum w_i^2}
    93        {\left(\sum w_i\right)^2} =\frac{\sum w_i^2}{w\sum w_i}mse\f$
     72       The prediction error is defined as expected squared deviation a
     73       new data point (with weight @a w) will be from the model
     74       value \f$ E((Y|x - \hat{y}(x))^2|w) \f$ and is typically
     75       divided into the conditional variance ( see s2() )
     76       given \f$ x \f$ and the squared standard error ( see
     77       standard_error2() ) of the model estimation in \f$ x \f$.
     78
     79       \f$ E((Y|x - E(Y|x))^2|w) + E((E(Y|x) - \hat{y}(x))^2) \f$
    9480
    9581       @return expected prediction error for a new data point in @a x
     82       with weight @a w.
    9683    */
    97     double prediction_error(const double x, const double w=1.0) const
    98     { return sqrt(mse()+pow(standard_error(x),2)); }
     84    double prediction_error2(const double x, const double w=1.0) const;
    9985
    10086    /**
    101        r-squared is defined as \f$ \frac{\sum
     87       r2 is defined as \f$ \frac{\sum
    10288       w_i(y_i-\hat{y}_i)^2}{\sum w_i(y_i-m_y)^2} \f$ or the fraction
    10389       of the variance explained by the regression model.
    10490    */
    105     inline double r_squared(void) const
    106     { return mse()/variance(); }
     91    double r2(void) const;
    10792
    10893    /**
    109        The standard error is defined as \f$ \sqrt{E((Y|x -
    110        \hat{y}(x))^2) }\f$
     94       \f$ s^2 \f$ is the estimation of variance of residuals or
     95       equivalently the conditional variance of Y.
     96
     97       @return Conditional variance of Y
     98    */
     99    virtual double s2(double w=1) const=0;
     100
     101    /**
     102       The standard error is defined as \f$ E((Y|x,w -
     103       \hat{y}(x))^2) \f$
    111104
    112105       @return error of model value in @a x
    113106    */
    114     virtual double standard_error(const double x) const=0;
     107    virtual double standard_error2(const double x) const=0;
    115108
    116109  protected:
     
    120113    statistics::AveragerPairWeighted ap_;
    121114
     115    /**
     116       @brief Chi-squared
     117       
     118       Chi-squared is defined as the \f$
     119       \sum{w_i(\hat{y_i}-y_i)^2} \f$
     120    */
     121    double chisq_;
     122
    122123  private:
    123     inline double variance(double w=1) const
    124     { return ap_.y_averager().variance(); }
    125 
    126 
    127124  };
    128125
  • trunk/yat/regression/Polynomial.cc

    r728 r729  
    4141
    4242
    43   double Polynomial::chisq(void) const
    44   {
    45     return md_.chisq();
    46   }
    47 
    48 
    4943  const utility::matrix& Polynomial::covariance(void) const
    5044  {
     
    6054        X(i,j)=X(i,j-1)*x(i);
    6155    md_.fit(X,y);
     56    chisq_ = md_.chisq();
    6257  }
    6358
  • trunk/yat/regression/Polynomial.h

    r728 r729  
    2828#include "MultiDimensional.h"
    2929#include "yat/utility/vector.h"
    30 
    31 #include <gsl/gsl_multifit.h>
    32 
    33 #include <cassert>
    3430
    3531namespace theplu {
     
    7672
    7773    ///
    78     /// @brief Sum of squared residuals
    79     ///
    80     double chisq(void) const;
    81 
    82     ///
    8374    /// @return value in @a x of model
    8475    ///
  • trunk/yat/regression/PolynomialWeighted.cc

    r718 r729  
    5454  }
    5555
    56   double PolynomialWeighted::mse(void) const
     56  double PolynomialWeighted::s2(const double) const
    5757  {
    5858    return mse_;
     
    7676  }
    7777
    78   double PolynomialWeighted::standard_error(const double x) const
     78  double PolynomialWeighted::standard_error2(const double x) const
    7979  {
    8080    utility::vector vec(power_+1,1);
  • trunk/yat/regression/PolynomialWeighted.h

    r718 r729  
    7070    /// @brief Mean Squared Error
    7171    ///
    72     double mse(void) const;
     72    double s2(const double) const;
    7373
    7474    ///
     
    8686    /// @return error of model value in @a x
    8787    ///
    88     double standard_error(const double x) const;
     88    double standard_error2(const double x) const;
    8989
    9090  private:
Note: See TracChangeset for help on using the changeset viewer.