source: trunk/test/regression_test.cc @ 726

Last change on this file since 726 was 726, checked in by Peter, 16 years ago

fixes #165 added test checking Linear Regression is equivalent to Polynomial regression of degree one.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 6.3 KB
Line 
1// $Id: regression_test.cc 726 2007-01-04 14:38:56Z peter $
2
3/*
4  Copyright (C) The authors contributing to this file.
5
6  This file is part of the yat library, http://lev.thep.lu.se/trac/yat
7
8  The yat library is free software; you can redistribute it and/or
9  modify it under the terms of the GNU General Public License as
10  published by the Free Software Foundation; either version 2 of the
11  License, or (at your option) any later version.
12
13  The yat library is distributed in the hope that it will be useful,
14  but WITHOUT ANY WARRANTY; without even the implied warranty of
15  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  General Public License for more details.
17
18  You should have received a copy of the GNU General Public License
19  along with this program; if not, write to the Free Software
20  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
21  02111-1307, USA.
22*/
23
24#include "yat/regression/KernelBox.h"
25#include "yat/regression/Linear.h"
26#include "yat/regression/LinearWeighted.h"
27#include "yat/regression/Local.h"
28#include "yat/regression/Naive.h"
29#include "yat/regression/NaiveWeighted.h"
30#include "yat/regression/Polynomial.h"
31#include "yat/regression/PolynomialWeighted.h"
32#include "yat/utility/matrix.h"
33#include "yat/utility/vector.h"
34
35#include <cmath>
36
37#include <fstream>
38#include <iostream>
39
40
41using namespace theplu::yat;
42
43bool Local_test(regression::OneDimensionalWeighted&, 
44                regression::Kernel&);
45
46
47int main(const int argc,const char* argv[])
48{
49  std::ostream* error;
50  if (argc>1 && argv[1]==std::string("-v"))
51    error = &std::cerr;
52  else {
53    error = new std::ofstream("/dev/null");
54    if (argc>1)
55      std::cout << "regression_test -v : for printing extra information\n";
56  }
57  *error << "testing regression" << std::endl;
58  bool ok = true;
59
60  // test data for Linear and Naive (Weighted and non-weighted)
61  utility::vector x(4); x(0)=1970; x(1)=1980; x(2)=1990; x(3)=2000;
62  utility::vector y(4); y(0)=12;   y(1)=11;   y(2)=14;   y(3)=13;
63  utility::vector w(4); w(0)=0.1;  w(1)=0.2;  w(2)=0.3;  w(3)=0.4;
64
65  // Comparing linear and polynomial(1)
66  regression::Linear linear;
67  linear.fit(x,y);
68  regression::Polynomial polynomial(1);
69  polynomial.fit(x,y);
70  if ( fabs(linear.beta()-polynomial.fit_parameters()(1))>0.0001 ){
71    *error << "error: beta and fit_parameters(1) not equal" << std::endl;
72    *error << "       beta = " << linear.beta() << std::endl;
73    *error << "       fit_parameters(1) = " 
74           << polynomial.fit_parameters()(1) << std::endl;
75    ok = false;
76  }
77  if ( fabs(polynomial.fit_parameters()(0)-linear.alpha()+
78            linear.beta()*1985)>0.0001){
79    *error << "error: fit_parameters(0) = " 
80           << polynomial.fit_parameters()(0)<< std::endl;
81    *error << "error: alpha-beta*m_x = " 
82           << linear.alpha()-linear.beta()*1985 << std::endl;
83    ok = false;
84  }
85  if ( fabs(polynomial.chisq()-linear.chisq())>0.0001){
86    *error << "error: chisq not same in linear and polynomial(1)" 
87           << std::endl;
88    ok = false;
89  }
90  if ( fabs(polynomial.predict(1.0)-linear.predict(1.0))>0.0001){
91    *error << "error: predict not same in linear and polynomial(1)" 
92           << std::endl;
93    ok = false;
94  }
95  if ( fabs(polynomial.standard_error(1985)-linear.standard_error(1985))
96       >0.0001){
97    *error << "error: standard_error not same in linear and polynomial(1)" 
98           << "\n  polynomial: " << polynomial.standard_error(1.0)
99           << "\n  linear: " << linear.standard_error(1.0)
100           << "\n  alpha_var: " << linear.alpha_var()
101           << "\n  beta_var: " << linear.beta_var()
102           << "\n  covariance: " << polynomial.covariance()(0,0)
103           << " " << polynomial.covariance()(0,1) << "\n"
104           << " " << polynomial.covariance()(1,0) 
105           << " " << polynomial.covariance()(1,1) 
106           << std::endl;
107    ok = false;
108  }
109
110  *error << "testing regression::LinearWeighted" << std::endl;
111  regression::LinearWeighted linear_w;
112  linear_w.fit(x,y,w);
113  double y_predicted = linear_w.predict(1990);
114  double y_predicted_err = linear_w.prediction_error(1990);
115  if (fabs(y_predicted-12.8)>0.001){
116    *error << "error: cannot reproduce fit." << std::endl;
117    *error << "predicted value: " << y_predicted << " expected 12.8" 
118           << std::endl;
119    ok=false;
120  }
121
122  // testing regression::NaiveWeighted
123  *error << "testing regression::NaiveWeighted" << std::endl;
124  regression::NaiveWeighted naive_w;
125  naive_w.fit(x,y,w);
126
127  y_predicted=naive_w.predict(0.0);
128  y_predicted_err=naive_w.prediction_error(0.0);
129  if (y_predicted!=(0.1*12+0.2*11+0.3*14+0.4*13)) {
130    *error << "regression_NaiveWeighted: cannot reproduce fit." << std::endl;
131    *error << "returned value: " << y_predicted << std::endl;
132    *error << "expected: " << 0.1*12+0.2*11+0.3*14+0.4*13 << std::endl;
133    ok=false;
134  }
135
136  // testing regression::Local
137  *error << "testing regression::Local" << std::endl;
138  regression::KernelBox kb;
139  regression::LinearWeighted rl;
140  if (!Local_test(rl,kb)) {
141    *error << "regression_Local: Linear cannot reproduce fit." << std::endl;
142    ok=false;
143  }
144  regression::NaiveWeighted rn;
145  if (!Local_test(rn,kb)) {
146    *error << "regression_Local: Naive cannot reproduce fit." << std::endl;
147    ok=false;
148  }
149
150  // testing regression::Polynomial
151  *error << "testing regression::Polynomial" << std::endl;
152  {
153    std::ifstream s("data/regression_gauss.data");
154    utility::matrix data(s);
155    utility::vector x(data.rows());
156    utility::vector ln_y(data.rows());
157    for (size_t i=0; i<data.rows(); ++i) {
158      x(i)=data(i,0);
159      ln_y(i)=log(data(i,1));
160    }
161
162    regression::Polynomial polynomialfit(2);
163    polynomialfit.fit(x,ln_y);
164    utility::vector fit=polynomialfit.fit_parameters();
165    if (fabs(fit[0]-1.012229646706 + fit[1]-0.012561322528 +
166             fit[2]+1.159674470130)>1e-11) {  // Jari, fix number!
167      *error << "regression_Polynomial: cannot reproduce fit." << std::endl;
168      ok=false;
169    }
170  }
171
172  *error << "testing regression::Linear" << std::endl;
173  regression::Linear lin;
174 
175  *error << "testing regression::Naive" << std::endl;
176  regression::Naive naive;
177
178  *error << "testing regression::Polynomial" << std::endl;
179  regression::Polynomial pol(2);
180
181  *error << "testing regression::PolynomialWeighted" << std::endl;
182  regression::PolynomialWeighted pol_weighted(2);
183
184  if (error!=&std::cerr)
185    delete error;
186
187  return (ok ? 0 : -1);
188}
189
190
191
192bool Local_test(regression::OneDimensionalWeighted& r, 
193                regression::Kernel& k)
194{
195  regression::Local rl(r,k);
196  for (size_t i=0; i<1000; i++){
197    rl.add(i, 10);
198  }
199
200  rl.fit(10, 100);
201
202  utility::vector y = rl.y_predicted();
203  for (size_t i=0; i<y.size(); i++) 
204    if (y(i)!=10.0){
205      return false;
206    }
207  return true;
208}
Note: See TracBrowser for help on using the repository browser.