Changeset 1230
- Timestamp:
- Mar 14, 2008, 1:24:07 PM (16 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/test/regression_test.cc
r1210 r1230 21 21 02111-1307, USA. 22 22 */ 23 24 #include "Suite.h" 23 25 24 26 #include "yat/regression/KernelBox.h" … … 41 43 using namespace theplu::yat; 42 44 43 boolequal(regression::OneDimensional&, regression::OneDimensionalWeighted&,44 std::ostream*);45 46 bool multidim(std::ostream* error);47 48 boolunity_weights(regression::OneDimensional&,45 void equal(regression::OneDimensional&, regression::OneDimensionalWeighted&, 46 test::Suite&); 47 48 void multidim(test::Suite& suite); 49 50 void unity_weights(regression::OneDimensional&, 49 51 regression::OneDimensionalWeighted&, 50 52 const utility::Vector&, const utility::Vector&, 51 std::ostream*);52 53 boolrescale_weights(regression::OneDimensionalWeighted&,53 test::Suite&); 54 55 void rescale_weights(regression::OneDimensionalWeighted&, 54 56 const utility::Vector&, const utility::Vector&, 55 std::ostream*);56 57 boolzero_weights(regression::OneDimensionalWeighted&,57 test::Suite&); 58 59 void zero_weights(regression::OneDimensionalWeighted&, 58 60 const utility::Vector&, const utility::Vector&, 59 std::ostream*);61 test::Suite&); 60 62 61 63 … … 63 65 regression::Kernel&); 64 66 65 int main(const int argc,const char* argv[]) 66 { 67 std::ostream* error; 68 if (argc>1 && argv[1]==std::string("-v")) 69 error = &std::cerr; 70 else { 71 error = new std::ofstream("/dev/null"); 72 if (argc>1) 73 std::cout << "regression_test -v : for printing extra information\n"; 74 } 75 *error << " testing regression" << std::endl; 76 bool ok = true; 67 int main(int argc, char* argv[]) 68 { 69 test::Suite suite(argc, argv); 70 71 suite.err() << " testing regression" << std::endl; 77 72 78 73 // test data for Linear and Naive (Weighted and non-weighted) … … 86 81 regression::Polynomial polynomial(1); 87 82 polynomial.fit(x,y); 88 if ( std::abs(linear.beta()-polynomial.fit_parameters()(1))>0.0001){89 *error<< "error: beta and fit_parameters(1) not equal" << std::endl;90 *error<< " beta = " << linear.beta() << std::endl;91 *error<< " fit_parameters(1) = "83 if ( test::equal(linear.beta(),polynomial.fit_parameters()(1)) ){ 84 suite.err() << "error: beta and fit_parameters(1) not equal" << std::endl; 85 suite.err() << " beta = " << linear.beta() << std::endl; 86 suite.err() << " fit_parameters(1) = " 92 87 << polynomial.fit_parameters()(1) << std::endl; 93 ok = false;94 } 95 if ( std::abs(polynomial.fit_parameters()(0)-linear.alpha()+96 linear.beta()*1985)>0.0001){97 *error<< "error: fit_parameters(0) = "88 suite.ok(false); 89 } 90 if (test::equal(polynomial.fit_parameters()(0), 91 linear.alpha()-linear.beta()*1985)){ 92 suite.err() << "error: fit_parameters(0) = " 98 93 << polynomial.fit_parameters()(0)<< std::endl; 99 *error<< "error: alpha-beta*m_x = "94 suite.err() << "error: alpha-beta*m_x = " 100 95 << linear.alpha()-linear.beta()*1985 << std::endl; 101 ok = false;96 suite.ok(false); 102 97 } 103 98 if ( std::abs(polynomial.chisq()-linear.chisq())>0.0001){ 104 *error<< "error: chisq not same in linear and polynomial(1)"105 << std::endl; 106 ok = false;99 suite.err() << "error: chisq not same in linear and polynomial(1)" 100 << std::endl; 101 suite.ok(false); 107 102 } 108 103 if ( std::abs(polynomial.predict(1.0)-linear.predict(1.0))>0.0001){ 109 *error<< "error: predict not same in linear and polynomial(1)"110 << std::endl; 111 ok = false;104 suite.err() << "error: predict not same in linear and polynomial(1)" 105 << std::endl; 106 suite.ok(false); 112 107 } 113 108 if ( std::abs(polynomial.standard_error2(1985)-linear.standard_error2(1985)) 114 109 >0.0001){ 115 *error<< "error: standard_error not same in linear and polynomial(1)"110 suite.err() << "error: standard_error not same in linear and polynomial(1)" 116 111 << "\n polynomial: " << polynomial.standard_error2(1985) 117 112 << "\n linear: " << linear.standard_error2(1985) 118 113 << std::endl; 119 ok = false;120 } 121 122 *error<< " testing regression::LinearWeighted" << std::endl;114 suite.ok(false); 115 } 116 117 suite.err() << " testing regression::LinearWeighted" << std::endl; 123 118 regression::LinearWeighted linear_w; 124 ok = equal(linear, linear_w, error) && ok;119 equal(linear, linear_w, suite); 125 120 linear_w.fit(x,y,w); 126 121 double y_predicted = linear_w.predict(1990); 127 122 double y_predicted_err = linear_w.prediction_error2(1990); 128 123 if (std::abs(y_predicted-12.8)>0.001){ 129 *error<< "error: cannot reproduce fit." << std::endl;130 *error<< "predicted value: " << y_predicted << " expected 12.8"131 << std::endl; 132 ok=false;124 suite.err() << "error: cannot reproduce fit." << std::endl; 125 suite.err() << "predicted value: " << y_predicted << " expected 12.8" 126 << std::endl; 127 suite.ok(false); 133 128 } 134 129 135 130 // Comparing LinearWeighted and PolynomialWeighted(1) 136 *error<< " comparing LinearWeighted and PolynomialWeighted(1)"131 suite.err() << " comparing LinearWeighted and PolynomialWeighted(1)" 137 132 << std::endl; 138 133 linear_w.fit(x,y,w); … … 140 135 polynomial_w.fit(x,y,w); 141 136 if ( std::abs(linear_w.beta()-polynomial_w.fit_parameters()(1))>10e-7 ){ 142 *error<< "error: beta and fit_parameters(1) not equal" << std::endl;143 *error<< " beta = " << linear_w.beta() << std::endl;144 *error<< " fit_parameters(1) = "137 suite.err() << "error: beta and fit_parameters(1) not equal" << std::endl; 138 suite.err() << " beta = " << linear_w.beta() << std::endl; 139 suite.err() << " fit_parameters(1) = " 145 140 << polynomial_w.fit_parameters()(1) << std::endl; 146 ok = false;141 suite.ok(false); 147 142 } 148 143 if ( std::abs(polynomial_w.fit_parameters()(0)-linear_w.alpha()+ 149 144 linear_w.beta()*1990)>0.0001){ 150 *error<< "error: fit_parameters(0) = "145 suite.err() << "error: fit_parameters(0) = " 151 146 << polynomial.fit_parameters()(0)<< std::endl; 152 *error<< "error: alpha-beta*m_x = "147 suite.err() << "error: alpha-beta*m_x = " 153 148 << linear.alpha()-linear.beta()*1990 << std::endl; 154 ok = false;149 suite.ok(false); 155 150 } 156 151 if ( std::abs(polynomial_w.s2()-linear_w.s2())>0.0001){ 157 *error<< "error: chisq not same in linear and polynomial(1)"158 << std::endl; 159 ok = false;152 suite.err() << "error: chisq not same in linear and polynomial(1)" 153 << std::endl; 154 suite.ok(false); 160 155 } 161 156 if ( std::abs(polynomial_w.predict(1.0)-linear_w.predict(1.0))>0.0001){ 162 *error<< "error: predict not same in linear and polynomial(1)"163 << std::endl; 164 ok = false;157 suite.err() << "error: predict not same in linear and polynomial(1)" 158 << std::endl; 159 suite.ok(false); 165 160 } 166 161 if ( std::abs(polynomial_w.standard_error2(1985)-linear_w.standard_error2(1985)) 167 162 >0.0001){ 168 *error<< "error: standard_error not same in linear and polynomial(1)"163 suite.err() << "error: standard_error not same in linear and polynomial(1)" 169 164 << "\n polynomial: " << polynomial.standard_error2(1985) 170 165 << "\n linear: " << linear.standard_error2(1985) 171 166 << std::endl; 172 ok = false;167 suite.ok(false); 173 168 } 174 169 175 170 // testing regression::NaiveWeighted 176 *error<< " testing regression::NaiveWeighted" << std::endl;171 suite.err() << " testing regression::NaiveWeighted" << std::endl; 177 172 regression::NaiveWeighted naive_w; 178 173 regression::Naive naive; 179 ok = equal(naive, naive_w, error) && ok;174 equal(naive, naive_w, suite); 180 175 naive_w.fit(x,y,w); 181 176 182 177 y_predicted=naive_w.predict(0.0); 183 178 y_predicted_err=naive_w.prediction_error2(0.0); 184 if ( y_predicted!=(0.1*12+0.2*11+0.3*14+0.4*13)) {185 *error << "regression_NaiveWeighted: cannot reproduce fit." << std::endl;186 *error<< "returned value: " << y_predicted << std::endl;187 *error<< "expected: " << 0.1*12+0.2*11+0.3*14+0.4*13 << std::endl;188 ok=false;179 if (!test::equal(y_predicted,0.1*12+0.2*11+0.3*14+0.4*13)) { 180 suite.err() << "regression_NaiveWeighted: cannot reproduce fit.\n"; 181 suite.err() << "returned value: " << y_predicted << std::endl; 182 suite.err() << "expected: " << 0.1*12+0.2*11+0.3*14+0.4*13 << std::endl; 183 suite.ok(false); 189 184 } 190 185 191 186 // testing regression::Local 192 *error<< " testing regression::Local" << std::endl;187 suite.err() << " testing regression::Local" << std::endl; 193 188 regression::KernelBox kb; 194 189 regression::LinearWeighted rl; 195 190 if (!Local_test(rl,kb)) { 196 *error<< "regression_Local: Linear cannot reproduce fit." << std::endl;197 ok=false;191 suite.err() << "regression_Local: Linear cannot reproduce fit." << std::endl; 192 suite.ok(false); 198 193 } 199 194 regression::NaiveWeighted rn; 200 195 if (!Local_test(rn,kb)) { 201 *error<< "regression_Local: Naive cannot reproduce fit." << std::endl;202 ok=false;196 suite.err() << "regression_Local: Naive cannot reproduce fit." << std::endl; 197 suite.ok(false); 203 198 } 204 199 205 200 // testing regression::Polynomial 206 *error<< " testing regression::Polynomial" << std::endl;201 suite.err() << " testing regression::Polynomial" << std::endl; 207 202 { 208 203 std::ifstream s("data/regression_gauss.data"); … … 220 215 if (std::abs(fit(0)-1.012229646706 + fit(1)-0.012561322528 + 221 216 fit(2)+1.159674470130)>1e-11) { 222 *error<< "regression_Polynomial: cannot reproduce fit." << std::endl;223 ok=false;217 suite.err() << "regression_Polynomial: cannot reproduce fit." << std::endl; 218 suite.ok(false); 224 219 } 225 220 } 226 221 227 *error<< " testing regression::PolynomialWeighted" << std::endl;222 suite.err() << " testing regression::PolynomialWeighted" << std::endl; 228 223 regression::PolynomialWeighted pol_weighted(2); 229 224 regression::Polynomial polynomial2(2); 230 ok = equal(polynomial2, pol_weighted, error) && ok; 231 232 ok = multidim(error) && ok; 233 234 if (!ok) 235 *error << "Test failed\n" << std::endl; 236 237 if (error!=&std::cerr) 238 delete error; 239 240 return (ok ? 0 : -1); 241 } 242 243 244 bool equal(regression::OneDimensional& r, 225 equal(polynomial2, pol_weighted, suite); 226 227 multidim(suite); 228 229 return suite.return_value(); 230 } 231 232 233 void equal(regression::OneDimensional& r, 245 234 regression::OneDimensionalWeighted& wr, 246 std::ostream* error) 247 { 248 bool ok=true; 235 test::Suite& suite) 236 { 249 237 utility::Vector x(5); x(0)=1970; x(1)=1980; x(2)=1990; x(3)=2000; x(4)=2010; 250 238 utility::Vector y(5); y(0)=12; y(1)=11; y(2)=14; y(3)=13; y(4)=15; 251 239 252 ok = unity_weights(r, wr, x, y, error) && ok; 253 ok = rescale_weights(wr, x, y, error) && ok; 254 ok = zero_weights(wr, x, y, error) && ok; 255 return ok; 256 } 257 258 259 bool unity_weights(regression::OneDimensional& r, 240 unity_weights(r, wr, x, y, suite); 241 rescale_weights(wr, x, y, suite); 242 zero_weights(wr, x, y, suite); 243 } 244 245 246 void unity_weights(regression::OneDimensional& r, 260 247 regression::OneDimensionalWeighted& rw, 261 248 const utility::Vector& x, const utility::Vector& y, 262 std::ostream* error) 263 { 264 *error << " testing unity weights equal to non-weighted version.\n"; 265 bool ok=true; 249 test::Suite& suite) 250 { 251 suite.err() << " testing unity weights equal to non-weighted version.\n"; 266 252 utility::Vector w(x.size(), 1.0); 267 253 r.fit(x,y); 268 254 rw.fit(x,y,w); 269 255 if (std::abs(r.predict(2000) - rw.predict(2000))>1e-15) { 270 ok = false;271 *error<< "Error: predict not equal\n"256 suite.ok(false); 257 suite.err() << "Error: predict not equal\n" 272 258 << " weighted: " << rw.predict(2000) << "\n" 273 259 << " non-weighted: " << r.predict(2000) … … 275 261 } 276 262 if (std::abs(r.s2()-rw.s2(1.0))>10E-7){ 277 ok = false;278 *error<< "Error: s2 not equal non-weighted version." << std::endl;279 *error<< "weighted s2 = " << rw.s2(1.0) << std::endl;280 *error<< "non-weighted s2 = " << r.s2() << std::endl;263 suite.ok(false); 264 suite.err() << "Error: s2 not equal non-weighted version." << std::endl; 265 suite.err() << "weighted s2 = " << rw.s2(1.0) << std::endl; 266 suite.err() << "non-weighted s2 = " << r.s2() << std::endl; 281 267 } 282 268 if (std::abs(r.standard_error2(2000)-rw.standard_error2(2000))>10e-7){ 283 ok = false;284 *error<< "Error: standard_error not equal non-weighted version."269 suite.ok(false); 270 suite.err() << "Error: standard_error not equal non-weighted version." 285 271 << std::endl; 286 272 } 287 273 if (std::abs(r.r2()-rw.r2())>10E-7){ 288 ok = false;289 *error<< "Error: r2 not equal non-weighted version." << std::endl;290 *error<< "weighted r2 = " << rw.r2() << std::endl;291 *error<< "non-weighted r2 = " << r.r2() << std::endl;274 suite.ok(false); 275 suite.err() << "Error: r2 not equal non-weighted version." << std::endl; 276 suite.err() << "weighted r2 = " << rw.r2() << std::endl; 277 suite.err() << "non-weighted r2 = " << r.r2() << std::endl; 292 278 } 293 279 if (std::abs(r.prediction_error2(2000)-rw.prediction_error2(2000,1))>10e-7){ 294 ok = false;295 *error<< "Error: prediction_error2 not equal non-weighted version.\n"280 suite.ok(false); 281 suite.err() << "Error: prediction_error2 not equal non-weighted version.\n" 296 282 << " weighted: " << rw.prediction_error2(2000,1) << "\n" 297 283 << " non-weighted: " << r.prediction_error2(2000) 298 284 << std::endl; 299 285 } 300 return ok;301 286 } 302 287 303 288 304 boolrescale_weights(regression::OneDimensionalWeighted& wr,289 void rescale_weights(regression::OneDimensionalWeighted& wr, 305 290 const utility::Vector& x, const utility::Vector& y, 306 std::ostream* error) 307 { 308 *error << " testing rescaling weights.\n"; 309 bool ok = true; 291 test::Suite& suite) 292 { 293 suite.err() << " testing rescaling weights.\n"; 310 294 utility::Vector w(5); w(0)=1.0; w(1)=1.0; w(2)=0.5; w(3)=0.2; w(4)=0.2; 311 295 wr.fit(x,y,w); … … 319 303 wr.fit(x,y,w); 320 304 if (std::abs(wr.predict(2000)-predict)>10e-11){ 321 ok = false;322 *error<< "Error: predict not equal after rescaling.\n";323 *error<< " predict = " << predict305 suite.ok(false); 306 suite.err() << "Error: predict not equal after rescaling.\n"; 307 suite.err() << " predict = " << predict 324 308 << " and after doubling weights.\n"; 325 *error<< " predict = " << wr.predict(2000) << "\n";309 suite.err() << " predict = " << wr.predict(2000) << "\n"; 326 310 } 327 311 if (std::abs(wr.s2(2)-s2)>10e-11){ 328 ok = false;329 *error<< "Error: s2 not equal after rescaling.\n";330 *error<< " s2 = " << s2 << " and after doubling weights.\n";331 *error<< " s2 = " << wr.s2(2) << "\n";332 *error<< "difference " << s2-wr.s2(2.0) << std::endl;312 suite.ok(false); 313 suite.err() << "Error: s2 not equal after rescaling.\n"; 314 suite.err() << " s2 = " << s2 << " and after doubling weights.\n"; 315 suite.err() << " s2 = " << wr.s2(2) << "\n"; 316 suite.err() << "difference " << s2-wr.s2(2.0) << std::endl; 333 317 } 334 318 if (std::abs(wr.standard_error2(2000)-standard_error2)>10e-6){ 335 ok = false;336 *error<< "Error: standard_error2 not equal after rescaling.\n";337 *error<< " standard_error2 = " << standard_error2319 suite.ok(false); 320 suite.err() << "Error: standard_error2 not equal after rescaling.\n"; 321 suite.err() << " standard_error2 = " << standard_error2 338 322 << " and after doubling weights.\n"; 339 *error<< " standard_error2 = "323 suite.err() << " standard_error2 = " 340 324 << wr.standard_error2(2000) << "\n"; 341 *error<< " difference: " << wr.standard_error2(2000)-standard_error2325 suite.err() << " difference: " << wr.standard_error2(2000)-standard_error2 342 326 << std::endl; 343 327 } 344 328 if (std::abs(wr.r2()-r2)>10e-6){ 345 ok = false;346 *error<< "Error: r2 not equal after rescaling.\n";329 suite.ok(false); 330 suite.err() << "Error: r2 not equal after rescaling.\n"; 347 331 } 348 332 if (std::abs(wr.prediction_error2(2000,2)-prediction_error2)>10e-6){ 349 ok = false;350 *error<< "Error: prediction_error2 not equal after rescaling.\n";351 *error<< " prediction_error2 = " << prediction_error2333 suite.ok(false); 334 suite.err() << "Error: prediction_error2 not equal after rescaling.\n"; 335 suite.err() << " prediction_error2 = " << prediction_error2 352 336 << " and after doubling weights.\n"; 353 *error<< " prediction_error2 = "337 suite.err() << " prediction_error2 = " 354 338 << wr.prediction_error2(2000,2) << "\n"; 355 339 } 356 return ok; 357 } 358 359 360 bool zero_weights(regression::OneDimensionalWeighted& wr, 340 } 341 342 343 void zero_weights(regression::OneDimensionalWeighted& wr, 361 344 const utility::Vector& x, const utility::Vector& y, 362 std::ostream* error) 363 { 364 *error << " testing zero weights equal to missing value.\n"; 365 bool ok = true; 345 test::Suite& suite) 346 { 347 suite.err() << " testing zero weights equal to missing value.\n"; 366 348 utility::Vector w(5); w(0)=1.0; w(1)=1.0; w(2)=0.5; w(3)=0.2; w(4)=0; 367 349 wr.fit(x,y,w); … … 383 365 wr.fit(x2,y2,w2); 384 366 if (std::abs(wr.predict(2000) - predict)>1e-10) { 385 ok = false;386 *error<< "Error: predict not equal.\n";387 *error<< " weighted predict: " << wr.predict(2000) << "\n";388 *error<< " unweighted predict: " << predict << "\n";389 *error<< " difference: " << wr.predict(2000)-predict << "\n";367 suite.ok(false); 368 suite.err() << "Error: predict not equal.\n"; 369 suite.err() << " weighted predict: " << wr.predict(2000) << "\n"; 370 suite.err() << " unweighted predict: " << predict << "\n"; 371 suite.err() << " difference: " << wr.predict(2000)-predict << "\n"; 390 372 391 373 } 392 374 if (std::abs(wr.prediction_error2(2000) - prediction_error2)>1e-13) { 393 ok = false;394 *error<< "Error: prediction_error2 not equal.\n";375 suite.ok(false); 376 suite.err() << "Error: prediction_error2 not equal.\n"; 395 377 } 396 378 if (std::abs(wr.r2() - r2)>1e-15) { 397 ok = false;398 *error<< "Error: r2 not equal.\n";399 *error<< " r2: " << r2 << "\n";400 *error<< " r2: " << wr.r2() << "\n";379 suite.ok(false); 380 suite.err() << "Error: r2 not equal.\n"; 381 suite.err() << " r2: " << r2 << "\n"; 382 suite.err() << " r2: " << wr.r2() << "\n"; 401 383 } 402 384 if (std::abs(wr.s2() - s2)>1e-15) { 403 ok = false;404 *error<< "Error: s2 not equal.\n";385 suite.ok(false); 386 suite.err() << "Error: s2 not equal.\n"; 405 387 } 406 388 if (std::abs(wr.standard_error2(2000) - standard_error2)>1e-15) { 407 ok = false; 408 *error << "Error: standard_error2 not equal.\n"; 409 } 410 return ok; 411 } 412 413 414 bool multidim(std::ostream* error) 415 { 416 bool ok = true; 417 *error << " testing regression::MultiDimensionalWeighted" << std::endl; 389 suite.ok(false); 390 suite.err() << "Error: standard_error2 not equal.\n"; 391 } 392 } 393 394 395 void multidim(test::Suite& suite) 396 { 397 suite.err() << " testing regression::MultiDimensionalWeighted" << std::endl; 418 398 utility::Vector x(5); x(0)=1970; x(1)=1980; x(2)=1990; x(3)=2000; x(4)=2010; 419 399 utility::Vector y(5); y(0)=12; y(1)=11; y(2)=14; y(3)=13; y(4)=15; … … 433 413 z(1)=2000; 434 414 z(2)=2000*2000; 435 if ( md.predict(z) != mdw.predict(z)){436 ok = false;437 *error<< "Error: predict not equal\n"415 if (!test::equal(md.predict(z), mdw.predict(z))){ 416 suite.ok(false); 417 suite.err() << "Error: predict not equal\n" 438 418 << " weighted: " << mdw.predict(z) << "\n" 439 419 << " non-weighted: " << md.predict(z) … … 442 422 443 423 if (std::abs(md.standard_error2(z)-mdw.standard_error2(z))>10e-6){ 444 ok = false;445 *error<< "Error: standard_error2 not equal\n"424 suite.ok(false); 425 suite.err() << "Error: standard_error2 not equal\n" 446 426 << " weighted: " << mdw.standard_error2(z) << "\n" 447 427 << " non-weighted: " << md.standard_error2(z) … … 449 429 } 450 430 if (std::abs(md.prediction_error2(z)-mdw.prediction_error2(z,1.0))>10e-7){ 451 ok = false;452 *error<< "Error: prediction_error2 not equal\n"431 suite.ok(false); 432 suite.err() << "Error: prediction_error2 not equal\n" 453 433 << " weighted: " << mdw.prediction_error2(z,1.0) << "\n" 454 434 << " non-weighted: " << md.prediction_error2(z) … … 466 446 mdw.fit(data,y,w); 467 447 if (std::abs(mdw.predict(z)-predict)>10e-10){ 468 ok = false;469 *error<< "Error: predict not equal after rescaling.\n";470 *error<< " predict = " << predict << " and after doubling weights.\n";471 *error<< " predict = " << mdw.predict(z) << "\n";448 suite.ok(false); 449 suite.err() << "Error: predict not equal after rescaling.\n"; 450 suite.err() << " predict = " << predict << " and after doubling weights.\n"; 451 suite.err() << " predict = " << mdw.predict(z) << "\n"; 472 452 } 473 453 if (std::abs(mdw.prediction_error2(z,2)-prediction_error2)>10e-7){ 474 ok = false;475 *error<< "Error: prediction_error2 not equal after rescaling.\n";476 *error<< " predict_error2 = " << prediction_error2454 suite.ok(false); 455 suite.err() << "Error: prediction_error2 not equal after rescaling.\n"; 456 suite.err() << " predict_error2 = " << prediction_error2 477 457 << " and after doubling weights.\n"; 478 *error<< " predict_error2 = " << mdw.prediction_error2(z,2) << "\n";458 suite.err() << " predict_error2 = " << mdw.prediction_error2(z,2) << "\n"; 479 459 } 480 460 if (std::abs(mdw.s2(2)-s2)>10e-10){ 481 ok = false;482 *error<< "Error: s2 not equal after rescaling.\n";483 *error<< " s2 = " << s2 << " and after doubling weights.\n";484 *error<< " s2 = " << mdw.s2(2) << "\n";461 suite.ok(false); 462 suite.err() << "Error: s2 not equal after rescaling.\n"; 463 suite.err() << " s2 = " << s2 << " and after doubling weights.\n"; 464 suite.err() << " s2 = " << mdw.s2(2) << "\n"; 485 465 } 486 466 if (std::abs(mdw.standard_error2(z)-standard_error2)>10e-7){ 487 ok = false;488 *error<< "Error: standard_error2 not equal after rescaling.\n";489 *error<< " standard_error2 = " << standard_error2467 suite.ok(false); 468 suite.err() << "Error: standard_error2 not equal after rescaling.\n"; 469 suite.err() << " standard_error2 = " << standard_error2 490 470 << " and after doubling weights.\n"; 491 *error << " standard_error2 = " << mdw.standard_error2(z) << "\n"; 492 } 493 494 return ok; 471 suite.err() << " standard_error2 = " << mdw.standard_error2(z) << "\n"; 472 } 495 473 } 496 474 … … 508 486 utility::Vector y(rl.y_predicted()); 509 487 for (size_t i=0; i<y.size(); i++) 510 if ( y(i)!=10.0){488 if (!test::equal(y(i),10.0)){ 511 489 return false; 512 490 }
Note: See TracChangeset
for help on using the changeset viewer.