Changeset 1210
- Timestamp:
- Mar 6, 2008, 5:00:44 PM (15 years ago)
- Location:
- trunk/test
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/test/alignment_test.cc
r1121 r1210 69 69 double match(const double x, const double y, const double s) 70 70 { 71 return 2*gsl_cdf_gaussian_Q( fabs(x-y),sqrt(2)*s);71 return 2*gsl_cdf_gaussian_Q(std::abs(x-y),sqrt(2)*s); 72 72 } 73 73 -
trunk/test/averager_test.cc
r1120 r1210 107 107 108 108 109 if ( fabs(a.variance() - a.std()*a.std())>109 if ( std::abs(a.variance() - a.std()*a.std())> 110 110 std::numeric_limits<double>().round_error() ){ 111 111 ok=false; … … 189 189 for (int i=0; i<10; i++) 190 190 ap.add(static_cast<double>(i),i); 191 if ( fabs(ap.correlation()-1)>tol){191 if (std::abs(ap.correlation()-1)>tol){ 192 192 ok=false; 193 193 *error << "correlation: " << ap.correlation() << std::endl; … … 252 252 // std::cout << (a.n()==b.n()) << std::endl; 253 253 // std::cout << (a.mean()==b.mean()) << std::endl; 254 // std::cout << ( fabs(a.variance()-b.variance()<1e-15)) << std::endl;254 // std::cout << (std::abs(a.variance()-b.variance()<1e-15)) << std::endl; 255 255 return (a.n()==b.n() && a.mean()==b.mean() && 256 fabs(a.variance()-b.variance()<1e-15));256 std::abs(a.variance()-b.variance()<1e-15)); 257 257 } 258 258 … … 261 261 { 262 262 bool equal = true; 263 if ( fabs(a.mean()-b.mean())>tol){263 if ( std::abs(a.mean()-b.mean())>tol){ 264 264 equal=false; 265 265 *error << "mean:\t" << a.mean() << "\t" << b.mean() << std::endl; 266 266 } 267 if ( fabs(a.variance()-b.variance())>tol ) {267 if ( std::abs(a.variance()-b.variance())>tol ) { 268 268 equal=false; 269 269 *error << "error for variance:\t" << a.variance() << " " << b.variance() 270 270 << std::endl; 271 271 } 272 if ( fabs(a.standard_error()-b.standard_error())>tol ) {272 if ( std::abs(a.standard_error()-b.standard_error())>tol ) { 273 273 equal =false; 274 274 *error << "error for standard error:\t" << std::endl; … … 281 281 { 282 282 bool equal = true; 283 if ( fabs(a.mean()-b.mean())>tol){283 if ( std::abs(a.mean()-b.mean())>tol){ 284 284 equal=false; 285 285 *error << "mean:\t" << a.mean() << "\t" << b.mean() << std::endl; 286 286 } 287 if ( fabs(a.variance()-b.variance())>tol ) {287 if ( std::abs(a.variance()-b.variance())>tol ) { 288 288 equal=false; 289 289 *error << "error for variance:\t" << a.variance() << " " << b.variance() 290 290 << std::endl; 291 291 } 292 if ( fabs(a.standard_error()-b.standard_error())>tol ) {292 if ( std::abs(a.standard_error()-b.standard_error())>tol ) { 293 293 equal =false; 294 294 *error << "error for standard error:\t" << std::endl; … … 301 301 { 302 302 bool ok = true; 303 if ( fabs(a.covariance()-b.covariance())>tol){303 if ( std::abs(a.covariance()-b.covariance())>tol){ 304 304 ok=false; 305 305 *error << "error covariance: " << a.covariance() << "\t" 306 306 << b.covariance() << std::endl; 307 307 } 308 if ( fabs(a.correlation()-b.correlation())>tol ) {308 if ( std::abs(a.correlation()-b.correlation())>tol ) { 309 309 ok=false; 310 310 *error << "error correlation" << std::endl; … … 317 317 { 318 318 bool ok = true; 319 if ( fabs(a.covariance()-b.covariance())>tol){319 if ( std::abs(a.covariance()-b.covariance())>tol){ 320 320 ok=false; 321 321 *error << "error covariance: " << a.covariance() << "\t" 322 322 << b.covariance() << std::endl; 323 323 } 324 if ( fabs(a.correlation()-b.correlation())>tol ) {324 if ( std::abs(a.correlation()-b.correlation())>tol ) { 325 325 ok=false; 326 326 *error << "error correlation" << std::endl; … … 336 336 { 337 337 bool ok = true; 338 if ( fabs(a.covariance()-b.covariance())>tol){338 if ( std::abs(a.covariance()-b.covariance())>tol){ 339 339 ok=false; 340 340 *error << "error covariance: " << a.covariance() << "\t" 341 341 << b.covariance() << std::endl; 342 342 } 343 if ( fabs(a.correlation()-b.correlation())>tol ) {343 if ( std::abs(a.correlation()-b.correlation())>tol ) { 344 344 ok=false; 345 345 *error << "error correlation" << std::endl; -
trunk/test/distance_test.cc
r1121 r1210 60 60 statistics::EuclideanDistance eucl_dist; 61 61 double dist=eucl_dist(a.begin(),a.end(),b.begin()); 62 if( fabs(dist-2.23607)>tolerance) {62 if(std::abs(dist-2.23607)>tolerance) { 63 63 *error << "Error in unweighted Euclidean distance " << std::endl; 64 64 ok=false; … … 67 67 statistics::PearsonDistance pear_dist; 68 68 dist=pear_dist(a.begin(),a.end(),b.begin()); 69 if( fabs(dist-1.5)>tolerance) {69 if(std::abs(dist-1.5)>tolerance) { 70 70 *error << "Error in unweighted Pearson distance " << std::endl; 71 71 ok=false; … … 86 86 dist=eucl_dist(aw.begin(),aw.end(),bw.begin()); 87 87 88 if( fabs(dist-sqrt(6))>tolerance) {88 if(std::abs(dist-sqrt(6))>tolerance) { 89 89 *error << "Error in weighted Euclidean distance " << std::endl; 90 90 ok=false; … … 93 93 dist=pear_dist(aw.begin(),aw.end(),bw.begin()); 94 94 95 if( fabs(dist-2)>tolerance) {95 if(std::abs(dist-2)>tolerance) { 96 96 *error << "Error in weighted Pearson distance " << std::endl; 97 97 ok=false; … … 106 106 107 107 dist=eucl_dist(sa.begin(),sa.end(),sb.begin()); 108 if( fabs(dist-2.23607)>tolerance) {108 if(std::abs(dist-2.23607)>tolerance) { 109 109 *error << "Error in distance for std::vector " << std::endl; 110 110 ok=false; … … 115 115 std::copy(sa.begin(),sa.end(),std::back_inserter<std::list<double> >(la)); 116 116 dist=eucl_dist(la.begin(),la.end(),sb.begin()); 117 if( fabs(dist-2.23607)>tolerance) {117 if(std::abs(dist-2.23607)>tolerance) { 118 118 *error << "Error in distance for std::list " << std::endl; 119 119 ok=false; -
trunk/test/feature_selection_test.cc
r1134 r1210 99 99 100 100 classifier::DataLookupWeighted1D row(dataviewweighted,best_feature,true); 101 double score_diff= fabs(snr.score(targets,row)-1.47804);101 double score_diff=std::abs(snr.score(targets,row)-1.47804); 102 102 if(score_diff>0.00001) { 103 103 *error << "\nERROR: Best score not what expected!\n" << std::endl; -
trunk/test/kernel_test.cc
r1134 r1210 152 152 for(u_int i=0;i<control.rows();i++) 153 153 for(u_int j=0;j<control.columns();j++) 154 if ( fabs(kernel(i,j)-control(i,j))>error_bound)154 if (std::abs(kernel(i,j)-control(i,j))>error_bound) 155 155 return false; 156 156 … … 191 191 for(u_int i=0;i<control.rows();i++) 192 192 for(u_int j=0;j<control.columns();j++) 193 if ( fabs(kernel(i,j)-control(i,j))>error_bound)193 if (std::abs(kernel(i,j)-control(i,j))>error_bound) 194 194 return false; 195 195 -
trunk/test/knn_test.cc
r1157 r1210 45 45 for (size_t i=0; i<a.rows(); i++){ 46 46 for (size_t j=0; j<a.columns(); j++){ 47 sl += fabs(a(i,j)-b(i,j));47 sl += std::abs(a(i,j)-b(i,j)); 48 48 } 49 49 } … … 179 179 if (!(std::isinf(prediction1(0,0)) && std::isinf(prediction1(0,1)) && 180 180 std::isinf(prediction1(1,2)) && 181 fabs(prediction1(1,3)-(1.0/3.67423461417))<slack_bound &&182 fabs(prediction1(1,0)-(1.0/2.82842712475+1.0/2.44948974278))<slack_bound)){181 std::abs(prediction1(1,3)-(1.0/3.67423461417))<slack_bound && 182 std::abs(prediction1(1,0)-(1.0/2.82842712475+1.0/2.44948974278))<slack_bound)){ 183 183 *error << "Difference to expected prediction too large\n"; 184 184 ok = false; … … 202 202 if (!(std::isnan(prediction1(0,0)) && std::isnan(prediction1(0,1)) && 203 203 std::isnan(prediction1(0,2)) && std::isnan(prediction1(0,3)) && 204 fabs(prediction1(1,0)-2.0)<slack_bound &&205 fabs(prediction1(1,1)-2.0)<slack_bound &&206 fabs(prediction1(1,2)-2.0)<slack_bound &&207 fabs(prediction1(1,3)-2.0)<slack_bound)) {204 std::abs(prediction1(1,0)-2.0)<slack_bound && 205 std::abs(prediction1(1,1)-2.0)<slack_bound && 206 std::abs(prediction1(1,2)-2.0)<slack_bound && 207 std::abs(prediction1(1,3)-2.0)<slack_bound)) { 208 208 *error << "Difference to expected prediction too large\n"; 209 209 ok = false; -
trunk/test/ncc_test.cc
r1158 r1210 49 49 for (size_t i=0; i<a.rows(); i++){ 50 50 for (size_t j=0; j<a.columns(); j++){ 51 sl += fabs(a(i,j)-b(i,j));51 sl += std::abs(a(i,j)-b(i,j)); 52 52 } 53 53 } … … 172 172 } 173 173 if(!(std::isnan(prediction1(0,0)) && 174 fabs(prediction1(1,0)-sqrt(3.0))<slack_bound &&175 fabs(prediction1(0,1)-sqrt(3.0))<slack_bound &&176 fabs(prediction1(1,1)-sqrt(15.0))<slack_bound &&177 fabs(prediction1(0,2)-sqrt(27.0))<slack_bound)) {174 std::abs(prediction1(1,0)-sqrt(3.0))<slack_bound && 175 std::abs(prediction1(0,1)-sqrt(3.0))<slack_bound && 176 std::abs(prediction1(1,1)-sqrt(15.0))<slack_bound && 177 std::abs(prediction1(0,2)-sqrt(27.0))<slack_bound)) { 178 178 ok=false; 179 179 *error << "Test failed: predictions incorrect" << std::endl; … … 190 190 std::isnan(prediction1(0,2)) && 191 191 std::isnan(prediction1(1,0)) && 192 fabs(prediction1(0,1))<slack_bound &&193 fabs(prediction1(1,2))<slack_bound &&194 fabs(prediction1(1,3))<slack_bound &&195 fabs(prediction1(0,3)-2.0)<slack_bound &&196 fabs(prediction1(1,1)-2.0)<slack_bound)) {192 std::abs(prediction1(0,1))<slack_bound && 193 std::abs(prediction1(1,2))<slack_bound && 194 std::abs(prediction1(1,3))<slack_bound && 195 std::abs(prediction1(0,3)-2.0)<slack_bound && 196 std::abs(prediction1(1,1)-2.0)<slack_bound)) { 197 197 ok=false; 198 198 *error << "Test failed: predictions incorrect" << std::endl; … … 214 214 std::isnan(prediction1(0,2)) && std::isnan(prediction1(0,3)) && 215 215 std::isnan(prediction1(1,0)) && 216 fabs(prediction1(1,1)-2.0)<slack_bound &&217 fabs(prediction1(1,2))<slack_bound &&218 fabs(prediction1(1,3))<slack_bound)) {216 std::abs(prediction1(1,1)-2.0)<slack_bound && 217 std::abs(prediction1(1,2))<slack_bound && 218 std::abs(prediction1(1,3))<slack_bound)) { 219 219 *error << "Difference to expected prediction too large\n"; 220 220 ok = false; -
trunk/test/nni_test.cc
r1121 r1210 75 75 for (unsigned int i=0; i<control.rows(); i++) 76 76 for (unsigned int j=0; j<control.columns(); j++) 77 if ( fabs(control(i,j))>error_bound) {77 if (std::abs(control(i,j))>error_bound) { 78 78 if (print) 79 79 std::cerr << "kNNI FAILED, error on row " << i << " and " 80 << "column " << j << " is " << fabs(control(i,j))80 << "column " << j << " is " << std::abs(control(i,j)) 81 81 << ". Expected less than " << error_bound << std::endl; 82 82 ok=false; // calculation result out of accepted error bounds … … 106 106 for (unsigned int i=0; i<control.rows(); i++) 107 107 for (unsigned int j=0; j<control.columns(); j++) 108 if ( fabs(control(i,j))>error_bound) {108 if (std::abs(control(i,j))>error_bound) { 109 109 if (print) 110 110 std::cerr << "WeNNI FAILED, error on row " << i << " and " 111 << "column " << j << " is " << fabs(control(i,j))111 << "column " << j << " is " << std::abs(control(i,j)) 112 112 << ". Expected less than " << error_bound << std::endl; 113 113 ok=false; // calculation result out of accepted error bounds … … 137 137 for (unsigned int i=0; i<control.rows(); i++) 138 138 for (unsigned int j=0; j<control.columns(); j++) 139 if ( fabs(control(i,j))>error_bound) {139 if (std::abs(control(i,j))>error_bound) { 140 140 if (print) 141 141 std::cerr << "WeNNI binary weight test FAILED.\nError on row " << i 142 << " and column " << j << " is " << fabs(control(i,j))142 << " and column " << j << " is " << std::abs(control(i,j)) 143 143 << ". Expected less than " << error_bound << std::endl; 144 144 ok=false; // calculation result out of accepted error bounds -
trunk/test/regression_test.cc
r1183 r1210 86 86 regression::Polynomial polynomial(1); 87 87 polynomial.fit(x,y); 88 if ( fabs(linear.beta()-polynomial.fit_parameters()(1))>0.0001 ){88 if ( std::abs(linear.beta()-polynomial.fit_parameters()(1))>0.0001 ){ 89 89 *error << "error: beta and fit_parameters(1) not equal" << std::endl; 90 90 *error << " beta = " << linear.beta() << std::endl; … … 93 93 ok = false; 94 94 } 95 if ( fabs(polynomial.fit_parameters()(0)-linear.alpha()+95 if ( std::abs(polynomial.fit_parameters()(0)-linear.alpha()+ 96 96 linear.beta()*1985)>0.0001){ 97 97 *error << "error: fit_parameters(0) = " … … 101 101 ok = false; 102 102 } 103 if ( fabs(polynomial.chisq()-linear.chisq())>0.0001){103 if ( std::abs(polynomial.chisq()-linear.chisq())>0.0001){ 104 104 *error << "error: chisq not same in linear and polynomial(1)" 105 105 << std::endl; 106 106 ok = false; 107 107 } 108 if ( fabs(polynomial.predict(1.0)-linear.predict(1.0))>0.0001){108 if ( std::abs(polynomial.predict(1.0)-linear.predict(1.0))>0.0001){ 109 109 *error << "error: predict not same in linear and polynomial(1)" 110 110 << std::endl; 111 111 ok = false; 112 112 } 113 if ( fabs(polynomial.standard_error2(1985)-linear.standard_error2(1985))113 if ( std::abs(polynomial.standard_error2(1985)-linear.standard_error2(1985)) 114 114 >0.0001){ 115 115 *error << "error: standard_error not same in linear and polynomial(1)" … … 126 126 double y_predicted = linear_w.predict(1990); 127 127 double y_predicted_err = linear_w.prediction_error2(1990); 128 if ( fabs(y_predicted-12.8)>0.001){128 if (std::abs(y_predicted-12.8)>0.001){ 129 129 *error << "error: cannot reproduce fit." << std::endl; 130 130 *error << "predicted value: " << y_predicted << " expected 12.8" … … 139 139 regression::PolynomialWeighted polynomial_w(1); 140 140 polynomial_w.fit(x,y,w); 141 if ( fabs(linear_w.beta()-polynomial_w.fit_parameters()(1))>10e-7 ){141 if ( std::abs(linear_w.beta()-polynomial_w.fit_parameters()(1))>10e-7 ){ 142 142 *error << "error: beta and fit_parameters(1) not equal" << std::endl; 143 143 *error << " beta = " << linear_w.beta() << std::endl; … … 146 146 ok = false; 147 147 } 148 if ( fabs(polynomial_w.fit_parameters()(0)-linear_w.alpha()+148 if ( std::abs(polynomial_w.fit_parameters()(0)-linear_w.alpha()+ 149 149 linear_w.beta()*1990)>0.0001){ 150 150 *error << "error: fit_parameters(0) = " … … 154 154 ok = false; 155 155 } 156 if ( fabs(polynomial_w.s2()-linear_w.s2())>0.0001){156 if ( std::abs(polynomial_w.s2()-linear_w.s2())>0.0001){ 157 157 *error << "error: chisq not same in linear and polynomial(1)" 158 158 << std::endl; 159 159 ok = false; 160 160 } 161 if ( fabs(polynomial_w.predict(1.0)-linear_w.predict(1.0))>0.0001){161 if ( std::abs(polynomial_w.predict(1.0)-linear_w.predict(1.0))>0.0001){ 162 162 *error << "error: predict not same in linear and polynomial(1)" 163 163 << std::endl; 164 164 ok = false; 165 165 } 166 if ( fabs(polynomial_w.standard_error2(1985)-linear_w.standard_error2(1985))166 if ( std::abs(polynomial_w.standard_error2(1985)-linear_w.standard_error2(1985)) 167 167 >0.0001){ 168 168 *error << "error: standard_error not same in linear and polynomial(1)" … … 218 218 polynomialfit.fit(x,ln_y); 219 219 utility::Vector fit=polynomialfit.fit_parameters(); 220 if ( fabs(fit(0)-1.012229646706 + fit(1)-0.012561322528 +220 if (std::abs(fit(0)-1.012229646706 + fit(1)-0.012561322528 + 221 221 fit(2)+1.159674470130)>1e-11) { 222 222 *error << "regression_Polynomial: cannot reproduce fit." << std::endl; … … 267 267 r.fit(x,y); 268 268 rw.fit(x,y,w); 269 if ( fabs(r.predict(2000) - rw.predict(2000))>1e-15) {269 if (std::abs(r.predict(2000) - rw.predict(2000))>1e-15) { 270 270 ok = false; 271 271 *error << "Error: predict not equal\n" … … 274 274 << std::endl; 275 275 } 276 if ( fabs(r.s2()-rw.s2(1.0))>10E-7){276 if (std::abs(r.s2()-rw.s2(1.0))>10E-7){ 277 277 ok = false; 278 278 *error << "Error: s2 not equal non-weighted version." << std::endl; … … 280 280 *error << "non-weighted s2 = " << r.s2() << std::endl; 281 281 } 282 if ( fabs(r.standard_error2(2000)-rw.standard_error2(2000))>10e-7){282 if (std::abs(r.standard_error2(2000)-rw.standard_error2(2000))>10e-7){ 283 283 ok = false; 284 284 *error << "Error: standard_error not equal non-weighted version." 285 285 << std::endl; 286 286 } 287 if ( fabs(r.r2()-rw.r2())>10E-7){287 if (std::abs(r.r2()-rw.r2())>10E-7){ 288 288 ok = false; 289 289 *error << "Error: r2 not equal non-weighted version." << std::endl; … … 291 291 *error << "non-weighted r2 = " << r.r2() << std::endl; 292 292 } 293 if ( fabs(r.prediction_error2(2000)-rw.prediction_error2(2000,1))>10e-7){293 if (std::abs(r.prediction_error2(2000)-rw.prediction_error2(2000,1))>10e-7){ 294 294 ok = false; 295 295 *error << "Error: prediction_error2 not equal non-weighted version.\n" … … 318 318 w*=2; 319 319 wr.fit(x,y,w); 320 if ( fabs(wr.predict(2000)-predict)>10e-11){320 if (std::abs(wr.predict(2000)-predict)>10e-11){ 321 321 ok = false; 322 322 *error << "Error: predict not equal after rescaling.\n"; … … 325 325 *error << " predict = " << wr.predict(2000) << "\n"; 326 326 } 327 if ( fabs(wr.s2(2)-s2)>10e-11){327 if (std::abs(wr.s2(2)-s2)>10e-11){ 328 328 ok = false; 329 329 *error << "Error: s2 not equal after rescaling.\n"; … … 332 332 *error << "difference " << s2-wr.s2(2.0) << std::endl; 333 333 } 334 if ( fabs(wr.standard_error2(2000)-standard_error2)>10e-6){334 if (std::abs(wr.standard_error2(2000)-standard_error2)>10e-6){ 335 335 ok = false; 336 336 *error << "Error: standard_error2 not equal after rescaling.\n"; … … 342 342 << std::endl; 343 343 } 344 if ( fabs(wr.r2()-r2)>10e-6){344 if (std::abs(wr.r2()-r2)>10e-6){ 345 345 ok = false; 346 346 *error << "Error: r2 not equal after rescaling.\n"; 347 347 } 348 if ( fabs(wr.prediction_error2(2000,2)-prediction_error2)>10e-6){348 if (std::abs(wr.prediction_error2(2000,2)-prediction_error2)>10e-6){ 349 349 ok = false; 350 350 *error << "Error: prediction_error2 not equal after rescaling.\n"; … … 382 382 383 383 wr.fit(x2,y2,w2); 384 if ( fabs(wr.predict(2000) - predict)>1e-10) {384 if (std::abs(wr.predict(2000) - predict)>1e-10) { 385 385 ok = false; 386 386 *error << "Error: predict not equal.\n"; … … 390 390 391 391 } 392 if ( fabs(wr.prediction_error2(2000) - prediction_error2)>1e-13) {392 if (std::abs(wr.prediction_error2(2000) - prediction_error2)>1e-13) { 393 393 ok = false; 394 394 *error << "Error: prediction_error2 not equal.\n"; 395 395 } 396 if ( fabs(wr.r2() - r2)>1e-15) {396 if (std::abs(wr.r2() - r2)>1e-15) { 397 397 ok = false; 398 398 *error << "Error: r2 not equal.\n"; … … 400 400 *error << " r2: " << wr.r2() << "\n"; 401 401 } 402 if ( fabs(wr.s2() - s2)>1e-15) {402 if (std::abs(wr.s2() - s2)>1e-15) { 403 403 ok = false; 404 404 *error << "Error: s2 not equal.\n"; 405 405 } 406 if ( fabs(wr.standard_error2(2000) - standard_error2)>1e-15) {406 if (std::abs(wr.standard_error2(2000) - standard_error2)>1e-15) { 407 407 ok = false; 408 408 *error << "Error: standard_error2 not equal.\n"; … … 441 441 } 442 442 443 if ( fabs(md.standard_error2(z)-mdw.standard_error2(z))>10e-6){443 if (std::abs(md.standard_error2(z)-mdw.standard_error2(z))>10e-6){ 444 444 ok = false; 445 445 *error << "Error: standard_error2 not equal\n" … … 448 448 << std::endl; 449 449 } 450 if ( fabs(md.prediction_error2(z)-mdw.prediction_error2(z,1.0))>10e-7){450 if (std::abs(md.prediction_error2(z)-mdw.prediction_error2(z,1.0))>10e-7){ 451 451 ok = false; 452 452 *error << "Error: prediction_error2 not equal\n" … … 465 465 w*=2; 466 466 mdw.fit(data,y,w); 467 if ( fabs(mdw.predict(z)-predict)>10e-10){467 if (std::abs(mdw.predict(z)-predict)>10e-10){ 468 468 ok = false; 469 469 *error << "Error: predict not equal after rescaling.\n"; … … 471 471 *error << " predict = " << mdw.predict(z) << "\n"; 472 472 } 473 if ( fabs(mdw.prediction_error2(z,2)-prediction_error2)>10e-7){473 if (std::abs(mdw.prediction_error2(z,2)-prediction_error2)>10e-7){ 474 474 ok = false; 475 475 *error << "Error: prediction_error2 not equal after rescaling.\n"; … … 478 478 *error << " predict_error2 = " << mdw.prediction_error2(z,2) << "\n"; 479 479 } 480 if ( fabs(mdw.s2(2)-s2)>10e-10){480 if (std::abs(mdw.s2(2)-s2)>10e-10){ 481 481 ok = false; 482 482 *error << "Error: s2 not equal after rescaling.\n"; … … 484 484 *error << " s2 = " << mdw.s2(2) << "\n"; 485 485 } 486 if ( fabs(mdw.standard_error2(z)-standard_error2)>10e-7){486 if (std::abs(mdw.standard_error2(z)-standard_error2)>10e-7){ 487 487 ok = false; 488 488 *error << "Error: standard_error2 not equal after rescaling.\n"; -
trunk/test/statistics_test.cc
r1120 r1210 52 52 double tolerance=1e-10; 53 53 double skewness_gsl=statistics::skewness(gsl_vec); 54 if ( fabs(skewness_gsl)>tolerance)54 if (std::abs(skewness_gsl)>tolerance) 55 55 return -1; 56 56 double kurtosis_gsl=statistics::kurtosis(gsl_vec); 57 if ( fabs(kurtosis_gsl+1.5616363636363637113)>tolerance)57 if (std::abs(kurtosis_gsl+1.5616363636363637113)>tolerance) 58 58 return -1; 59 59 return 0; -
trunk/test/svm_test.cc
r1133 r1210 101 101 102 102 double tol=1e-6; 103 if ( fabs(classifier2.alpha()(1)-2)>tol ||104 fabs(classifier2.alpha()(2)-2)>tol){103 if (std::abs(classifier2.alpha()(1)-2)>tol || 104 std::abs(classifier2.alpha()(2)-2)>tol){ 105 105 *error << "wrong alpha" << std::endl; 106 106 *error << "alpha: " << classifier2.alpha() << std::endl;
Note: See TracChangeset
for help on using the changeset viewer.