# Changeset 429

Ignore:
Timestamp:
Dec 8, 2005, 8:50:11 PM (17 years ago)
Message:

separating weighted and non-weighted regression to different classes.

Location:
trunk
Files:
11 edited

Unmodified
Removed
• ## trunk/lib/statistics/Linear.cc

 r428 void Linear::fit(const gslapi::vector& x, const gslapi::vector& y) { statistics::AveragerPair ap; ap_.reset(); for (size_t i=0; i
• ## trunk/lib/statistics/Linear.h

 r428 /// the expected deviation from the line for a new data point. /// void predict(const double x, double& y, double& y_err) void predict(const double x, double& y, double& y_err); /// /// Function returning the coefficient of determination, /// i.e. fraction of variance explained by the linear model. /// @todo implement r2's calculation in fit function /// inline double r2(void) const { return r2_; }
• ## trunk/lib/statistics/Local.cc

 r412 #include #include #include #include //#include
• ## trunk/lib/statistics/Local.h

 r411 #include #include #include #include /// type of \a kernel. /// inline Local(OneDimensional& r, Kernel& k) : kernel_(&k), regressor_(&r) {} inline Local(OneDimensionalWeighted& r, Kernel& k) : kernel_(&k), regressor_(&r) {} /// std::vector > data_; Kernel* kernel_; OneDimensional* regressor_; OneDimensionalWeighted* regressor_; std::vector x_; std::vector y_;
• ## trunk/lib/statistics/Makefile.am

 r414 libstatistics_la_SOURCES = \ Averager.cc AveragerPair.cc AveragerWeighted.cc Fisher.cc FoldChange.cc \ Histogram.cc Kernel.cc KernelBox.cc KernelTriCube.cc Linear.cc Local.cc \ MultiDimensional.cc Naive.cc \ Histogram.cc Kernel.cc KernelBox.cc KernelTriCube.cc Linear.cc \ LinearWeighted.cc Local.cc \ MultiDimensional.cc Naive.cc NaiveWeighted.cc OneDimensional.cc\ Pearson.cc Polynomial.cc ROC.cc Score.cc tScore.cc utility.cc include_statistics_HEADERS = \ Averager.h AveragerPair.h AveragerWeighted.h Fisher.h FoldChange.h \ Histogram.h Kernel.h KernelBox.h KernelTriCube.h Linear.h Local.h \ MultiDimensional.h Naive.h \ OneDimensional.h Pearson.h Polynomial.h ROC.h Score.h tScore.h utility.h Histogram.h Kernel.h KernelBox.h KernelTriCube.h Linear.h LinearWeighted.h \ Local.h MultiDimensional.h Naive.h NaiveWeighted.h OneDimensional.h \ OneDimensionalWeighted.h Pearson.h Polynomial.h ROC.h Score.h tScore.h \ utility.h
• ## trunk/lib/statistics/Naive.cc

 r383 void Naive::fit(const gslapi::vector& x, const gslapi::vector& y) { Averager a; ap_.reset(); for (size_t i=0; i
• ## trunk/lib/statistics/Naive.h

 r389 #include #include #include #include /// /// This function computes the best-fit for the naive model \f$y /// = m \f$ from vectors \a x and \a y, by minimizing \f$\sum /// w_i(y_i-m)^2 \f$. The weight \f$w_i \f$ is proportional to /// the inverse of the variance for \f$y_i \f$ /// void fit(const gslapi::vector& x, const gslapi::vector& y, const gslapi::vector& w); /// /// Function predicting value using the naive model. \a y_err is /// the expected deviation from the line for a new data point. The /// weight for the new point can be specified. A smaller weight /// means larger error. The error has two components: the variance /// of point and error in estimation of m_. /// error has two components: the variance of point and error in /// estimation of the mean. /// void predict(const double x, double& y, double& y_err, const double w=1) ; void predict(const double x, double& y, double& y_err) ; /// /// @return prediction value and parameters /// std::ostream& print(std::ostream&) const; /// /// @return header for print()
• ## trunk/lib/statistics/OneDimensional.h

 r428 #ifndef _theplu_statistics_regression_onedimensioanl_ #define _theplu_statistics_regression_onedimensioanl_ #include #include /// Default Constructor. /// inline OneDimensional(void) : x_(0.0), y_(0.0), y_err_(0.0) {} inline OneDimensional(void) {} /// /// /// This function computes the best-fit given a model (see /// specific class for details) by minimizing \f$/// \sum{w_i(\hat{y_i}-y_i)^2} \f$, where \f$\hat{y} \f$ is the /// fitted value. The weight \f$w_i \f$ is should be proportional /// to the inverse of the variance for \f$y_i \f$ /// virtual void fit(const gslapi::vector& x, const gslapi::vector& y, const gslapi::vector& w)=0; /// /// function predicting in one point /// virtual void predict(const double x, double& y, double& y_err, const double w=1) =0; virtual void predict(const double x, double& y, double& y_err) const=0; /// /// @return prediction value and parameters /// /// virtual std::ostream& print(std::ostream&) const=0; /// @return stream of prediction values and parameters /// std::ostream& print(std::ostream&,const double min, const double max, const u_int n) const; /// protected: /// /// x for predicted point /// Averager for pair of x and y /// //double x_; /// /// y for predicted point /// //double y_; /// /// estimated error of predicted point (in y). /// //double y_err_; }; AveragerPair ap_; }; }}} // of namespaces regression, statisitcs and thep
• ## trunk/lib/statistics/Polynomial.h

 r389 gslapi::vector fit_parameters(void) { return md_.fit_parameters(); } inline void fit(const gslapi::vector& x, const gslapi::vector& y, const gslapi::vector& w) { assert(0); } /// /// @todo implement /// inline void predict(const double x, double& y, double& y_err) const { assert(0); } inline void predict(const double x, double& y, double& y_err, const double w=1) { assert(0); } inline std::ostream& print(std::ostream& s) const { assert(0); return s; } /// /// @todo implement /// inline std::ostream& print_header(std::ostream& s) const { assert(0); return s; } { return s; }
• ## trunk/lib/svm/Kernel.h

 r350 /// ///   @todo Constructor taking the \a data matrix, the KernelFunction and a ///   \a weight matrix as input. Each column in the data matrix ///   corresponds to one sample. Kernel(const gslapi::matrix& data, const KernelFunction&, const gslapi::matrix& weight); /// ///   Destructor ///
• ## trunk/test/regression_test.cc

 r399 #include #include #include #include #include #include #include using namespace theplu; bool Local_test(statistics::regression::OneDimensional&, bool Local_test(statistics::regression::OneDimensionalWeighted&, statistics::regression::Kernel&); bool ok = true; // test data for Linear and Naive // test data for Linear and Naive (Weighted and non-weighted) gslapi::vector x(4); x(0)=1970; x(1)=1980; x(2)=1990; x(3)=2000; gslapi::vector y(4); y(0)=12;   y(1)=11;   y(2)=14;   y(3)=13; gslapi::vector w(4); w(0)=0.1;  w(1)=0.2;  w(2)=0.3;  w(3)=0.4; // testing regression::Linear statistics::regression::Linear linear; linear.fit(x,y,w); // testing regression::LinearWeighted statistics::regression::LinearWeighted linear_w; linear_w.fit(x,y,w); double y_predicted=0; double y_predicted_err=0; linear.predict(1990,y_predicted,y_predicted_err); linear_w.predict(1990,y_predicted,y_predicted_err); if (y_predicted!=12.8){ *error << "regression_Linear: cannot reproduce fit." << std::endl; } // testing regression::Naive statistics::regression::Naive naive; naive.fit(x,y,w); // testing regression::NaiveWeighted statistics::regression::NaiveWeighted naive_w; naive_w.fit(x,y,w); y_predicted=0; y_predicted_err=0; naive.predict(0.0,y_predicted,y_predicted_err); naive_w.predict(0.0,y_predicted,y_predicted_err); if (y_predicted!=(0.1*12+0.2*11+0.3*14+0.4*13)) { *error << "regression_Naive: cannot reproduce fit." << std::endl; *error << "regression_NaiveWeighted: cannot reproduce fit." << std::endl; ok=false; } // testing regression::Local statistics::regression::KernelBox kb; statistics::regression::Linear rl; statistics::regression::LinearWeighted rl; if (!Local_test(rl,kb)) { *error << "regression_Local: Linear cannot reproduce fit." << std::endl; ok=false; } statistics::regression::Naive rn; statistics::regression::NaiveWeighted rn; if (!Local_test(rn,kb)) { *error << "regression_Local: Naive cannot reproduce fit." << std::endl; bool Local_test(statistics::regression::OneDimensional& r, bool Local_test(statistics::regression::OneDimensionalWeighted& r, statistics::regression::Kernel& k) {
Note: See TracChangeset for help on using the changeset viewer.