Changeset 767
- Timestamp:
- Feb 22, 2007, 4:14:40 PM (17 years ago)
- Location:
- trunk/yat
- Files:
-
- 51 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/yat/classifier/CrossValidationSampler.h
r680 r767 34 34 35 35 /// 36 /// Class splitting a set into training set and validation set in a 36 /// @brief Class splitting a set into training set and validation 37 /// set in a 37 38 /// crossvalidation manner. This is done in a balanced way, meaning 38 39 /// the proportions between the classes in the trainingset is close -
trunk/yat/classifier/DataLookup1D.h
r757 r767 26 26 27 27 #include <iostream> 28 #include <vector> 28 29 29 30 namespace theplu { 30 31 namespace yat { 32 namespace utility { 33 class vector; 34 } 31 35 namespace classifier { 32 36 … … 34 38 35 39 /// 36 /// Class for general vector view40 /// @brief Class for general vector view. 37 41 /// 38 42 … … 63 67 DataLookup1D(const size_t size, const double value=0); 64 68 69 /** 70 @brief Create general view from utility::vector 71 72 Constructor creates a proper MatrixLookup that object can view 73 into. Object is owner of this underlying MatrixLookup. Object fulfills 74 \f$ x(i) = vec(index(i)) \f$ 75 */ 76 DataLookup1D(const utility::vector& vec, const std::vector<size_t>& index); 77 65 78 /// 66 /// 79 /// @brief Destructor deletes underlying DataLookup2D if object is owner 67 80 /// 68 81 virtual ~DataLookup1D(); -
trunk/yat/classifier/DataLookupWeighted1D.h
r747 r767 35 35 36 36 /// 37 /// Class for general weighted vector view.37 /// @brief Class for general weighted vector view. 38 38 /// 39 39 /// @see MatrixLookupWeighted -
trunk/yat/classifier/EnsembleBuilder.h
r736 r767 40 40 41 41 /// 42 /// Class for ensembles of supervised classifiers42 /// @brief Class for ensembles of supervised classifiers 43 43 /// 44 44 -
trunk/yat/classifier/GaussianKernelFunction.h
r747 r767 36 36 37 37 /// 38 /// Class for Gaussian kernel calculations.38 /// @brief Class for Gaussian kernel calculations. 39 39 /// 40 40 -
trunk/yat/classifier/IRRank.h
r680 r767 33 33 34 34 /// 35 /// Functor retrieving minus rank from a InputRanker to36 /// builda ConsensusInputRanker.35 /// @brief Functor retrieving minus rank from a InputRanker to build 36 /// a ConsensusInputRanker. 37 37 /// 38 38 class IRRank : public IRRetrieve -
trunk/yat/classifier/IRRetrieve.h
r680 r767 32 32 33 33 /// 34 /// Abstractclass for retrieving information from a InputRanker to34 /// @brief Interface class for retrieving information from a InputRanker to 35 35 /// build a ConsensusInputRanker. 36 36 /// -
trunk/yat/classifier/InputRanker.h
r720 r767 40 40 41 41 /// 42 /// Class for ranking rows in a matrix, using a Score and a42 /// @brief Class for ranking rows in a matrix, using a Score and a 43 43 /// target vector. 44 44 /// -
trunk/yat/classifier/Kernel.h
r749 r767 39 39 40 40 /// 41 /// @brief Abstract Base Class for Kernels.41 /// @brief Interface Class for Kernels. 42 42 /// 43 43 /// Class taking care of the \f$ NxN \f$ kernel matrix, where \f$ N \f$ -
trunk/yat/classifier/KernelFunction.h
r680 r767 32 32 33 33 /// 34 /// Virtual Class calculating kernel matrix.34 /// @brief Interface class calculating elements in Kernel. 35 35 /// 36 36 class KernelFunction -
trunk/yat/classifier/MatrixLookup.h
r757 r767 39 39 namespace classifier { 40 40 41 /// 42 /// @brief General view into utility::matrix 41 43 /// 42 44 /// MatrixLookups can be used to create lookups/views into matrices -
trunk/yat/classifier/MatrixLookupWeighted.h
r757 r767 39 39 namespace classifier { 40 40 41 /// 42 /// @brief Class viewing into data and weight matrix. 41 43 /// 42 44 /// A MatrixLookupWeighted is very similar to a MatrixLookup, but -
trunk/yat/classifier/NBC.cc
r722 r767 30 30 #include "yat/utility/matrix.h" 31 31 32 #include <cassert> 32 33 #include <vector> 33 34 … … 51 52 52 53 53 54 54 const DataLookup2D& NBC::data(void) const 55 { 55 56 return data_; 56 57 } 57 58 58 59 … … 83 84 const MatrixLookupWeighted& data = 84 85 dynamic_cast<const MatrixLookupWeighted&>(data_); 85 86 aver[target_(j)].add(data.data(i,j), data.weight(i,j)); 86 87 } 87 88 else … … 90 91 for (size_t j=0; target_.nof_classes(); ++j){ 91 92 centroids_(i,j) = aver[j].mean(); 92 sigma _(i,j) = aver[j].variance();93 sigma2_(i,j) = aver[j].variance(); 93 94 } 94 95 } … … 103 104 std::cerr << "NBC::predict not implemented\n"; 104 105 exit(1); 106 assert(data_.rows()==input.rows()); 107 108 std::log(sigma_(i,c)) + 109 110 prediction = utility::matrix(centroids_.columns(),input.columns()); 111 for (size_t c=0; c<centroid_.columns(); ++c) { 112 double sum_ln_sigma=0; 113 for (size_t i=0; i<sigma2_.rows(); ++i) 114 sum_ln_sigma += log(sigma2_(i,c)); 115 sum_ln_sigma /= 2; 116 117 for (size_t s=0; s<input.columns(); ++s) { 118 // -lnp = sum{ln(sigma_i)} + sum{(x_i-m_i)^2/(2sigma_i)} 119 prediction(c,s) = sum_ln_sigma; 120 for (size_t i=0; i<input.columns(); ++i) { 121 prediction(c,s) += std::pow(input(i,s)-mean_(i,c),2)/sigma2_(i,c); 122 } 123 } 124 } 125 // exponentiate and normalize 105 126 } 106 127 -
trunk/yat/classifier/NBC.h
r722 r767 38 38 class Target; 39 39 40 /// 41 /// @brief Naive Bayesian Classification. 42 /// 40 /** 41 @brief Naive Bayesian Classification. 42 43 Each class is modelled as a multinormal distribution with 44 features being independent: \f$ p(x|c) = \prod 45 \frac{1}{\sqrt{2\pi\sigma_i^2}} \exp \left( 46 \frac{(x_i-m_i)^2}{2\sigma_i^2)} \right)\f$ 47 */ 43 48 class NBC : public SupervisedClassifier 44 49 { … … 69 74 /// Train the classifier using the training data. 70 75 /// 76 /// For each class mean and variance are estimated for each 77 /// feature (see Averager and AveragerWeighted for details). 78 /// 71 79 /// @return true if training succedeed. 72 80 /// … … 78 86 /// to the corresponding class. 79 87 /// 80 void predict(const DataLookup2D& , utility::matrix&) const;88 void predict(const DataLookup2D& data, utility::matrix& res) const; 81 89 82 90 83 91 private: 92 double gaussian(double x, double m, double sigma) const; 93 84 94 utility::matrix centroids_; 85 95 utility::matrix sigma_; -
trunk/yat/classifier/NCC.h
r722 r767 50 50 51 51 /// 52 /// Class for Nearest Centroid Classification.52 /// @brief Class for Nearest Centroid Classification. 53 53 /// 54 54 -
trunk/yat/classifier/PolynomialKernelFunction.h
r747 r767 36 36 37 37 /// 38 /// Class for polynomial kernel calculations38 /// @brief Class for polynomial kernel calculations 39 39 /// 40 40 -
trunk/yat/classifier/Sampler.h
r720 r767 34 34 35 35 /// 36 /// Interface for dividing samples into training and validation. 36 /// @brief Interface class for dividing samples into training and 37 /// validation. 37 38 /// 38 39 -
trunk/yat/classifier/SubsetGenerator.h
r720 r767 38 38 39 39 /// 40 /// Class splitting a set into training set and validation set using 41 /// a Sampler method. 42 /// 40 /// @brief Class splitting a set into training set and validation set. 41 /// 43 42 class SubsetGenerator 44 43 { -
trunk/yat/classifier/SupervisedClassifier.h
r722 r767 40 40 41 41 /// 42 /// Interface class for supervised classifiers42 /// @brief Interface class for supervised classifiers 43 43 /// 44 44 -
trunk/yat/classifier/Target.h
r757 r767 38 38 39 39 /// 40 /// Class for targets40 /// @brief Class for containing sample labels. 41 41 /// 42 42 -
trunk/yat/random/random.h
r752 r767 433 433 434 434 /// 435 /// Class to generatenumbers from a histogram in a continuous manner.435 /// @brief Generates numbers from a histogram in a continuous manner. 436 436 /// 437 437 class ContinuousGeneral : public Continuous -
trunk/yat/regression/Kernel.h
r682 r767 30 30 31 31 /// 32 /// Abstract Base Class for calculating the weights in a more32 /// @brief Interface Class for calculating the weights in a more 33 33 /// general way than classical rectangular windows. 34 34 /// … … 45 45 46 46 /// 47 /// Function calculating the weight47 /// Operator calculating calculating kernel value. 48 48 /// 49 virtual double weight(const double) const=0;49 virtual double operator()(const double) const=0; 50 50 }; 51 51 -
trunk/yat/regression/KernelBox.cc
r682 r767 33 33 } 34 34 35 double KernelBox:: weight(const double u) const35 double KernelBox::operator()(const double u) const 36 36 { 37 37 if (u>1 || u<-1) -
trunk/yat/regression/KernelBox.h
r682 r767 32 32 33 33 /// 34 /// Class for KernelBox a.k.a. rectangular window.34 /// @brief Class for KernelBox a.k.a. rectangular window. 35 35 /// 36 36 class KernelBox : public Kernel … … 44 44 45 45 /// 46 /// Function calculating the weightas \f$ w(x)=1\f$ if \f$|x|\le 146 /// Function calculating kernel value as \f$ w(x)=1\f$ if \f$|x|\le 1 47 47 /// \f$, \f$ w(x)=0 \f$ otherwise. 48 48 /// 49 double weight(const double) const;49 double operator()(const double) const; 50 50 51 51 private: -
trunk/yat/regression/KernelTriCube.cc
r682 r767 36 36 } 37 37 38 double KernelTriCube:: weight(const double x) const38 double KernelTriCube::operator()(const double x) const 39 39 { 40 40 if (x>1 || x<-1) -
trunk/yat/regression/KernelTriCube.h
r682 r767 32 32 33 33 /// 34 /// Class for TriCubal kernel.34 /// @brief Class for TriCubal kernel. 35 35 /// 36 36 class KernelTriCube : public Kernel … … 44 44 45 45 /// 46 /// Function calculating the weightas \f$ w(x)=(1-|x|^3)^3\f$ if46 /// Operator calculating kernel value as \f$ w(x)=(1-|x|^3)^3\f$ if 47 47 /// \f$|x|\le 1 \f$, \f$ w(x)=0 \f$ otherwise. 48 48 /// 49 double weight(const double) const;49 double operator()(const double) const; 50 50 51 51 private: -
trunk/yat/regression/Local.cc
r759 r767 107 107 utility::vector w(max_index-min_index+1); 108 108 for (size_t j=0; j<w.size(); j++) 109 w(j) = kernel_->weight( (x_local(j)- x_mid)/width );109 w(j) = (*kernel_)( (x_local(j)- x_mid)/width ); 110 110 111 111 // fitting the regressor locally -
trunk/yat/regression/Local.h
r747 r767 37 37 38 38 /// 39 /// Class for Locally weighted regression.39 /// @brief Class for Locally weighted regression. 40 40 /// 41 41 /// Locally weighted regression is an algorithm for learning -
trunk/yat/regression/OneDimensional.h
r729 r767 37 37 38 38 /// 39 /// Abstract Base Class for One Dimensional fitting.39 /// @brief Interface Class for One Dimensional fitting. 40 40 /// 41 41 /// @see OneDimensionalWeighted. -
trunk/yat/regression/OneDimensionalWeighted.h
r729 r767 37 37 38 38 /// 39 /// Abstract Base Class for One Dimensional fitting in a weighted39 /// @brief Interface Class for One Dimensional fitting in a weighted 40 40 /// fashion. 41 41 /// -
trunk/yat/regression/PolynomialWeighted.h
r757 r767 34 34 35 35 /// 36 /// @ todo document36 /// @brief Polynomial Regression in weighted fashion. 37 37 /// 38 38 class PolynomialWeighted : public OneDimensionalWeighted -
trunk/yat/statistics/Averager.h
r718 r767 35 35 36 36 /// 37 /// Class to calculate simple (first and second moments) averages.37 /// @brief Class to calculate simple (first and second moments) averages. 38 38 /// 39 39 /// @see AveragerWeighted AveragerPair AveragerPairWeighted -
trunk/yat/statistics/AveragerPair.h
r757 r767 35 35 36 36 /// 37 /// Class for taking care of mean and covariance of two variables.37 /// @brief Class for taking care of mean and covariance of two variables. 38 38 /// 39 39 /// @see Averager AveragerWeighted AveragerPairWeighted -
trunk/yat/statistics/AveragerPairWeighted.h
r718 r767 37 37 namespace statistics{ 38 38 /// 39 /// Class for taking care of mean and covariance of two variables in39 /// @brief Class for taking care of mean and covariance of two variables in 40 40 /// a weighted manner. 41 41 /// -
trunk/yat/statistics/Distance.h
r703 r767 38 38 39 39 /// 40 /// Interface class for calculating distances between arrays.40 /// @brief Interface class for calculating distances between arrays. 41 41 /// 42 42 class Distance -
trunk/yat/statistics/FoldChange.h
r683 r767 35 35 36 36 /// 37 /// @brief FoldChange 38 /// 39 /// This score is simply given by the difference by the group means. 37 /// @brief Score given by the difference by the group means. 40 38 /// 41 39 class FoldChange : public Score -
trunk/yat/statistics/Histogram.h
r718 r767 35 35 36 36 /// 37 /// Histograms provide a convenient way of presenting the 38 /// distribution of a set of data. A histogram consists of a set of 37 /// @brief Histograms provide a convenient way of presenting the 38 /// distribution of a set of data. 39 /// 40 /// A histogram consists of a set of 39 41 /// bins which count the number of events falling into these 40 42 /// bins. Currently only one dimensional histograms with uniformly -
trunk/yat/statistics/Pearson.h
r703 r767 38 38 39 39 /// 40 /// Class for calculating Pearson correlation.40 /// @brief Class for calculating Pearson correlation. 41 41 /// 42 42 -
trunk/yat/statistics/PearsonDistance.h
r703 r767 40 40 namespace statistics{ 41 41 /// 42 /// Class for calculating distances using Pearson correlation.42 /// @brief Class for calculating distances using Pearson correlation. 43 43 /// d=1-C. 44 44 /// -
trunk/yat/statistics/ROC.h
r747 r767 41 41 42 42 /// 43 /// Class for ROC (Reciever Operating Characteristic).43 /// @brief Class for Reciever Operating Characteristic. 44 44 /// 45 45 /// As the area under an ROC curve is equivalent to Mann-Whitney U -
trunk/yat/statistics/Score.h
r757 r767 38 38 39 39 /// 40 /// Abstract Base Class defining the interface for thescore classes.40 /// @brief Interface Class for score classes. 41 41 /// 42 42 class Score -
trunk/yat/statistics/tScore.h
r703 r767 37 37 38 38 /// 39 /// Class for Fisher's t-test.39 /// @brief Class for Fisher's t-test. 40 40 /// 41 41 /// See <a href="http://en.wikipedia.org/wiki/Student's_t-test"> -
trunk/yat/utility/FileUtil.h
r711 r767 38 38 39 39 /// 40 /// FileUtil is a utility class for checking file/directory 41 /// existence and access permissions. FileUtil is a wrapper to 42 /// access(2) and stat(2). 40 /// 41 /// @brief Checking file/directory existence and access permissions. 42 /// 43 /// FileUtil is a wrapper to access(2) and stat(2). 43 44 /// 44 45 class FileUtil { -
trunk/yat/utility/NNI.h
r718 r767 38 38 39 39 /// 40 /// NNI is an abstract base class defining the interfacefor nearest40 /// @brief Interface class for nearest 41 41 /// neighbour imputation (NNI) algorithms. 42 42 /// -
trunk/yat/utility/PCA.h
r715 r767 33 33 34 34 /** 35 @brief Principal Component Analysis 36 35 37 Class performing PCA using SVD. This class assumes that 36 38 the columns corresponds to the dimenension of the problem. -
trunk/yat/utility/SVD.h
r751 r767 35 35 36 36 /** 37 Class encapsulating GSL methods for singular value decomposition, 38 SVD. 37 @brief Singular Value Decomposition 38 39 Class encapsulating GSL methods for singular value 40 decomposition, SVD. 39 41 40 42 A = U S V' = (MxN)(NxN)(NxN) = (MxN)\n -
trunk/yat/utility/WeNNI.h
r703 r767 36 36 namespace utility { 37 37 38 /// 39 /// @brief Weighted Nearest Neighbour Imputation 38 40 /// 39 41 /// WeNNI is a continuous weights generalization of the (binary -
trunk/yat/utility/kNNI.h
r687 r767 37 37 38 38 /// 39 /// @brief kNNimpute 40 /// 39 41 /// kNNI is the binary weight implementation of NNI. This follows 40 42 /// the work done by Troyanskaya et al. cited in the NNI document -
trunk/yat/utility/matrix.h
r762 r767 43 43 44 44 /// 45 /// This is the yat interface to GSL matrix. 'double' is the46 /// only type supported, maybe we should add a 'complex' type as47 /// well in the future.45 /// @brief Interface to GSL matrix. 46 /// 47 /// For the time being 'double' is the only type supported. 48 48 /// 49 49 /// \par[File streams] Reading and writing vectors to file streams -
trunk/yat/utility/stl_utility.h
r725 r767 58 58 59 59 /// 60 /// @brief Functor comparing pairs using second. 61 /// 60 62 /// STL provides operator< for the pair.first element, but none for 61 63 /// pair.second. This template provides this and can be used as the -
trunk/yat/utility/vector.h
r759 r767 44 44 45 45 /** 46 This is the yat interface to GSL vector. 'double' is the only47 type supported, maybe we should add a 'complex' type as well in 48 the future.46 @brief This is the yat interface to GSL vector. 47 48 For time being 'double' is the only type supported. 49 49 50 50 \par File streams:
Note: See TracChangeset
for help on using the changeset viewer.