Changeset 592


Ignore:
Timestamp:
Aug 24, 2006, 1:18:28 PM (15 years ago)
Author:
Peter
Message:

added random_shuffle function in Target class

Location:
trunk/c++_tools/classifier
Files:
13 edited

Legend:

Unmodified
Added
Removed
  • trunk/c++_tools/classifier/ConsensusInputRanker.h

    r522 r592  
    1212  class CrossSplitter;
    1313
    14   ///
    15   /// Class for ranking rows in a data matrix versus a target vector.
    16   /// The rows are sorted with respect
    17   /// to median of their ranks.
    18   ///   
     14  ///
     15  /// @brief Robust algorithm to rank rows in a data matrix versus a
     16  /// target vector.
     17  ///
     18  /// The idea is to create several (different) ranked lists. The list
     19  /// could be different because they are based upon different
     20  /// sub-sets of the data, or the different lists could be different
     21  /// because they have are generated using different criteria. Having
     22  /// \f$N\f$ lists means each row in the data matrix has \f$N\f$
     23  /// ranks (each corresponding to one list) and a consensus ranked
     24  /// list is created by sorting the data rows with respect to their
     25  /// median rank.
     26  ///
     27  /// For the time being there are two ways to build a
     28  /// ConsensusInputRanker. 1) Sending a CrossSplitter to the
     29  /// constructor will create one ranked list for each of the
     30  /// partitions defined in the CrossSplitter. 2) You can generate
     31  /// your ranked list outside, using your favourite method, and
     32  /// adding it into the ConsensusInputRanker object. This allows
     33  /// combining different scores and different sub-sets in a more
     34  /// general way.
     35  ///
    1936  class ConsensusInputRanker
    2037  {
     
    2441    /// @brief Default constructor
    2542    ///
     43    /// Truly does nothing but creates a few empty member vectors.
     44    ///
    2645    ConsensusInputRanker(void);
    2746   
    2847    ///
    29     /// Constructor
    30     /// @todo doc
    31     ConsensusInputRanker(CrossSplitter&, statistics::Score&);
     48    /// For each sub-set in CrossSplitter @a sc an InputRanker object
     49    /// is created using the Score @s. After creation the data rows
     50    /// are sorted with respect to the median rank (i.e. update() is
     51    /// called).
     52    ///
     53    ConsensusInputRanker(CrossSplitter& sc, statistics::Score& s);
    3254   
    3355    ///
  • trunk/c++_tools/classifier/DataLookup2D.h

    r566 r592  
    1111
    1212  ///
    13   /// Interface class for classifier data
     13  /// @brief Interface class for classifier data.
    1414  ///
    15 
     15  /// This is the abstract base class defining a common interface for
     16  /// MatrixLookup and KernelLookup. The general idea o the Lookup
     17  /// classes is to: rather than copying the sub-matrix or sub-kernel,
     18  /// to hold a pointer to the underlying matrix/kernel and a vector
     19  /// of row indices and a vector of column indices. These indices
     20  /// then define what element to look into.
     21  ///
     22  /// This allow fast construction of sub-matrices/sub-kernels and at
     23  /// almost no extra memory usage.
     24  ///
    1625  class DataLookup2D
    1726  {
     
    2635
    2736    ///
    28     /// Constructor taking the row index vector and column index vector
    29     /// as input.
     37    /// Constructor taking the @a row index vector and @a column index
     38    /// vector as input. If @a owner is set true, the object is
     39    /// consider as owner of the underlying data (and the data is
     40    /// deleted at destruction).
    3041    ///
    31     DataLookup2D(const std::vector<size_t>&, const std::vector<size_t>&,
     42    DataLookup2D(const std::vector<size_t>& row,
     43                 const std::vector<size_t>& column,
    3244                 const bool owner = false);
    3345
    3446    ///
    35     /// Copy constructor.
     47    /// @brief Copy constructor. Indices and pointer to underlying
     48    /// data is copied, whereas owner is set to false.
    3649    ///
    3750    DataLookup2D(const DataLookup2D&);
    3851
    3952    ///
    40     ///
     53    /// Copy the index such that new(i,j) = old(row[i],col[j])
    4154    ///
    4255    DataLookup2D(const DataLookup2D&, const std::vector<size_t>& row,
    43               const std::vector<size_t>& col);
     56                const std::vector<size_t>& col);
    4457
    4558    ///
    46     ///
     59    /// If row is true indices are copied so new(i,j)=old(index[i],j).
     60    /// Else indices are copied so new(i,j)=old(i,index[j])
    4761    ///
    4862    DataLookup2D(const DataLookup2D&, const std::vector<size_t>& index,
    49               const bool row);
     63                const bool row);
    5064
    5165
    5266    ///
    53     ///
     67    /// Indices are created all pointing to the zero-zero element
     68    /// created in daughter classes.
    5469    ///
    5570    DataLookup2D(const size_t, const size_t, const bool owner);
     
    5772
    5873    ///
    59     /// Destructor
     74    /// @brief Destructor
    6075    ///
    6176    virtual ~DataLookup2D() {};
     
    91106
    92107    ///
    93     /// @return
     108    /// @brief access operator
     109    ///
     110    /// @return value in that particular element
    94111    ///
    95112    virtual double operator()(const size_t row, const size_t column) const=0;
  • trunk/c++_tools/classifier/Kernel.h

    r566 r592  
    1616
    1717  ///
    18   ///   @brief Base Class for Kernels.
     18  ///  @brief Abstract Base Class for Kernels.
    1919  ///
    20   ///   Class taking care of the \f$NxN\f$ kernel matrix, where
    21   ///   \f$N\f$ is number of samples. Each element in the Kernel
    22   ///   matrix is the scalar product of the corresponding pair of
    23   ///   samples. Type of Kernel is defined by a KernelFunction.
    24   ///   
    25   ///   @note If the KernelFunction is destroyed, the Kernel is no
    26   ///   longer defined.
     20  ///  Class taking care of the \f$NxN\f$ kernel matrix, where \f$N\f$
     21  ///  is number of samples. Each element in the Kernel corresponds is
     22  ///  the scalar product of the corresponding pair of samples. At the
     23  ///  time being there are two kinds of kernels. Kernel_SEV that is
     24  ///  optimized to be fast and Kernel_MEV that is preferable when
     25  ///  dealing with many samples and memory might be a
     26  ///  bottleneck. Also there are the corresponding weighted versions
     27  ///  to deal with weights (including missing values). A
     28  ///  KernelFunction defines what kind of scalar product the Kernel
     29  ///  represents, e.g. a Polynomial Kernel of degree 1 means we are
     30  ///  dealing with the ordinary linear scalar product.
     31  ///
     32  /// @note If the KernelFunction is destroyed, the Kernel is no
     33  /// longer defined.
    2734  ///
    2835  class Kernel
     
    3239
    3340    ///
    34     ///   Constructor taking the data matrix and KernelFunction as
    35     ///   input.Each column in the data matrix corresponds to one
    36     ///   sample.
     41    /// Constructor taking the @a data matrix and KernelFunction as
     42    /// input. Each column in the data matrix corresponds to one
     43    /// sample and the Kernel matrix is built applying the
     44    /// KernelFunction on each pair of columns in the data matrix.
    3745    ///
    38     ///   @note Can not handle NaNs.
     46    /// @note Can not handle NaNs.
    3947    ///
    4048    Kernel(const MatrixLookup& data, const KernelFunction& kf);
    4149
    4250    ///
    43     /// @todo doc
     51    /// Here each data value is accompanied by a weight, and the
     52    /// Kernel matrix is built applying the weighted version of the
     53    /// KernelFunction on each pair of samples (i.e. columns in @a
     54    /// data matrix and @a weight matrix).
    4455    ///
    4556    Kernel(const MatrixLookup& data, const KernelFunction& kf,
     
    4758   
    4859    ///
    49     /// @todo doc
     60    /// @todo remove this function
    5061    ///
    5162    Kernel(const Kernel& kernel, const std::vector<size_t>& index);
     
    6475    ///
    6576    /// @return number columns in Kernel
    66     ///
     77    /// @todo remove this function
    6778    inline size_t columns(void) const { return size(); }
    6879
     80    ///
     81    /// @return const reference to the underlying data
     82    ///
    6983    inline const MatrixLookup& data(void) const { return *data_; }
    7084
    7185    ///
    7286    /// @return number of rows in Kernel
    73     ///
     87    /// @todo remove this function
    7488    inline size_t rows(void) const { return size(); }
    7589
    7690    ///
    77     /// @brief number of samples
     91    /// @brief number of samples 
    7892    ///
    7993    inline size_t size(void) const { return data_->columns(); }
    8094
    81    
     95    ///
     96    /// Calculates the scalar product (using the KernelFunction)
     97    /// between vector @a vec and the \f$i\f$th column in the data
     98    /// matrix.
     99    ///   
    82100    virtual double element(const DataLookup1D& vec, const size_t i) const=0;
     101
     102    ///
     103    /// Calculates the weighted scalar product (using the
     104    /// KernelFunction) between vector @a vec and the \f$i\f$th column
     105    /// in the data matrix. Using a weight vector with all elements
     106    /// equal to unity yields same result as the non-weighted version
     107    /// above.
     108    ///
    83109    virtual double element(const DataLookup1D& vec, const DataLookup1D& w,
    84110                           const size_t i) const=0;
    85111
     112    ///
     113    /// @todo remove this function
    86114    ///
    87115    /// Created Kernel is built from selected features in data. The
     
    101129    virtual bool weighted(void) const=0;
    102130
     131    ///
     132    /// If no weight matrix was used in constructing the Kernel a
     133    /// MatrixLookup with same dimensions as data MatrixLookup and all
     134    /// elements equal to unity will be returned.  @note If Kernel is
     135    /// non-weighted the Kernel is owner of the weight and the weight
     136    /// not well defined after the Kernel has gone out of scope or has
     137    /// been deleted.
     138    ///
     139    /// @return the weight matrix
     140    ///
    103141    inline const MatrixLookup& weights(void) const { return *weights_; }
    104142
  • trunk/c++_tools/classifier/KernelLookup.h

    r566 r592  
    1515
    1616  ///
    17   /// @brief View into sub Kernel
     17  /// @brief Lookup into Kernel
     18  ///
     19  /// This is the KernelLookup class to be used together with kernel
     20  /// methods such as Support Vector Machines (SVM). The class does
     21  /// not contain any data or values, but rather is a lookup into a
     22  /// Kernel object. Each row and each column corresponds to a row and
     23  /// a column in the Kernel, respectively. This design allow for fast
     24  /// creation of sub-kernels, which is a common operation in most
     25  /// traning/validation procedures.
     26  ///
     27  /// A KernelLookup can be created directly from a Kernel or from an
     28  /// other KernelLookup. In the latter case, the resulting
     29  /// KernelLookup is looking directly into the underlying Kernel to
     30  /// avoid multiple lookups.
     31  ///
     32  /// There is a possibility to set the KernelLookup as owner of the
     33  /// underlying Kernel. In that case the underlying Kernel will be
     34  /// destroyed in the destructor. Consequently, the underlying Kernel
     35  /// must have been dynamically allocated and no other KernelLookup
     36  /// can own the Kernel.
    1837  ///
    1938  class KernelLookup : public DataLookup2D
     
    2342   
    2443    ///
    25     /// @brief Constructor from a Kernel
    26     ///
    27     /// @parameter own if true @a kernel is deleted in destructor,
    28     /// i.e., it must be dynamically allocated.
     44    /// @brief Constructor a Lookup into a Kernel
     45    ///
     46    /// Constructs a KernelLookup corresponding to the Kernel @a
     47    /// kernel. By default @a owner is set to false, which means
     48    /// KernelLookup does not own the underlying Kernel. If
     49    /// KernelLookup owns the Kernel then the Kernel will be deleted
     50    /// in the destructor.
    2951    ///
    3052    /// @note If underlying Kernel goes out of scope or is deleted, the
    31     /// returned pointer becomes invalid and the result of further use is
     53    /// KernelLookup becomes invalid and the result of further use is
    3254    /// undefined.
    3355    ///
    34     KernelLookup(const Kernel& kernel, const bool own=false);
     56    KernelLookup(const Kernel& kernel, const bool owner=false);
    3557
    3658    ///
    37     /// Constructor creating a subKernel. The \f$i\f$th row in
    38     /// constructed lookup is identical to row number row[i] in
    39     /// matrix. The \f$i\f$th column in constructed lookup is
    40     /// identical to column number column[i] in matrix. If @a owner is
    41     /// set to true @a kernel is destroyed in destructor.
     59    /// @brief Constructing a Lookup into a subKernel
     60    ///
     61    /// Creating a Lookup into parts of the Kernel. In the created
     62    /// Lookup the element in the \f$i\f$th row in the \f$j\f$th
     63    /// column is identical to the element in row row[i] and columns
     64    /// column[j] in the underlying @a kernel. If @a owner is set to
     65    /// true yhe underlying @a kernel is destroyed in the destructor.
    4266    ///
    4367    /// @note If @a kernel goes out of scope or is deleted, the
     
    5276   
    5377    ///
    54     /// Copy constructor
    55     ///
    56     KernelLookup(const KernelLookup&);
     78    /// @brief Copy constructor.
     79    ///
     80    /// A Lookup is created looking into the
     81    /// same underlying Kernel as @a kl is looking into. The newly
     82    /// created KernelLookup does not own the underlying Kernel.
     83    ///
     84    KernelLookup(const KernelLookup& kl);
    5785
    5886
    5987    ///
     88    /// @brief Contructing a sub-KernelLookup.
     89    ///
    6090    /// Contructor building a sub-KernelLookup from a KernelLookup
    61     /// defined by row index vector and column index vector. The
    62     /// resulting KernelLookup is independent of the old KernelLookup,
    63     /// but is undefined in case underlying Kernel is destroyed.
     91    /// defined by row index vector and column index vector. In the
     92    /// created Lookup the element in the \f$i\f$th row in the
     93    /// \f$j\f$th column is identical to the element in row row[i] and
     94    /// columns column[j] in the copied @a kl. The resulting
     95    /// KernelLookup is independent of the old KernelLookup, but is
     96    /// undefined in case underlying Kernel is destroyed.
    6497    ///
    6598    /// @note For training usage row index shall always be equal to
    6699    /// column index.
    67100    ///
    68     KernelLookup(const KernelLookup& kernel, const std::vector<size_t>& row,
     101    KernelLookup(const KernelLookup& kl, const std::vector<size_t>& row,
    69102                 const std::vector<size_t>& column);
    70103   
     
    74107    /// equally many rows as @a kernel.
    75108    ///
    76     /// @note If underlying matrix goes out of scope or is deleted, the
     109    /// @note If underlying kernel goes out of scope or is deleted, the
    77110    /// KernelLookup becomes invalid and the result of further use is
    78111    /// undefined.
     
    90123
    91124    ///
    92     /// @return sub-Lookup of the DataLookup2D
     125    /// Creates a sub-Kernel identical to the one created using
     126    /// KernelLookup(*this, train, train).
     127    ///
     128    /// @return pointer to dynamically allocated sub-Lookup of the KernelLookup
    93129    ///
    94130    /// @Note Returns a dynamically allocated DataLookup2D, which has
     
    100136    ///
    101137    /// In returned kernel each row corresponds to a training sample
    102     /// and each column corresponds to a validation sample.
     138    /// and each column corresponds to a validation sample. The
     139    /// created sub-KernelLookup is equivalent to using
     140    /// KernelLooup(*this, train, validation).
    103141    ///
    104142    /// @return sub-Lookup of the DataLookup2D
     
    120158
    121159    ///
    122     /// Each column in returned MatrixLook corresponds to the column
     160    /// Each column in returned MatrixLookup corresponds to the column
    123161    /// in KernelLookup.
    124162    ///
     
    131169
    132170    ///
    133     /// @todo doc
     171    /// Function to calculate a new Kernel element using the
     172    /// underlying KernelFunction. The value is calulated between @a
     173    /// vec and the data vector of the \f$i\f$th sample, in other
     174    /// words, the sample corresponding to the \f$i\f$th row or
     175    /// \f$i\f$th column. In case KernelLookup is a sub-Kernel and not
     176    /// symmetric, the kernel value is calculated between @a vec and
     177    /// the data vector corresponding to \f$i\f$th row.
    134178    ///
    135179    inline double element(const DataLookup1D& vec, const size_t i) const
     
    137181
    138182    ///
    139     /// @todo doc
     183    /// Weighted version of element function. Using weights @a w all
     184    /// identical to unity results in same as using the unweighted
     185    /// version above.
    140186    ///
    141187    inline double element(const DataLookup1D& vec, const DataLookup1D& w,
     
    144190
    145191    ///
    146     /// @todo doc
     192    /// @todo remove
     193    /// @note This function will probably be removed
    147194    ///
    148195    /// @Note Returns a dynamically allocated KernelLookup, which has
     
    158205    ///
    159206    /// Each column in returned MatrixLook corresponds to the column
    160     /// in KernelLookup.
     207    /// in KernelLookup. If no weights were used in construction of
     208    /// Kernel, each element in returned MatrixLookup is set to unity.
    161209    ///
    162210    /// @Note Returns a dynamically allocated MatrixLookup2D, which has
  • trunk/c++_tools/classifier/KernelWeighted_MEV.h

    r565 r592  
    1515
    1616  ///
    17   ///   @brief Memory Efficient Kernel
    18   ///   Class taking care of the \f$NxN\f$ kernel matrix, where
    19   ///   \f$N\f$ is number of samples. Type of Kernel is defined by a
    20   ///   KernelFunction. This Memory Efficient Version (MEV) does not
    21   ///   store the kernel matrix in memory, but calculates each element
    22   ///   when it is needed. When memory allows do always use Kernel_SEV
    23   ///   instead.
     17  /// @brief Memory Efficient Kernel Class taking care of the
     18  /// \f$NxN\f$ kernel matrix, where \f$N\f$ is number of
     19  /// samples. Type of Kernel is defined by a KernelFunction. This
     20  /// Memory Efficient Version (MEV) does not store the kernel matrix
     21  /// in memory, but calculates an element when it is needed. When
     22  /// memory allows do always use KernelWeighted_SEV instead.
    2423  ///   
    25   ///   @see also KernelWeighted_SEV
     24  /// @see Kernel_MEV KernelWeighted_SEV
    2625  ///
    2726  class KernelWeighted_MEV : public Kernel
     
    3130   
    3231    ///
    33     ///   Constructor taking the \a data matrix, the KernelFunction and a
    34     ///   \a weight matrix as input. Each column in the data matrix
    35     ///   corresponds to one sample.
     32    /// Constructor taking the \a data matrix, the KernelFunction and a
     33    /// \a weight matrix as input. Each column in the data matrix
     34    /// corresponds to one sample.
    3635    ///
    3736    /// @note if @a data, @a kf, or @a weights is destroyed the
     
    4342
    4443    ///
    45     /// @todo doc
     44    /// @todo remove
    4645    ///
    4746    KernelWeighted_MEV(const KernelWeighted_MEV& other,
     
    5554
    5655    ///
     56    /// Calculates the scalar product using the weighted
     57    /// KernelFunction between data vector @a vec and column \f$i\f$
     58    /// in data matrix. For @a vec a vector of unity weights is used.
     59    ///
    5760    /// @return kernel element between data @a ve and training sample @a i
    5861    ///
     
    6568
    6669    ///
    67     /// @todo doc
     70    /// Calculates the scalar product using the weighted
     71    /// KernelFunction between data vector @a vec and column \f$i\f$
     72    /// in data matrix. For @a vec a vector of unity weights is used.
    6873    ///
     74    /// @return kernel element between data @a ve and training sample @a i
     75    ///
    6976    inline double element(const DataLookup1D& vec, const DataLookup1D& w,
    7077                          const size_t i) const
     
    7582
    7683    ///
    77     /// @todo doc
     84    /// @todo remove
    7885    ///
    7986    const Kernel* selected(const std::vector<size_t>& index) const;
    8087
     88    ///
     89    /// @return true
     90    ///
    8191    inline bool weighted(void) const { return true; }
    8292
  • trunk/c++_tools/classifier/KernelWeighted_SEV.h

    r565 r592  
    1717
    1818  ///
    19   ///   @brief Speed Efficient Kernel
     19  ///   @brief Weighted Speed Efficient Kernel
     20  ///
    2021  ///   Class taking care of the \f$NxN\f$ kernel matrix, where
    2122  ///   \f$N\f$ is number of samples. Type of Kernel is defined by a
    22   ///   KernelFunction. This Speed Efficient Version (SEV) calculated
     23  ///   KernelFunction. This Speed Efficient Version (SEV) calculates
    2324  ///   the kernel matrix once and the kernel is stored in
    24   ///   memory. When \f$N\f$ is large and the kernel matrix cannot be
    25   ///   stored in memory, use Kernel_MEV instead.
     25  ///   memory. This Kernel expects a weight matrix associated to the
     26  ///   data matrix. Having missing values these should be associated
     27  ///   to a zero weight, which means they will be ignored during all
     28  ///   calculations. See KernelFunction for further details on
     29  ///   weighted calculations of the Kernel. When \f$N\f$ is large and
     30  ///   the kernel matrix cannot be stored in memory, use
     31  ///   KernelWeighted_MEV instead.
    2632  ///   
    27   ///   @see also Kernel_MEV
     33  ///   @see also KernelWeighted_MEV Kernel_SEV
    2834  ///
    2935  class KernelWeighted_SEV : public Kernel
     
    3339
    3440    ///
    35     ///  Constructor taking the data matrix and KernelFunction as
    36     ///  input. @note Can not handle NaNs. When dealing with missing values,
    37     ///  use constructor taking a weight matrix.
     41    ///  Constructor taking the data matrix, KernelFunction, and
     42    ///  weight matrix as input. Each element in the data matrix
     43    ///  should have a corresponding element in weight matrix, and
     44    ///  consequently they dimensions of the two matrices should be
     45    ///  equal. The Kernel is built using the weighted version of the
     46    ///  KernelFunction.
    3847    ///
    3948    /// @note if @a data, @a kf, or @a weights is destroyed the
     
    4453
    4554    ///
    46     /// @todo doc
     55    /// @todo remove.
    4756    ///
    4857    KernelWeighted_SEV(const KernelWeighted_SEV& other,
     
    5867
    5968    ///
     69    /// Calculates the scalar product using the weighted
     70    /// KernelFunction between data vector @a vec and column \f$i\f$
     71    /// in data matrix. For @a vec a vector of unity weights is used.
     72    ///
    6073    /// @return kernel element between data @a ve and training sample @a i
    6174    ///
     
    6881
    6982    ///
    70     /// @todo doc
     83    /// Calculates the scalar product using the weighted
     84    /// KernelFunction between data vector @a vec and column \f$i\f$
     85    /// in data matrix. For @a vec a vector of unity weights is used.
    7186    ///
     87    /// @return kernel element between data @a ve and training sample @a i
     88    ///
    7289    inline double element(const DataLookup1D& vec, const DataLookup1D& w,
    7390                          const size_t i) const
     
    7895
    7996    ///
    80     /// @todo doc
     97    /// @todo remove
    8198    ///
    8299    const KernelWeighted_SEV* selected(const std::vector<size_t>& index) const;
  • trunk/c++_tools/classifier/Kernel_MEV.h

    r565 r592  
    1414
    1515  ///
    16   ///   @brief Memory Efficient Kernel
    17   ///   Class taking care of the \f$NxN\f$ kernel matrix, where
    18   ///   \f$N\f$ is number of samples. Type of Kernel is defined by a
    19   ///   KernelFunction. This Memory Efficient Version (MEV) does not
    20   ///   store the kernel matrix in memory, but calculates each element
    21   ///   when it is needed. When memory allows do always use Kernel_SEV
    22   ///   instead.
     16  /// @brief Memory Efficient Kernel Class taking care of the
     17  ///   \f$NxN\f$ kernel matrix, where \f$N\f$ is number of
     18  ///   samples. Type of Kernel is defined by a KernelFunction. This
     19  ///   Memory Efficient Version (MEV) does not store the kernel
     20  ///   matrix in memory, but calculates an element when it is
     21  ///   needed. When memory allows do always use Kernel_SEV
     22  ///   instead. This Kernel do not support missing values in form of
     23  ///   NaNs. To deal with missing values, use KernelWeighted_MEV.
    2324  ///   
    24   ///   @see also Kernel_SEV
     25  ///   @see also Kernel_SEV KernelWeighted_MEV
    2526  ///
    2627  class Kernel_MEV : public Kernel
     
    3031   
    3132    ///
    32     ///   Constructor taking the data matrix and KernelFunction as
    33     ///   input.Each column in the data matrix corresponds to one
    34     ///   sample. @note Can not handle NaNs.
     33    /// Constructor taking the data matrix and KernelFunction as
     34    /// input.Each column in the data matrix corresponds to one
     35    /// sample. @note Can not handle NaNs.
    3536    ///
    3637    inline Kernel_MEV(const MatrixLookup& data, const KernelFunction& kf)
     
    3839
    3940    ///
    40     /// @todo doc
     41    /// @todo remove
    4142    ///
    4243    Kernel_MEV(const Kernel_MEV& kernel, const std::vector<size_t>& index);
     
    4445
    4546    ///
    46     ///   Destructor
     47    /// Destructor
    4748    ///
    4849    inline virtual ~Kernel_MEV(void) {};
     
    5556   
    5657    ///
    57     /// @return kernel element between data @a ve and training sample @a i
     58    /// Calculates the scalar product using the KernelFunction between
     59    /// data vector @a vec and column \f$i\f$ in data matrix.
    5860    ///
     61    /// @return kernel element between data @a vec and training sample @a i
     62    ///
    5963    inline double element(const DataLookup1D& vec, const size_t i) const
    6064    { return kf_->operator()(vec, DataLookup1D(*data_,i,false)); }
    6165     
     66    ///
     67    /// Using the KernelFunction this function calculates the scalar
     68    /// product between vector @a vec and the column \f$ i\f$ in data
     69    /// matrix. The KernelFunction expects a weight vector for each of
     70    /// the two data vectors and as this Kernel is non-weighted each
     71    /// value in the data matrix is associated to a unity weight.
     72    ///
     73    /// @return weighted kernel element between data @a vec and
     74    /// training sample @a i
     75    ///
    6276    inline double element(const DataLookup1D& vec, const DataLookup1D& w,
    6377                          const size_t i) const
     
    6579                   w, DataLookup1D(w.size(),1.0));}
    6680
     81    /// @todo remove
    6782    const Kernel_MEV* selected(const std::vector<size_t>& index) const;
    6883
     84    ///
     85    /// @return false
     86    ///
    6987    inline bool weighted(void) const { return false; }
    7088
  • trunk/c++_tools/classifier/Kernel_SEV.h

    r555 r592  
    1414  class KernelFunction;
    1515
    16   ///
     16  ///
    1717  ///   @brief Speed Efficient Kernel
    18   ///   Class taking care of the \f$NxN\f$ kernel matrix, where
    19   ///   \f$N\f$ is number of samples. Type of Kernel is defined by a
    20   ///   KernelFunction. This Speed Efficient Version (SEV) calculated
    21   ///   the kernel matrix once and the kernel is stored in
    22   ///   memory. When \f$N\f$ is large and the kernel matrix cannot be
    23   ///   stored in memory, use Kernel_MEV instead.
    24   ///   
    25   ///   @see also Kernel_MEV
    26   ///
     18  ///   Class taking care of the \f$NxN\f$ kernel matrix, where
     19  ///   \f$N\f$ is number of samples. Type of Kernel is defined by a
     20  ///   KernelFunction. This Speed Efficient Version (SEV) calculated
     21  ///   the kernel matrix once by construction and the kernel is stored in
     22  ///   memory. When \f$N\f$ is large and the kernel matrix cannot be
     23  ///   stored in memory, use Kernel_MEV instead.
     24  ///   
     25  ///   @see also Kernel_MEV KernelWeighted_SEV
     26  ///
    2727  class Kernel_SEV : public Kernel
    2828  {
     
    3131
    3232    ///
    33     ///   Constructor taking the data matrix and KernelFunction as
    34     ///   input. @note Can not handle NaNs. When dealing with missing values,
    35     ///   use constructor taking a weight matrix.
     33    /// Constructor taking the data matrix and KernelFunction as
     34    /// input. @note Can not handle NaNs. When dealing with missing values,
     35    /// use KernelWeighted_SEV instead.
     36    ///
    3637    Kernel_SEV(const MatrixLookup&, const KernelFunction&);
    3738   
    3839    ///
    39     /// @todo doc
     40    /// @todo remove
    4041    ///
    4142    Kernel_SEV(const Kernel_SEV& kernel, const std::vector<size_t>& index);
     
    4445    /// @return element at position (\a row, \a column) in the Kernel
    4546    /// matrix
    46     ///
     47    ///
    4748    inline double operator()(const size_t row,const size_t column) const
    4849    { return kernel_matrix_(row,column); }
    4950
     51    ///
     52    /// Calculates the scalar product using the KernelFunction between
     53    /// data vector @a vec and column \f$i\f$ in data matrix.
    5054    ///
    5155    /// @return kernel element between data @a vec and training sample @a i
     
    5458
    5559    ///
    56     /// @todo doc
     60    /// Using the KernelFunction this function calculates the scalar
     61    /// product between vector @a vec and the column \f$ i\f$ in data
     62    /// matrix. The KernelFunction expects a weight vector for each of
     63    /// the two data vectors and as this Kernel is non-weighted each
     64    /// value in the data matrix is associated to a unity weight.
     65    ///
     66    /// @return weighted kernel element between data @a vec and
     67    /// training sample @a i
    5768    ///
    5869    double element(const DataLookup1D& vec, const DataLookup1D& w,
     
    6071
    6172    ///
    62     /// @todo doc
     73    /// @todo remove this function
    6374    ///
    6475    const Kernel* selected(const std::vector<size_t>& index) const;
  • trunk/c++_tools/classifier/MatrixLookup.cc

    r556 r592  
    66#include <algorithm>
    77#endif
     8
     9#include <fstream>
    810
    911namespace theplu {
     
    103105
    104106
     107  MatrixLookup::MatrixLookup(std::istream& is, char sep)
     108    : DataLookup2D(true)
     109  {
     110    data_ = new gslapi::matrix(is,sep);
     111    for(size_t i=0;i<(*data_).rows();i++)
     112      row_index_.push_back(i);
     113    for(size_t i=0;i<(*data_).columns();i++)
     114      column_index_.push_back(i);
     115  }
     116
     117
    105118  MatrixLookup::~MatrixLookup(void)
    106119  {
     
    130143  {
    131144    if (this!=&other){
     145      if (owner_){
     146        delete data_;
     147        owner_=false;
     148      }
    132149      DataLookup2D::operator=(other);
    133150      data_ = other.data_;
  • trunk/c++_tools/classifier/SVM.h

    r571 r592  
    9292  };
    9393
     94  ///
     95  /// @brief Support Vector Machine
     96  ///
     97  ///
    9498  ///
    9599  /// Class for SVM using Keerthi's second modification of Platt's
  • trunk/c++_tools/classifier/SupervisedClassifier.cc

    r543 r592  
    1414
    1515  SupervisedClassifier::SupervisedClassifier(const Target& target,
    16                                             statistics::Score* score,
    17                                             const size_t nof_inputs)
     16              statistics::Score* score,
     17              const size_t nof_inputs)
    1818    : target_(target), score_(score), ranker_(0), nof_inputs_(nof_inputs),
    1919      trained_(false)
  • trunk/c++_tools/classifier/SupervisedClassifier.h

    r543 r592  
    44#define _theplu_classifier_supervisedclassifier_
    55
    6 #include <cctype>
     6#include <stddef.h>
    77
    88namespace theplu {
     
    3333  public:
    3434    ///
    35     /// Constructor. Taking a vector of target values.
     35    /// Constructor. Taking a Target object.
    3636    ///
    3737    SupervisedClassifier(const Target&);
  • trunk/c++_tools/classifier/Target.h

    r581 r592  
    9292
    9393    ///
     94    /// @brief randomize labels
     95    ///
     96    /// Randomizes classes. Number of samples with a specific label is
     97    /// not modified, neither mapping from label to class.
     98    ///
     99    void random_shuffle(void);
     100
     101    ///
    94102    /// @return number of samples
    95103    ///
     
    124132    std::vector<char> binary_; // avoid using vector<bool>
    125133    std::vector<size_t> classes_; // class of sample i
    126     // map between class label and class index
     134    // map between class label and class index (inverse of labels_)
    127135    std::map<std::string,size_t> class_map_; 
    128136    std::vector<std::string> labels_; // label of class i
Note: See TracChangeset for help on using the changeset viewer.