Ignore:
Timestamp:
Mar 5, 2008, 3:30:58 AM (14 years ago)
Author:
Peter
Message:

working on #75

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/yat/classifier/SVM.h

    r1175 r1200  
    4646  class KernelLookup;
    4747
    48   ///
    49   /// @brief Support Vector Machine
    50   ///
    51   ///
    52   ///
    53   /// Class for SVM using Keerthi's second modification of Platt's
    54   /// Sequential Minimal Optimization. The SVM uses all data given for
    55   /// training. If validation or testing is wanted this should be
    56   /// taken care of outside (in the kernel).
    57   ///   
     48  /**
     49     \brief Support Vector Machine
     50  */
    5851  class SVM
    5952  {
     
    6659
    6760    /**
    68        Copy constructor.
     61       \brief Copy constructor.
    6962    */
    7063    SVM(const SVM&);
    7164         
    7265    ///
    73     /// Destructor
     66    /// \brief Destructor
    7467    ///
    7568    virtual ~SVM();
    7669
    77     ///
    78     /// Same as copy constructor.
    79     ///
     70    /**
     71       \brief Create an untrained copy of SVM.
     72
     73       \returns A dynamically allocated SVM, which has to be deleted
     74       by the caller to avoid memory leaks.
     75    */
    8076    SVM* make_classifier(void) const;
    8177
    8278    ///
    83     /// @return \f$ \alpha \f$
     79    /// @return alpha parameters
    8480    ///
    8581    const utility::Vector& alpha(void) const;
     
    8985    /// large C means the training will be focused on getting samples
    9086    /// correctly classified, with risk for overfitting and poor
    91     /// generalisation. A too small C will result in a training in which
    92     /// misclassifications are not penalized. C is weighted with
    93     /// respect to the size, so \f$ n_+C_+ = n_-C_- \f$, meaning a
    94     /// misclassificaion of the smaller group is penalized
     87    /// generalisation. A too small C will result in a training, in
     88    /// which misclassifications are not penalized. C is weighted with
     89    /// respect to the size such that \f$ n_+C_+ = n_-C_- \f$, meaning
     90    /// a misclassificaion of the smaller group is penalized
    9591    /// harder. This balance is equivalent to the one occuring for
    96     /// regression with regularisation, or ANN-training with a
     92    /// %regression with regularisation, or ANN-training with a
    9793    /// weight-decay term. Default is C set to infinity.
    9894    ///
     
    117113        + bias \f$, where \f$ t \f$ is the target.
    118114   
    119         @return output
     115        @return output of training samples
    120116    */
    121117    const theplu::yat::utility::Vector& output(void) const;
     
    125121       is calculated as the output times the margin, i.e., geometric
    126122       distance from decision hyperplane: \f$ \frac{ \sum \alpha_j
    127        t_j K_{ij} + bias}{w} \f$ The output has 2 rows. The first row
     123       t_j K_{ij} + bias}{|w|} \f$ The output has 2 rows. The first row
    128124       is for binary target true, and the second is for binary target
    129125       false. The second row is superfluous as it is the first row
     
    137133    void predict(const KernelLookup& input, utility::Matrix& predict) const;
    138134
     135    /*
    139136    ///
    140137    /// @return output times margin (i.e. geometric distance from
     
    148145    ///
    149146    double predict(const DataLookupWeighted1D& input) const;
     147    */
    150148
    151149    ///
     
    181179       decreased.
    182180
    183        \throw if maximal number of epoch is reach.
     181       Class for SVM using Keerthi's second modification of Platt's
     182       Sequential Minimal Optimization. The SVM uses all data given for
     183       training.
     184       
     185       \throw std::runtime_error if maximal number of epoch is reach.
    184186    */
    185187    void train(const KernelLookup& kernel, const Target& target);
Note: See TracChangeset for help on using the changeset viewer.