Changeset 1167 for trunk/yat/classifier
- Timestamp:
- Feb 26, 2008, 9:02:28 PM (15 years ago)
- Location:
- trunk/yat/classifier
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/yat/classifier/DataLookup2D.h
r1134 r1167 123 123 size_t rows(void) const; 124 124 125 ///126 /// @return Data based on selected features.127 ///128 virtual const DataLookup2D* selected(const utility::Index&) const=0;129 130 ///131 /// @return sub-Lookup of the DataLookup2D132 ///133 /// @note Returns a dynamically allocated DataLookup2D, which has134 /// to be deleted by the caller to avoid memory leaks.135 ///136 virtual const DataLookup2D*137 training_data(const utility::Index&) const=0;138 139 ///140 /// @return sub-Lookup of the DataLookup2D141 ///142 /// @note Returns a dynamically allocated DataLookup2D, which has143 /// to be deleted by the caller to avoid memory leaks.144 ///145 virtual const DataLookup2D*146 validation_data(const utility::Index& train,147 const utility::Index& val) const=0;148 149 125 /** 150 126 \return data -
trunk/yat/classifier/KernelLookup.cc
r1165 r1167 23 23 24 24 #include "KernelLookup.h" 25 #include "DataLookup2D.h"26 25 #include "MatrixLookup.h" 27 26 #include "MatrixLookupWeighted.h" … … 335 334 336 335 336 /* 337 337 const KernelLookup* 338 338 KernelLookup::training_data(const utility::Index& train) const … … 340 340 return new KernelLookup(*this,train,train); 341 341 } 342 */ 342 343 343 344 … … 351 352 352 353 353 const KernelLookup*354 KernelLookup::validation_data(const utility::Index& train,355 const utility::Index& validation) const356 {357 return new KernelLookup(*this,train,validation);358 }359 360 361 354 bool KernelLookup::weighted(void) const 362 355 { -
trunk/yat/classifier/KernelLookup.h
r1165 r1167 304 304 305 305 /** 306 \brief Creates a sub-Kernel identical to the one created using307 KernelLookup(*this, train, train).308 309 \return pointer to dynamically allocated sub-Lookup of the310 KernelLookup311 312 \note Returns a dynamically allocated DataLookup2D, which has313 to be deleted by the caller to avoid memory leaks.314 */315 const KernelLookup* training_data(const utility::Index& train) const;316 317 /**318 In returned kernel each row corresponds to a training sample319 and each column corresponds to a validation sample. The created320 sub-KernelLookup is equivalent to using KernelLooup(*this,321 train, validation).322 323 \return sub-Lookup of the DataLookup2D324 325 \note Returns a dynamically allocated DataLookup2D, which has326 to be deleted by the caller to avoid memory leaks.327 */328 const KernelLookup*329 validation_data(const utility::Index& train,330 const utility::Index& validation) const;331 332 /**333 306 \return true if underlying Kernel is weighted 334 307 */ -
trunk/yat/classifier/SVindex.cc
r1121 r1167 23 23 24 24 #include "SVindex.h" 25 #include "DataLookup2D.h"26 25 #include "yat/random/random.h" 27 26 #include "yat/statistics/Averager.h" -
trunk/yat/classifier/SubsetGenerator.h
r1165 r1167 133 133 const Target& validation_target(std::vector<Target>::size_type i) const; 134 134 135 ///136 /// @return true if weighted137 /// @todo remove this function138 //bool weighted(void) const;139 140 135 private: 141 136 void build(const MatrixLookup&); … … 168 163 training_data_.reserve(sampler_.size()); 169 164 validation_data_.reserve(sampler_.size()); 170 for (size_t i=0; i<sampler_.size(); ++i){ 171 // Dynamically allocated. Must be deleted in destructor. 172 training_data_.push_back(data.training_data(sampler.training_index(i))); 173 validation_data_.push_back(data.validation_data(sampler.training_index(i), 174 sampler.validation_index(i))); 175 176 training_target_.push_back(Target(target(),sampler.training_index(i))); 177 validation_target_.push_back(Target(target(), 178 sampler.validation_index(i))); 179 utility::yat_assert<std::runtime_error>(training_data_.size()==i+1); 180 utility::yat_assert<std::runtime_error>(training_target_.size()==i+1); 181 utility::yat_assert<std::runtime_error>(validation_data_.size()==i+1); 182 utility::yat_assert<std::runtime_error>(validation_target_.size()==i+1); 183 } 184 185 // No feature selection, hence features same for all partitions 186 // and can be stored in features_[0] 187 features_.push_back(utility::Index(data.rows())); 188 165 build(data); 189 166 utility::yat_assert<std::runtime_error>(training_data_.size()==size()); 190 167 utility::yat_assert<std::runtime_error>(training_target_.size()==size()); … … 196 173 template<typename T> 197 174 SubsetGenerator<T>::SubsetGenerator(const Sampler& sampler, 198 199 175 const T& data, 176 FeatureSelector& fs) 200 177 : f_selector_(&fs), sampler_(sampler) 201 178 { … … 229 206 training_target_.push_back(Target(target(),training_index(k))); 230 207 validation_target_.push_back(Target(target(),validation_index(k))); 231 // training data with no feature selection 232 const MatrixLookup* train_data_all_feat = 233 ml.training_data(training_index(k)); 234 // use these data to create feature selection 235 utility::yat_assert<std::runtime_error>(train_data_all_feat); 236 f_selector_->update(*train_data_all_feat, training_target(k)); 208 if (f_selector_){ 209 // training data with no feature selection 210 const MatrixLookup* train_data_all_feat = 211 ml.training_data(training_index(k)); 212 // use these data to create feature selection 213 utility::yat_assert<std::runtime_error>(train_data_all_feat); 214 f_selector_->update(*train_data_all_feat, training_target(k)); 237 215 // get features 238 features_.push_back(f_selector_->features()); 239 utility::yat_assert<std::runtime_error>(train_data_all_feat); 240 delete train_data_all_feat; 216 features_.push_back(f_selector_->features()); 217 utility::yat_assert<std::runtime_error>(train_data_all_feat); 218 delete train_data_all_feat; 219 } 220 else // no feature selection 221 features_.push_back(utility::Index(ml.rows())); 222 241 223 242 224 // Dynamically allocated. Must be deleted in destructor. … … 256 238 training_target_.push_back(Target(target(),training_index(k))); 257 239 validation_target_.push_back(Target(target(),validation_index(k))); 258 // training data with no feature selection 259 const MatrixLookupWeighted* train_data_all_feat = 260 ml.training_data(training_index(k)); 261 // use these data to create feature selection 262 f_selector_->update(*train_data_all_feat, training_target(k)); 263 // get features 264 features_.push_back(f_selector_->features()); 265 delete train_data_all_feat; 266 240 if (f_selector_){ 241 // training data with no feature selection 242 const MatrixLookupWeighted* train_data_all_feat = 243 ml.training_data(training_index(k)); 244 // use these data to create feature selection 245 f_selector_->update(*train_data_all_feat, training_target(k)); 246 // get features 247 features_.push_back(f_selector_->features()); 248 delete train_data_all_feat; 249 } 250 else // no feature selection 251 features_.push_back(utility::Index(ml.rows())); 252 253 267 254 // Dynamically allocated. Must be deleted in destructor. 268 255 training_data_.push_back(new MatrixLookupWeighted(ml, features_.back(), … … 280 267 validation_target_.push_back(Target(target(),validation_index(k))); 281 268 282 if (kernel.weighted()){ 283 utility::SmartPtr<const MatrixLookupWeighted> ml=kernel.data_weighted(); 284 f_selector_->update(MatrixLookupWeighted(*ml,training_index(k),false), 285 training_target(k)); 269 if (f_selector_){ 270 if (kernel.weighted()){ 271 utility::SmartPtr<const MatrixLookupWeighted> ml= 272 kernel.data_weighted(); 273 f_selector_->update(MatrixLookupWeighted(*ml,training_index(k),false), 274 training_target(k)); 275 } 276 else { 277 utility::SmartPtr<const MatrixLookup> ml=kernel.data(); 278 f_selector_->update(MatrixLookup(*ml,training_index(k), false), 279 training_target(k)); 280 } 281 features_.push_back(f_selector_->features()); 282 const KernelLookup* kl = kernel.selected(features_.back()); 283 // Dynamically allocated. Must be deleted in destructor. 284 training_data_.push_back(new KernelLookup(kernel,training_index(k), 285 training_index(k))); 286 validation_data_.push_back(new KernelLookup(kernel, training_index(k), 287 validation_index(k))); 288 utility::yat_assert<std::runtime_error>(kl); 289 delete kl; 286 290 } 287 else { 288 utility::SmartPtr<const MatrixLookup> ml=kernel.data(); 289 f_selector_->update(MatrixLookup(*ml,training_index(k), false), 290 training_target(k)); 291 } 292 utility::Index dummie=f_selector_->features(); 293 features_.push_back(dummie); 294 //features_.push_back(f_selector_->features()); 295 const KernelLookup* kl = kernel.selected(features_.back()); 291 else {// no feature selection 292 training_data_.push_back(new KernelLookup(kernel, training_index(k), 293 training_index(k))); 294 validation_data_.push_back(new KernelLookup(kernel, 295 training_index(k), 296 validation_index(k))); 297 } 296 298 297 // Dynamically allocated. Must be deleted in destructor.298 training_data_.push_back(kl->training_data(training_index(k)));299 validation_data_.push_back(kl->validation_data(training_index(k),300 validation_index(k)));301 utility::yat_assert<std::runtime_error>(kl);302 delete kl;303 299 } 304 300 }
Note: See TracChangeset
for help on using the changeset viewer.