ENSDF++ 1.1
An easy, fast and simple way to run querys towards the ENSDF database, written in C++.
Dataset.cpp
Go to the documentation of this file.
00001 #include "Dataset.h"
00002 
00003 Dataset::~Dataset()
00004 {
00005   for(list<IdentificationRecord* >::iterator it = myIdentificationRecords.begin(); it!=myIdentificationRecords.end(); it++)
00006     {
00007       delete *it;
00008       *it=NULL;
00009     }
00010   myIdentificationRecords.clear();
00011   for(list<HistoryRecord* >::iterator it = myHistoryRecords.begin(); it!=myHistoryRecords.end(); it++)
00012     {
00013       delete *it;
00014       *it=NULL;
00015    }
00016   myHistoryRecords.clear();
00017   for(list<CrossReferenceRecord* >::iterator it = myCrossReferenceRecords.begin(); it!=myCrossReferenceRecords.end(); it++)
00018     {
00019       delete *it;
00020       *it=NULL;
00021     }
00022   myCrossReferenceRecords.clear();
00023   for(list<CommentRecord* >::iterator it = myCommentRecords.begin(); it!=myCommentRecords.end(); it++)
00024     {
00025       delete *it;
00026       *it=NULL;
00027     }
00028   myCommentRecords.clear();
00029   for(list<ParentRecord* >::iterator it = myParentRecords.begin(); it!=myParentRecords.end(); it++)
00030     {
00031       delete *it;
00032       *it=NULL;
00033     }
00034   myParentRecords.clear();
00035   for(list<NormalizationRecord* >::iterator it = myNormalizationRecords.begin(); it!=myNormalizationRecords.end(); it++)
00036     {
00037       delete *it;
00038       *it=NULL;
00039     }
00040   myNormalizationRecords.clear();
00041   for(list<ProductionNormalizationRecord* >::iterator it = myProductionNormalizationRecords.begin(); it!=myProductionNormalizationRecords.end(); it++)
00042     {
00043       delete *it;
00044       *it=NULL;
00045     }
00046   myProductionNormalizationRecords.clear();
00047   for(list<LevelRecord* >::iterator it = myLevelRecords.begin(); it!=myLevelRecords.end(); it++)
00048     {
00049       delete *it;
00050       *it = NULL;
00051     }
00052   myLevelRecords.clear();
00053   for(list<BetaMinusRecord* >::iterator it = myBetaMinusRecords.begin(); it!=myBetaMinusRecords.end(); it++)
00054     {
00055       delete *it;
00056       *it = NULL;
00057     }
00058   myBetaMinusRecords.clear();
00059   for(list<BetaPlusRecord* >::iterator it = myBetaPlusRecords.begin(); it!=myBetaPlusRecords.end(); it++)
00060     {
00061       delete *it;
00062       *it = NULL;
00063     }
00064   myBetaPlusRecords.clear();
00065   for(list<AlphaRecord* >::iterator it = myAlphaRecords.begin(); it!=myAlphaRecords.end(); it++)
00066     {
00067       delete *it;
00068       *it = NULL;
00069     }
00070   myAlphaRecords.clear();
00071   for(list<DelayedParticleRecord* >::iterator it = myDelayedParticleRecords.begin(); it!=myDelayedParticleRecords.end(); it++)
00072     {
00073       delete *it;
00074       *it = NULL;
00075     }
00076   myDelayedParticleRecords.clear();
00077   
00078   myBetaRecords.clear();
00079   myRecords.clear();
00080 }
00081 
00082 void Dataset::flushAllStacksExcept(int nbrOfRecords, ...)
00083 {
00084   set<RecordType> toNotFlush;
00085   va_list v1;
00086   va_start(v1,nbrOfRecords);
00087   for( int i = 0; i<nbrOfRecords; ++i )
00088     {
00089       RecordType R1 = intToEnum(va_arg(v1,int));
00090       toNotFlush.insert(R1);
00091     }
00092   va_end(v1);
00093   
00094   for(map<RecordType, list<string> >::iterator it = myRecordStack.begin(); it!=myRecordStack.end(); it++)
00095     {
00096       if(toNotFlush.find(it->first)==toNotFlush.end())
00097         flushSpecific(it->first);
00098     }
00099 }
00100 
00101 void Dataset::flushSpecific(RecordType toFlush)
00102 {
00103   if(myRecordStack.find(toFlush)==myRecordStack.end())
00104     throw DataFileException("Cannot flush the attempted list.");
00105   
00106   for(map<RecordType, list<string> >::iterator it = myRecordStack.begin(); it!=myRecordStack.end(); it++)
00107     {
00108       if(it->first==toFlush)
00109         {
00110           if(!it->second.empty())
00111             {
00112               ParentRecord * ParentReference = NULL;
00113               if(!myParentRecords.empty())
00114                   ParentReference = myParentRecords.back();
00115               NormalizationRecord * NormalizationReference = NULL;
00116               if(!myNormalizationRecords.empty())
00117                 NormalizationReference = myNormalizationRecords.back();
00118               QValueRecord * QValueReference = NULL;
00119               if(!myQValueRecords.empty())
00120                 QValueReference = myQValueRecords.back();
00121               LevelRecord * LevelReference = NULL;
00122               if(!myLevelRecords.empty())
00123                 LevelReference = myLevelRecords.back();
00124               if(toFlush==IdentificationRecord_)
00125                 {
00126                   myIdentificationRecords.push_back(new IdentificationRecord(it->second));
00127                 }
00128               else if(toFlush==HistoryRecord_)
00129                 {
00130                   myHistoryRecords.push_back(new HistoryRecord(it->second));
00131                 }
00132               else if(toFlush==CrossReferenceRecord_)
00133                 {
00134                   myCrossReferenceRecords.push_back(new CrossReferenceRecord(it->second));
00135                 }
00136               else if(toFlush==CommentRecord_)
00137                 {
00138                   myCommentRecords.push_back(new CommentRecord(it->second));
00139                 }
00140               else if(toFlush==ParentRecord_)
00141                 {
00142                   myParentRecords.push_back(new ParentRecord(it->second));
00143                 }
00144               else if(toFlush==NormalizationRecord_)
00145                 {
00146                   myNormalizationRecords.push_back(new NormalizationRecord(it->second, ParentReference));
00147                 }
00148               else if(toFlush==ProductionNormalizationRecord_)
00149                 {
00150                   myProductionNormalizationRecords.push_back(new ProductionNormalizationRecord(it->second, NormalizationReference));
00151                 }
00152               else if(toFlush==LevelRecord_)
00153                 {
00154                   myLevelRecords.push_back(new LevelRecord(it->second, QValueReference));
00155                 }
00156               else if(toFlush==BetaMinusRecord_)
00157                 {
00158                   myBetaMinusRecords.push_back(new BetaMinusRecord(it->second, NormalizationReference, LevelReference, ParentReference, QValueReference));
00159                 }
00160               else if(toFlush==BetaPlusRecord_)
00161                 {
00162                   myBetaPlusRecords.push_back(new BetaPlusRecord(it->second, NormalizationReference, LevelReference, ParentReference, QValueReference));
00163                 }
00164               else if(toFlush==AlphaRecord_)
00165                 {
00166                   myAlphaRecords.push_back(new AlphaRecord(it->second, NormalizationReference, LevelReference, ParentReference, QValueReference));
00167                 }
00168               else if(toFlush==DelayedParticleRecord_)
00169                 {
00170                   myDelayedParticleRecords.push_back(new DelayedParticleRecord(it->second, NormalizationReference, LevelReference, ParentReference, QValueReference));
00171                 }
00172               else if(toFlush==GammaRecord_)
00173                 {
00174                   myGammaRecords.push_back(new GammaRecord(it->second, LevelReference, NormalizationReference, QValueReference));
00175                 }
00176               it->second.clear();
00177             }
00178         }
00179     }
00180 }
00181 
00182 void Dataset::initRecordStack()
00183 {
00184   myRecordStack[IdentificationRecord_];
00185   myRecordStack[HistoryRecord_];
00186   myRecordStack[CrossReferenceRecord_];
00187   myRecordStack[CommentRecord_];
00188   myRecordStack[ParentRecord_];
00189   myRecordStack[NormalizationRecord_];
00190   myRecordStack[ProductionNormalizationRecord_];
00191   myRecordStack[LevelRecord_];
00192   myRecordStack[BetaMinusRecord_];
00193   myRecordStack[BetaPlusRecord_];
00194   myRecordStack[AlphaRecord_];
00195   myRecordStack[DelayedParticleRecord_];
00196   myRecordStack[GammaRecord_];
00197 }
00198 
00199 Dataset::Dataset(list<string> textToInput)
00200 {
00201   initRecordStack();
00202   for(list<string>::iterator it = textToInput.begin(); it!=textToInput.end();it++)
00203     {
00204       string local = *it;
00205       if(local[6]==' ' && local[7]==' ' && local[8]==' ') //IdentificationRecord
00206         {
00207           if(local[5]==' ')
00208             {
00209               flushAllStacksExcept(0);
00210             }
00211           else
00212             {
00213               if(myRecordStack[IdentificationRecord_].empty())
00214                 throw DataFileException("No IdentificationRecord to continue: \n" + local);
00215               flushAllStacksExcept(1,IdentificationRecord_);
00216             }
00217           myRecordStack[IdentificationRecord_].push_back(local);
00218         }     
00219       else if(local[6]==' ' && local[7]=='H' && local[8]==' ') //HistoryRecord
00220         {
00221           if(local[5]==' ')
00222             {
00223               flushAllStacksExcept(0);
00224             }
00225           else
00226             {
00227               if(myRecordStack[HistoryRecord_].empty())
00228                 throw DataFileException("No HistoryRecord to continue: \n" + local);
00229             }
00230           myRecordStack[HistoryRecord_].push_back(local);
00231         }
00232       else if(local[6]== ' ' && local[7]=='Q' && local[8]==' ') //QValueRecord
00233         {
00234           if(local[5]==' ')
00235             {
00236               flushAllStacksExcept(0);
00237             }
00238           else
00239             {
00240               if(myRecordStack[QValueRecord_].empty())
00241                 throw DataFileException("No QValueRecord to continue: \n" + local);
00242               flushAllStacksExcept(1,QValueRecord_);
00243             }
00244           myRecordStack[QValueRecord_].push_back(local);
00245         }
00246       else if(local[6]==' ' && local[7]=='X') //CrossReferenceRecord
00247         {
00248           if(local[5]==' ')
00249             {
00250               flushAllStacksExcept(0);
00251             }
00252           else
00253             {
00254               if(myRecordStack[CrossReferenceRecord_].empty())
00255                 throw DataFileException("No CrossReferenceRecord to continue: \n" + local);
00256             }
00257           myRecordStack[CrossReferenceRecord_].push_back(local);
00258         }
00259       else if((local[6]=='C' || local[6]=='D' || local[6]=='T' || local[6]=='c' || local[6]=='d' || local[6]=='t')) //Comment record
00260         {
00261           if(local[5]==' ')
00262             {
00263               flushSpecific(CommentRecord_);
00264             }
00265           else
00266             {
00267               if(myRecordStack[CommentRecord_].empty())
00268                 throw DataFileException("No CommentRecord to continue: \n" + local);
00269             }
00270           myRecordStack[CommentRecord_].push_back(local);
00271         }
00272       else if(local[6]==' ' && local[7]=='P') //Parent record
00273         {
00274           if(local[5]==' ')
00275             {
00276               flushAllStacksExcept(0);
00277             }
00278           else
00279             {
00280               if(myRecordStack[ParentRecord_].empty())
00281                 throw DataFileException("No ParentRecord to continue: \n" + local);
00282               flushAllStacksExcept(1,ParentRecord_);
00283             }
00284           myRecordStack[ParentRecord_].push_back(local);
00285         }
00286       else if(local[6]==' ' && local[7]=='N') //NormalizationRecord
00287         {
00288           if(local[5]==' ')
00289             {
00290               flushAllStacksExcept(0);
00291             }
00292           else
00293             {
00294               if(myRecordStack[NormalizationRecord_].empty())
00295                 throw DataFileException("No NormalizationRecord to continue: \n" + local);
00296               flushAllStacksExcept(1,NormalizationRecord_);
00297             }
00298           myRecordStack[NormalizationRecord_].push_back(local);
00299         }
00300       else if(local[6]=='P' && local[7]=='N' && local[8]== ' ') //ProductionNormalizationRecord
00301         {
00302           if(local[5]==' ')
00303             {
00304               flushAllStacksExcept(0);
00305             }
00306           else
00307             {
00308               if(myRecordStack[ProductionNormalizationRecord_].empty())
00309                 throw DataFileException("No ProductionNormalizationRecord to continue: \n" + local);
00310               flushAllStacksExcept(1,ProductionNormalizationRecord_);
00311             }
00312           myRecordStack[ProductionNormalizationRecord_].push_back(local);
00313         }
00314       else if(local[6]==' ' && local[7]=='L' && local[8]== ' ') //LevelRecord
00315         {
00316           if(local[5]==' ')
00317             {
00318               flushAllStacksExcept(0);
00319             }
00320           else
00321             {
00322               if(myRecordStack[LevelRecord_].empty())
00323                 throw DataFileException("No LevelRecord to continue: \n" + local);
00324               flushAllStacksExcept(1,LevelRecord_);
00325             }
00326           myRecordStack[LevelRecord_].push_back(local);
00327         }
00328       else if(local[6]==' ' && local[7]=='B' && local[8]==' ') //BetaMinusRecord
00329         {
00330           if(local[5]==' ')
00331             {
00332               flushAllStacksExcept(0);
00333             }
00334           else
00335             {
00336               if(myRecordStack[BetaMinusRecord_].empty())
00337                 throw DataFileException("No BetaMinusRecord to continue: \n" + local);
00338               flushAllStacksExcept(1,BetaMinusRecord_);
00339             }
00340           myRecordStack[BetaMinusRecord_].push_back(local);
00341         }
00342       else if(local[6]==' ' && local[7]=='E' && local[8]==' ') //BetaPlusRecord
00343         {
00344           if(local[5]==' ')
00345             {
00346               flushAllStacksExcept(0);
00347             }
00348           else
00349             {
00350               if(myRecordStack[BetaPlusRecord_].empty())
00351                 throw DataFileException("No BetaPlusRecord to continue: \n" + local);
00352               flushAllStacksExcept(1,BetaPlusRecord_);
00353             }
00354           myRecordStack[BetaPlusRecord_].push_back(local);
00355         }
00356       else if(local[6]==' ' && local[7]=='A' && local[8]==' ') //AlphaRecord
00357         {
00358           if(local[5]==' ')
00359             {
00360               flushAllStacksExcept(0);
00361             }
00362           else
00363             {
00364               if(myRecordStack[AlphaRecord_].empty())
00365                 throw DataFileException("No AlphaRecord to continue: \n" + local);
00366               flushAllStacksExcept(1,AlphaRecord_);
00367             }
00368           myRecordStack[AlphaRecord_].push_back(local);
00369         }
00370       else if(local[6]==' ' && (local[7]=='D' || local[7]==' ') && (local[8]=='N' || local[8]=='P' || local[8]=='A')) //DelayedParticleRecord
00371         {
00372           if(local[5]==' ')
00373             {
00374               flushAllStacksExcept(0);
00375             }
00376           else
00377             {
00378               if(myRecordStack[DelayedParticleRecord_].empty())
00379                 throw DataFileException("No DelayedParticleRecord to continue: \n" + local);
00380               flushAllStacksExcept(1,DelayedParticleRecord_);
00381             }
00382           myRecordStack[DelayedParticleRecord_].push_back(local);
00383         }
00384       else if(local[6]==' ' && local[7]=='G' && local[8]==' ') //GammaRecord
00385         {
00386           if(local[5]==' ')
00387             {
00388               flushAllStacksExcept(0);
00389             }
00390           else
00391             {
00392               if(myRecordStack[GammaRecord_].empty())
00393                 throw DataFileException("No GammaRecord to continue: \n" + local);
00394               flushAllStacksExcept(1,GammaRecord_);
00395             }
00396           myRecordStack[GammaRecord_].push_back(local);
00397         }
00398       else if(local[3]==' ' && local[4]==' ' && local[5]==' ' && local[6]==' ' && local[7]=='R' && local[8]==' ') //ReferenceRecord
00399         {
00400           myRecordStack[CrossReferenceRecord_].push_back(local);
00401           flushAllStacksExcept(0);
00402         }
00403       else //if this occurs, we either have a) a file error or b) an error in this software. Either way, throw an exception.
00404         {
00405           throw DataFileException("Invalid entry detected in data files: \n" + local);
00406         }
00407     }
00408   flushAllStacksExcept(0);
00409   initRecords();
00410   initBetaRecords();
00411 }
00412 
00413 void Dataset::initBetaRecords()
00414 {
00415   for(list<BetaMinusRecord* >::iterator it = myBetaMinusRecords.begin(); it!=myBetaMinusRecords.end(); it++)
00416     {
00417       myBetaRecords.push_back(*it);
00418     }
00419   
00420   for(list<BetaPlusRecord* >::iterator it = myBetaPlusRecords.begin(); it!=myBetaPlusRecords.end(); it++)
00421     {
00422       myBetaRecords.push_back(*it);
00423     }
00424 }
00425 
00426 list<Record *> Dataset::getRecords() const
00427 {
00428   return myRecords;
00429 }
00430 
00431 list<BetaRecordWrapper *> Dataset::getBetaRecords() const
00432 {
00433   return myBetaRecords;
00434 }
00435 
00436 void Dataset::initRecords()
00437 {
00438 
00439   for(list<IdentificationRecord* >::iterator it = myIdentificationRecords.begin(); it!=myIdentificationRecords.end(); it++)
00440     {
00441       myRecords.push_back(*it);
00442     }
00443   for(list<HistoryRecord* >::iterator it = myHistoryRecords.begin(); it!=myHistoryRecords.end(); it++)
00444     {
00445       myRecords.push_back(*it);
00446    } 
00447   for(list<CrossReferenceRecord* >::iterator it = myCrossReferenceRecords.begin(); it!=myCrossReferenceRecords.end(); it++)
00448     {
00449       myRecords.push_back(*it);
00450     }
00451   for(list<CommentRecord* >::iterator it = myCommentRecords.begin(); it!=myCommentRecords.end(); it++)
00452     {
00453       myRecords.push_back(*it);
00454     }
00455   for(list<ParentRecord* >::iterator it = myParentRecords.begin(); it!=myParentRecords.end(); it++)
00456     {
00457       myRecords.push_back(*it);
00458     }
00459   for(list<NormalizationRecord* >::iterator it = myNormalizationRecords.begin(); it!=myNormalizationRecords.end(); it++)
00460     {
00461       myRecords.push_back(*it);
00462     }
00463   for(list<ProductionNormalizationRecord* >::iterator it = myProductionNormalizationRecords.begin(); it!=myProductionNormalizationRecords.end(); it++)
00464     {
00465       myRecords.push_back(*it);
00466     }
00467   for(list<LevelRecord* >::iterator it = myLevelRecords.begin(); it!=myLevelRecords.end(); it++)
00468     {
00469       myRecords.push_back(*it);
00470     }
00471   for(list<BetaMinusRecord* >::iterator it = myBetaMinusRecords.begin(); it!=myBetaMinusRecords.end(); it++)
00472     {
00473       myRecords.push_back(*it);
00474     }
00475   for(list<BetaPlusRecord* >::iterator it = myBetaPlusRecords.begin(); it!=myBetaPlusRecords.end(); it++)
00476     {
00477       myRecords.push_back(*it);
00478     }
00479   for(list<AlphaRecord* >::iterator it = myAlphaRecords.begin(); it!=myAlphaRecords.end(); it++)
00480     {
00481       myRecords.push_back(*it);
00482     }
00483   for(list<DelayedParticleRecord* >::iterator it = myDelayedParticleRecords.begin(); it!=myDelayedParticleRecords.end(); it++)
00484     {
00485       myRecords.push_back(*it);
00486     }
00487 }
00488 
00489 list<IdentificationRecord* > Dataset::getIdentificationRecords() const
00490 {
00491   return myIdentificationRecords;
00492 }
00493 
00494 list<HistoryRecord* > Dataset::getHistoryRecords() const
00495 {
00496   return myHistoryRecords;
00497 }
00498 
00499 list<CrossReferenceRecord* > Dataset::getCrossReferenceRecords() const
00500 {
00501   return myCrossReferenceRecords;
00502 }
00503 
00504 list<CommentRecord* > Dataset::getCommentRecords() const
00505 {
00506   return myCommentRecords;
00507 }
00508 
00509 list<ParentRecord* > Dataset::getParentRecords() const
00510 {
00511   return myParentRecords;
00512 }
00513 
00514 list<NormalizationRecord* > Dataset::getNormalizationRecords() const
00515 {
00516   return myNormalizationRecords;
00517 }
00518 
00519 list<ProductionNormalizationRecord* > Dataset::getProductionNormalizationRecords() const
00520 {
00521   return myProductionNormalizationRecords;
00522 }
00523 
00524 list<LevelRecord* > Dataset::getLevelRecords() const
00525 {
00526   return myLevelRecords;
00527 }
00528 
00529 list<BetaMinusRecord* > Dataset::getBetaMinusRecords() const
00530 {
00531   return myBetaMinusRecords;
00532 }
00533 
00534 list<BetaPlusRecord* > Dataset::getBetaPlusRecords() const
00535 {
00536   return myBetaPlusRecords;
00537 }
00538 
00539 list<AlphaRecord* > Dataset::getAlphaRecords() const
00540 {
00541   return myAlphaRecords;
00542 }
00543 
00544 list<DelayedParticleRecord* > Dataset::getDelayedParticleRecords() const
00545 {
00546   return myDelayedParticleRecords;
00547 }
00548 
00549 list<GammaRecord* > Dataset::getGammaRecords() const
00550 {
00551   return myGammaRecords;
00552 }
00553 
00554 list<ReferenceRecord* > Dataset::getReferenceRecords() const
00555 {
00556   return myReferenceRecords;
00557 }
 All Classes Files Functions Variables Enumerations Enumerator Defines

Back to the main page of the Precalibrated Ion Beam Identification Detector project

Created by Rikard Lundmark