| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

In This Package:

csv_tools.py

Go to the documentation of this file.
00001 """
00002 This funtionality is typically invoked from classmethods on DybDbi Row classes, eg::
00003 
00004    from DybDbi import GCalibPmtSpec
00005    GCalibPmtSpec.check( "$DBWRITERROOT/share/DYB_MC_AD1.txt", afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" )
00006    GCalibPmtSpec.import( "$DBWRITERROOT/share/DYB_MC_AD1.txt", afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" )
00007    GCalibPmtSpec.export( "$HOME/DYB_MC_AD1.txt" )
00008 
00009 Implemented
00010 ~~~~~~~~~~~~
00011 
00012 #. mapping csv field name to DBI attribute name 
00013 #. writing in DB 
00014 
00015 Todo 
00016 ~~~~~
00017 
00018 #. many checks are missing 
00019 #. inputing context info (for import) including MC/Data starttime task ...
00020 #. command line client : OptionParserized 
00021 #. investigate logcomments ... could automate dybsvn traclink@rev to the source csv ?
00022 
00023 """
00024 import os,sys,csv
00025 from mapper import Mapper
00026 from csvrw import CSV
00027 
00028 from DybDbi import ContextRange, Site, SimFlag, TimeStamp, DetectorId
00029 
00030 
00031 def csv_check( cls, path, **kwargs ):
00032     """
00033     Check the validity of CSV file and correspondence with CSV fields and DBI attributes:: 
00034 
00035          from DybDbi import GCalibPmtSpec
00036          GCalibPmtSpec.csv_check( "$DBWRITERROOT/share/DYB_%s_AD1.txt" % "SAB", afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" ) 
00037 
00038     Manual mapping is required if field names do not match DBI attribute names
00039     (primitive case insensitive auto mapping is applied to avoid the need for tedious full mapping). 
00040 
00041     """
00042     csv_ = CSV( path, **kwargs )
00043     csv_.read()
00044     csv_.smry()
00045     if csv_.is_valid():
00046         print "csv is valid "
00047     else:
00048         print "csv is NOT valid "
00049 
00050 
00051 def csv_compare( cls, path, **kwargs  ):
00052     """
00053     compare entries in CSV file with those found in DB 
00054     """
00055     csv_ = CSV( path, **kwargs )
00056     csv_.read() 
00057 
00058 
00059 def csv_import( cls, path, **kwargs  ):
00060     """
00061     Import CSV file into Database 
00062     Using default writer context for now
00063 
00064 
00065     ContextRange::ContextRange(const int siteMask, const int simMask, const TimeStamp& tstart, const TimeStamp& tend)
00066 
00067 ql> select * from CalibPmtSpecVld ;
00068 +-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
00069 | SEQNO | TIMESTART           | TIMEEND             | SITEMASK | SIMMASK | SUBSITE | TASK | AGGREGATENO | VERSIONDATE         | INSERTDATE          |
00070 +-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
00071 |    26 | 2011-01-22 08:15:17 | 2020-12-30 16:00:00 |      127 |       1 |       0 |    0 |          -1 | 2011-01-22 08:15:17 | 2011-02-25 08:10:15 | 
00072 |    18 | 2010-06-21 07:49:24 | 2038-01-19 03:14:07 |       32 |       1 |       1 |    0 |          -1 | 2010-06-21 15:50:24 | 2010-07-19 12:49:29 | 
00073     
00074 
00075 
00076     HMM... Better to make this a classmethod on the writer rather than the Row class...
00077     OR do not shrinkwrap .. just leave as example
00078  
00079     """
00080     src = CSV( path, **kwargs )
00081     src.read()    ## exceptions are raised when invalid 
00082     
00083     mpr = Mapper( cls, src.fieldnames , **kwargs )
00084 
00085     wrt = cls.Wrt()
00086     cr = ContextRange( Site.kAll,  SimFlag.kData|SimFlag.kMC , TimeStamp.GetBOT() ,TimeStamp.GetEOT())
00087     wrt.ctx( contextrange=cr , dbno=0, versiondate=TimeStamp(0,0), subsite=0 , task=7, logcomment="DybDbi demo write" )
00088 
00089     for r in map(mpr,src):
00090         instance = cls.Create( **r )
00091         wrt.Write( instance )
00092 
00093     print wrt.Close()   
00094 
00095 
00096     
00097 def csv_export( cls, path, **kwargs ):
00098     """
00099     Export the result of a default context DBI query as a CSV file 
00100 
00101     :param path: path of output file
00102     :param fieldnames:  optionally specifiy the field order with a list of fieldnames 
00103 
00104     .. note:: make the output more human readable with regular column widths
00105 
00106     """
00107 
00108     csv_ = CSV( path, **kwargs )
00109 
00110 
00111 
00112 
00113 if __name__ == '__main__':
00114 
00115     from DybDbi import GCalibPmtSpec 
00116     #path = "$DBWRITERROOT/share/DYB_SAB_AD1.txt"
00117     path = "$DBWRITERROOT/share/DYB_MC_AD1.txt"
00118     #csv_check(GCalibPmtSpec, path , afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" )
00119     csv_import(GCalibPmtSpec, path , afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" )
00120    
00121 
00122 
| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

Generated on Mon Apr 11 20:17:17 2011 for DybDbi by doxygen 1.4.7