00001
00002 class Mapper(dict):
00003 """
00004 Establish the mapping between sets of fields (such as csv fields) and dbi attributes, usage::
00005
00006 ckf = ['status', '_srcline', 'afterPulse', 'sigmaSpe', 'pmtID', 'efficiency', 'darkRate', '_hasblank', 'prePulse', 'speLow', 'timeOffset', 'timeSpread', 'speHigh', 'description']
00007 mpr = Mapper( GCalibPmtSpec, ckf , afterPulse="AfterPulseProb", sigmaSpe="SigmaSpeHigh", prePulse="PrePulseProb", description="Describ" )
00008 print mpr
00009
00010 If a mapping cannot be made, an exception is thrown that reports the partial mapping constructed.
00011
00012 The automapping performed is dumb by design, only case insensitively identical names are
00013 auto mapped. Other differences between csv field names and dbi attributes must be manually provided
00014 in the keyword arguments.
00015
00016 The string codetype from the spec is promoted into the corresponding python type, to enable
00017 conversion of the csv dict (comprised of all strings) into a dbi dict with appropriate types
00018 for the values.
00019
00020 """
00021 def __init__(self, cls, csv_fields, **kwargs ):
00022 self.cls = cls
00023
00024 self.csv_fields = filter(lambda _:not _.startswith("_"), csv_fields)
00025 self.dbi_attrs = map(str, cls.SpecKeys().aslist() )
00026
00027 self.automap()
00028 self.update(kwargs)
00029
00030 self.dbi2csv = dict([(kdbi,kcsv) for kcsv,kdbi in self.items() ])
00031
00032 sk = cls.SpecKeys().aslist()
00033 sm = cls.SpecMap().asdod()
00034
00035 if not self.is_valid():
00036 raise Exception("insufficient info to contruct mapper, partial mapping: %r ",self)
00037
00038 def cast( typ ):
00039 if typ == "int":
00040 return int
00041 elif typ == "float" or typ == "double":
00042 return float
00043 else:
00044 return str
00045
00046 self.csv2type = dict([(self.dbi2csv[k],cast(sm[k]['codetype'])) for k in sk ])
00047
00048 def is_valid(self):
00049 return len(self.miss) == 0
00050
00051 def smry(self):
00052 print repr(self)
00053 print self
00054
00055 def automap(self):
00056 """
00057 Basic auto mapping, using case insensitive comparison
00058 and yielding case sensitive mapping from csv fields to dbi attributes
00059
00060 The index of the csv fieldname in the dbi attribute list is
00061 found with case insensitive string comparison
00062
00063 """
00064 allcaps = lambda _:_.upper()
00065 for ck in self.csv_fields:
00066 try:
00067 idx = map(allcaps, self.dbi_attrs).index( allcaps(ck) )
00068 except ValueError:
00069 idx = -1
00070 if idx > -1:
00071 self[ck] = self.dbi_attrs[idx]
00072 else:
00073 self[ck] = None
00074
00075 miss = property(lambda self:filter( lambda _:self[_]==None, self ))
00076
00077 def convert_csv2dbi(self, dcsv):
00078 """
00079 Translate dict with csv fieldnames into dict with dbi attr names
00080 and appropiate types for insertion into the DBI Row cls instance
00081 """
00082 return dict([(self[k], self.csv2type[k](dcsv[k]) ) for k in filter( lambda _:not _.startswith("_"), dcsv) ])
00083
00084 def __call__(self, dcsv):
00085 return self.convert_csv2dbi(dcsv)
00086
00087 def __str__(self):
00088 return "\n".join( [" csv fields\n %r " % self.csv_fields ,
00089 " dbi atts\n %r " % self.dbi_attrs ,
00090 " csv2dbi\n %r " % self ,
00091 " missing\n %r " % self.miss ])
00092
00093 def __repr__(self):
00094 return ",".join(["%s=%s " % (k,v) for k,v in self.items()])
00095
00096
00097
00098 if __name__ == '__main__':
00099 pass
00100