/search.css" rel="stylesheet" type="text/css"/> /search.js">
| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

In This Package:

adlidsensor.py
Go to the documentation of this file.
00001 """
00002 AD lid sensors scraping specialization 
00003 
00004 Discussion from Wei:
00005  
00006 #. we were discussing scrapping the average, its standard deviation, the minimum and the maximum within each hour. 
00007 #. It seems average once per hour is sufficient. (Note: reactor flux will be available sparser than 1/hour). 
00008 
00009    ==============   ==============================================
00010     reference
00011    ==============   ==============================================
00012      :doc:`6673`      discussion 
00013      :doc:`6996`      for the current status given by David W. 
00014      :doc:`6983`      summarizes the lid sensor data so far. 
00015    ==============   ==============================================
00016 
00017 
00018 """
00019 import os, logging
00020 import random
00021 from datetime import datetime, timedelta
00022 from pprint import pformat
00023 
00024 #from dcs import ADLS
00025 from base import Regime, Scraper, Faker, DCS, SourceContext
00026 
00027 log = logging.getLogger(__name__)
00028 #log.addHandler(logging.FileHandler("adlidsensor.log"))
00029 
00030 class AdLidSensor(Regime):
00031     """
00032     Regime frontend class with simple prescribed interface, 
00033     takes the cfg argument into this dict and no args in call 
00034     ... allowing the frontend to be entirely generic
00035     """
00036     srcs    = property( lambda self:AdLidSensorSource( DCS(self['source']) ))
00037     scraper = property( lambda self:AdLidSensorScraper(self.srcs, self.target, self ))
00038     faker   = property( lambda self:AdLidSensorFaker(self.srcs,self ))
00039 
00040 
00041 class AdPhysLogical(dict):
00042     """
00043     Placeholder dict until physical to logical via DB lookup is worked out 
00044     """
00045     def __call__(self, physad ):
00046         assert physad in self 
00047         return self[physad] 
00048  
00049 
00050 class AdLidSensorSource(list):
00051     """
00052     A list of SQLAlchemy dynamic classes 
00053     """
00054     def __init__(self, srcdb ):
00055         """Coordinates of source table/joins"""
00056 
00057         apl = AdPhysLogical(             ## place holder for testing, accurate as of Nov 30, 2011. 
00058                   AD1=("kDayaBay","kAD1"), 
00059                   AD2=("kDayaBay","kAD2"), 
00060                   AD3=("kLingAo" ,"kAD1"), 
00061                   AD4=("kFar"    ,"kAD1"), 
00062                   AD5=("kFar"    ,"kAD2"), 
00063                   AD6=("kFar"    ,"kAD3"), 
00064                   AD7=("kSAB"    ,"kAD1"), 
00065                   AD8=("kSAB"    ,"kAD2"), 
00066              )
00067 
00068         for physad in ("AD1","AD2","AD3","AD4","AD5","AD6","AD7","AD8",):
00069             ksite, ksubsite = apl( physad )
00070             dtn = SourceContext(table="%s_LidSensor" % physad , ksite=ksite , ksubsite=ksubsite ) 
00071             self.append( srcdb.kls(dtn) )
00072 
00073 
00074 class AdLidSensorScraper(Scraper):
00075     """
00076     Specialization of generic scraper for AD temperature tables
00077     """
00078     #adls_matcher = ADLS()   
00079 
00080     def changed(self, sv  ):
00081         """
00082         returns changed decision to base class
00083 
00084         Caution DB/SQLAlchemy is providing decimal.Decimal
00085         values... unify types to float before comparison to avoid surprises
00086         """
00087         #pd = self._adlsdict( sv[0] )
00088         #ud = self._adlsdict( sv[-1] )
00089         #log.debug("prev %s " % pd )
00090         #log.debug("curr %s " % ud )
00091         log.debug("prev %s " % sv[0] )
00092         log.debug("curr %s " % sv[-1] )
00093         log.debug("source vector length %d" % len(sv));
00094 
00095         #if not hasattr(self, 'state'):          ## only on 1st call when no state
00096         #    kls = self.target.kls               ## the genDbi target  class
00097         #    keys = kls.SpecKeys().aslist()
00098         #    state = dict(zip(keys,map(lambda _:0, keys)))    ## state dict with all values 0
00099         #    self.state = state
00100 
00101         ## work of associating source to target attributes
00102         #for k in self.state:
00103             #sk = ..some_fn..( k )       ## source key from target key
00104             ## do running aggregates  min/max/avg
00105             #self.state[k] += sv[-1][sk]
00106 
00107         return False    ## True if sufficient changes to warrant non-age based propagation 
00108 
00109 #        for adls in sorted(pd.keys()):
00110 #            pv = float(pd[adls])    ## often decimal.Decimal types
00111 #            uv = float(ud[adls])
00112 #            df = abs(pv-uv) 
00113 #            if float(df) > float(self.threshold):
00114 #                chg = True
00115 #            else:
00116 #                chg = False
00117 #            log.debug("pv %r uv %r df %r threshold %s change %s  " % (pv,uv,df, self.threshold, chg)) 
00118 #            if chg:
00119 #                return True
00120 #        return False
00121 
00122     def propagate(self, sv ):
00123         """yields one or more target dicts ready for writing to target DB"""
00124         #dd = self._adlsdict(sv[-1])
00125         #log.debug("propagate %r " % dd)
00126         #yield self.state
00127         aggd=sv[-1].aggd
00128         assert aggd, "missing aggd - this should always be present"
00129         for k,v in sorted(aggd.items()):
00130             print k,v
00131         if aggd['NSamples']==0:
00132             kls = self.target.kls               ## the genDbi target  class
00133             keys = kls.SpecKeys().aslist()
00134             yield dict(zip(keys,map(lambda _:-1, keys))) ## state dict with all values -1
00135         else:    
00136             yield aggd
00137 
00138     def seed(self, sc):
00139         """
00140         Used for seeding target DB when testing into empty tables
00141 
00142         :param sc: source class, potentially different seeds will be needed 
00143                    for each source that feeds into a single target
00144         """
00145         kls = self.target.kls               ## the genDbi target  class
00146         keys = kls.SpecKeys().aslist()
00147         return dict(zip(keys,map(lambda _:0, keys)))    ## state dict with all values 0
00148 
00149     def _adlsdict(self, inst ):
00150         """converts source instance into target dicts"""
00151         dd = {}
00152         for k,v in inst.asdict.items():
00153             if k in 'id date_time'.split():
00154                 continue
00155             #adls = self.adls_matcher(k)
00156             if adls and adls != "5":      ## SKIPPING Temp5 AS NO LONGER IN SPEC
00157                 dd['Temp%s'%adls] = v
00158         return dd 
00159 
00160 
00161 class AdLidSensorFaker(Faker):
00162     #adls_matcher = ADLS()   
00163     def fake(self, inst, id , dt=None ):
00164         """
00165         Invoked from base class, sets source instance attributes to form a fake 
00166 
00167         :param inst: source instance
00168         :param id: id to assign to the instance 
00169         """
00170         if dt==None:
00171             dt = datetime.now()
00172         for k,v in inst.asdict.items():
00173             if k == 'id':
00174                 setattr( inst, k, id )
00175             elif k == 'date_time':
00176                 setattr( inst, k, dt )
00177             else:
00178                 # TODO switch on simulated data profiles
00179                 #if self.profile == "modulo_ramp":
00180                 #elif self.profile == "island_of_stability":
00181                 #elif self.profile == "island_of_change":
00182 
00183                 # TODO simulate actual fluid heights.  for now, simulate counts
00184                 if k.startswith('Ultrasonic'):
00185                     setattr( inst, k, random.gauss(2000, 5) )
00186                 elif k.startswith('Capacitance'):
00187                     setattr( inst, k, random.gauss(200, 10) )
00188                 elif k.startswith('Temp_'):
00189                     setattr( inst, k, random.gauss(22.7, 0.1) )
00190                 elif k.startswith('Tilt'):
00191                     setattr( inst, k, random.gauss(0, 0.1) )
00192                 else:
00193                     setattr( inst, k, 0 );   
00194 
00195 # source database variables
00196 #  Ultrasonic_GdLS=0,
00197 #  Ultrasonic_LS=0,
00198 #  Capacitance_GdLS=0,
00199 #  Capacitance_LS=0,
00200 #  Capacitance_MO=0,
00201 #  Tiltx_Sensor1=0,
00202 #  Tilty_Sensor1=0,
00203 #  Tiltx_Sensor2=0,
00204 #  Tilty_Sensor2=0,
00205 #  Tiltx_Sensor3=0,
00206 #  Tilty_Sensor3=0,
00207 #  Temp_GdLS=0,
00208 #  Temp_LS=0,
00209 #  Capacitance_Temp_GdLS=0,
00210 #  Capacitance_Temp_LS=0,
00211 #  Capacitance_Temp_MO=0,
00212 
00213 
00214 if __name__ == '__main__':
00215     pass
00216 
00217     from base.parser import Parser
00218     cfg = Parser.config("adlidsensor_scraper")
00219     reg = cfg.regcls
00220     irg = reg(cfg)
00221     print irg.srcs    
00222     assert len(irg.srcs) > 0, "no source classes "
00223 
00224  
00225 
00226 
00227 
| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

Generated on Fri May 16 2014 09:50:03 for Scraper by doxygen 1.7.4