/search.css" rel="stylesheet" type="text/css"/> /search.js">
| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

In This Package:

Public Member Functions | Public Attributes | Static Public Attributes | Properties | Private Member Functions | Private Attributes
DybPython::db::DB Class Reference

List of all members.

Public Member Functions

def __init__
def close
def execute_
def fetchone
def fetchcount
def fetchall
def __call__
def check_
def noop_
def ls_
def info_
def count_
def vsssta
def vsssta_
def vdupe
def vdupe_
def mysql
def describe
def desc
def fields
def read_desc
def outfile
def reldir
def relname
def tabfile
def predump
def rdumpcat_
def rloadcat_
def loadcsv
def forced_rloadcat_
def rcmpcat_
def dump_
def load_
def read_seqno
def check_seqno
def get_seqno
def vseqnos
def read_allseqno
def check_allseqno
def get_allseqno
def wipe_cache
def get_fabseqno
def docs
def has_table
def tab
def cli_

Public Attributes

 opts
 skip_pay_check
 allow_partial
 conn
 llconn
 dbc
 sect
 ignore is effective default for remote imports anyhow
 count

Static Public Attributes

tuple miscreants = ('CalibPmtSpec','CalibFeeSpec',)
tuple docs = classmethod(docs)

Properties

 is_lowlevel = property(lambda self:self.opts.get('lowlevel', False))
 tmpfold = property( _get_tmpfold , doc=_get_tmpfold.__doc__ )
 tmpdir = property( _get_tmpdir, doc=_get_tmpdir.__doc__ )
 showtables = property( _get_showtables, doc=_get_showtables.__doc__ )
 showpaytables = property( _get_showpaytables, doc=_get_showpaytables.__doc__ )
 tables = property( _get_tables, doc=_get_tables.__doc__ )
 paytables = property( _get_paytables, doc=_get_paytables.__doc__ )
 optables = property( _get_optables, doc=_get_optables.__doc__ )
 seqno = property( get_seqno )
 allseqno = property( get_allseqno , doc=get_allseqno.__doc__ )
 fabseqno = property( get_fabseqno, doc=get_fabseqno.__doc__ )

Private Member Functions

def _get_tmpfold
def _get_tmpdir
def _get_showtables
def _get_showpaytables
def _get_tables
def _get_paytables
def _get_optables
def _merge_localseqno
def _write_csvdirect
def _write_outfile

Private Attributes

 _tmpdir
 _showtables
 _showpaytables
 _seqno
 _allseqno
 _fabseqno

Detailed Description

Definition at line 232 of file db.py.


Constructor & Destructor Documentation

def DybPython::db::DB::__init__ (   self,
  sect = None,
  opts = {},
  kwa 
)
Initialize config dict corresponding to section of config file 

:param sect: section in config file  

Definition at line 233 of file db.py.

00233                                         {},  **kwa ):
00234         """
00235         Initialize config dict corresponding to section of config file 
00236 
00237         :param sect: section in config file  
00238 
00239         """
00240         self.opts = opts
00241         self.skip_pay_check = opts.get('SKIP_PAY_CHECK',False)
00242         self.allow_partial = opts.get('ALLOW_PARTIAL',False)
00243 
00244         dbc = DBConf(sect=sect, **kwa)
00245         pars = dbc.mysqldb_parameters(nodb=kwa.get('nodb',False))
00246         if pars.get('db',"").find("#") > -1:
00247             raise Exception("DB handling of ascii DBI catalog not yet implemented, see #864 ")
00248         try:  
00249             conn = MySQLdb.connect( **pars ) 
00250         except MySQLdb.Error, e: 
00251             raise Exception("Error %d: %s " % ( e.args[0], e.args[1] ) )
00252             
00253         self.conn = conn
00254         self.llconn = _mysql.connect( **pars )       
00255         self.dbc = dbc
00256         self.sect = dbc.sect    ## as DBConf promotes None to the DBCONF value from environment 
00257 
00258     

Member Function Documentation

def DybPython::db::DB::close (   self)

Definition at line 261 of file db.py.

00262                    :
00263         self.conn.close()

def DybPython::db::DB::execute_ (   self,
  cmd 
)

Definition at line 264 of file db.py.

00265                            :
00266         cursor = self.conn.cursor(MySQLdb.cursors.DictCursor)
00267         cursor.execute( cmd )
00268         return cursor

def DybPython::db::DB::fetchone (   self,
  cmd 
)

Definition at line 269 of file db.py.

00269                             : 
00270         cursor = self.execute_(cmd)
00271         row = cursor.fetchone()
00272         cursor.close()
00273         return row
00274 
def DybPython::db::DB::fetchcount (   self,
  cmd 
)

Definition at line 275 of file db.py.

00275                               : 
00276         row = self.fetchone(cmd)
00277         assert len(row) == 1
00278         return row.values()[0]
00279 
def DybPython::db::DB::fetchall (   self,
  cmd 
)

Definition at line 280 of file db.py.

00280                             : 
00281         cursor = self.execute_(cmd)
00282         rows = cursor.fetchall()
00283         self.count = cursor.rowcount
00284         cursor.close()
00285         return rows
00286 
def DybPython::db::DB::_get_tmpfold (   self) [private]
Path to temporary folder, named after the DBCONF section.
The base directory can be controlled by tmpbase (-b) option  

Definition at line 287 of file db.py.

00288                           :
00289         """
00290         Path to temporary folder, named after the DBCONF section.
00291         The base directory can be controlled by tmpbase (-b) option  
00292         """
        return os.path.join( self.opts.get('tmpbase','/tmp') , self.sect )
def DybPython::db::DB::_get_tmpdir (   self) [private]
Create new temporary directory for each instance, writable by ugo

Definition at line 295 of file db.py.

00296                          :
00297         """
00298         Create new temporary directory for each instance, writable by ugo
00299         """
00300         if not hasattr(self,'_tmpdir'): 
00301             if not os.path.exists(self.tmpfold):
00302                 os.makedirs(self.tmpfold)
00303                 os.chmod(self.tmpfold, 0777)     
00304             self._tmpdir = mkdtemp(dir=self.tmpfold) 
00305             os.chmod( self._tmpdir, 0777 )  
        return self._tmpdir
def DybPython::db::DB::__call__ (   self,
  cmd 
)

Definition at line 308 of file db.py.

00309                            :
00310         if self.opts.get('verbose',False):
00311             print cmd
00312         return self.fetchall(cmd)

def DybPython::db::DB::check_ (   self,
  args,
  kwa 
)
check connection to DB by issuing a SELECT of info functions such as DATABASE() and CURRENT_USER() command

Definition at line 313 of file db.py.

00314                                   :
00315         """
00316         check connection to DB by issuing a SELECT of info functions such as DATABASE() and CURRENT_USER() command
00317         """
00318         rec = self.fetchone("SELECT DATABASE(),CURRENT_USER(),VERSION(),CONNECTION_ID() ")
00319         return rec

def DybPython::db::DB::noop_ (   self,
  args,
  kwa 
)
Do nothing command, allowing to just instanciate the DB object and provide it for 
interactive prodding, eg:: 

    ~/v/db/bin/ipython -- ~/DybPython/db.py tmp_offline_db noop   

    In [1]: db("show tables")     ## high level 

    In [2]: db.llconn.query("select * from CalibPmtSpecVld")    ## lowlevel _mysql    
    In [3]: r = db.conn.store_result()

This also demonstrates standalone :file:`db.py` usage, assuming svn checkout::

    svn co http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/python/DybPython

Definition at line 320 of file db.py.

00321                                  :
00322         """
00323         Do nothing command, allowing to just instanciate the DB object and provide it for 
00324         interactive prodding, eg:: 
00325 
00326             ~/v/db/bin/ipython -- ~/DybPython/db.py tmp_offline_db noop   
00327 
00328             In [1]: db("show tables")     ## high level 
00329 
00330             In [2]: db.llconn.query("select * from CalibPmtSpecVld")    ## lowlevel _mysql    
00331             In [3]: r = db.conn.store_result()
00332 
00333         This also demonstrates standalone :file:`db.py` usage, assuming svn checkout::
00334 
00335             svn co http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/python/DybPython
00336 
00337         """
00338         pass
00339 
00340 

def DybPython::db::DB::ls_ (   self,
  args,
  kwa 
)
Listing of tables in various sets
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Usage::

   ./db.py tmp_offline_db ls

Annotation '-' indicates tables not in the table selection,
typically only the below types of tables should appear with '-' annotation.

#. non-SOP tables such as scraped tables 
#. temporary working tables not intended for offline_db

If a table appears with annotation '-' that is not one of the 
above cases then either ``db.py`` **tselect** 
needs to be updated to accomodate a new table (ask Liang to do this)
OR you need to update your version of ``db.py``. The first few lines
of ``db.py --help`` lists the revision in use.

See :dybsvn:`ticket:1269` for issue with adding new table McsPos that 
this command would have helped to diagnose rapidly.

Definition at line 341 of file db.py.

00342                                 :
00343         """
00344         Listing of tables in various sets
00345         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
00346 
00347         Usage::
00348 
00349            ./db.py tmp_offline_db ls
00350 
00351         Annotation '-' indicates tables not in the table selection,
00352         typically only the below types of tables should appear with '-' annotation.
00353 
00354         #. non-SOP tables such as scraped tables 
00355         #. temporary working tables not intended for offline_db
00356 
00357         If a table appears with annotation '-' that is not one of the 
00358         above cases then either ``db.py`` **tselect** 
00359         needs to be updated to accomodate a new table (ask Liang to do this)
00360         OR you need to update your version of ``db.py``. The first few lines
00361         of ``db.py --help`` lists the revision in use.
00362 
00363         See :dybsvn:`ticket:1269` for issue with adding new table McsPos that 
00364         this command would have helped to diagnose rapidly.
00365         """
00366 
00367         kvs = (
00368                 ('showtables',    "all tables in DB, the result of SHOW TABLES"),
00369                 ('showpaytables', "all DBI payload tables, tables ending Vld with Vld removed, from SHOW TABLES LIKE '%Vld' "),
00370                 ('tables',        "DBI paired up table selection + LOCALSEQNO, controlled via tselect default option"),
00371                 ('paytables',     "derived from `tables`, selected DBI payload tables"),
00372                 ('optables',      "tables to be operated upon, in default decoupled case same as `tables` for offline_db or `showtables` otherwise  "),
00373               )
00374 
00375         tt = self.tables
00376         def present_(t):
00377             anno = " " if t in tt else "-" 
00378             return "    %s %s" % (anno,t) 
00379 
00380         print self.ls_.__doc__
00381         for k,v in kvs:
00382             vv = getattr(self, k )
00383             print "\n **%s** : %s \n" % ( k, v )
00384             print "\n".join(map(present_,vv)) 
 
def DybPython::db::DB::_get_showtables (   self,
  nocache = False 
) [private]
list names of all tables in DB as reported by SHOW TABLES, 
NB the result is cached so will become stale after deletions or creations 
unless `nocache=True` option is used

Definition at line 385 of file db.py.

00386                                                :
00387         """
00388         list names of all tables in DB as reported by SHOW TABLES, 
00389         NB the result is cached so will become stale after deletions or creations 
00390         unless `nocache=True` option is used
00391         """ 
00392         if not hasattr(self, '_showtables') or nocache == True:
00393             self._showtables = [rec.values()[0] for rec in self("SHOW TABLES")]
        return self._showtables 
def DybPython::db::DB::_get_showpaytables (   self,
  nocache = False 
) [private]
list names of all DBI payload tables in DB as reported by SHOW TABLES LIKE '%Vld'
with the 'Vld' chopped off
 
NB the result is cached so will become stale after deletions or creations 
unless `nocache=True` option is used

Definition at line 396 of file db.py.

00397                                                   :
00398         """
00399         list names of all DBI payload tables in DB as reported by SHOW TABLES LIKE '%Vld'
00400         with the 'Vld' chopped off
00401  
00402         NB the result is cached so will become stale after deletions or creations 
00403         unless `nocache=True` option is used
00404         """ 
00405         if not hasattr(self, '_showpaytables') or nocache == True:
00406             self._showpaytables = [rec.values()[0][:-3] for rec in self("SHOW TABLES LIKE '%Vld'")]
        return self._showpaytables 
def DybPython::db::DB::_get_tables (   self) [private]
list of selected table names to operate on plus the mandatory LOCALSEQNO
Poorly named should be ``table_selection``

Definition at line 411 of file db.py.

00412                            :
00413         """
00414         list of selected table names to operate on plus the mandatory LOCALSEQNO
00415         Poorly named should be ``table_selection``
00416         """
00417 
00418         tabs = self.opts['tselect'].split(",")
00419         if self.opts['NOPAIRING']:
00420             log.warn("option NOPAIRING causes tables selection to be used verbatim, with no DBI pairing up")
00421         else: 
00422             tabs = dbi_pairs(tabs) 
00423 
00424         if self.opts['NOLOCALSEQNO']:
00425             log.warn("option NOLOCALSEQNO inhibits operations on LOCALSEQNO table")
00426         else:
00427             tabs += ["LOCALSEQNO"]
        return tabs 
def DybPython::db::DB::_get_paytables (   self) [private]
list of selected DBI payload tables

Definition at line 430 of file db.py.

00431                             :
00432         """
00433         list of selected DBI payload tables
00434         """
00435         tabs = self.tables
        return filter(lambda t:t not in ('LOCALSEQNO','GLOCALSEQNO',) and t[-3:] != 'Vld', tabs ) 
def DybPython::db::DB::_get_optables (   self) [private]
List of tables that commands such as **rdumpcat** perform operations on, outcome depends on:

#. table selection from the `-t/--tselect` option 
#. decoupled option setting  
#. DBCONF section name, where name **offline_db** is regarded as special

The default value of the table selection option constitutes the current 
standard set of DBI tables that should be reflected in the dybaux catalog.

When following the SOP in the now default "decoupled" mode the **offline_db** rdumpcat needs 
to abide by the table selection in force, whereas when dumping from **tmp_offline_db** onto a
dybaux checkout need to dump all of the subset. Rather than the default table selection.

This special casing avoids the need for the ``-t`` selection when rdumpcating **tmp_offline_db**

Definition at line 439 of file db.py.

00440                            :
00441         """
00442         List of tables that commands such as **rdumpcat** perform operations on, outcome depends on:
00443 
00444         #. table selection from the `-t/--tselect` option 
00445         #. decoupled option setting  
00446         #. DBCONF section name, where name **offline_db** is regarded as special
00447 
00448         The default value of the table selection option constitutes the current 
00449         standard set of DBI tables that should be reflected in the dybaux catalog.
00450 
00451         When following the SOP in the now default "decoupled" mode the **offline_db** rdumpcat needs 
00452         to abide by the table selection in force, whereas when dumping from **tmp_offline_db** onto a
00453         dybaux checkout need to dump all of the subset. Rather than the default table selection.
00454 
00455         This special casing avoids the need for the ``-t`` selection when rdumpcating **tmp_offline_db**
00456 
00457         """
00458         if self.opts['decoupled']:
00459             if self.sect == 'offline_db':   ## NASTY SPECIAL CASING  
00460                 return self.tables          ## tselected tables, dbi paired with LOCALSEQNO added  
00461             else:
00462                 return self.showtables      ## all tables as listed by "SHOW TABLES" 
00463         else: 
00464             return self.tables

def DybPython::db::DB::info_ (   self,
  args,
  kwa 
)

Definition at line 468 of file db.py.

00469                                  :
00470         log.info("info t %s %s %s " % (self.sect, repr(args), repr(kwa)))
00471         for att in ('showtables','tables','optables'):
00472             tabs = getattr(self, att )
00473             log.info( " ******** %s ( %s ) ************* " % ( att,len(tabs))  )
00474 

def DybPython::db::DB::count_ (   self,
  args,
  kwa 
)
List table counts of all tables in database, usage example::

    db.py offline_db count

*offline_db*  is  :file:`~/.my.cnf` section name specifying host/database/user/password

Definition at line 475 of file db.py.

00476                                   :
00477         """
00478         List table counts of all tables in database, usage example::
00479                 
00480             db.py offline_db count
00481 
00482         *offline_db*  is  :file:`~/.my.cnf` section name specifying host/database/user/password
00483 
00484         """
00485         log.debug("count %s %s %s " % (self.sect, repr(args), repr(kwa)))
00486         counts = dict(TOTAL=0)
00487         for tab in self.showtables:
00488             cnt = self.fetchone("SELECT COUNT(*) FROM  %s" % tab )
00489             n = float(cnt.values()[0])
00490             counts[tab] = n
00491             counts['TOTAL'] += n
00492 
00493         log.info( counts )
00494         for tab in self.showtables + ['TOTAL']:
00495             perc = 100.*counts[tab]/counts['TOTAL']  
00496             log.info( "%-30s : %-10s : %10s " % ( tab, counts[tab] , "%.3f" % perc ) )
00497 
00498 

def DybPython::db::DB::vsssta (   self,
  tab 
)
Look at VERSIONDATE/TIMESTART/... within SSSTA groups

Definition at line 499 of file db.py.

00500                          :
00501         """
00502         Look at VERSIONDATE/TIMESTART/... within SSSTA groups
00503         """
00504         sssta = 'concat(SITEMASK,":",SIMMASK,":",SUBSITE,":",TASK,":",AGGREGATENO)'  
00505         having = " having ( count(distinct(VERSIONDATE)) < count(*) )"
00506         q = "select min(SEQNO) as miSEQNO,max(SEQNO) as mxSEQNO,count(distinct(VERSIONDATE)) as nVERSIONDATE,count(distinct(TIMESTART)) as nTIMESTART,count(*) as n,%(sssta)s as sssta  from %(tab)sVld group by %(sssta)s %(having)s "
00507         for i,e in enumerate(self(q % locals())):
00508             e.update(tab=tab)
00509             log.info(e)

def DybPython::db::DB::vsssta_ (   self,
  args,
  kwa 
)

Definition at line 510 of file db.py.

00511                                    :
00512         for tab in self.showpaytables:
00513             self.vsssta( tab )
00514 

def DybPython::db::DB::vdupe (   self,
  tab 
)
Currently is overreporting as needs to be balkanized by context 


Definition at line 515 of file db.py.

00516                         :
00517         """
00518         Currently is overreporting as needs to be balkanized by context 
00519 
00520 
00521         """
00522         q = "SELECT SEQNO,VERSIONDATE,COUNT(VERSIONDATE) AS dupe  FROM %(tab)sVld GROUP BY VERSIONDATE HAVING ( COUNT(VERSIONDATE) > 1 ) " 
00523         n = 0 
00524 
00525         log.info("vdupe_ tab %(tab)s " % locals() )
00526         for i,e in enumerate(self(q % locals())):
00527             n += 1
00528             vdate = e['VERSIONDATE'].strftime("%Y-%m-%d %H:%M:%S")
00529             vq = "select * from %(tab)sVld where VERSIONDATE = '%(vdate)s' ;" % locals() 
00530             log.warn("vdupe_ %s seqno %s q %s  " % (e['dupe'], e['SEQNO'], vq ) )
00531         return n

def DybPython::db::DB::vdupe_ (   self,
  args,
  kwa 
)
Report the first Vlds which feature duplicated VERSIONDATEs::

mysql> SELECT SEQNO,VERSIONDATE,COUNT(VERSIONDATE) AS dupe  FROM DemoVld GROUP BY VERSIONDATE HAVING ( COUNT(VERSIONDATE) > 1 ) ;
+-------+---------------------+------+
| SEQNO | VERSIONDATE         | dupe |
+-------+---------------------+------+
|    71 | 2011-08-04 05:55:47 |    2 | 
|    72 | 2011-08-04 05:56:47 |    3 | 
+-------+---------------------+------+
2 rows in set (0.00 sec)

mysql> select * from DemoVld ;
+-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
| SEQNO | TIMESTART           | TIMEEND             | SITEMASK | SIMMASK | SUBSITE | TASK | AGGREGATENO | VERSIONDATE         | INSERTDATE          |
+-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
|    70 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:54:47 | 2011-08-11 10:12:32 | 
|    71 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:55:47 | 2011-08-11 10:12:32 | 
|    72 | 2011-08-04 07:02:51 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
|    73 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:55:47 | 2011-08-11 10:12:32 | 
|    74 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
|    75 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
|    76 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:57:47 | 2011-08-11 10:12:32 | 
+-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
7 rows in set (0.00 sec)

Definition at line 532 of file db.py.

00533                                    :
00534         """
00535         Report the first Vlds which feature duplicated VERSIONDATEs::
00536 
00537                 mysql> SELECT SEQNO,VERSIONDATE,COUNT(VERSIONDATE) AS dupe  FROM DemoVld GROUP BY VERSIONDATE HAVING ( COUNT(VERSIONDATE) > 1 ) ;
00538                 +-------+---------------------+------+
00539                 | SEQNO | VERSIONDATE         | dupe |
00540                 +-------+---------------------+------+
00541                 |    71 | 2011-08-04 05:55:47 |    2 | 
00542                 |    72 | 2011-08-04 05:56:47 |    3 | 
00543                 +-------+---------------------+------+
00544                 2 rows in set (0.00 sec)
00545 
00546                 mysql> select * from DemoVld ;
00547                 +-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
00548                 | SEQNO | TIMESTART           | TIMEEND             | SITEMASK | SIMMASK | SUBSITE | TASK | AGGREGATENO | VERSIONDATE         | INSERTDATE          |
00549                 +-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
00550                 |    70 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:54:47 | 2011-08-11 10:12:32 | 
00551                 |    71 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:55:47 | 2011-08-11 10:12:32 | 
00552                 |    72 | 2011-08-04 07:02:51 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
00553                 |    73 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:55:47 | 2011-08-11 10:12:32 | 
00554                 |    74 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
00555                 |    75 | 2011-08-04 05:54:47 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:56:47 | 2011-08-11 10:12:32 | 
00556                 |    76 | 2011-08-04 06:15:46 | 2038-01-19 03:14:07 |      127 |       1 |       0 |    0 |          -1 | 2011-08-04 05:57:47 | 2011-08-11 10:12:32 | 
00557                 +-------+---------------------+---------------------+----------+---------+---------+------+-------------+---------------------+---------------------+
00558                 7 rows in set (0.00 sec)
00559 
00560         """
00561         for tab in self.showtables:
00562             if tab[-3:] != 'Vld':continue
00563             self.vdupe( tab[:-3] )
00564 

def DybPython::db::DB::mysql (   self,
  args,
  kwa 
)

Definition at line 565 of file db.py.

00566                                   :
00567         """
00568         """
00569         cmd = MySQLCommand( self.dbc )
00570         return cmd( *args, **kwa )  

def DybPython::db::DB::describe (   self,
  tab 
)

Definition at line 571 of file db.py.

00572                             :
00573         """
00574         """
00575         dsc = MySQLDescribe( self.dbc )
00576         return dsc(table=tab)   

def DybPython::db::DB::desc (   self,
  tab 
)
Header line with table definition in .csv files shift the pk definition to the end    

Definition at line 577 of file db.py.

00578                         :
00579         """
00580         Header line with table definition in .csv files shift the pk definition to the end    
00581         """
00582         pks = []
00583         def _desc( f ):
00584             if f['Key'] == "PRI":
00585                 pks.append(f['Field'])
00586             return "%(Field)s %(Type)s" % f
00587         cols = ",".join( [ _desc(f) for f in self("describe %s" % tab) ] )
00588         if pks:
00589             cols += ",PRIMARY KEY (" + ",".join( pks ) + ")"
00590         return cols + "\n"

def DybPython::db::DB::fields (   self,
  tab 
)
:param tab:
:return: list of field names

Definition at line 591 of file db.py.

00592                          :
00593         """
00594         :param tab:
00595         :return: list of field names
00596         """
00597         return map(lambda _:_['Field'], self("describe `%s`" % tab ))  

def DybPython::db::DB::read_desc (   self,
  tabfile 
)
Read first line of csv file containing the description

Definition at line 598 of file db.py.

00599                                  :
00600         """
00601         Read first line of csv file containing the description
00602         """
00603         tf = open(tabfile, "r")
00604         hdr = tf.readline().strip()    
00605         tf.close()
00606         return hdr
00607 

def DybPython::db::DB::outfile (   self,
  tab 
)
Path of raw outfile as dumped by  SELECT ... INTO OUTFILE    

Definition at line 608 of file db.py.

00609                           :
00610         """Path of raw outfile as dumped by  SELECT ... INTO OUTFILE    """
00611         return os.path.join( self.tmpdir , "%s.csv" % tab )

def DybPython::db::DB::reldir (   self,
  tab 
)

Definition at line 612 of file db.py.

00613                           :
00614         return tab[-3:].upper() == 'VLD' and tab[:-3] or tab

def DybPython::db::DB::relname (   self,
  tab 
)

Definition at line 615 of file db.py.

00616                           :
00617         return os.path.join( self.reldir(tab) , "%s.csv" % tab )

def DybPython::db::DB::tabfile (   self,
  tab,
  catfold 
)
path of table obtained from     

Definition at line 618 of file db.py.

00619                                     :
00620         """ path of table obtained from     """
00621         dir = os.path.join( catfold , self.reldir(tab) )
00622         if not os.path.isdir(dir):
00623             os.makedirs(dir)
00624         return os.path.join( catfold, self.relname(tab) )
00625 

def DybPython::db::DB::predump (   self)
Checks performed before : **dump**, **dumpcat**, **rdumpcat** 

Definition at line 626 of file db.py.

00627                      :
00628         """
00629         Checks performed before : **dump**, **dumpcat**, **rdumpcat** 
00630         """ 
00631         if self.opts['decoupled']: 
00632             pass
00633         else: 
00634             show = set(self.showtables)
00635             tsel = set(self.tables)
00636             missing = list(tsel.difference(show))
00637             log.info("predump show %s " % show )
00638             log.info("predump tsel %s " % tsel )
00639             assert len(missing) == 0, "selected tables are missing from DB : %r \n use -t option to restrict the table selection" % missing
00640 

def DybPython::db::DB::rdumpcat_ (   self,
  args,
  kwa 
)
Dumps DBI tables and merges LOCALSEQNO from ``tmp_offline_db`` into a pre-existing ascii catalog. 
Usage:: 

    db.py -d   tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db       ## -d/--decoupled is now the default
    db.py      tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db      

    svn status ~/dybaux/catalog/tmp_offline_db                               ## see whats changed 

Features of the default ``-d/--decoupled`` option:

#. requires dumping into a pre-existing catalog 
#. subset of tables present in the DB are dumped 
#. partial LOCALSEQNO.csv is merged into the pre-existing catalog LOCALSEQNO.csv
#. performs safe writes, if the merge fails detritus files with names ending ``.csv._safe`` and ``.csv._merged`` will be left in the working copy  

With alternate ``-D/--nodecoupled`` option must ensure that the table selection is appropriate to the content of the DB::

    db.py -D -t CableMap,HardwareID   offline_db rdumpcat ~/offline_db

To obtain the dybaux SVN catalog::

    mkdir ~/dybaux 
    cd ~/dybaux ; 
    svn co http://dayabay.ihep.ac.cn/svn/dybaux/catalog

The ascii catalog is structured ::

    ~/dybaux/catalog/tmp_offline_db
               tmp_offline_db.cat
               CalibFeeSpec/
                   CalibFeeSpec.csv
                   CalibFeeSpecVld.csv
               CalibPmtSpec/
                   CalibPmtSpec.csv
                   CalibPmtSpecVld.csv
               ...
               LOCALSEQNO/
                   LOCALSEQNO.csv

The .csv files comprise a single header line with the table definition
and remainder containing the row data. 


ADVANCED USAGE OF ASCII CATALOGS IN CASCADES

The resulting catalog can be used in a DBI cascade by setting DBCONF to::
     
     tmp_offline_db_ascii:offline_db

Assuming a section::

     [tmp_offline_db_ascii]
     host = localhost
     user = whatever
     password = whatever
     db = tmp_offline_db#/path/to/catname/catname.cat
       
NB from :dybsvn:`r9869` /path/to/catname/catname.cat can also be a remote URL such as ::

     http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/trunk/db/cat/zhe/trial/trial.cat
     http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/!svn/bc/8000/trunk/db/cat/zhe/trial/trial.cat

When stuffing basic authentication credentials into 
the URL it is necessary to backslash escape the "@" to avoid confusing DBI(TUrl)
Note the use of "!svn/bc/NNNN" that requests apache mod_dav_svn  
to provide a specific revision of the catalog. rather than the default latest.  

ADVANTAGES OF CATALOG FORMAT OVER MYSQLDUMP SERIALIZATIONS

* effectively native DBI format that can be used in ascii cascades 
  allowing previewing of future database after updates are made
* very simple/easily parsable .csv that can be read by multiple tools
* very simple diffs (DBI updates should be contiguous additional lines), unlike mysqldump, this means efficient storage in SVN 
* no-variants/options that change the format (unlike mysqldump) 
* no changes between versions of mysql      
* much faster to load than mysqldumps 

IMPLEMENTATION NOTES 

#. `mysql` does not support remote  `SELECT ... INTO OUTFILE` even with `OUTFILE=/dev/stdout`
#. `mysqldump -Tpath/to/dumpdir` has the same limitation

To workaround these limitations a `csvdirect` approach is taken where low level mysql-python is
used to perform a ``select *`` on selected tables and the strings obtained are written directly to 
the csv files of the catalog.  Low-level mysql-python is used to avoid pointless conversion of 
strings from the underlying mysql C-api into python types and then back into strings.

Definition at line 641 of file db.py.

00642                                       :
00643         """
00644         Dumps DBI tables and merges LOCALSEQNO from ``tmp_offline_db`` into a pre-existing ascii catalog. 
00645         Usage:: 
00646 
00647             db.py -d   tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db       ## -d/--decoupled is now the default
00648             db.py      tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db      
00649 
00650             svn status ~/dybaux/catalog/tmp_offline_db                               ## see whats changed 
00651 
00652         Features of the default ``-d/--decoupled`` option:
00653 
00654         #. requires dumping into a pre-existing catalog 
00655         #. subset of tables present in the DB are dumped 
00656         #. partial LOCALSEQNO.csv is merged into the pre-existing catalog LOCALSEQNO.csv
00657         #. performs safe writes, if the merge fails detritus files with names ending ``.csv._safe`` and ``.csv._merged`` will be left in the working copy  
00658 
00659         With alternate ``-D/--nodecoupled`` option must ensure that the table selection is appropriate to the content of the DB::
00660 
00661             db.py -D -t CableMap,HardwareID   offline_db rdumpcat ~/offline_db
00662 
00663         To obtain the dybaux SVN catalog::
00664 
00665             mkdir ~/dybaux 
00666             cd ~/dybaux ; 
00667             svn co http://dayabay.ihep.ac.cn/svn/dybaux/catalog
00668 
00669         The ascii catalog is structured ::
00670 
00671                     ~/dybaux/catalog/tmp_offline_db
00672                                tmp_offline_db.cat
00673                                CalibFeeSpec/
00674                                    CalibFeeSpec.csv
00675                                    CalibFeeSpecVld.csv
00676                                CalibPmtSpec/
00677                                    CalibPmtSpec.csv
00678                                    CalibPmtSpecVld.csv
00679                                ...
00680                                LOCALSEQNO/
00681                                    LOCALSEQNO.csv
00682 
00683         The .csv files comprise a single header line with the table definition
00684         and remainder containing the row data. 
00685 
00686 
00687         ADVANCED USAGE OF ASCII CATALOGS IN CASCADES
00688 
00689         The resulting catalog can be used in a DBI cascade by setting DBCONF to::
00690              
00691              tmp_offline_db_ascii:offline_db
00692 
00693         Assuming a section::
00694 
00695              [tmp_offline_db_ascii]
00696              host = localhost
00697              user = whatever
00698              password = whatever
00699              db = tmp_offline_db#/path/to/catname/catname.cat
00700                
00701         NB from :dybsvn:`r9869` /path/to/catname/catname.cat can also be a remote URL such as ::
00702 
00703              http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/trunk/db/cat/zhe/trial/trial.cat
00704              http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/!svn/bc/8000/trunk/db/cat/zhe/trial/trial.cat
00705         
00706         When stuffing basic authentication credentials into 
00707         the URL it is necessary to backslash escape the "@" to avoid confusing DBI(TUrl)
00708         Note the use of "!svn/bc/NNNN" that requests apache mod_dav_svn  
00709         to provide a specific revision of the catalog. rather than the default latest.  
00710 
00711         ADVANTAGES OF CATALOG FORMAT OVER MYSQLDUMP SERIALIZATIONS
00712 
00713         * effectively native DBI format that can be used in ascii cascades 
00714           allowing previewing of future database after updates are made
00715         * very simple/easily parsable .csv that can be read by multiple tools
00716         * very simple diffs (DBI updates should be contiguous additional lines), unlike mysqldump, this means efficient storage in SVN 
00717         * no-variants/options that change the format (unlike mysqldump) 
00718         * no changes between versions of mysql      
00719         * much faster to load than mysqldumps 
00720 
00721         IMPLEMENTATION NOTES 
00722 
00723         #. `mysql` does not support remote  `SELECT ... INTO OUTFILE` even with `OUTFILE=/dev/stdout`
00724         #. `mysqldump -Tpath/to/dumpdir` has the same limitation
00725 
00726         To workaround these limitations a `csvdirect` approach is taken where low level mysql-python is
00727         used to perform a ``select *`` on selected tables and the strings obtained are written directly to 
00728         the csv files of the catalog.  Low-level mysql-python is used to avoid pointless conversion of 
00729         strings from the underlying mysql C-api into python types and then back into strings.
00730 
00731         """
00732         log.debug("rdumpcat %s %s %s " % ( self.sect, repr(args), repr(kwa)))
00733         assert len(args) > 0, "argument specifying the path of the catalog folder to be created is required " 
00734 
00735         self.predump()
00736 
00737         catfold = args[0]
00738         catname = os.path.basename(catfold)
00739         catfile = os.path.join(catfold, "%s.cat" % catname) 
00740         
00741         if os.path.exists(catfold):
00742             assert os.path.isdir(catfold),"argument must specify directory, not a file %s " % catfold
00743             if self.opts['decoupled']:
00744                 log.info("performing decoupled rdumpcat into existing directory %s  " % catfold )
00745             else:
00746                 log.info("CAUTION : performing non-decoupled rdumpcat into existing directory ")
00747         else:
00748             assert not self.opts['decoupled'], "decoupled rdumpcat must be done into a preexisting catalog " 
00749 
00750 
00751         cat = ['name']
00752         omrg = {}    ## results of decoupled LOCALSEQNO merge 
00753 
00754         for tab in sorted(self.optables):
00755             select = Select(tab=tab)
00756             select.db = self 
00757 
00758             tabfile = self.tabfile(tab, catfold)
00759             preexists = os.path.exists(tabfile)
00760             safe_write = preexists and self.opts.get('safe')
00761             log.debug("rdumpcat_ writing %s pre-exists %s safe_write %s  " % (tabfile,preexists,safe_write) )
00762 
00763             ## when safe writing, write first to "<tablename>.csv._safe" 
00764             ## and do some sanity checks before renaming to "<tablename>.csv" 
00765             if safe_write: 
00766                 prior   = Stat(tabfile)  
00767                 safefile = tabfile + "._safe"
00768             else:
00769                 safefile = tabfile 
00770 
00771             tf = open(safefile,"w")
00772             tf.write( self.desc(tab) )
00773 
00774             if not self.opts.get('local'):
00775                 self._write_csvdirect( select , tf )   ## result of select is returned to python and thence formatted directly into csv, works remotely 
00776             else:
00777                 outfile = self.outfile(tab)            ## temporary written by mysql
00778                 select['outfile'] = outfile            ## presence of this key changes the form of query 
00779                 self._write_outfile( select )          ## mysql does the writing into outfile , local only
00780                 tf.write( open(outfile,"r").read() )   ## shove the mysql created file on the end of the description 
00781             tf.close()
00782 
00783             if safe_write:
00784                 if tab == 'LOCALSEQNO' and self.opts['decoupled']:
00785                     resfile, workings, omrg = self._merge_localseqno( prior, safefile ) 
00786                 else:
00787                     resfile, workings  = Stat(safefile), []
00788 
00789                 ## safety checks before overwriting csv
00790                 if resfile.size < prior.size:
00791                     log.fatal("size reduction prior %s %s whereas new %s %s " % ( prior , prior.size, resfile, resfile.size ))
00792                     log.fatal("dumpcat is only allowed to add, size reductions are prohibited unless using OVERRIDE")
00793                     for path in workings:
00794                         log.info("workings %s " % path )
00795                     if self.opts.get('OVERRIDE',False):
00796                         log.warn("proceeding by virtue of OVERRIDE option")
00797                     else:
00798                         sys.exit(1)
00799                     pass
00800                 log.debug("renaming %s into %s " % ( resfile, prior ))
00801                 for path in workings:
00802                     log.debug("remove workings %s " % path )
00803                     assert len(path) > 10
00804                     os.remove( path )
00805                 os.rename( resfile, prior )
00806             else:
00807                 pass    ## non-safe simply writes directly to target
00808 
00809             cat.append( self.relname(tab) )
00810             pass
00811 
00812         if self.opts['decoupled']:
00813             log.info("completed decoupled merge into catalog, catfile %s " % ( catfile ) )
00814             for k in sorted(omrg):
00815                 log.info( "%-15s %s " % ( k, omrg[k] ))
00816         else:
00817             open( catfile , "w" ).write( "\n".join(cat) + "\n" ) 
00818             log.info("completed writing catalog, catfile %s " % catfile )
00819 

def DybPython::db::DB::_merge_localseqno (   self,
  prior,
  safefile 
) [private]
decoupled handling of LOCALSEQNO requires merging of small LOCALSEQNO into preexisting bigger one 

Merge checks:

#. changed LASTUSEDSEQNO restricted to declared subset of tables
#. changes increment LASTUSEDSEQNO

Definition at line 820 of file db.py.

00821                                                   :
00822         """
00823         decoupled handling of LOCALSEQNO requires merging of small LOCALSEQNO into preexisting bigger one 
00824 
00825         Merge checks:
00826 
00827         #. changed LASTUSEDSEQNO restricted to declared subset of tables
00828         #. changes increment LASTUSEDSEQNO
00829 
00830         """ 
00831         ori = AsciiCSV( None, prior )()               ## .csv            full old one
00832         upd = AsciiCSV( None, safefile )()            ## .csv._safe      slim decoupled subset 
00833         mrg = ori.merged( upd , postpend="._merged" ) ## .csv._merged    merge slim decoupled into full old one
00834         mrg.write()
00835 
00836         log.debug( "ori %r " % ori.lastusedseqno ) 
00837         log.debug( "upd %r " % upd.lastusedseqno ) 
00838         log.debug( "mrg %r " % mrg.lastusedseqno ) 
00839 
00840         ## compared the merged with the original
00841         om = DD( ori.lastusedseqno , mrg.lastusedseqno , increments=True )
00842         log.debug( "om (ori cf mrg) %s " % om )  
00843         assert len(om['removed']) == 0, om
00844         ts = self.tables
00845         assert list(om['changed']) == om['increments'].keys()
00846         for t, i in om['increments'].items():
00847             assert t in ts, "changed table %s  must be in selection %s " % ( t, ts )     
00848             if i < 0:
00849                 msg = "LASTUSEDSEQNO must increase %s %s %s unless using OVERRIDE " % ( i, t , om )
00850                 log.fatal(msg) 
00851                 if not self.opts.get('OVERRIDE',False):
00852                     raise Exception(msg)   
00853                 else:
00854                     log.warn("proceed due to OVERRIDE option")
00855 
00856 
00857         log.warn("writing merged %r into %s " % ( mrg.lastusedseqno, mrg.path ))
00858         workings = (upd.path,)
00859         resfile = Stat( mrg.path )
00860         return resfile, workings , om
00861 

def DybPython::db::DB::_write_csvdirect (   self,
  select,
  tf 
) [private]
Adopt low level approach to avoid unnecessary conversions into 
python types then back to string and the associated difficulties of 
then getting precisely the same as SELECT * INTO OUTFILE 

Note that use of `store_result` rather than `use_result` means 
that all rows are in memory at once.

NB for consistency the CSV ouput by this command MUST MATCH that 
by _write_outfile

`_write_csvdirect` is used by **rdumpcat** , this mimics 
the output from `_write_outfile` (used by **dumpcat**) with 
the big advantage that it works remotely, with no strong permission 
requirements

TODO:

#. when there is a pre-existing LOCALSEQNO redirect LOCALSEQNO to a temporay file 
   and do a merge...  easiest to instanciate them as AsciiCSV and then merge at that level 

Definition at line 862 of file db.py.

00863                                             :
00864         """
00865         Adopt low level approach to avoid unnecessary conversions into 
00866         python types then back to string and the associated difficulties of 
00867         then getting precisely the same as SELECT * INTO OUTFILE 
00868 
00869         Note that use of `store_result` rather than `use_result` means 
00870         that all rows are in memory at once.
00871 
00872         NB for consistency the CSV ouput by this command MUST MATCH that 
00873         by _write_outfile
00874 
00875         `_write_csvdirect` is used by **rdumpcat** , this mimics 
00876         the output from `_write_outfile` (used by **dumpcat**) with 
00877         the big advantage that it works remotely, with no strong permission 
00878         requirements
00879 
00880         TODO:
00881 
00882         #. when there is a pre-existing LOCALSEQNO redirect LOCALSEQNO to a temporay file 
00883            and do a merge...  easiest to instanciate them as AsciiCSV and then merge at that level 
00884 
00885         """
00886         q = str(select)
00887         log.debug("_write_csvdirect %s " % q) 
00888 
00889         llconn = self.llconn   
00890         llconn.query( q )
00891 
00892         lessmemory = self.opts.get('LESSMEMORY', False)
00893         if lessmemory:
00894             log.info("using `--LESSMEMORY` option : less memory expensive but more network expensive 'use_result'  ")
00895             result = llconn.use_result()
00896         else:
00897             log.info("using more memory expensive but less network expensive 'store_result' ")
00898             result = llconn.store_result()
00899 
00900         csvf = CSVFormat( result.describe() )   
00901         for row in result.fetch_row(maxrows=0, how=0):   ## all rows as tuples
00902             tf.write( str(csvf) % tuple(row) +"\n" )
00903 

def DybPython::db::DB::_write_outfile (   self,
  select 
) [private]
Use of "INTO OUTFILE" forces client and server to be on the same machine

Definition at line 904 of file db.py.

00905                                      :
00906         """
00907         Use of "INTO OUTFILE" forces client and server to be on the same machine
00908         """
00909         q = str(select)
00910         log.debug("_write_outfile %s " % q) 
00911         self(q)
00912 
00913 

def DybPython::db::DB::rloadcat_ (   self,
  args,
  kwa 
)
Loads an ascii catalog into a possibly remote database.  
This is used by DB managers in the final step of the update SOP to 
propagate ``dybaux`` updates into ``offline_db``.
 
Usage::

     ./db.py tmp_offline_db rloadcat ~/dybaux/catalog/tmp_offline_db

Steps taken by **rloadcat**:

#. compares tables and `SEQNO` present in the ascii catalog with those in the DB and reports diffences found.
   The comparison looks both at the ``LOCALSEQNO`` tables that DBI uses to hold the `LASTUSEDSEQNO` for each table
   and also by looking directly at all `SEQNO` present in the validity tables. 
   The **rcmpcat** command does only these comparisons. 

#. if updates are found the user is asked for consent to continue with updating

#. for the rows (`SEQNO`) that are added by the update the catalog validity tables ``INSERTDATE`` timestamps are *fastforwarded* inplace to the current UTC time 

#. catalog tables are imported into the DB with the `mysqlimport` tool. 
   For payload and validity tables the `mysqlimport` option ``--ignore`` is used meaning that only new rows (as determined 
   by their primary keys) are imported, other rows are ignored. 
   For the ``LOCALSEQNO`` table the option ``--replace`` is used in order to replace the  ``(TABLENAME,LASTUSEDSEQNO)``   
   entry. 
 
:return: dictionary keyed by payload table names with values containing lists of SEQNO values
:rtype: dict

You might be tempted to use **rloadcat** as a faster alternative to **load** however this is
not advised due to the extra things that **rloadcat** does such as update comparisons and
fastforwarding and potentially merging in (when the decouped option is used).

In comparison the **load** command blasts what comes before it, this can be done using
**forced_rloadcat** with the ``--DROP`` option::

    ./db.py --DROP tmp_offline_db forced_rloadcat ~/dybaux/catalog/tmp_offline_db    

After which you can check operation via an **rdumpcat** back onto the working copy, before doing 
any updates::

    ./db.py tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db 
    svn st ~/dybaux/catalog/tmp_offline_db    ## should show no changes 


Reading full catalog into memory is expensive.

#. can I omit the payload tables from the read ?


Definition at line 914 of file db.py.

00915                                      :
00916         """
00917         Loads an ascii catalog into a possibly remote database.  
00918         This is used by DB managers in the final step of the update SOP to 
00919         propagate ``dybaux`` updates into ``offline_db``.
00920  
00921         Usage::
00922 
00923              ./db.py tmp_offline_db rloadcat ~/dybaux/catalog/tmp_offline_db
00924 
00925         Steps taken by **rloadcat**:
00926 
00927         #. compares tables and `SEQNO` present in the ascii catalog with those in the DB and reports diffences found.
00928            The comparison looks both at the ``LOCALSEQNO`` tables that DBI uses to hold the `LASTUSEDSEQNO` for each table
00929            and also by looking directly at all `SEQNO` present in the validity tables. 
00930            The **rcmpcat** command does only these comparisons. 
00931 
00932         #. if updates are found the user is asked for consent to continue with updating
00933 
00934         #. for the rows (`SEQNO`) that are added by the update the catalog validity tables ``INSERTDATE`` timestamps are *fastforwarded* inplace to the current UTC time 
00935 
00936         #. catalog tables are imported into the DB with the `mysqlimport` tool. 
00937            For payload and validity tables the `mysqlimport` option ``--ignore`` is used meaning that only new rows (as determined 
00938            by their primary keys) are imported, other rows are ignored. 
00939            For the ``LOCALSEQNO`` table the option ``--replace`` is used in order to replace the  ``(TABLENAME,LASTUSEDSEQNO)``   
00940            entry. 
00941  
00942         :return: dictionary keyed by payload table names with values containing lists of SEQNO values
00943         :rtype: dict
00944 
00945         You might be tempted to use **rloadcat** as a faster alternative to **load** however this is
00946         not advised due to the extra things that **rloadcat** does such as update comparisons and
00947         fastforwarding and potentially merging in (when the decouped option is used).
00948 
00949         In comparison the **load** command blasts what comes before it, this can be done using
00950         **forced_rloadcat** with the ``--DROP`` option::
00951 
00952             ./db.py --DROP tmp_offline_db forced_rloadcat ~/dybaux/catalog/tmp_offline_db    
00953 
00954         After which you can check operation via an **rdumpcat** back onto the working copy, before doing 
00955         any updates::
00956 
00957             ./db.py tmp_offline_db rdumpcat ~/dybaux/catalog/tmp_offline_db 
00958             svn st ~/dybaux/catalog/tmp_offline_db    ## should show no changes 
00959 
00960 
00961         Reading full catalog into memory is expensive.
00962 
00963         #. can I omit the payload tables from the read ?
00964 
00965 
00966         """
00967         log.debug("rloadcat %s %s %s " % ( self.sect, repr(args), repr(kwa)))
00968         assert len(args) > 0,  "argument specifying the path of an existing catalog directory is required %s " % args[0]
00969 
00970         tselect = self.paytables
00971         log.debug("rloadcat_ tselect %r " % tselect )
00972 
00973         cat = AsciiCat(args[0], skip_pay_check=self.skip_pay_check, allow_partial=self.allow_partial )
00974         updates = cat.updates( self , tselect=tselect , fastforward=False )
00975 
00976         if len(updates) == 0:
00977             log.warn("no updates (new tables or new SEQNO) are detected, nothing to do ")
00978             return {}
00979 
00980         for tn,seq in updates.items():
00981             log.info(" %-20s has %d new SEQNO : %r " % ( tn, len(seq), seq ) ) 
00982 
00983         uptabs = updates.keys()  ## payload names only
00984         log.info("changed tables %r " % uptabs ) 
00985         
00986         
00987         for tn in uptabs:
00988             if tn not in self.tables:
00989                 raise Exception("updated table %s is not in selected table list %r " % (tn,self.tables))
00990 
00991         if self.opts['noconfirm']:
00992             log.info("proceed without confirmation due to noconfirm option  " )
00993         else:
00994             var = raw_input("Enter YES to proceed with rloadcat for  : %r " % updates.keys() )
00995             if var == "YES":
00996                 log.info("user consents to update tables %r " % updates.keys()  )
00997             else:
00998                 log.info("user declined to update " )
00999                 return
01000          
01001         updates_ = cat.updates( self , tselect=tselect, fastforward=True )    ## run again with fastforward enabled
01002         assert updates == updates_
01003 
01004         alltabs = sorted( map(lambda _:"%sVld"%_, uptabs) + uptabs + ["LOCALSEQNO"] )     ## double up for pairs and machinery table LOCALSEQNO
01005         showtables = self._get_showtables(nocache=True)
01006 
01007         for tab in alltabs:
01008             if tab not in showtables:
01009                 raise Exception("table %(tab)s does not exist " % locals() )                       
01010 
01011         for tn in sorted(uptabs) + ["LOCALSEQNO"]:
01012             self.loadcsv( cat, tn )
01013 
01014         ## force DB access following a load in order to notice changes 
01015         self.wipe_cache()    
01016         return updates

def DybPython::db::DB::loadcsv (   self,
  cat,
  tn 
)
:param cat: AsciiCat instance 
:param tn: string payload table name or LOCALSEQNO

Definition at line 1017 of file db.py.

01018                                 :
01019         """
01020         :param cat: AsciiCat instance 
01021         :param tn: string payload table name or LOCALSEQNO
01022 
01023         """
01024         tabs = ( tn, ) if tn == "LOCALSEQNO" else ( tn,tn+'Vld' )
01025         csvs = map(lambda _:cat[_], tabs )
01026         paths = map(lambda _:_.abspath, csvs )
01027         replace_ignore = "REPLACE" if tn == "LOCALSEQNO" else "IGNORE"    ## ignore is effective default for remote imports anyhow
01028         log.info("loadcsv_  %(tn)s loading paths %(paths)s into tabs %(tabs)s replace_ignore %(replace_ignore)s  " % locals() )
01029             
01030         if self.opts.get('DROP'):
01031             log.warn("dropping and recreating table ")
01032             assert self.sect == 'tmp_offline_db' and os.getlogin() == 'blyth'
01033             for tab,csv in zip(tabs,csvs):
01034                 ctx = dict(tab=tab,hdr=csv.hdr)
01035                 self("DROP TABLE IF EXISTS %(tab)s " % ctx )
01036                 self("CREATE TABLE %(tab)s ( %(hdr)s  )" % ctx)       
01037         else:
01038             pass
01039 
01040         if not self.opts.get('local'):
01041             impr = MySQLImport(self.dbc)
01042             xopts = "" if self.opts['nolock'] else "--lock-tables"
01043             out = impr(csvpaths=paths, verbose=True, replace_ignore="--"+replace_ignore.lower() , xopts=xopts ) 
01044             log.info(out)
01045         else:
01046             for path,tab in zip(paths,tabs):
01047                 ctx = dict(replace_ignore=replace_ignore,path=path, tab=tab)
01048                 self("LOAD DATA LOCAL INFILE '%(path)s' %(replace_ignore)s INTO TABLE %(tab)s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' IGNORE 1 LINES " % ctx)

def DybPython::db::DB::forced_rloadcat_ (   self,
  args,
  kwa 
)
Forcible loading of a catalog ... FOR TESTING ONLY

Definition at line 1049 of file db.py.

01050                                              :
01051         """
01052         Forcible loading of a catalog ... FOR TESTING ONLY
01053         """ 
01054         assert self.sect == 'tmp_offline_db' 
01055         restrict = self.opts['RESTRICT'].split(",")
01056         log.info("forced_rloadcat_ restrict %s " % restrict )
01057         cat = AsciiCat(args[0])
01058         for tn in map(lambda _:_[:-3],filter(lambda _:_[-3:] == 'Vld', cat )) + ["LOCALSEQNO"]:
01059             if len(restrict) == 0 or tn in restrict:
01060                 self.loadcsv( cat , tn )
01061             else:
01062                 log.warn("forced_rloadcat_ RESTRICT option excludes tn %s " % tn )
01063         return cat 

def DybPython::db::DB::rcmpcat_ (   self,
  args,
  kwa 
)
Just dumps a comparison between target DB and ascii catalog, allowing the actions an **rloadcat** will
do to be previewed.

Compares DBI vitals such as LASTUSEDSEQNO between a DBI database and a DBI ascii catalog, usage::

    ./db.py tmp_offline_db rcmpcat ~/dybaux/catalog/tmp_offline_db

Definition at line 1064 of file db.py.

01065                                      :
01066         """
01067         Just dumps a comparison between target DB and ascii catalog, allowing the actions an **rloadcat** will
01068         do to be previewed.
01069 
01070         Compares DBI vitals such as LASTUSEDSEQNO between a DBI database and a DBI ascii catalog, usage::
01071 
01072             ./db.py tmp_offline_db rcmpcat ~/dybaux/catalog/tmp_offline_db
01073 
01074         """ 
01075         log.debug("rcmpcat  %s %s %s " % ( self.sect, repr(args), repr(kwa)))
01076         assert len(args) > 0, "argument specifying the path of the catalog folder to be compared with is required " 
01077 
01078         tselect = self.paytables
01079         log.debug("rcmpcat_ tselect %r " % tselect )
01080 
01081         cat = AsciiCat(args[0], skip_pay_check=self.skip_pay_check )
01082         updates = cat.updates( self , tselect=tselect , fastforward=False )
01083 
01084         if len(updates) == 0:
01085             log.warn("no updates (new tables or new SEQNO) are detected  ")
01086 
01087         uptabs = updates.keys()
01088         uptabs = sorted( map(lambda _:"%sVld"%_, uptabs) + uptabs + ["LOCALSEQNO"] )     ## double up for pairs and machinery table LOCALSEQNO
01089         for tn in uptabs:
01090             if tn not in self.tables:
01091                 raise Exception("updated table %s is not in selected table list %r " % (tn,self.tables))
01092 
01093         for tn,seq in updates.items():
01094             log.info(" %-20s has %d new SEQNO : %r " % ( tn, len(seq), seq ) ) 
01095 
01096         return updates 
01097 
01098 

def DybPython::db::DB::dump_ (   self,
  args,
  kwa 
)
Dumps tables from any accessible database into a mysqldump file. Usage::

     db.py                        offline_db dump /tmp/offline_db.sql  ## without -t a default list of tables is dumped
     db.py -t CableMap,HardwareID offline_db dump /tmp/offline_db.sql
     tail -25  /tmp/offline_db.sql                                     ## checking tail, look for the LASTUSEDSEQNO entries
 
Use the ``-t/--tselect`` option with a comma delimited list of to select payload tables.  
Corresponding validity tables and the `LOCALSEQNO` table are included automatically.

The now default ``-d/--decoupled`` option means that the  ``LOCALSEQNO`` table is 
dumped separately and only contains entries corresponding to the selected tables. 
The decoupled dump can be loaded into ``tmp_offline_db`` without any special options, 
as the table selection is reflected within the dump::

     db.py tmp_offline_db load  /tmp/offline_db.sql

Partial dumping is implemented using::

     mysqldump ... --where="TABLENAME IN ('*','CableMap','HardwareID')" LOCALSEQNO 

Definition at line 1099 of file db.py.

01099                                  : 
01100         """
01101         Dumps tables from any accessible database into a mysqldump file. Usage::
01102 
01103              db.py                        offline_db dump /tmp/offline_db.sql  ## without -t a default list of tables is dumped
01104              db.py -t CableMap,HardwareID offline_db dump /tmp/offline_db.sql
01105              tail -25  /tmp/offline_db.sql                                     ## checking tail, look for the LASTUSEDSEQNO entries
01106          
01107         Use the ``-t/--tselect`` option with a comma delimited list of to select payload tables.  
01108         Corresponding validity tables and the `LOCALSEQNO` table are included automatically.
01109 
01110         The now default ``-d/--decoupled`` option means that the  ``LOCALSEQNO`` table is 
01111         dumped separately and only contains entries corresponding to the selected tables. 
01112         The decoupled dump can be loaded into ``tmp_offline_db`` without any special options, 
01113         as the table selection is reflected within the dump::
01114 
01115              db.py tmp_offline_db load  /tmp/offline_db.sql
01116 
01117         Partial dumping is implemented using::
01118 
01119              mysqldump ... --where="TABLENAME IN ('*','CableMap','HardwareID')" LOCALSEQNO 
01120 
01121         """
01122         self.predump()
01123 
01124         msg = r"""
01125 performing mysqldump 
01126    DO NOT INTERRUPT FOR A VALID DUMP ... MAY TAKE ~30s OR MORE DEPENDING ON NETWORK 
01127   
01128 """
01129         assert len(args) == 1, "dump_ : ERROR an argument specifying the path of the dumpfile is required"
01130 
01131         if self.opts['decoupled']:
01132             dmpr = MySQLDumpDBI(self.dbc)
01133             log.warn( "using decoupled dump : only entries corresponding to the table selection are included in the LOCALSEQNO table  ")
01134         else:
01135             dmpr = MySQLDump(self.dbc)
01136         log.info( msg )
01137         if self.opts['all']: 
01138             tables = []
01139         else:
01140             tables = self.tables
01141         log.info("dumping tables %r to %s " % (tables, args[0]) )
01142         ret = dmpr( tables , args[0] )    
01143         log.info( ret )
01144 
def DybPython::db::DB::load_ (   self,
  args,
  kwa 
)
Loads tables from a mysqldump file into a target db, the target db is configured by the 
parameters in the for example `tmp_offline_db` section of the config file.
For safety the name of the configured target database must begin with `tmp_`
 
.. note:: 

     CAUTION IF THE TARGET DATABASE EXISTS ALREADY IT WILL BE DROPPED AND RECREATED BY THIS COMMAND

Usage example:: 

     db.py tmp_offline_db load /tmp/offline_db.sql

Definition at line 1145 of file db.py.

01145                                  : 
01146         """     
01147         Loads tables from a mysqldump file into a target db, the target db is configured by the 
01148         parameters in the for example `tmp_offline_db` section of the config file.
01149         For safety the name of the configured target database must begin with `tmp_`
01150  
01151         .. note:: 
01152 
01153              CAUTION IF THE TARGET DATABASE EXISTS ALREADY IT WILL BE DROPPED AND RECREATED BY THIS COMMAND
01154 
01155         Usage example:: 
01156 
01157              db.py tmp_offline_db load /tmp/offline_db.sql
01158 
01159         """
01160         dbn = self.dbc['database']
01161         assert dbn.startswith('tmp_'), "load_ ERROR : configured database name must start with tmp_ : %s " % dbn  
01162         
01163         path = args[0]
01164         assert os.path.exists(path) , "load_ ERROR : need an existing path to a mysqldump file : %s " % path
01165 
01166         if self.opts['APPENDLOAD']:
01167             log.warn("APPENDLOAD option prevents database dropping before load, resulting in commingled tables : NOT SOP KOSHER ")  
01168         else:
01169             self("DROP DATABASE IF EXISTS %(database)s" % self.dbc )
01170             self("CREATE DATABASE %(database)s" % self.dbc )
01171 
01172         lodr = MySQLLoad(self.dbc)
01173         ret = lodr(path)
01174         log.info(ret)
01175 
def DybPython::db::DB::read_seqno (   self,
  tab = "LOCALSEQNO" 
)
Read LASTUSEDSEQNO entries from table ``LOCALSEQNO``

Definition at line 1176 of file db.py.

01177                                           :
01178         """
01179         Read LASTUSEDSEQNO entries from table ``LOCALSEQNO``
01180         """
01181         self._seqno = {}
01182         for d in self("select * from %s " % tab ):
01183             n,t = int(d['LASTUSEDSEQNO']), d['TABLENAME']
01184             if t == "*":continue
01185             self._seqno[t] = n

def DybPython::db::DB::check_seqno (   self)
Compares the LASTUSEDSEQNO entries read into ``self._seqno`` with the
``max(SEQNO)`` results of selects on the DB payload and validity tables.

Definition at line 1186 of file db.py.

01187                          :
01188         """
01189         Compares the LASTUSEDSEQNO entries read into ``self._seqno`` with the
01190         ``max(SEQNO)`` results of selects on the DB payload and validity tables.
01191         """ 
01192         for t,n in self._seqno.items():
01193             v = int(self("select max(SEQNO) as n from %s " % "%sVld" % t )[0]["n"])
01194             p = int(self("select max(SEQNO) as n from %s " % t )[0]["n"])
01195             sseq = set([v,p,n])   #1392
01196             nseq = len(sseq)
01197             if nseq == 1:
01198                 log.debug("check_seqno succeeds for table %s n/v/p: %s %s %s " % ( t,n,v,p) )
01199                 pass
01200             elif nseq == 2:
01201                 dseq = max(sseq) - min(sseq)
01202                 if dseq == 1:
01203                     log.info("check_seqno permitting single SEQNO offset, presumably update in progress t:%s n:%s v:%s p:%s " ( t,n,v,p ))    
01204                 else:        
01205                     assert 0, ( "seqno mismatch dseq %s for %s " % (dseq,t) , n,v,p )
01206             else:
01207                 log.fatal("check_seqno nseq %s v/p/n all different this should not happen %s : %s %s %s " % (nseq, t, v,p,n ))
01208                 assert 0, dseq
01209                   
      
def DybPython::db::DB::get_seqno (   self)
SEQNO accessor, reading and checking is done on first access to ``self.seqno`` with ::
       
    db = DB()
    print db.seqno   ## checks DB 
    print db.seqno   ## uses cached
    del db._seqno    ## force a re-read and check
    print db.seqno 

Definition at line 1210 of file db.py.

01211                        :
01212         """
01213         SEQNO accessor, reading and checking is done on first access to ``self.seqno`` with ::
01214        
01215             db = DB()
01216             print db.seqno   ## checks DB 
01217             print db.seqno   ## uses cached
01218             del db._seqno    ## force a re-read and check
01219             print db.seqno 
01220 
01221         """
01222         if hasattr(self, '_seqno' ):
01223             return self._seqno
01224         self.read_seqno()
01225         self.check_seqno()
        return self._seqno
def DybPython::db::DB::vseqnos (   self,
  tn 
)

Definition at line 1231 of file db.py.

01232                           :
        return map(lambda _:int(_["n"]), self("select SEQNO as n from %sVld" % tn ))
def DybPython::db::DB::read_allseqno (   self)

Definition at line 1233 of file db.py.

01234                            :
01235         self._allseqno = {}
01236         for tn,lus in self.seqno.items():
            self._allseqno[tn] = self.vseqnos(tn)
def DybPython::db::DB::check_allseqno (   self)

Definition at line 1237 of file db.py.

01238                             :
01239         """
01240         """
01241         assert sorted(self._allseqno.keys()) == sorted(self._seqno.keys()), "seqno keys mismatch "
01242         fabseqno = self.fabseqno
01243         for tn in self._allseqno.keys():
01244             if fabseqno[tn] != self._seqno[tn]:
01245                 msg = "check_allseqno mismatch for %s %s %s " % ( tn, fabseqno[tn], self._seqno[tn]) 
01246                 if tn in self.miscreants:
01247                     log.warn( msg + " (KNOWN MISCREANT) " )
01248                 else:
01249                     log.fatal(msg)
01250                     assert 0
01251             else:
01252                 pass
01253         if len(self.miscreants) == 0:
01254             assert fabseqno == self._seqno , ("fabseqno mismatch ", fabseqno, self._seqno )
01255         else:
01256             log.debug( "fabseqno from allseqno %r seqno from LOCALSEQNO table  %r " % (fabseqno, self._seqno ))
        pass 
def DybPython::db::DB::get_allseqno (   self)
Provides a table name keyed dict containing lists of all SEQNO in each Vld table  
The tables included correspond to the read DBI tables (namely those in LOCALSEQNO)  

Definition at line 1257 of file db.py.

01258                           :
01259         """
01260         Provides a table name keyed dict containing lists of all SEQNO in each Vld table  
01261         The tables included correspond to the read DBI tables (namely those in LOCALSEQNO)  
01262         """ 
01263         if hasattr(self, '_allseqno' ):
01264             return self._allseqno
01265         self.read_allseqno()
01266         self.check_allseqno()
        return self._allseqno
def DybPython::db::DB::wipe_cache (   self)
Wipe the cache forcing DB access to retrieve the info afresh
This is needed when wish to check status after a DB load from the 
same process that performed the load.

Definition at line 1270 of file db.py.

01271                         :
01272         """
01273         Wipe the cache forcing DB access to retrieve the info afresh
01274         This is needed when wish to check status after a DB load from the 
01275         same process that performed the load.
01276         """
01277         log.debug("wipe_cache")
01278         if hasattr(self, '_seqno' ):
01279             del self._seqno
01280         if hasattr(self, '_allseqno' ):
01281             del self._allseqno
01282 

def DybPython::db::DB::get_fabseqno (   self)
Summarizes ``db.allseqno``, by fabricating a dict keyed by table name contaoning 
the number of Vld SEQNO (from length of values in ``db.allseqno``) 

This dict can be compared with ``db.seqno``, which is obtained from 
the LASTUSEDSEQNO entries in the ``LOCALSEQNO`` table::
Assuming kosher DBI handling of tables this fabricated dict ``db.fabseqno`` should 
match ``db.seqno``, meaning that SEQNO start from 1 and have no gaps.

       .. code-block:: ipython          

In [1]: from DybPython import DB

In [2]: db = DB("tmp_fake_offline_db")

In [3]: db.seqno   ## queries the LOCALSEQNO table in DB
Out[3]: 
{'CableMap': 213,
 'CalibFeeSpec': 113,
 'CalibPmtSpec': 29,
 'FeeCableMap': 3,
 'HardwareID': 172}

In [4]: db.fabseqno    ## a summarization of db.allseqno
Out[4]: 
{'CableMap': 213,
 'CalibFeeSpec': 111,
 'CalibPmtSpec': 8,
 'FeeCableMap': 3,
 'HardwareID': 172}

In [5]: db.miscreants   ## assertions avoided by miscreant status
Out[5]: ('CalibPmtSpec', 'CalibFeeSpec')


Definition at line 1285 of file db.py.

01286                           :
01287         """
01288         Summarizes ``db.allseqno``, by fabricating a dict keyed by table name contaoning 
01289         the number of Vld SEQNO (from length of values in ``db.allseqno``) 
01290 
01291         This dict can be compared with ``db.seqno``, which is obtained from 
01292         the LASTUSEDSEQNO entries in the ``LOCALSEQNO`` table::
01293         Assuming kosher DBI handling of tables this fabricated dict ``db.fabseqno`` should 
01294         match ``db.seqno``, meaning that SEQNO start from 1 and have no gaps.
01295 
01296        .. code-block:: ipython          
01297 
01298                 In [1]: from DybPython import DB
01299 
01300                 In [2]: db = DB("tmp_fake_offline_db")
01301 
01302                 In [3]: db.seqno   ## queries the LOCALSEQNO table in DB
01303                 Out[3]: 
01304                 {'CableMap': 213,
01305                  'CalibFeeSpec': 113,
01306                  'CalibPmtSpec': 29,
01307                  'FeeCableMap': 3,
01308                  'HardwareID': 172}
01309 
01310                 In [4]: db.fabseqno    ## a summarization of db.allseqno
01311                 Out[4]: 
01312                 {'CableMap': 213,
01313                  'CalibFeeSpec': 111,
01314                  'CalibPmtSpec': 8,
01315                  'FeeCableMap': 3,
01316                  'HardwareID': 172}
01317 
01318                 In [5]: db.miscreants   ## assertions avoided by miscreant status
01319                 Out[5]: ('CalibPmtSpec', 'CalibFeeSpec')
01320 
01321 
01322         """
01323         if hasattr(self, '_fabseqno' ):
01324             return self._fabseqno
01325         self._fabseqno =  dict(map(lambda(k,v):(k,len(v)),self.allseqno.items()))
        return self._fabseqno
def DybPython::db::DB::docs (   cls)
collect the docstrings on command methods 
identified by naming convention of ending with _ (and not starting with _) 

Definition at line 1328 of file db.py.

01329                    :
01330         """
01331         collect the docstrings on command methods 
01332         identified by naming convention of ending with _ (and not starting with _) 
01333         """
01334         mdoc = lambda m:getattr(m,'__doc__',None)
01335         mdocs  = [ dict(meth=k[:-1],doc=mdoc(v)) for k,v in [(k,v) for k,v in inspect.getmembers(cls) if k[-1]=='_' and k[0] != '_' and mdoc(v)]]
        return "\n".join([ """ %(meth)s : %(doc)s """ % d for d in mdocs ])   
def DybPython::db::DB::has_table (   self,
  tn 
)
:param tn: table name
:return exists:  if table exists in the DB

Definition at line 1338 of file db.py.

01339                              :
01340         """
01341         :param tn: table name
01342         :return exists:  if table exists in the DB
01343         """
01344         return len(self("show tables like '%s'" % tn )) == 1 

def DybPython::db::DB::tab (   self,
  name 
)
:param name: DBI payload table name 

Definition at line 1345 of file db.py.

01346                          :
01347         """
01348         :param name: DBI payload table name 
01349         """ 
01350         from tab import Tab
01351         return Tab( name, self) 
01352 

def DybPython::db::DB::cli_ (   self,
  args,
  kwa 
)
Emit to stdout the shell commandline for connecting to a mysql DB via the 
client, without actually doing so.
The section names depends on content of :file:`~/.my.cnf`

Usage::

     eval $(db.py tmp_offline_db cli)

Bash function examples to define in :file:`~/.bash_profile` using this command::

     idb(){ local cnf=$1 ; shift ; eval $(db.py $cnf cli) $* ; }   
     offline_db(){             idb $FUNCNAME $* ; }        
     tmp_offline_db(){         idb $FUNCNAME $* ; }
     tmp_etw_offline_db(){     idb $FUNCNAME $* ; }
     tmp_jpochoa_offline_db(){ idb $FUNCNAME $* ; }
     ihep_dcs(){               idb $FUNCNAME $* ; }
    
 Invoke the shortcut with fast start extra argument for the client::

     ihep_dcs -A   


Note a lower level *almost* equivalent command to this sub-command 
for standalone usage without `db.py` is provided by `my.py` which 
can probably run with the older system python alone. 
Install into your PATH with::

   svn export http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/scripts/my.py

Definition at line 1353 of file db.py.

01354                                  :
01355         """
01356         Emit to stdout the shell commandline for connecting to a mysql DB via the 
01357         client, without actually doing so.
01358         The section names depends on content of :file:`~/.my.cnf`
01359 
01360         Usage::
01361 
01362              eval $(db.py tmp_offline_db cli)
01363 
01364         Bash function examples to define in :file:`~/.bash_profile` using this command::
01365 
01366              idb(){ local cnf=$1 ; shift ; eval $(db.py $cnf cli) $* ; }   
01367              offline_db(){             idb $FUNCNAME $* ; }        
01368              tmp_offline_db(){         idb $FUNCNAME $* ; }
01369              tmp_etw_offline_db(){     idb $FUNCNAME $* ; }
01370              tmp_jpochoa_offline_db(){ idb $FUNCNAME $* ; }
01371              ihep_dcs(){               idb $FUNCNAME $* ; }
01372     
01373          Invoke the shortcut with fast start extra argument for the client::
01374 
01375              ihep_dcs -A   
01376 
01377 
01378         Note a lower level *almost* equivalent command to this sub-command 
01379         for standalone usage without `db.py` is provided by `my.py` which 
01380         can probably run with the older system python alone. 
01381         Install into your PATH with::
01382 
01383            svn export http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/scripts/my.py
01384 
01385         """
01386         cmd = MySQLCmd(self.dbc)()
01387         if not kwa.get('silent'):
01388             print cmd
01389         return cmd
01390 
01391 


Member Data Documentation

tuple DybPython::db::DB::miscreants = ('CalibPmtSpec','CalibFeeSpec',) [static]

Definition at line 1283 of file db.py.

tuple DybPython::db::DB::docs = classmethod(docs) [static]

Definition at line 1336 of file db.py.

Definition at line 238 of file db.py.

Definition at line 238 of file db.py.

Definition at line 238 of file db.py.

Definition at line 238 of file db.py.

Definition at line 238 of file db.py.

Definition at line 238 of file db.py.

ignore is effective default for remote imports anyhow

Definition at line 238 of file db.py.

Definition at line 280 of file db.py.

Definition at line 297 of file db.py.

Definition at line 389 of file db.py.

Definition at line 402 of file db.py.

Definition at line 1178 of file db.py.

Definition at line 1233 of file db.py.

Definition at line 1320 of file db.py.


Property Documentation

DybPython::db::DB::is_lowlevel = property(lambda self:self.opts.get('lowlevel', False)) [static]

Definition at line 259 of file db.py.

DybPython::db::DB::tmpfold = property( _get_tmpfold , doc=_get_tmpfold.__doc__ ) [static]

Definition at line 293 of file db.py.

DybPython::db::DB::tmpdir = property( _get_tmpdir, doc=_get_tmpdir.__doc__ ) [static]

Definition at line 306 of file db.py.

DybPython::db::DB::showtables = property( _get_showtables, doc=_get_showtables.__doc__ ) [static]

Definition at line 394 of file db.py.

DybPython::db::DB::showpaytables = property( _get_showpaytables, doc=_get_showpaytables.__doc__ ) [static]

Definition at line 407 of file db.py.

DybPython::db::DB::tables = property( _get_tables, doc=_get_tables.__doc__ ) [static]

Definition at line 428 of file db.py.

DybPython::db::DB::paytables = property( _get_paytables, doc=_get_paytables.__doc__ ) [static]

Definition at line 436 of file db.py.

DybPython::db::DB::optables = property( _get_optables, doc=_get_optables.__doc__ ) [static]

Definition at line 465 of file db.py.

DybPython::db::DB::seqno = property( get_seqno ) [static]

Definition at line 1226 of file db.py.

DybPython::db::DB::allseqno = property( get_allseqno , doc=get_allseqno.__doc__ ) [static]

Definition at line 1267 of file db.py.

DybPython::db::DB::fabseqno = property( get_fabseqno, doc=get_fabseqno.__doc__ ) [static]

Definition at line 1326 of file db.py.


The documentation for this class was generated from the following file:
| Classes | Job Modules | Data Objects | Services | Algorithms | Tools | Packages | Directories | Tracs |

Generated on Fri May 16 2014 09:55:40 for DybPython by doxygen 1.7.4