Public Member Functions | |
def | __init__ |
def | close |
def | execute_ |
def | fetchone |
def | fetchcount |
def | fetchall |
def | __call__ |
def | check_ |
def | noop_ |
def | count_ |
def | desc |
def | read_desc |
def | outfile |
def | reldir |
def | relname |
def | tabfile |
def | dumpcat_ |
def | loadcat_ |
def | rloadcat_ |
def | rdumpcat_ |
def | dump_ |
def | load_ |
def | docs |
Public Attributes | |
opts | |
conn | |
llconn | |
dbc | |
sect | |
count | |
Static Public Attributes | |
tuple | is_lowlevel = property(lambda self:self.opts.get('lowlevel', False)) |
tuple | tmpfold = property( _get_tmpfold , doc=_get_tmpfold.__doc__ ) |
tuple | tmpdir = property( _get_tmpdir, doc=_get_tmpdir.__doc__ ) |
tuple | showtables = property( _get_showtables, doc=_get_showtables.__doc__ ) |
tuple | tables = property( _get_tables, doc=_get_tables.__doc__ ) |
tuple | docs = classmethod(docs) |
Private Member Functions | |
def | _get_tmpfold |
def | _get_tmpdir |
def | _get_showtables |
def | _get_tables |
def | _write_csvdirect |
def | _write_outfile |
Definition at line 233 of file db.py.
def DybPython::db::DB::__init__ | ( | self, | ||
sect = None , |
||||
opts = {} , |
||||
kwa | ||||
) |
Definition at line 234 of file db.py.
00234 {}, **kwa ): 00235 """ 00236 Initialize config dict corresponding to section of config file 00237 00238 :param sect: section in config file 00239 00240 """ 00241 self.opts = opts 00242 dbc = DBConf(sect=sect, **kwa) 00243 pars = dbc.mysqldb_parameters(nodb=kwa.get('nodb',False)) 00244 00245 try: 00246 conn = MySQLdb.connect( **pars ) 00247 except MySQLdb.Error, e: 00248 raise Exception("Error %d: %s " % ( e.args[0], e.args[1] ) ) 00249
def DybPython::db::DB::close | ( | self | ) |
def DybPython::db::DB::execute_ | ( | self, | ||
cmd | ||||
) |
def DybPython::db::DB::fetchone | ( | self, | ||
cmd | ||||
) |
def DybPython::db::DB::fetchall | ( | self, | ||
cmd | ||||
) |
def DybPython::db::DB::_get_tmpfold | ( | self | ) | [private] |
def DybPython::db::DB::_get_tmpdir | ( | self | ) | [private] |
Create new temporary directory for each instance, writable by ugo
Definition at line 286 of file db.py.
00291 : 00292 """ 00293 Create new temporary directory for each instance, writable by ugo 00294 """ 00295 if not hasattr(self,'_tmpdir'): 00296 if not os.path.exists(self.tmpfold): os.makedirs(self.tmpfold)
def DybPython::db::DB::__call__ | ( | self, | ||
cmd | ||||
) |
def DybPython::db::DB::check_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
check connection to DB by issuing a SELECT of info functions such as DATABASE() and CURRENT_USER() command
Definition at line 304 of file db.py.
00304 : 00305 if self.opts.get('verbose',False): 00306 print cmd 00307 return self.fetchall(cmd) 00308 00309 def check_(self, *args, **kwa): 00310 """ check connection to DB by issuing a SELECT of info functions such as DATABASE() and CURRENT_USER() command
def DybPython::db::DB::noop_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Do nothing command, allowing to just instanciate the DB object and provide it for interactive prodding, eg:: ~/v/db/bin/ipython -- ~/DybPython/db.py tmp_offline_db noop In [1]: db("show tables") ## high level In [2]: db.llconn.query("select * from CalibPmtSpecVld") ## lowlevel _mysql In [3]: r = db.conn.store_result() This also demonstrates standalone :file:`db.py` usage, assuming svn checkout:: svn co http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/python/DybPython
Definition at line 311 of file db.py.
00316 : 00317 """ 00318 Do nothing command, allowing to just instanciate the DB object and provide it for 00319 interactive prodding, eg:: 00320 00321 ~/v/db/bin/ipython -- ~/DybPython/db.py tmp_offline_db noop 00322 00323 In [1]: db("show tables") ## high level 00324 00325 In [2]: db.llconn.query("select * from CalibPmtSpecVld") ## lowlevel _mysql 00326 In [3]: r = db.conn.store_result() 00327 00328 This also demonstrates standalone :file:`db.py` usage, assuming svn checkout:: 00329 svn co http://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/python/DybPython
def DybPython::db::DB::_get_showtables | ( | self, | ||
nocache = False | ||||
) | [private] |
list names of all tables in DB as reported by SHOW TABLES, NB the result is cached so will become stale after deletions or creations unless `nocache=True` option is used
Definition at line 330 of file db.py.
00330 ://dayabay.ihep.ac.cn/svn/dybsvn/dybgaudi/trunk/DybPython/python/DybPython 00331 00332 """ 00333 pass 00334 00335 def _get_showtables( self , nocache=False ): 00336 """ 00337 list names of all tables in DB as reported by SHOW TABLES, 00338 NB the result is cached so will become stale after deletions or creations
def DybPython::db::DB::_get_tables | ( | self | ) | [private] |
list of selected table names to operate on
Definition at line 341 of file db.py.
00341 : 00342 self._showtables = [rec.values()[0] for rec in self("SHOW TABLES")] 00343 return self._showtables 00344 showtables = property( _get_showtables, doc=_get_showtables.__doc__ ) 00345 def _get_tables( self ):
def DybPython::db::DB::count_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
List table counts of all tables in database, usage example:: db.py offline_db count *offline_db* is :file:`~/.my.cnf` section name specifying host/database/user/password
Definition at line 348 of file db.py.
00353 : 00354 """ 00355 List table counts of all tables in database, usage example:: 00356 00357 db.py offline_db count 00358 00359 *offline_db* is :file:`~/.my.cnf` section name specifying host/database/user/password 00360 00361 """ 00362 print "count %s %s %s " % (self.sect, repr(args), repr(kwa)) 00363 counts = dict(TOTAL=0) 00364 for tab in self.showtables: 00365 cnt = self.fetchone("SELECT COUNT(*) FROM %s" % tab ) 00366 n = float(cnt.values()[0]) 00367 counts[tab] = n 00368 counts['TOTAL'] += n 00369 print counts
def DybPython::db::DB::desc | ( | self, | ||
tab | ||||
) |
Header line with table definition in .csv files shift the pk definition to the end
Definition at line 370 of file db.py.
00371 : 00372 perc = 100.*counts[tab]/counts['TOTAL'] 00373 print "%-30s : %-10s : %10s " % ( tab, counts[tab] , "%.3f" % perc ) 00374 00375 def desc(self, tab ): 00376 """ 00377 Header line with table definition in .csv files shift the pk definition to the end 00378 """ 00379 pks = [] 00380 def _desc( f ): 00381 if f['Key'] == "PRI": 00382 pks.append(f['Field']) 00383 return "%(Field)s %(Type)s" % f cols = ",".join( [ _desc(f) for f in self("describe %s" % tab) ] )
def DybPython::db::DB::read_desc | ( | self, | ||
tabfile | ||||
) |
Read first line of csv file containing the description
Definition at line 384 of file db.py.
00385 : 00386 cols += ",PRIMARY KEY (" + ",".join( pks ) + ")" 00387 return cols + "\n" 00388 00389 def read_desc(self, tabfile ): 00390 """ 00391 Read first line of csv file containing the description 00392 """ 00393 tf = open(tabfile, "r") hdr = tf.readline().strip()
def DybPython::db::DB::outfile | ( | self, | ||
tab | ||||
) |
def DybPython::db::DB::reldir | ( | self, | ||
tab | ||||
) |
def DybPython::db::DB::relname | ( | self, | ||
tab | ||||
) |
def DybPython::db::DB::tabfile | ( | self, | ||
tab, | ||||
catfold | ||||
) |
def DybPython::db::DB::dumpcat_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Dumps tables from LOCAL database into DBI ascii catalog. This allows candidate DB updates to be shared/tested/previewed prior to doing :meth:`loadcat_` into the master DB. Usage example :: db.py local_offline_db dumpcat /path/to/catname "local_offline_db" :file:`~/.my.cnf` section name specifying local db /path/to/catname directory into which catalog will be dumped Tables dumped are controlled via options "--exclude","--all" The dumped ascii catalog directory is structured :: /path/to/<catname>/ <catname>.cat CalibFeeSpec/ CalibFeeSpec.csv CalibFeeSpecVld.csv CalibPmtSpec/ CalibPmtSpec.csv CalibPmtSpecVld.csv ... LOCALSEQNO/ LOCALSEQNO.csv The .csv files comprise a single header line with the table definition and remainder containing the row data. The resulting catalog can be used in a DBI cascade by setting DBCONF_URL to :: mysql://%(local_host)s/%(local_db)s#/path/to/catname/catname.cat;mysql://%(remote_host)s/%(remote_db)s NB from :dybsvn:`r9869` /path/to/catname/catname.cat can also be a remote URL such as :: http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/trunk/db/cat/zhe/trial/trial.cat http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/!svn/bc/8000/trunk/db/cat/zhe/trial/trial.cat When stuffing basic authentication credentials into the URL it is necessary to backslash escape the "@" to avoid confusing DBI(TUrl) Note the use of "!svn/bc/NNNN" that requests apache mod_dav_svn to provide a specific revision of the catalog. rather than the default latest. ADVANTAGES OF CATALOG FORMAT OVER MYSQLDUMP SERIALIZATIONS * effectively native DBI format that can be used in ascii cascades allowing previewing of future database after updates are made * very simple/easily parsable .csv that can be read by multiple tools * very simple diffs (DBI updates should be contiguous additional lines), unlike mysqldump, this means efficient storage in SVN * no-variants/options that change the format (unlike mysqldump) * no changes between versions of mysql ADVANTAGES OF MYSQLDUMP * can be made remotely
Definition at line 411 of file db.py.
00412 : 00413 os.makedirs(dir) 00414 return os.path.join( catfold, self.relname(tab) ) 00415 00416 def dumpcat_(self, *args, **kwa ): 00417 """ 00418 Dumps tables from LOCAL database into DBI ascii catalog. 00419 00420 This allows candidate DB updates to be shared/tested/previewed prior to doing :meth:`loadcat_` 00421 into the master DB. Usage example :: 00422 00423 db.py local_offline_db dumpcat /path/to/catname 00424 00425 "local_offline_db" 00426 :file:`~/.my.cnf` section name specifying local db 00427 /path/to/catname 00428 directory into which catalog will be dumped 00429 00430 Tables dumped are controlled via options "--exclude","--all" 00431 The dumped ascii catalog directory is structured :: 00432 00433 /path/to/<catname>/ 00434 <catname>.cat 00435 CalibFeeSpec/ 00436 CalibFeeSpec.csv 00437 CalibFeeSpecVld.csv 00438 CalibPmtSpec/ 00439 CalibPmtSpec.csv 00440 CalibPmtSpecVld.csv 00441 ... 00442 LOCALSEQNO/ 00443 LOCALSEQNO.csv 00444 00445 The .csv files comprise a single header line with the table definition 00446 and remainder containing the row data. 00447 00448 The resulting catalog can be used in a DBI cascade by setting DBCONF_URL to :: 00449 00450 mysql://%(local_host)s/%(local_db)s#/path/to/catname/catname.cat;mysql://%(remote_host)s/%(remote_db)s 00451 00452 NB from :dybsvn:`r9869` /path/to/catname/catname.cat can also be a remote URL such as :: 00453 00454 http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/trunk/db/cat/zhe/trial/trial.cat 00455 http://dayabay:youknowit\@dayabay.ihep.ac.cn/svn/dybaux/!svn/bc/8000/trunk/db/cat/zhe/trial/trial.cat 00456 00457 When stuffing basic authentication credentials into 00458 the URL it is necessary to backslash escape the "@" to avoid confusing DBI(TUrl) 00459 00460 Note the use of "!svn/bc/NNNN" that requests apache mod_dav_svn 00461 to provide a specific revision of the catalog. rather than the default latest. 00462 00463 00464 ADVANTAGES OF CATALOG FORMAT OVER MYSQLDUMP SERIALIZATIONS 00465 00466 * effectively native DBI format that can be used in ascii cascades 00467 allowing previewing of future database after updates are made 00468 * very simple/easily parsable .csv that can be read by multiple tools 00469 * very simple diffs (DBI updates should be contiguous additional lines), unlike mysqldump, this means efficient storage in SVN 00470 * no-variants/options that change the format (unlike mysqldump) 00471 * no changes between versions of mysql 00472 00473 ADVANTAGES OF MYSQLDUMP 00474 00475 * can be made remotely 00476 00477 00478 """ 00479 print "dumpcat %s %s %s " % ( self.sect, repr(args), repr(kwa)) 00480 assert len(args) > 0, "argument specifying the path of the catalog folder to be created is required " 00481 catfold = args[0] 00482 catname = os.path.basename(catfold) 00483 catfile = os.path.join(catfold, "%s.cat" % catname) 00484 if os.path.exists(catfold): 00485 assert os.path.isdir(catfold),"argument must specify directory, not a file %s " % catfold 00486 print "CAUTION : are dumping catalog into existing directory " 00487 00488 cat = ['name'] 00489 for tab in self.tables: 00490 outfile = self.outfile(tab) 00491 ctx = dict( tab=tab , outfile=outfile) 00492 00493 tabfile = self.tabfile(tab, catfold) 00494 tf = open(tabfile,"w") 00495 tf.write( self.desc(tab) ) 00496 if self.opts.get('csvdirect',None): 00497 self._write_csvdirect( ctx , tf ) 00498 else: 00499 self._write_outfile( ctx ) 00500 tf.write( open(outfile,"r").read() ) 00501 tf.close() cat.append( self.relname(tab) )
def DybPython::db::DB::_write_csvdirect | ( | self, | ||
ctx, | ||||
tf | ||||
) | [private] |
Adopt low level approach to avoid unnecessary conversions into python types then back to string and the associated difficulties of then getting precisely the same as SELECT * INTO OUTFILE Note that use of `store_result` rather than `use_result` means that all rows are in memory at once. NB for consistency the CSV ouput by this command MUST MATCH that by _write_outfile
Definition at line 502 of file db.py.
00507 : 00508 """ 00509 Adopt low level approach to avoid unnecessary conversions into 00510 python types then back to string and the associated difficulties of 00511 then getting precisely the same as SELECT * INTO OUTFILE 00512 00513 Note that use of `store_result` rather than `use_result` means 00514 that all rows are in memory at once. 00515 00516 NB for consistency the CSV ouput by this command MUST MATCH that 00517 by _write_outfile 00518 00519 """ 00520 llconn = self.llconn 00521 llconn.query( "SELECT * FROM %(tab)s " % ctx ) 00522 result = llconn.store_result() csvf = CSVFormat( result.describe() )
def DybPython::db::DB::_write_outfile | ( | self, | ||
ctx | ||||
) | [private] |
Use of "INTO OUTFILE" forces client and server to be on the same machine
Definition at line 523 of file db.py.
00524 : ## all rows as tuples 00525 tf.write( str(csvf) % tuple(row) +"\n" ) 00526 00527 00528 def _write_outfile(self, ctx ):
def DybPython::db::DB::loadcat_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Loads dumpcat ascii catalog into LOCAL database, appending to preexisting tables of the same name, and destroying preexisting tables when the "--replace" option is used. .. warning:: CAUTION : THIS ACTION MUST BE USED CAREFULLY : IT CAN DESTROY TABLES Prior to doing this destructive action you should ensure that * a recent mysqldump backup for target database is available and verified * you have validated the update using DBI cascades (see dumpcat for more on this) Usage example :: db.py local_offline_db loadcat /path/to/catname "local_offline_db" : ~/.my.cnf section name specifying local db /path/to/catname : catalog directory (containing catname.cat) Note the options used : --replace : tables are first dropped and then fully recreated from the ascii catalog. NB without the "--replace" option only additional rows not existing in the table will be added When loading a new table that is not already present in the database, the replace option is required in order to create the table description. When doing this ensure that --prefix="" Names of tables created are prefixed with this string must be set to empty string to effect the standardly named tables. The default prefix of Tmp allows loadcat operation to be tested without clobbering the standardly named tables.
Definition at line 529 of file db.py.
00534 : 00535 """ 00536 Loads dumpcat ascii catalog into LOCAL database, 00537 00538 appending to preexisting tables of the same name, and destroying 00539 preexisting tables when the "--replace" option is used. 00540 00541 .. warning:: CAUTION : THIS ACTION MUST BE USED CAREFULLY : IT CAN DESTROY TABLES 00542 00543 Prior to doing this destructive action you should ensure that 00544 00545 * a recent mysqldump backup for target database is available and verified 00546 * you have validated the update using DBI cascades (see dumpcat for more on this) 00547 00548 Usage example :: 00549 00550 db.py local_offline_db loadcat /path/to/catname 00551 00552 "local_offline_db" : ~/.my.cnf section name specifying local db 00553 /path/to/catname : catalog directory (containing catname.cat) 00554 00555 Note the options used : 00556 00557 --replace : 00558 tables are first dropped and then fully recreated from the ascii catalog. 00559 NB without the "--replace" option only additional rows not 00560 existing in the table will be added 00561 00562 When loading a new table that is not already present in the database, the 00563 replace option is required in order to create the table description. 00564 When doing this ensure that 00565 00566 --prefix="" 00567 Names of tables created are prefixed with this string 00568 must be set to empty string to effect the standardly named tables. 00569 The default prefix of Tmp allows loadcat operation to be tested 00570 without clobbering the standardly named tables. 00571 00572 00573 """ 00574 print "loadcat %s %s %s " % ( self.sect, repr(args), repr(kwa)) 00575 assert len(args) > 0 and os.path.isdir(args[0]), "argument specifying the path of an existing catalog directory is required " 00576 catfold = args[0] 00577 catname = os.path.basename(catfold) 00578 catfile = os.path.join(catfold, "%s.cat" % catname) 00579 assert os.path.isfile(catfile), "catfile %s does not exist " % catfile 00580 cat = map( strip, open(catfile).readlines() ) 00581 assert cat[0] == "name" , "error catfile has unexpected 1st line %s " % cat 00582 00583 for ele in cat[1:]: 00584 assert ele[0:5] not in "file: http:".split(), "loadcat with absolute csv paths not yet supported %s " % ele 00585 tabfile = os.path.join( catfold, ele ) 00586 assert os.path.isfile( tabfile ), "loadcat error catalog entry %s does not resolve to a file %s " % ( ele, tabfile ) 00587 tabroot, tabext = os.path.splitext( tabfile ) 00588 assert tabext == ".csv" , "loadcat error : unexpectedc extentions for tabfile % " % tabext 00589 tabname = os.path.basename( tabroot ) 00590 tab = self.opts['prefix'] + tabname 00591 assert not " " in tab , "loadcat error : tab names cannot contain spaces %s " % tab 00592 ctx = dict(tab=tab, tabfile=tabfile ) 00593 if tab not in self.tables: 00594 raise Exception("table in catalog %(tab)s is not in selected table list " % ctx ) 00595 00596 print "loading tabfile %s into table %s " % ( tabfile , tab ) 00597 ctx['hdr'] = self.read_desc( tabfile ) 00598 if self.opts['replace']: 00599 self("DROP TABLE IF EXISTS %(tab)s" % ctx) 00600 self("CREATE TABLE %(tab)s ( %(hdr)s )" % ctx) 00601 pass 00602 if self.opts.get('mysqlimport',False): 00603 if tab not in self._get_showtables(nocache=True): 00604 if self.opts['tcreate']: 00605 self("CREATE TABLE %(tab)s ( %(hdr)s )" % ctx) 00606 else: 00607 raise Exception("table %(tab)s does not exist and tcreate option is disabled" % ctx) impr = MySQLImport(self.dbc)
def DybPython::db::DB::rloadcat_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Definition at line 608 of file db.py.
00610 : 00611 self("LOAD DATA LOCAL INFILE '%(tabfile)s' IGNORE INTO TABLE %(tab)s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' IGNORE 1 LINES " % ctx) 00612 00613 def rloadcat_(self, *args, **kwa ): 00614 print "rloadcat %s %s %s " % ( self.sect, repr(args), repr(kwa)) self.opts['mysqlimport'] = True
def DybPython::db::DB::rdumpcat_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Dumps tables from REMOTE database into DBI ascii catalog:: ./db.py tmp_offline_db rdumpcat /path/to/catnamedfolder For example target an dybaux SVN checkout:: cd svn co http://dayabay.ihep.ac.cn/svn/dybaux/catalog/offline_db_today ./db.py tmp_offline_db rdumpcat ~/offline_db_today ## updates on top of the checkout ~/offline_db_today/offline_db_today.cat svn st ~/offline_db_today ## see what has changed in the DB Non-trivial as: #. `mysql` does not support remote `SELECT ... INTO OUTFILE` even with `OUTFILE=/dev/stdout` #. `mysqldump -Tpath/to/dumpdir` has the same limitation Leaving non-ideal approaches that work remotely: #. `mysql -BAN -e "select ... " | sed` convert into csv with sed #. use mysql-python to read the records into array of python objects then format that as .csv this way seems reliable but slow
Definition at line 615 of file db.py.
00620 : 00621 """ 00622 Dumps tables from REMOTE database into DBI ascii catalog:: 00623 00624 ./db.py tmp_offline_db rdumpcat /path/to/catnamedfolder 00625 00626 For example target an dybaux SVN checkout:: 00627 00628 cd 00629 svn co http://dayabay.ihep.ac.cn/svn/dybaux/catalog/offline_db_today 00630 00631 ./db.py tmp_offline_db rdumpcat ~/offline_db_today 00632 ## updates on top of the checkout ~/offline_db_today/offline_db_today.cat 00633 00634 svn st ~/offline_db_today 00635 ## see what has changed in the DB 00636 00637 Non-trivial as: 00638 00639 #. `mysql` does not support remote `SELECT ... INTO OUTFILE` even with `OUTFILE=/dev/stdout` 00640 #. `mysqldump -Tpath/to/dumpdir` has the same limitation 00641 00642 Leaving non-ideal approaches that work remotely: 00643 00644 #. `mysql -BAN -e "select ... " | sed` convert into csv with sed 00645 #. use mysql-python to read the records into array of python objects then format that as .csv 00646 this way seems reliable but slow 00647 00648 00649 """ 00650 print "rdumpcat %s %s %s " % ( self.sect, repr(args), repr(kwa)) self.opts['csvdirect'] = True
def DybPython::db::DB::dump_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Definition at line 651 of file db.py.
00656 : 00657 """ 00658 Dumps tables from any accessible database (either local or remote) into mysqldump file 00659 using the configuration parameters. Usage example:: 00660 00661 db.py offline_db dump /tmp/offline_db.sql 00662 00663 """ 00664 msg = r""" 00665 performing mysqldump 00666 DO NOT INTERRUPT FOR A VALID DUMP ... MAY TAKE ~30s OR MORE DEPENDING ON NETWORK 00667 00668 """ 00669 assert len(args) == 1, "dump_ : ERROR an argument specifying the path of the dumpfile is required"
def DybPython::db::DB::load_ | ( | self, | ||
args, | ||||
kwa | ||||
) |
Definition at line 670 of file db.py.
00675 : 00676 """ 00677 Loads tables from a mysqldump file into a target db, the target db is configured by the 00678 parameters in the for example `tmp_offline_db` section of the config file. 00679 For safety the name of the configured target database must begin with `tmp_` 00680 00681 .. note:: 00682 00683 CAUTION IF THE TARGET DATABASE EXISTS ALREADY IT WILL BE DROPPED AND RECREATED BY THIS COMMAND 00684 00685 Usage example:: 00686 00687 db.py tmp_offline_db load /tmp/offline_db.sql 00688 00689 Typical usage pattern to copy the current `offline_db` to a locally controlled `tmp_offline_db`:: 00690 00691 db.py offline_db dump /tmp/offline_db.sql 00692 ## remote dump of offline_db into a mysqldump file 00693 00694 db.py tmp_offline_db load /tmp/offline_db.sql 00695 ## load mysqldump file into local tmp_offline_db 00696 00697 ## modify tmp_offline_db with DBI/DybDbi using DBCONF=tmp_offline_db 00698 00699 db.py tmp_offline_db dumpcat /tmp/offline_db.sql 00700 ## dump the modified copy into a catalog for sharing with others via dybaux SVN 00701 00702 """ 00703 dbn = self.dbc['database'] 00704 assert dbn.startswith('tmp_'), "load_ ERROR : configured database name must start with tmp_ : %s " % dbn 00705 00706 path = args[0] 00707 assert os.path.exists(path) , "load_ ERROR : need an existing path to a mysqldump file : %s " % path 00708 00709 self("DROP DATABASE IF EXISTS %(database)s" % self.dbc ) 00710 self("CREATE DATABASE %(database)s" % self.dbc )
def DybPython::db::DB::docs | ( | cls | ) |
tuple DybPython::db::DB::is_lowlevel = property(lambda self:self.opts.get('lowlevel', False)) [static] |
tuple DybPython::db::DB::tmpfold = property( _get_tmpfold , doc=_get_tmpfold.__doc__ ) [static] |
tuple DybPython::db::DB::tmpdir = property( _get_tmpdir, doc=_get_tmpdir.__doc__ ) [static] |
tuple DybPython::db::DB::showtables = property( _get_showtables, doc=_get_showtables.__doc__ ) [static] |
tuple DybPython::db::DB::tables = property( _get_tables, doc=_get_tables.__doc__ ) [static] |
tuple DybPython::db::DB::docs = classmethod(docs) [static] |