瀏覽代碼

new class: DBConnector, KITCube

Chuan Miao 9 年之前
父節點
當前提交
119848da1b

+ 0 - 3
applications/service/controllers/config.py

@@ -1,3 +0,0 @@
-
-def index():
-    return locals()

+ 32 - 0
applications/service/controllers/sql.py

@@ -0,0 +1,32 @@
+def index():
+    db = info(info.sql_db).select()
+    mod = info(info.sql_module).select()
+    sensors = info(info.sql_sensor).select()
+
+    # if False:
+    if True:
+        info.sql_db.truncate()
+        info.sql_module.truncate()
+        info.sql_sensor.truncate()
+
+    return locals()
+
+@request.restful()
+def data():
+    response.view = 'generic.'+request.extension
+    if request.env.http_origin:
+        response.headers['Access-Control-Allow-Origin'] = '*'
+    def GET(*args, **kargs):
+        if len(args) == 0: redirect('index')
+        
+        db_name = args[0]
+        # kmodules = info(info.sql_db.database==db_name)\
+        #                (info.sql_module.db==info.sql_db.id).\
+        #                select(info.sql_module.ALL)
+        # all_sensors = [ info(info.sql_module.id==m.id)\
+        #                 (info.sql_module.id==info.sql_sensor.module).\
+        #                 select(info.sql_sensor.ALL) \
+        #                 for m in kmodules ]
+        return dict(sensors=sensors)
+
+    return locals()

+ 46 - 44
applications/service/models/config.py

@@ -1,57 +1,59 @@
-from gluon.dal import Row
 
-adei_servers = [
-{
-    'server_key': 'katrin',
-    'db_host': 'katrin.kit.edu/adei-detector',
-    'db_server': 'detector',
-    'db_name': 'katrin'
-},
-{
-    'server_key': 'aircoils',
-    'db_host': 'katrin.kit.edu/adei-detector',
-    'db_server': 'aircoils',
-    'db_name': 'aircoils'
+config_adei_servers = [
+    {
+        'server_key': 'katrin',
+        'db_host': 'katrin.kit.edu/adei-detector',
+        'db_server': 'detector',
+        'db_name': 'katrin'
+    },
+    {
+        'server_key': 'aircoils',
+        'db_host': 'katrin.kit.edu/adei-detector',
+        'db_server': 'aircoils',
+        'db_name': 'aircoils'
 
-}
+    }
 ]
 
-sql_servers = [
-{
-    'server': 'orca',
-    'host': 'mysql://status:status@192.168.32.181',
-    'database': 'orca',
-    'tables': ['runs', 'machines', 'experiment'] ,
-    'field_ignore': [ 
-        'machines.password', 'experiment.ratesstr',
-    ],
-    'field_type_fix': {
-        'experiment.ratesstr': 'text',
-        'experiment.totalCountsstr': 'text',
-        'experiment.thresholdsstr': 'text',
-        'experiment.gainsstr': 'text'
-    },
-    'selector': {
-        'all': [{},{}],
-        'last': [{-1},{}]
+config_sql_database = [
+    {
+        'name': 'orca',
+        'host': 'mysql://status:status@192.168.32.181',
+        'database': 'orca',
+        'sensor_groups': ['runs', 'machines', 'experiment'] ,
+        'field_ignore': [
+            'machines.password', 'experiment.ratesstr',
+        ],
+        'field_type_fix': {
+            'experiment.ratesstr': 'text',
+            'experiment.totalCountsstr': 'text',
+            'experiment.thresholdsstr': 'text',
+            'experiment.gainsstr': 'text'
+        },
+        'selector': {
+            'all': [{},{}],
+            'last': [{-1},{}]
+        },
+        'disabled': True
     },
-    'disabled': True
-},
-{
-    'server': 'kitcube',
-    'host': 'mysql://cube:cube@127.0.0.1',
-    'database': 'HEADS',
-    # 'tables': 'Data_\d{3}_\w*|\w*list'
-    'tables': 'Data_\d{3}_\w+'
-}
+    {
+        'name': 'kitcube_HEADS',
+        'host': 'mysql://cube:cube@miaopdv.ipe.kit.edu',
+        'db': 'HEADS',
+        'modules': 'Data_(?P<mod_mark>\d{3})_\w+',
+        'axislist': 'Axislist',
+        'sensorlist': 'Sensorlist'
+        # 'tables': 'Data_\d{3}_\w+'
+    }
 ]
 
-
-for i, r in enumerate(sql_servers):
+for i, r in enumerate(config_sql_database):
     if 'tables' not in r.keys():
         r['tables'] = []
     if 'field_ignore' not in r.keys():
         r['field_ignore'] = []
     if 'field_type_fix' not in r.keys():
         r['field_type_fix'] = {}
-    sql_servers[i].update(r)
+    if 'disabled' not in r.keys():
+        r['disabled'] = False
+    config_sql_database[i].update(r)

+ 86 - 90
applications/service/models/db_sql.py

@@ -1,95 +1,91 @@
-from gluon.dal import MySQLAdapter 
+from gluon.custom_import import track_changes
+track_changes(True)
+
+from kitcube import KITCube
 import hashlib
 import re
 import time
 
-dal_field_type = dict(
-    id='id',
-    bit='boolean',
-    tinyint='boolean',
-    int='integer',
-    bigint='bigint',
-    double='double',
-    float='double',
-    mediumblob='blob',
-    blob='blob',
-    varchar='string',
-    text='text'
-)
-
-# For later convenieces,
-# copy server info into session cache
-# retrieve sql database fields, and save into session cache
-
-session.sql_servers = sql_servers
-session.sql_fields = []
-
-for c in sql_servers:
-    if 'disabled' in c.keys():
-        if c['disabled']:
-            continue
-
-    host       = c['host']
-    server     = c['server']
-    database   = c['database']
-    tables     = c['tables']
-    ignore_fields  = c['field_ignore']
-    field_type_fix = c['field_type_fix']
-
-    #_db = 'sql_' + hashlib.md5(server).hexdigest()
-    _db = 'sql_' + server
-    globals()[_db] = DAL( "%s/%s" % (host, database), migrate=False, pool_size=10, lazy_tables=True)
-
-    # match table names if a pattern is defined in configuration
-    if isinstance(tables, basestring):
-        pattern = re.compile(tables)
-        tables_in_db = globals()[_db].executesql('show tables')
-        tables = [tab[0] for tab in tables_in_db if pattern.match(tab[0])]
-
-    # extract column name and type from tables
-    for tab in tables:
-        t0 = time.time()
-        field_info = {}
-        dal_fields = []
+
+info = DAL('sqlite://info.storage.sqlite', pool_size=10)
+
+info.define_table('sql_db', 
+                    Field('name', 'string'),
+                    Field('host', 'string'),
+                    Field('database', 'string'),
+                    Field('modules', 'list:integer'),
+                    Field('flag', 'boolean', default=False))
+info.define_table('sql_module', 
+                    Field('mark', 'integer'),
+                    Field('name', 'string'),
+                    Field('db', 'reference sql_db'))
+info.define_table('sql_sensor', 
+                    Field('name', 'string'),
+                    Field('type', 'string'),
+                    Field('unit', 'string'),
+                    Field('length', 'integer'),
+                    Field('rname', 'string'),
+                    Field('module_mark', 'integer'),
+                    Field('module', 'reference sql_module'))
+
+for database in config_sql_database:
+    if database['disabled']: continue
+
+    db_name = database['name']
+    db_host = database['host']
+    db      = database['db']
+    modules = database['modules'] 
+    adapter = KITCube(db_name, db_host, db, modules)    
+    
+    q = (info.sql_db.database==db) & (info.sql_db.host==db_host)
+    r = info(q).select().last()  
+    
+    if r is None:
+        modules, sensors = adapter.extract()
         
-        # columns from mysql table
-        cols = globals()[_db].executesql('show columns from %s' % tab)
-
-        # extract fields from database and construct DAL fields
-        pattern = re.compile('(?P<fld_type>\w+)\((?P<fld_length>\d*)\)')
-        for fld_name, fld_type, _, fld_key, _, _ in cols:
-            
-            if '.' in fld_name:
-                rname = '`' + fld_name + '`'
-                fld_name = fld_name.replace('.', '_')
-            else:
-                rname = None
-
-            field_long_name = '%s.%s' % (tab, fld_name)
-            if field_long_name in field_type_fix.keys():
-                fld_type = field_type_fix[field_long_name]
-
-            m = pattern.match(fld_type)
-            if m:
-                fld_type = m.group('fld_type')
-                fld_length = m.group('fld_length')
-            else:
-                fld_length = None
-
-            if fld_key == 'PRI':
-                fld_type = 'id'
-
-            if fld_type in ['mediumblob', 'blob']:
-                continue
-            if field_long_name in ignore_fields:
-                continue
-
-            field_info[fld_name] = dict(type=fld_type)
-            
-            dal_fields.append(Field(fld_name,
-                                    type=dal_field_type[fld_type],
-                                    length=fld_length,
-                                    rname=rname))
-
-        globals()[_db].define_table(tab, *dal_fields)
-        session.sql_fields.append(dict(server=server, table=tab, fields=field_info))
+        module_marks = [ m['mark'] for m in modules]
+        module_ids = {}
+
+        db_id = info.sql_db.insert(name=db_name, host=db_host, database=db, modules=module_marks)
+
+        for m in modules:
+            module_id = info.sql_module.insert(db=db_id, **m)
+            module_ids[m['mark']] = module_id
+
+        for s in sensors:
+            module_id = module_ids[ s['module_mark'] ]
+            info.sql_sensor.insert(module=module_id, **s)
+    else:
+        modules = info(info.sql_module).select()
+        sensors = info(info.sql_sensor).select()
+
+    adapter.define_tables(modules, sensors)
+
+
+
+
+
+    # if not r.flag:
+        # if r:
+        #     info.sql_dbs(info.sql_dbs.id==r.id).update()
+
+
+    # #_db = 'sql_' + hashlib.md5(database).hexdigest()
+    # _db = 'sql_' + database
+    # globals()[_db] = DAL( "%s/%s" % (host, database), migrate=False, pool_size=10, lazy_tables=True)
+    #
+    # # match table names if a pattern is defined in configuration
+    #
+    # # extract column name and type from tables
+    #
+    # dal_fields.append(Field(fld_name,
+    #                         type=dal_field_type[fld_type],
+    #                         length=fld_length,
+    #                         rname=rname))
+    #
+    #
+    #     globals()[_db].define_table(tab, *dal_fields)
+    #     session.sql_fields.append(dict(database=database, database=database, table=tab, fields=field_info))
+    #
+    # session.sql_dbs.append(_db)
+    # session.sql_tables.append(dict(database=database, dababase=database, tables=tables))

+ 0 - 46
applications/service/models/sql/sql.py.0

@@ -1,46 +0,0 @@
-from gluon.dal import MySQLAdapter 
-import hashlib
-import re
-
-field_type = dict(
-        id='id',
-        bit='boolean',
-        tinyint='boolean',
-        bigint='bigint',
-        double='double',
-        float='double',
-        varchar='string' )
-
-if 'sql_db' not in globals():
-    sql_db = {}
-
-for k, c in sql_servers.iteritems():
-    d = DAL( "%s/%s" % (c['host'], c['database']), migrate=False )
-    
-    fields = []
-    for tab in c['tables']:
-        # columns from mysql table
-        cols = d.executesql('show columns from %s' % tab)
-
-        for nam, typ, nul, key, dft, ext in cols:
-            # extract filed type (typ) and filed length (length)
-            m = re.match('(.*)\((.*)\)', typ)
-            if m:
-                typ = m.group(1)
-                length = int(m.group(2))
-            else:
-                length = None
-
-            # if key is 'PRI', then typ is 'id'
-            if key == 'PRI': 
-                typ = 'id'
-
-            # construct field
-            fields.append(Field(nam, type=field_type[typ], length=length))
-
-        # define table
-        d.define_table(tab, *fields)
-
-    sql_db[hashlib.md5(k).hexdigest()] = d
-
-    

+ 246 - 0
applications/service/modules/kitcube.py

@@ -0,0 +1,246 @@
+import re
+from gluon import DAL, Field
+from gluon.dal import Row, Rows
+
+dal_field_type = dict(
+    id='id',
+    bit='boolean',
+    tinyint='boolean',
+    int='integer',
+    bigint='bigint',
+    double='double',
+    float='double',
+    mediumblob='blob',
+    blob='blob',
+    varchar='string',
+    text='text'
+)
+
+def search_objects(objs, **kargs):
+    ''' search list of objects, matching supplied fields'''
+    return filter(lambda s: all([ s[k] == v for k,v in kargs.iteritems() ]), objs)
+def search_objects2(axes, sensor_name, module_mark):
+    for ax in axes:
+        if (ax['sensor_name'] == sensor_name) and (ax['module_mark'] == module_mark):
+            return ax['unit']
+    return None
+
+
+class DBConnector(object):
+    def __init__(self, name, host, database):
+        self.axis_list = 'Axislist'
+        self.sensor_list = 'Sensorlist'
+        self.name = name
+        self.host = host
+        self.database = database
+        self.uri = "%s/%s" % (self.host, self.database)
+        self.connector = DAL( self.uri, migrate=False, pool_size=1, lazy_tables=True)
+    def extract(self):
+        modules = self.extract_modules()
+        sensors = self.extract_sensors(modules)
+        return modules, sensors
+    def connect(self, modules=None, sensors=None):
+        if modules is None or sensors is None:
+            modules, sensors = extract()
+        for mod in modules:
+            s = search_objects(sensors, module_mark=mod['mark'])
+            f = [ Field(k['name'], type=k['type'], length=k['length'], rname=k['rname']) for k in s ]
+            self.connector.define_table(mod['name'], *f)
+    def reconnect(self):
+        self.connector.reconnect()
+
+class KITCube(DBConnector):
+    def __init__(self, name, host, database, modules, ignore_fields=[], field_type_fix={}):
+        self.ignore_fields = ignore_fields
+        self.field_type_fix = field_type_fix
+        super(KITCube, self).__init__(name, host, database)
+
+        if isinstance(modules, basestring):
+            self.module_pattern = re.compile(modules)
+        else:
+            self.module_pattern = None
+
+    def extract_modules(self):
+        tables = self.connector.executesql('show tables')
+        if self.module_pattern is not None:
+            matches = [ (tname, self.module_pattern.match(tname)) for tname, in tables ]
+            modules = [ dict(name=tname, mark=int(m.group('mod_mark'))) for tname, m in matches if m ]
+        return modules
+
+    def extract_sensors(self, modules):
+        fld_type_pattern = re.compile('(?P<fld_type>\w+)\((?P<fld_length>\d*)\)')
+        sensor_units = self.extract_units()
+        sensors = []
+
+        for mod in modules:
+            cols = self.connector.executesql('show columns from ' + mod['name'])
+            for fld_name, fld_type, _, fld_key, _, _ in cols:
+                if '.' in fld_name:
+                    rname = fld_name
+                    fld_name = fld_name.replace('.', '_')
+                else:
+                    rname = None
+                fld_name_long = mod['name'] + '.' + fld_name
+
+                if fld_key == 'PRI':
+                    fld_type = 'id'
+                if fld_name_long in self.field_type_fix.keys():
+                    fld_type = self.field_type_fix[fld_name_long]
+
+                m = fld_type_pattern.match(fld_type)
+                if m:
+                    fld_type = m.group('fld_type')
+                    fld_length = m.group('fld_length')
+                else:
+                    fld_length = None
+
+                if fld_name_long in self.ignore_fields:
+                    continue                
+                if fld_type in ['mediumblob', 'blob']:
+                    continue
+
+                s = search_objects(sensor_units, module=mod['mark'], name=rname)
+                try:
+                    unit = s[0]['unit']
+                except:
+                    unit = None
+
+                sensors.append(dict(module_mark=mod['mark'], 
+                                    name=fld_name, type=fld_type, 
+                                    length=fld_length, rname=rname, 
+                                    unit=unit))
+        return sensors
+
+    def extract_units(self):
+        self.connector.define_table(self.axis_list,   
+                            Field('id'), Field('unit', 'string'))
+        self.connector.define_table(self.sensor_list, 
+                            Field('id'), Field('name', 'string'),
+                            Field('module', 'integer'),
+                            Field('axis', 'reference %s' % self.axis_list))
+        
+        sensortable = self.connector[self.sensor_list]
+        axistable = self.connector[self.axis_list]
+        
+        records = self.connector(sensortable.axis==axistable.id).select()
+        sensor_units = [ dict(name=r[self.sensor_list].name, 
+                              module=r[self.sensor_list].module, 
+                              unit=r[self.axis_list].unit) for r in records ]
+        return sensor_units
+
+    # def set_tables(self):
+    #     for mod in self.modules:
+    #         mark = mod['mark']
+    #         fields = []
+    #         for sns in self.sensors:
+    #             if sns['module_mark'] == mark:
+    #                 fields.append(
+    #                     Field(sns['name'], sns['type'], length=sns['length'], rname=sns['rname']))
+    #         self.connector.define_table(mod['name'], *fields)
+
+
+    # def extract_sensors(self, modules):
+    #     sensors = []
+    #     fld_type_pattern = re.compile('(?P<fld_type>\w+)\((?P<fld_length>\d*)\)')
+    #             sensors.append(dict(module_mark=mod['mark'], name=fld_name, type=fld_type, length=fld_length, rname=rname))
+    #     return sensors
+    # def extract_sensoraxis(self):
+    #     axes = self.extract_axes()
+    #     for mod in self.modules:
+    #         for sns in mod['sensors']:
+    #             unit = search_dicts(axes, sensor_name=(sns['rname'] or sns['name']), module_mark=mod['mark'])
+    #             sns.update({'unit': unit})
+    # def extract_axes(self):
+    #     q = (self._sql[self.sensor_list].axis == self._sql[self.axis_list].id)
+    #     res = self._sql(q).select(self._sql[self.sensor_list].name, 
+    #                               self._sql[self.sensor_list].module, 
+    #                               self._sql[self.axis_list].unit)
+    #     axes = [{ 'sensor_name': r[self.sensor_list].name,
+    #               'module_mark': r[self.sensor_list].module,
+    #               'unit': r[self.axis_list].unit
+    #             } for r in res]
+    #     return axes
+    # def get(self):
+    #     self.extract_modules()
+    #     self.extract_sensors()
+    #     self.extract_sensoraxis()
+    #     return self.modules
+    # def tables(self, _info):
+    #     pass
+
+def extract_db_info(config, _sql=None):
+    host     = config['host']
+    server   = config['server']
+    database = config['database']
+    sensor_groups  = config['sensor_groups']
+    axis_list      = config['axislist']
+    sensor_list    = config['sensorlist']
+    ignore_fields  = config['field_ignore']
+    field_type_fix = config['field_type_fix']
+
+    if not _sql:
+        _sql = DAL( "%s/%s" % (host, database), migrate=False, pool_size=1)
+
+    _sql.define_table(axis_list, Field('id'), Field('unit', 'string'))
+    _sql.define_table(sensor_list, Field('id'),
+                                   Field('name', 'string'),
+                                   Field('axis', 'reference %s' % axis_list),
+                                   Field('module', 'integer'))
+
+    q  = _sql[sensor_list].axis == _sql[axis_list].id
+    r  = _sql(q).select(_sql[sensor_list].name, _sql[sensor_list].module, _sql[axis_list].unit)
+    sensor_axis = r
+
+    tables_in_db = _sql.executesql('show tables')
+
+    if isinstance(sensor_groups, basestring):
+        pattern = re.compile(sensor_groups)
+        tables = [tab[0] for tab in tables_in_db if pattern.match(tab[0])]
+
+    fld_pattern = re.compile('(?P<fld_type>\w+)\((?P<fld_length>\d*)\)')
+    mod_pattern = re.compile('\w+_(?P<mod_number>\d{3})_\w+')
+
+    modules = { tab[0]:[ ] for tab in tables }
+    print tables, modules
+
+    for tab in tables:
+        m = mod_pattern.match(tab)
+        mod_number = int(m.group('mod_number'))
+
+        # columns from mysql table
+        cols =_sql.executesql('show columns from %s' % tab)
+
+        # extract fields from database and construct DAL fields
+        for fld_name, fld_type, _, fld_key, _, _ in cols:
+            if '.' in fld_name:
+                rname = '`' + fld_name + '`'
+                fld_name = fld_name.replace('.', '_')
+            else:
+                rname = None
+
+            field_long_name = '%s.%s' % (tab, fld_name)
+            if field_long_name in field_type_fix.keys():
+                fld_type = field_type_fix[field_long_name]
+
+            m = fld_pattern.match(fld_type)
+            if m:
+                fld_type = m.group('fld_type')
+                fld_length = m.group('fld_length')
+            else:
+                fld_length = None
+
+            if fld_key == 'PRI':
+                fld_type = 'id'
+
+            if fld_type in ['mediumblob', 'blob']:
+                continue
+            if field_long_name in ignore_fields:
+                continue
+
+            axis = sensor_axis.find(lambda r:
+                        r[sensor_list].name == fld_name and r[sensor_list].module == mod_number).first()
+            if axis:
+                axis = axis[axis_list].unit
+
+            modules[tab].append(dict(name=fld_name, type=fld_type, axis=axis))
+    return modules