Browse Source

clean files

Chuan Miao 9 years ago
parent
commit
d7a438baf9

+ 1 - 0
.gitignore

@@ -4,6 +4,7 @@ errors/
 cache/
 cron/
 private/
+old/
 
 progress.log
 ABOUT

+ 0 - 164
controllers/api.py

@@ -1,164 +0,0 @@
-import re
-
-def index():
-    return dict(adei=session.adei_sensors, sql=session.sql_fields)
-
-def adei_servers():
-    servers = config().select(config.adei_server.ALL)
-    return dict(servers=servers)
-
-def adei_groups():
-    server = request.get_vars.server
-    s = config(config.adei_server.server_key==server).select().first()
-    groups = s.adei_group.select()
-    return dict(server=server, groups=groups)
-
-def adei_sensors():
-    server = request.get_vars.server
-    group  = request.get_vars.group
-    s = config(( config.adei_server.id==config.adei_sensor.server_id ) &\
-               ( config.adei_group.id==config.adei_sensor.group_id ))
-    sensors =  s(( config.adei_server.server_key==server ) & \
-                 ( config.adei_group.db_group==group ) ).\
-                 select(config.adei_sensor.ALL)
-    return dict(server=server, group=group, sensors=sensors)
-
-def adei_data():
-    server = request.get_vars.server
-    group  = request.get_vars.group
-    sensor = request.get_vars.sensor
-    ar = session.adei_readers[server]
-    sensor_names, data = ar.query_data(group=group, sensor=sensor)
-    # pack results
-    sensor = re.findall(r'\d+', sensor)
-    return dict(server=server, group=group, field_mask=sensor, field=sensor_names, data=data)
-
-def sql_servers():
-    servers = [s['server'] for s in session.sql_servers]
-    return dict(servers=servers)
-def sql_tables():
-    server = request.get_vars.server
-    tables = []
-    for s in session.sql_servers:
-        if s['server'] == server:
-            tables = s['tables']
-    return dict(server=server, tables=tables)
-def sql_fields():
-    server = request.get_vars.server
-    table  = request.get_vars.table
-    fields = []
-    for f in session.sql_fields:
-        if f['server'] == server and f['table'] == table:
-            fields = f['fields']
-    return dict(server=server, table=table, fields=fields)
-def sql_data():
-    server = request.get_vars.server
-    table  = request.get_vars.table
-    _id    = request.get_vars.id or 'any'
-    field  = request.get_vars.field or 'all'
-    kargs  = request.vars
-    servers = sql_servers()
-    fields_in_table = sql_fields()['fields']
-    sql = globals()['sql_'+server]
-    if _id == 'any':
-        query = sql[table]
-    else:
-        query = (sql[table].id == _id)
-    if (len(kargs) > 0):
-        for k, v in kargs.iteritems():
-            try:
-                query = query & (sql[table][k] == v)
-            except:
-                query = (sql[table][k] == v)
-    if field == 'all':
-        val = sql(query).select(orderby=~sql[table].id)
-    else:
-        val = sql(query).select(sql[table][field], orderby=~sql[table].id)
-    return dict(server=server, table=table, data=val)
-
-@request.restful()
-def sqlinfo():
-    if request.extension == 'html':
-        request.extension = 'xml'
-    response.view = 'generic.'+request.extension
-    if request.env.http_origin:
-        response.headers['Access-Control-Allow-Origin'] = '*'
-    def GET(*args, **kargs):
-        if len(args) == 0:
-            return sql_servers()
-        elif len(args) == 1:
-            request.get_vars.server = args[0]
-            return sql_tables()
-        else:
-            request.get_vars.server = args[0]
-            request.get_vars.table = args[1]
-            return sql_fields()
-    def POST(*args, **kargs):
-        return dict()
-    return locals()
-
-@request.restful()
-def sql():
-    if request.extension == 'html':
-        request.extension = 'json'
-    response.view = 'generic.'+request.extension
-    if request.env.http_origin:
-        response.headers['Access-Control-Allow-Origin'] = '*'
-    def GET(*args, **kargs):
-        if len(args) == 0:
-            redirect('sqlinfo')
-        elif len(args) == 1:
-            pass
-        elif len(args) == 2:
-            request.get_vars.server = args[0]
-            request.get_vars.table = args[1]
-            return sql_data()
-        elif len(args) == 3:
-            request.get_vars.server = args[0]
-            request.get_vars.table = args[1]
-            request.get_vars.id = args[2]
-            request.vars = kargs
-            return sql_data()
-        else:
-            pass
-        return dict()
-    return locals()
-
-@request.restful()
-def adeiinfo():
-    if request.extension == 'html':
-        request.extension = 'xml'
-    response.view = 'generic.'+request.extension
-    if request.env.http_origin:
-        response.headers['Access-Control-Allow-Origin'] = '*'
-    def GET(*args, **kargs):
-        if len(args) == 0:
-            return adei_servers()
-        elif len(args) == 1:
-            request.get_vars.server = args[0]
-            return adei_groups()
-        elif len(args) == 2:
-            request.get_vars.server = args[0]
-            request.get_vars.group = args[1]
-            return adei_sensors()
-        else:
-            request.get_vars.server = args[0]
-            request.get_vars.group = args[1]
-            request.get_vars.sensor = args[2]
-            return adei_data()
-    def POST(*args, **kargs):
-        return dict()
-    return locals()
-
-def adei_test():
-    from gluon.contrib.hypermedia import Collection
-    rules ={
-        'adei_server': {
-            'GET':{'query':None, 'fields':['id', 'server_key', 'db_host']},
-            'POST':{'query':None, 'fields':['server_key']},
-            'PUT':{'query':None, 'fields':['server_key']},
-            'DELETE':{'query':None}
-        }
-    }
-    return Collection(config, compact=True).process(request, response, rules)
-

+ 30 - 0
controllers/bess.py

@@ -0,0 +1,30 @@
+import re
+
+def index():
+	# import sys
+	# size = sys.getsizeof(cache.ram.meta_storage.get('/service/bess'))
+	# return dict(cache=cache.ram, storage_size=size)
+	return locals()
+	
+
+@cache(request.env.path_info, time_expire=10, cache_model=cache.ram)
+@request.restful()
+def ulm():
+    if request.env.http_origin:
+        response.headers['Access-Control-Allow-Origin'] = '*'
+    regex = re.compile('(\w+).(\w+)')
+
+    def GET():
+    	adapter = ulm_adapter
+    	sensors = ulm_config.get('sensors')
+    	data = adapter.read_sensors(sensors)
+
+    	res = ''
+    	for s in sensors:
+    		m = regex.match(s)
+    		group, sensor = m.group(1), m.group(2)
+    		res = res + data.get(group).get(sensor,'0') + '\n'
+    	return res
+    	#return dict(data=data)
+    return locals()
+                                          

+ 0 - 59
controllers/default.py

@@ -1,62 +1,3 @@
-# -*- coding: utf-8 -*-
-# this file is released under public domain and you can use without limitations
-
-#########################################################################
-## This is a sample controller
-## - index is the default action of any application
-## - user is required for authentication and authorization
-## - download is for downloading files uploaded in the db (does streaming)
-## - api is an example of Hypermedia API support and access control
-#########################################################################
 
 def index():
-    """
-    example action using the internationalization operator T and flash
-    rendered by views/default/index.html or views/generic.html
-
-    if you need a simple wiki simply replace the two lines below with:
-    return auth.wiki()
-    """
-    response.flash = T("Alarms 0")
-    #row = db().select(db.auth_user.ALL).first().as_xml()
-    #return dict(message=T('Hello World'))
-    #return dict(form=row)
     return locals()
-
-def user():
-    """
-    exposes:
-    http://..../[app]/default/user/login
-    http://..../[app]/default/user/logout
-    http://..../[app]/default/user/register
-    http://..../[app]/default/user/profile
-    http://..../[app]/default/user/retrieve_password
-    http://..../[app]/default/user/change_password
-    http://..../[app]/default/user/manage_users (requires membership in
-    use @auth.requires_login()
-        @auth.requires_membership('group name')
-        @auth.requires_permission('read','table name',record_id)
-    to decorate functions that need access control
-    """
-    return dict(form=auth())
-
-
-@cache.action()
-def download():
-    """
-    allows downloading of uploaded files
-    http://..../[app]/default/download/[filename]
-    """
-    return response.download(request, db)
-
-
-def call():
-    """
-    exposes services. for example:
-    http://..../[app]/default/call/jsonrpc
-    decorate with @services.jsonrpc the functions to expose
-    supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
-    """
-    return service()
-
-

+ 0 - 74
controllers/default.py.bak

@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-# this file is released under public domain and you can use without limitations
-
-#########################################################################
-## This is a sample controller
-## - index is the default action of any application
-## - user is required for authentication and authorization
-## - download is for downloading files uploaded in the db (does streaming)
-## - api is an example of Hypermedia API support and access control
-#########################################################################
-
-def index():
-    """
-    example action using the internationalization operator T and flash
-    rendered by views/default/index.html or views/generic.html
-
-    if you need a simple wiki simply replace the two lines below with:
-    return auth.wiki()
-    """
-    response.flash = T("Welcome to web2py!")
-    print globals()['message']
-    db1 = sql_dbs[0]
-    row = db1().select(db1.experiment1.ALL).first().as_xml()
-    #return dict(message=T('Hello World'))
-    return dict(form=row)
-
-def user():
-    """
-    exposes:
-    http://..../[app]/default/user/login
-    http://..../[app]/default/user/logout
-    http://..../[app]/default/user/register
-    http://..../[app]/default/user/profile
-    http://..../[app]/default/user/retrieve_password
-    http://..../[app]/default/user/change_password
-    http://..../[app]/default/user/manage_users (requires membership in
-    use @auth.requires_login()
-        @auth.requires_membership('group name')
-        @auth.requires_permission('read','table name',record_id)
-    to decorate functions that need access control
-    """
-    return dict(form=auth())
-
-
-@cache.action()
-def download():
-    """
-    allows downloading of uploaded files
-    http://..../[app]/default/download/[filename]
-    """
-    return response.download(request, db)
-
-
-def call():
-    """
-    exposes services. for example:
-    http://..../[app]/default/call/jsonrpc
-    decorate with @services.jsonrpc the functions to expose
-    supports xml, json, xmlrpc, jsonrpc, amfrpc, rss, csv
-    """
-    return service()
-
-
-@auth.requires_login() 
-def api():
-    """
-    this is example of API with access control
-    WEB2PY provides Hypermedia API (Collection+JSON) Experimental
-    """
-    from gluon.contrib.hypermedia import Collection
-    rules = {
-        '<tablename>': {'GET':{},'POST':{},'PUT':{},'DELETE':{}},
-        }
-    return Collection(db).process(request,response,rules)

+ 0 - 115
controllers/sql.py

@@ -1,115 +0,0 @@
-def index():
-    response.generic_patterns = ['*.html']
-    db = meta_dbs()
-    return locals()
-
-def reset():
-    if True:
-    # if False:
-        info.sql_db.truncate()
-        info.sql_module.truncate()
-        info.sql_sensor.truncate()
-    return locals()
-
-
-def error():
-    return dict(error=session.error)    
-
-def meta_dbs():
-    return info(info.sql_db).select()
-
-def meta_modules(db):
-    return info(info.sql_db.key==db)\
-               (info.sql_module.db_id==info.sql_db.id)\
-               .select(info.sql_module.ALL)
-
-def meta_sensors(db, module):
-    return info(info.sql_db.key==db)\
-               (info.sql_module.name==module)\
-               (info.sql_sensor.db_id==info.sql_db.id)\
-               (info.sql_sensor.module_id==info.sql_module.id)\
-               .select(info.sql_sensor.ALL)
-
-@request.restful()
-def api():
-    if request.extension is 'html':
-        request.extension = 'xml'
-    if request.env.http_origin:
-        response.headers['Access-Control-Allow-Origin'] = '*'
-    response.generic_patterns = ['*.html', '*.xml', '*.json']
-
-
-    def GET(*args, **kargs):        
-        try:
-            db_key = args[0]
-        except IndexError:
-            session.error = 'no database is specified'
-            request.extension = 'html'
-            redirect('error')
-
-        databases = meta_dbs()
-
-        if db_key not in [d.key for d in databases]:
-            session.error = 'database "%s" does not exits' % db_key
-            request.extension = 'html'
-            redirect('error')
-        else:
-            modules = [ m.name for m in meta_modules(db_key) ] 
-
-        logger.info(locals().get('modules'))
-
-        try:
-            module = args[1]
-        except IndexError:
-            error = 'Module is not specified'
-            # mods = {}
-            # for m in modules:
-            #     s = meta_sensors(db_key, m.name)
-            #     mods[m.name] = [ _s.name for _s in s ]
-            mods = { m: [ s.name for s in meta_sensors(db_key, m) ] for m in modules }
-            # mods = { m.name : meta_sensors(db_key, m.name) for m in modules } 
-            return dict(error=error, modules=mods)
-
-        try:
-            module = modules[int(module)]
-        except ValueError:
-            module = module
-        except KeyError:
-            session.error = 'Module index out of range: module index = %s' % module
-            session.modules = modules
-            request.extension = 'html'
-            redirect(URL('error'))
-
-        len = int(kargs.get('len', 1))
-
-        db = globals()[db_key]
-        db_connector = kitcube[db_key]
-        views = db_connector.views
-
-        if module in modules:
-            res = db_connector.query(module, len=len)
-        elif module in views.keys():
-            v = views[module]
-            res = db_connector.query_view(v, len=len)
-        else:
-            session.error = 'Module "%s" does not exits' % module
-            session.modules = modules
-            request.extension = 'html'
-            redirect(URL('error'))
-
-        unit = {}
-        if kargs.get('unit', None):
-            if module in modules:
-                unit[module] = info(info.sql_sensor.module_name==module).select(info.sql_sensor.name, info.sql_sensor.unit)
-            else:
-                for mod, s in views[module]:
-                    rows = info(info.sql_sensor.module_name==mod).select(info.sql_sensor.name, info.sql_sensor.unit)
-                    rows.exclude(lambda row: row.name not in s )    
-                    unit[mod] = rows
-
-        for m, s in unit.iteritems():
-            unit[m] = { _s['name']: _s['unit'] for _s in s }
-        
-        return dict(data=res, unit=unit)
-
-    return locals()

+ 0 - 101
models/.tmp/db.py

@@ -1,101 +0,0 @@
-# -*- coding: utf-8 -*-
-import hashlib
-
-db.define_table('legacy_mysql',
-    Field('id'),
-    Field('dbkey', 'string', 128, unique=True),
-    Field('hash_md5', 'string', 128),
-    Field('dbname', 'string'),
-    Field('host_uri', 'string'),
-    Field('username', 'string', 128),
-    Field('password', 'password'))
-
-db.define_table('legacy_mysql_tables',
-        Field('id', 'id'),
-        Field('db_id', 'reference legacy_mysql'),
-        Field('table_name', 'string'))
-
-db.define_table('legacy_mysql_fields',
-        Field('id'),
-        Field('db_id', 'reference legacy_mysql'),
-        Field('table_id', 'reference legacy_mysql_tables'),
-        Field('field_name', 'string'),
-        Field('field_type', 'string'))
-
-db.define_table('adei_server',
-        Field('id'),
-        Field('md5', 'string'),
-        Field('adei_url', 'string'),
-        Field('adei_host', 'string'),
-        Field('adei_name', 'string'),
-        Field('adei_group', 'string'))
-
-db.define_table('adei_sensors',
-        Field('id'),
-        Field('adei_server_id', 'reference adei_server'),
-        Field('sensor_name', 'string'),
-        Field('sensor_unit', 'string'),
-        Field('sensor_last_update', 'time'),
-        Field('sensor_last_value', 'double'))
-
-#db.legacy_mysql.hash_md5.represent = lambda value, row: \
-        #hashlib.md5(row.dbkey).hexdigest()
-
-mysql_db     = db.legacy_mysql
-mysql_tables = db.legacy_mysql_tables
-mysql_fields = db.legacy_mysql_fields
-mysql_info = db(( mysql_fields.table_id == mysql_tables.id) & ( mysql_fields.db_id == mysql_db.id ))
-
-for row in db(mysql_db).select():
-    db( mysql_db.id == row.id ).update(hash_md5 = hashlib.md5(row.dbkey).hexdigest())
-
-#print hashlib.md5(db.legacy_mysql.dbkey).hexdigest() 
-#db(mysql_db.id>0).update(hash_md5 = eval( hashlib.md5(mysql_db.dbkey).hexdigest() ))
-#db(mysql_db.id>0).update(hash_md5 = eval( '0' ))
-        
-
-if False:
-    print '\n test select\n', db(mysql_db).select()
-    print '\n test select\n', db(mysql_db).select(mysql_db.dbname)
-    print '\n test select\n', db(( mysql_fields.table_id == mysql_tables.id ) & 
-                                 ( mysql_fields.db_id == mysql_db.id )).select()
-    print '\n test select\n', db(( mysql_fields.table_id == mysql_tables.id ) & 
-                                 ( mysql_fields.db_id == mysql_db.id ))\
-                                 .select( mysql_tables.table_name, mysql_db.md5 )
-    print '\n test select\n', mysql_info.select()
-
-
-if 'MYSQL' not in globals():
-    MYSQL =  {}
-
-for host in db( mysql_db ).select():
-    if  host.hash_md5 not in MYSQL:
-        MYSQL[host.hash_md5] = DAL('mysql://%(username)s:%(password)s@%(host_uri)s/%(dbname)s' % host)
-
-    for table in db( mysql_tables.db_id == host.id ).select():
-
-        fields = db( mysql_fields.table_id == table.id ).select()
-        f = [ Field(x.field_name, x.field_type) for x in fields ]
-
-        MYSQL[host.hash_md5].define_table( table.table_name, *f, migrate=False )
-             
-
-
-
-
-    #fields = db(mysql_fields.table_id == row.mysql_tables.id).select()
-    #print fields
-
-#rows = mysql_info( mysql_fields.table_id ==  mysql_tables.id ).select( mysql_fields.field_name, mysql_fields.field_type, mysql_db.dbkey, mysql_tables.table_name, groupby=mysql_tables.table_name() )
-
-#print rows
-#for dbtable in db( mysql_tables ).select():
-    #db_id = dbtable.db_id
-    ##db_key = db(mysql_db.id == db_id).select().first()
-
-    ##for fld in db( mysql_fields.table_id == dbtable.id ).select( mysql_fields.ALL )
-    ##for fld in db( mysql_all )( mysql_fields.table_id == dbtable.id )).select(mysql_fields.field_name, mysql_fields.field_type):
-        ##fields.append(Field(frow.field_name, frow.field_type))
-
-    ##MYSQL[host.hash_md5].define_table(dbtable.table_name, fields[0], fields[1], fields[2], migrate=False)
-

+ 0 - 87
models/.tmp/db.py.0

@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#########################################################################
-## This scaffolding model makes your app work on Google App Engine too
-## File is released under public domain and you can use without limitations
-#########################################################################
-
-## if SSL/HTTPS is properly configured and you want all HTTP requests to
-## be redirected to HTTPS, uncomment the line below:
-# request.requires_https()
-
-if not request.env.web2py_runtime_gae:
-    ## if NOT running on Google App Engine use SQLite or other DB
-    db = DAL('sqlite://storage.sqlite',pool_size=1,check_reserved=['all'])
-else:
-    ## connect to Google BigTable (optional 'google:datastore://namespace')
-    db = DAL('google:datastore+ndb')
-    ## store sessions and tickets there
-    session.connect(request, response, db=db)
-    ## or store session in Memcache, Redis, etc.
-    ## from gluon.contrib.memdb import MEMDB
-    ## from google.appengine.api.memcache import Client
-    ## session.connect(request, response, db = MEMDB(Client()))
-
-## by default give a view/generic.extension to all actions from localhost
-## none otherwise. a pattern can be 'controller/function.extension'
-response.generic_patterns = ['*'] if request.is_local else []
-
-## (optional) optimize handling of static files
-# response.optimize_css = 'concat,minify,inline'
-# response.optimize_js = 'concat,minify,inline'
-## (optional) static assets folder versioning
-# response.static_version = '0.0.0'
-#########################################################################
-## Here is sample code if you need for
-## - email capabilities
-## - authentication (registration, login, logout, ... )
-## - authorization (role based authorization)
-## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
-## - old style crud actions
-## (more options discussed in gluon/tools.py)
-#########################################################################
-
-from gluon.tools import Auth, Service, PluginManager
-
-auth = Auth(db)
-service = Service()
-plugins = PluginManager()
-
-## create all tables needed by auth if not custom tables
-auth.define_tables(username=False, signature=False)
-
-## configure email
-mail = auth.settings.mailer
-mail.settings.server = 'logging' if request.is_local else 'smtp.gmail.com:587'
-mail.settings.sender = 'you@gmail.com'
-mail.settings.login = 'username:password'
-
-## configure auth policy
-auth.settings.registration_requires_verification = False
-auth.settings.registration_requires_approval = False
-auth.settings.reset_password_requires_verification = True
-
-## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
-## register with janrain.com, write your domain:api_key in private/janrain.key
-from gluon.contrib.login_methods.janrain_account import use_janrain
-use_janrain(auth, filename='private/janrain.key')
-
-#########################################################################
-## Define your tables below (or better in another model file) for example
-##
-## >>> db.define_table('mytable',Field('myfield','string'))
-##
-## Fields can be 'string','text','password','integer','double','boolean'
-##       'date','time','datetime','blob','upload', 'reference TABLENAME'
-## There is an implicit 'id integer autoincrement' field
-## Consult manual for more options, validators, etc.
-##
-## More API examples for controllers:
-##
-## >>> db.mytable.insert(myfield='value')
-## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
-## >>> for row in rows: print row.id, row.myfield
-#########################################################################
-
-## after defining tables, uncomment below to enable auditing
-# auth.enable_record_versioning(db)

+ 0 - 97
models/.tmp/db_legacy.py.tmp

@@ -1,97 +0,0 @@
-# -*- coding: utf-8 -*-
-import re
-
-db_config = '''
-localhost    test_status:experiment1    ID,usec,sensor1
-#localhost    test_status:experiment2    ID,usec,sensor1
-'''
-
-default_user = 'status'
-default_pass = 'status'
-
-datatype = dict(
-        id='id',
-        bit='boolean',
-        tinyint='boolean',
-        bigint='bigint',
-        double='double',
-        float='double',
-        varchar='string'
-        )
-
-
-if not db_config.strip():
-    db_config = ''
-
-def auto_create_db(configs):
-    extracted_dbs = []
-
-    for hostname, table, fields in \
-            [ x.strip().split()
-                    for x in configs.strip().split('\n')
-                    if not x.strip().startswith('#') ]:
-
-        db_name, table_name = table.split(':')
-        fields = fields.split(',')
-        print hostname, db_name, table_name, fields
-
-        db_settings = {'user': default_user,
-                       'pass': default_pass,
-                       'host': hostname,
-                       'database': db_name,
-                       'table': table_name,
-                       'fields': fields}
-
-        # DAL initialize
-        db = DAL("mysql://%(user)s:%(pass)s@%(host)s/%(database)s" % db_settings, migrate=False)
-
-        # extract fields
-        matched_fields = []
-        for fld_name in fields:
-            fld = db.executesql( "SHOW COLUMNS FROM `%s` LIKE '%s'" 
-                                 % (table_name, fld_name), as_dict=True )
-            if not fld:
-                print "No matched field, should throw error and exit"
-            else:
-                f = fld[0]['Field']
-                k = fld[0]['Key']
-                m = re.match('(.*)\((.*)\)', fld[0]['Type'])
-                if m:
-                    t = m.group(1)
-                    n = int(m.group(2))
-                else:
-                    t = fld[0]['Type']
-                    n = None
-
-            if k == 'PRI':
-                t = 'id'
-
-            ff = Field(f, type=datatype[t], length=n)
-            #print ff.as_dict()
-            matched_fields.append(ff)
-
-        db.define_table(table_name, *matched_fields)
-        #print db.as_json()
-
-        extracted_dbs.append(db)
-
-    return extracted_dbs
-
-sql_dbs = auto_create_db(db_config)
-
-
-if __name__ == '__main__':
-    try:
-        import gluon.main
-        from gluon import DAL, Field
-    except ImportError:
-        import sys
-        import os
-        import re
-        m = re.search('.*/(?=applications)', os.path.dirname(os.path.realpath(__file__)))
-        os.chdir(m.group(0))
-        sys.path.append(m.group(0))
-        import gluon.main
-        from gluon import DAL, Field
-
-    auto_create_db(db_config)

+ 0 - 61
models/.tmp/db_mysql.py.tmp

@@ -1,61 +0,0 @@
-import re
-server_config = '''
-# name  host                                db              table
-fpd     katrin.kit.edu/adei                 fpd             katrin_rep
-orca    status:status@mysql://127.0.0.1     test_status     experiment1     
-'''
-
-
-###
-def x_mysql(host, dbname, dbtable, user='', passwd='', fields=''): 
-    from gluon.dal import MySQLAdapter
-    db_setting = dict(host=host, dbname=dbname, dbtable=dbtable, dbuser=user, dbpass=passwd)
-    print db_setting
-
-    # DAL initialize       
-    db0 = DAL("mysql://%(dbuser)s:%(dbpass)s@%(host)s/%(dbname)s" % db_setting, 
-              migrate=False,
-              lazy_tables=True) 
-    
-    # extract fileds from description
-    fields = db0.executesql('show columns from %(dbtable)s;' % db_setting)
-    db0._adapter.execute('SELECT * FROM %(dbtable)s LIMIT 1;' % db_setting)
-
-    #print db0.get_instances()
-    #print db0._adapter.types
-    #print db0._adapter.cursor.description
-    #print fields
-
-    # extract fields
-    #db0 =
-###
-
-config_adei = {}
-config_mysql = {}
-
-
-regex_adei = re.compile('[^/]*/(adei)')
-regex_mysql = re.compile('(?P<user>.+):(?P<pass>.+)@mysql://(?P<host>.*)')
-
-for dbkey, hostname, dbname, dbtable in [ x.strip().split() 
-        for x in server_config.strip().split('\n') if not x.strip().startswith('#') ]:
-
-    m = regex_mysql.match(hostname)
-    if m:
-        if dbkey not in config_mysql.keys():
-            h, u, p = ( m.group('host'), m.group('user'), m.group('pass') )
-            config_mysql[dbkey] = ( h, dbname, dbtable, u, p)
-        continue
-
-adei_servers = {}
-for s in config_adei:
-    print s
-
-mysql_servers = {}
-for k, settings in config_mysql.iteritems():
-    print k, settings
-    #from extractdb import extract_mysql as x_mysql
-    mysql_servers[k] = x_mysql(*settings) 
-
-#import pprint
-#print '\n'.join(repr(u) for u in globals() if not u.startswith('__'))

+ 0 - 70
models/.tmp/server.py.tmp

@@ -1,70 +0,0 @@
-import re
-from adeireader.adeireader import ADEIReader 
-
-###
-def x_mysql(host, dbname, dbtable, user='', passwd='', fields=''): 
-    from gluon.dal import MySQLAdapter
-    db_setting = dict(host=host, dbname=dbname, dbtable=dbtable, dbuser=user, dbpass=passwd)
-    print db_setting
-
-    # DAL initialize       
-    db0 = DAL("mysql://%(dbuser)s:%(dbpass)s@%(host)s/%(dbname)s" % db_setting, 
-              migrate=False,
-              lazy_tables=True) 
-    
-    # extract fileds from description
-    fields = db0.executesql('show columns from %(dbtable)s;' % db_setting)
-    db0._adapter.execute('SELECT * FROM %(dbtable)s LIMIT 1;' % db_setting)
-
-    #print db0.get_instances()
-    #print db0._adapter.types
-    #print db0._adapter.cursor.description
-    #print fields
-
-    # extract fields
-    #db0 =
-###
-
-server_config = '''
-# name/host/db/table
-fpd     katrin.kit.edu/adei                 fpd             katrin_rep
-orca    status:status@mysql://127.0.0.1     test_status     experiment1     
-'''
-
-config_adei = {}
-config_mysql = {}
-
-
-regex_adei = re.compile('[^/]*/(adei)')
-regex_mysql = re.compile('(?P<user>.+):(?P<pass>.+)@mysql://(?P<host>.*)')
-
-for dbkey, hostname, dbname, dbtable in [ x.strip().split() 
-        for x in server_config.strip().split('\n') if not x.strip().startswith('#') ]:
-
-    if regex_adei.match(hostname):
-        if dbkey not in config_adei.keys():
-            config_adei[dbkey] = (hostname, dbname, dbtable)
-        continue
-
-    m = regex_mysql.match(hostname)
-    if m:
-        if dbkey not in config_mysql.keys():
-            h, u, p = ( m.group('host'), m.group('user'), m.group('pass') )
-            config_mysql[dbkey] = ( h, dbname, dbtable, u, p)
-        continue
-
-    # error message
-    print 'nothing is matched'
-
-adei_servers = {}
-for s in config_adei:
-    print s
-
-mysql_servers = {}
-for k, settings in config_mysql.iteritems():
-    print k, settings
-    #from extractdb import extract_mysql as x_mysql
-    mysql_servers[k] = x_mysql(*settings) 
-
-#import pprint
-#print '\n'.join(repr(u) for u in globals() if not u.startswith('__'))

+ 5 - 0
models/bess/db.py

@@ -0,0 +1,5 @@
+from dbadapter import ADEIAdapter
+from bess import ulm
+
+ulm_config = ulm.config
+ulm_adapter = cache.ram('bessUlm', lambda: ADEIAdapter(ulm_config), 3600)

+ 0 - 87
models/db.py

@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#########################################################################
-## This scaffolding model makes your app work on Google App Engine too
-## File is released under public domain and you can use without limitations
-#########################################################################
-
-## if SSL/HTTPS is properly configured and you want all HTTP requests to
-## be redirected to HTTPS, uncomment the line below:
-# request.requires_https()
-
-if not request.env.web2py_runtime_gae:
-    ## if NOT running on Google App Engine use SQLite or other DB
-    db = DAL('sqlite://storage.sqlite',pool_size=1,check_reserved=['all'])
-else:
-    ## connect to Google BigTable (optional 'google:datastore://namespace')
-    db = DAL('google:datastore+ndb')
-    ## store sessions and tickets there
-    session.connect(request, response, db=db)
-    ## or store session in Memcache, Redis, etc.
-    ## from gluon.contrib.memdb import MEMDB
-    ## from google.appengine.api.memcache import Client
-    ## session.connect(request, response, db = MEMDB(Client()))
-
-## by default give a view/generic.extension to all actions from localhost
-## none otherwise. a pattern can be 'controller/function.extension'
-response.generic_patterns = ['*'] if request.is_local else []
-
-## (optional) optimize handling of static files
-# response.optimize_css = 'concat,minify,inline'
-# response.optimize_js = 'concat,minify,inline'
-## (optional) static assets folder versioning
-# response.static_version = '0.0.0'
-#########################################################################
-## Here is sample code if you need for
-## - email capabilities
-## - authentication (registration, login, logout, ... )
-## - authorization (role based authorization)
-## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
-## - old style crud actions
-## (more options discussed in gluon/tools.py)
-#########################################################################
-
-from gluon.tools import Auth, Service, PluginManager
-
-auth = Auth(db)
-service = Service()
-plugins = PluginManager()
-
-## create all tables needed by auth if not custom tables
-auth.define_tables(username=False, signature=False)
-
-## configure email
-mail = auth.settings.mailer
-mail.settings.server = 'logging' if request.is_local else 'smtp.gmail.com:587'
-mail.settings.sender = 'you@gmail.com'
-mail.settings.login = 'username:password'
-
-## configure auth policy
-auth.settings.registration_requires_verification = False
-auth.settings.registration_requires_approval = False
-auth.settings.reset_password_requires_verification = True
-
-## if you need to use OpenID, Facebook, MySpace, Twitter, Linkedin, etc.
-## register with janrain.com, write your domain:api_key in private/janrain.key
-from gluon.contrib.login_methods.janrain_account import use_janrain
-use_janrain(auth, filename='private/janrain.key')
-
-#########################################################################
-## Define your tables below (or better in another model file) for example
-##
-## >>> db.define_table('mytable',Field('myfield','string'))
-##
-## Fields can be 'string','text','password','integer','double','boolean'
-##       'date','time','datetime','blob','upload', 'reference TABLENAME'
-## There is an implicit 'id integer autoincrement' field
-## Consult manual for more options, validators, etc.
-##
-## More API examples for controllers:
-##
-## >>> db.mytable.insert(myfield='value')
-## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
-## >>> for row in rows: print row.id, row.myfield
-#########################################################################
-
-## after defining tables, uncomment below to enable auditing
-# auth.enable_record_versioning(db)

+ 0 - 10
models/db_sql.py

@@ -1,10 +0,0 @@
-from gluon.custom_import import track_changes
-track_changes(True)
-
-from sql_models import info; info._adapter.reconnect()
-from sql_models import kitcube
-
-for k in kitcube.keys():
-    kitcube[k].reconnect()
-    globals()[k] = kitcube[k].dal
-

+ 0 - 33
models/menu.py

@@ -1,33 +0,0 @@
-# -*- coding: utf-8 -*-
-# this file is released under public domain and you can use without limitations
-
-#########################################################################
-## Customize your APP title, subtitle and menus here
-#########################################################################
-_a = request.application
-_c = request.controller
-_f = request.function
-
-response.logo = A(B('Katrin'),XML('&nbsp;&nbsp;'),
-                  _class="brand",_href="http://www.katrin.kit.edu/")
-response.title = request.application.replace('_',' ').title()
-response.subtitle = ''
-
-## read more at http://dev.w3.org/html5/markup/meta.name.html
-response.meta.author = 'Chuan Miao <chuan.miao@kit.edu>'
-response.meta.keywords = 'web2py, python, framework'
-response.meta.generator = 'Web2py Web Framework'
-
-## your http://google.com/analytics id
-response.google_analytics_id = None
-
-#########################################################################
-## this is the main application menu add/remove items as required
-#########################################################################
-
-response.menu = [
-    ('Display', _c == 'default' and _f == 'index', URL('default', 'index'), []),
-    ('Config', _c == 'config' and _f == 'index', URL('config', 'index'), [])
-]
-
-if "auth" in locals(): auth.wikimenu()

+ 0 - 139
modules/.ipynb_checkpoints/create_view-checkpoint.ipynb

@@ -1,139 +0,0 @@
-{
- "metadata": {
-  "name": ""
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import re\n",
-      "import time\n",
-      "from itertools import groupby\n",
-      "from config import sql_databases\n",
-      "from kitcube import KITCube\n",
-      "from gluon import DAL\n",
-      "\n",
-      "database = sql_databases[1]\n",
-      "db_key  = database['key']\n",
-      "db_host = database['host']\n",
-      "db      = database['db']\n",
-      "modules = database['modules']\n",
-      "views   = database.get('views', None)\n",
-      "connector = KITCube(db_key, db_host, db, modules)\n",
-      "\n",
-      "connector.connect()\n",
-      "\n",
-      "def process_views(views):\n",
-      "    pattern = re.compile('(\\w+).(\\w+)')\n",
-      "    keyfunc = lambda x: x[0]\n",
-      "    \n",
-      "    grouped_views = {}\n",
-      "    for view_name, sensors in views.iteritems():\n",
-      "        slist = []\n",
-      "        for sensor in sensors:\n",
-      "            m = pattern.match(sensor)\n",
-      "            slist.append((m.group(1), m.group(2)))\n",
-      "        sgroups = []\n",
-      "        for table, sgroup in groupby(slist, keyfunc):\n",
-      "            sgroups.append((table, [s[1] for s in sgroup]))\n",
-      "        grouped_views[view_name] = sgroups\n",
-      "    return grouped_views\n",
-      "            \n",
-      "\n",
-      "def parse_views(connector, views):\n",
-      "    db = connector.dal\n",
-      "    selections = {}\n",
-      "    for k, v in views.iteritems():\n",
-      "        selections[k] = []\n",
-      "        for table, fields in v:\n",
-      "            s = [ db[table][_f] for _f in fields ]\n",
-      "            selections[k].append(s)\n",
-      "    return selections\n",
-      "    \n",
-      "\n",
-      "\n",
-      "# for k, v in views.iteritems():\n",
-      "#     query = "
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [],
-     "prompt_number": 60
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "t0 = time.time()\n",
-      "\n",
-      "views = process_views(views)\n",
-      "s = parse_views(connector, views)\n",
-      "\n",
-      "ss = s['test1']\n",
-      "for sss in ss:\n",
-      "    print connector.dal().select(*sss, orderby='id DESC', limitby=(0,1)).as_list()\n",
-      "t1 = time.time()\n",
-      "t1-t0"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [
-      {
-       "output_type": "stream",
-       "stream": "stdout",
-       "text": [
-        "[{'INK_G_INKW_AVG': 1.220399, 'SMT_M_SOIL_002_AVG': 7.08, 'OMB_N_N_002_AVG': 0.0}]\n",
-        "[{'SHF_F_SHF_002_INST': 32.58767}]\n"
-       ]
-      },
-      {
-       "metadata": {},
-       "output_type": "pyout",
-       "prompt_number": 61,
-       "text": [
-        "0.019381999969482422"
-       ]
-      }
-     ],
-     "prompt_number": 61
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "views"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [
-      {
-       "metadata": {},
-       "output_type": "pyout",
-       "prompt_number": 62,
-       "text": [
-        "{'test1': [('Data_011_EBM1_DAR',\n",
-        "   ['INK_G_INKW_AVG', 'OMB_N_N_002_AVG', 'SMT_M_SOIL_002_AVG']),\n",
-        "  ('Data_011_EBM1_DAS1', ['SHF_F_SHF_002_INST'])]}"
-       ]
-      }
-     ],
-     "prompt_number": 62
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}

+ 0 - 28
modules/adei/adei.py.0

@@ -1,28 +0,0 @@
-
-from adeireader import ADEIReader
-import time
-
-
-adei_update = True
-
-if 'adei_servers' in session:
-    if session.adei_servers == adei_servers and \
-        len(session.adei_sensors) == len(session.adei_servers):
-        adei_update = False
-if adei_update:
-    session.adei_servers = adei_servers
-    session.adei_sensors = []
-    session.adei_readers = {}
-
-    for c in adei_servers:
-        ar = ADEIReader(c['host'], c['db_server'], c['db_name'])
-        groups = ar.query_group()
-        for ig, g in enumerate(groups):
-            groups[ig]['sensors'] = ar.query_sensor(g['db_group'])
-
-        session.adei_readers[c['server']] = ar
-        session.adei_sensors.append({
-                'server': c['server'],
-                'groups': groups
-        })
-

+ 0 - 83
modules/adei/config_adei.py

@@ -1,83 +0,0 @@
-from adeireader import ADEIReader
-import sqlite3
-
-config = DAL('sqlite://config.storage.sqlite',pool_size=1,check_reserved=['all'])
-
-config.define_table('adei_server',
-        Field('id', 'primary'),
-        Field('server_key', unique=True),
-        Field('db_host'),
-        Field('db_server'),
-        Field('db_name'))
-
-config.define_table('adei_group',
-        Field('id', 'primary'),
-        Field('group_key', unique=True),
-        Field('db_group'),
-        Field('group_name'),
-        Field('server_id', "reference adei_server"))
-
-config.define_table('adei_sensor',
-        Field('id', 'primary'),
-        Field('sensor_key', unique=True),
-        Field('sensor_mask', 'integer'),
-        Field('sensor_uid'),
-        Field('sensor_name'),
-        Field('group_id', 'reference adei_group'),
-        Field('server_id', 'reference adei_server'))
-
-def fetch_server_group_sensor(server_info):
-    skey = server_info['server_key']
-    sid = config.adei_server.insert(**server_info)
-    group_keys = [ s['group_key'] for s in config().select(config.adei_group.group_key) ]
-    sensor_keys = [ s['sensor_key'] for s in config().select(config.adei_sensor.sensor_key) ]
-    ar = session.adei_readers[skey]
-    for g in ar.query_group():
-        #print g
-        gk = skey + '__' + g['db_group']
-        db_group = g['db_group']
-        group_name = g['name']
-        try:
-            gid = config.adei_group.insert(
-                    group_key = gk,
-                    db_group = db_group,
-                    group_name = group_name,
-                    server_id = sid)
-        except:
-            #print 'record in adei_group already'
-            pass
-        for s in ar.query_sensor(db_group):
-            #print s
-            sensor_mask = s['value']
-            sensor_key = gk+'__'+sensor_mask
-            try:
-                sensor_uid = s['uid']
-            except:
-                sensor_uid = ''
-            try:
-                config.adei_sensor.insert(
-                        sensor_mask=sensor_mask,
-                        sensor_key=sensor_key,
-                        sensor_uid=sensor_uid,
-                        sensor_name=s['name'],
-                        server_id = sid,
-                        group_id = gid)
-            except:
-                #print 'record in adei_server already'
-                pass
-
-server_keys = [ s['server_key'] for s in config().select(config.adei_server.server_key) ]
-
-if 'adei_readers' not in session.keys():
-    session.adei_readers = {}
-
-for s in adei_servers:
-    if s['server_key'] not in session.adei_readers.keys():
-        ar = ADEIReader(s['db_host'], s['db_server'], s['db_name'])
-        session.adei_readers[s['server_key']] = ar
-    if s['server_key'] not in server_keys:
-        fetch_server_group_sensor(s)
-
-           
-        
-

+ 1 - 0
modules/bess/__init__.py

@@ -0,0 +1 @@
+__all__ = ['ulm']

+ 18 - 0
modules/bess/ulm.py

@@ -0,0 +1,18 @@
+sensors = [
+    'DCACBatt.p_ac_dcacbatt_act',
+    'GridPV.p_ac_pv_act',
+    'GridConsum.p_ac_cons_act',
+    'Grid.p_ac_grid_act',
+    'GridEVStation.p_ac_ev_station_act',
+    'Batt.soc_batt_act',
+    'Batt.u_batt_act'
+]
+
+config = dict( 
+    name      = 'ulm',
+    url       = 'http://ipebessadei.ipe.kit.edu/adei-battery',
+    server    = 'hiu',
+    database  = 'HIU_Speicher',
+    sensors   = sensors
+)
+

+ 0 - 67
modules/config.py

@@ -1,67 +0,0 @@
-
-config_adei_servers = [
-    {
-        'server_key': 'katrin',
-        'db_host': 'katrin.kit.edu/adei-detector',
-        'db_server': 'detector',
-        'db_name': 'katrin'
-    },
-    {
-        'server_key': 'aircoils',
-        'db_host': 'katrin.kit.edu/adei-detector',
-        'db_server': 'aircoils',
-        'db_name': 'aircoils'
-
-    }
-]
-
-sql_databases = [
-    {
-        'name': 'orca',
-        'host': 'mysql://status:status@192.168.32.181',
-        'database': 'orca',
-        'sensor_groups': ['runs', 'machines', 'experiment'] ,
-        'field_ignore': [
-            'machines.password', 'experiment.ratesstr',
-        ],
-        'field_type_fix': {
-            'experiment.ratesstr': 'text',
-            'experiment.totalCountsstr': 'text',
-            'experiment.thresholdsstr': 'text',
-            'experiment.gainsstr': 'text'
-        },
-        'selector': {
-            'all': [{},{}],
-            'last': [{-1},{}]
-        },
-        'disabled': True
-    },
-    {
-        'key': 'heads',
-        'host': 'mysql://cube:cube@miaopdv.ipe.kit.edu',
-        'db': 'HEADS',
-        'modules': 'Data_(?P<mod_mark>\d{3})_\w+',
-        'axislist': 'Axislist',
-        'sensorlist': 'Sensorlist',
-        'views': {
-            'test1': 
-                [ 'Data_011_EBM1_DAR.INK_G_INKW_AVG',    
-                  'Data_011_EBM1_DAR.OMB_N_N_002_AVG',
-                  'Data_011_EBM1_DAR.SMT_M_SOIL_002_AVG',
-                  'Data_011_EBM1_DAS1.SHF_F_SHF_002_INST',
-                  'Data_080_RPG_L1B.L1B_AZIMUTH_ANGLE' ]
-        }
-        # 'tables': 'Data_\d{3}_\w+'
-    }
-]
-
-for i, r in enumerate(sql_databases):
-    if 'tables' not in r.keys():
-        r['tables'] = []
-    if 'field_ignore' not in r.keys():
-        r['field_ignore'] = []
-    if 'field_type_fix' not in r.keys():
-        r['field_type_fix'] = {}
-    if 'disabled' not in r.keys():
-        r['disabled'] = False
-    sql_databases[i].update(r)

+ 2 - 0
modules/configs/__init__.py

@@ -0,0 +1,2 @@
+
+__all__ = ['bess_ulm']

+ 0 - 139
modules/create_view.ipynb

@@ -1,139 +0,0 @@
-{
- "metadata": {
-  "name": ""
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import re\n",
-      "import time\n",
-      "from itertools import groupby\n",
-      "from config import sql_databases\n",
-      "from kitcube import KITCube\n",
-      "from gluon import DAL\n",
-      "\n",
-      "database = sql_databases[1]\n",
-      "db_key  = database['key']\n",
-      "db_host = database['host']\n",
-      "db      = database['db']\n",
-      "modules = database['modules']\n",
-      "views   = database.get('views', None)\n",
-      "connector = KITCube(db_key, db_host, db, modules)\n",
-      "\n",
-      "connector.connect()\n",
-      "\n",
-      "def process_views(views):\n",
-      "    pattern = re.compile('(\\w+).(\\w+)')\n",
-      "    keyfunc = lambda x: x[0]\n",
-      "    \n",
-      "    grouped_views = {}\n",
-      "    for view_name, sensors in views.iteritems():\n",
-      "        slist = []\n",
-      "        for sensor in sensors:\n",
-      "            m = pattern.match(sensor)\n",
-      "            slist.append((m.group(1), m.group(2)))\n",
-      "        sgroups = []\n",
-      "        for table, sgroup in groupby(slist, keyfunc):\n",
-      "            sgroups.append((table, [s[1] for s in sgroup]))\n",
-      "        grouped_views[view_name] = sgroups\n",
-      "    return grouped_views\n",
-      "            \n",
-      "\n",
-      "def parse_views(connector, views):\n",
-      "    db = connector.dal\n",
-      "    selections = {}\n",
-      "    for k, v in views.iteritems():\n",
-      "        selections[k] = []\n",
-      "        for table, fields in v:\n",
-      "            s = [ db[table][_f] for _f in fields ]\n",
-      "            selections[k].append(s)\n",
-      "    return selections\n",
-      "    \n",
-      "\n",
-      "\n",
-      "# for k, v in views.iteritems():\n",
-      "#     query = "
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [],
-     "prompt_number": 60
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "t0 = time.time()\n",
-      "\n",
-      "views = process_views(views)\n",
-      "s = parse_views(connector, views)\n",
-      "\n",
-      "ss = s['test1']\n",
-      "for sss in ss:\n",
-      "    print connector.dal().select(*sss, orderby='id DESC', limitby=(0,1)).as_list()\n",
-      "t1 = time.time()\n",
-      "t1-t0"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [
-      {
-       "output_type": "stream",
-       "stream": "stdout",
-       "text": [
-        "[{'INK_G_INKW_AVG': 1.220399, 'SMT_M_SOIL_002_AVG': 7.08, 'OMB_N_N_002_AVG': 0.0}]\n",
-        "[{'SHF_F_SHF_002_INST': 32.58767}]\n"
-       ]
-      },
-      {
-       "metadata": {},
-       "output_type": "pyout",
-       "prompt_number": 61,
-       "text": [
-        "0.019381999969482422"
-       ]
-      }
-     ],
-     "prompt_number": 61
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "views"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": [
-      {
-       "metadata": {},
-       "output_type": "pyout",
-       "prompt_number": 62,
-       "text": [
-        "{'test1': [('Data_011_EBM1_DAR',\n",
-        "   ['INK_G_INKW_AVG', 'OMB_N_N_002_AVG', 'SMT_M_SOIL_002_AVG']),\n",
-        "  ('Data_011_EBM1_DAS1', ['SHF_F_SHF_002_INST'])]}"
-       ]
-      }
-     ],
-     "prompt_number": 62
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}

+ 53 - 0
modules/dbadapter.py

@@ -0,0 +1,53 @@
+import re
+from collections import defaultdict as ddict
+from readers.adeireader import ADEIReader
+
+class DBAdapter(object):
+    
+    regex_sensor = re.compile('(\w+).(\w+)')
+    
+    def __init__(self, config, dbreader):                                                                           
+        self._url = config.get('url')                                           
+        self._server = config.get('server')                                     
+        self._dbname = config.get('database')
+        self._dbreader = dbreader
+        
+    def parse_sensors(self, sensors):
+        res = ddict(list)
+        for sensor in sensors:
+            m = self.regex_sensor.search(sensor)
+            if m:
+                groupname = m.group(1)
+                sensorname = m.group(2)
+                res[groupname].append(sensorname)
+        return res
+
+    def read_from_source(self, sensors):
+        pass
+        
+    def read_sensors(self, sensors):
+        sensors = self.parse_sensors(sensors)
+        return self.read_from_source(sensors)
+    
+    def get_sensor_list(self):
+        return None
+
+    @property
+    def sensors(self):
+        return self.get_sensor_list()                                             
+
+                                                                                
+class ADEIAdapter(DBAdapter):
+    
+    def __init__(self, config, dbreader=None):
+        super(ADEIAdapter, self).__init__(config, dbreader)
+        self._dbreader = ADEIReader(self._url, self._server, self._dbname)
+
+    def get_sensor_list(self):
+        return self._adeireader.sensors
+
+    def read_from_source(self, sensors):
+        data = {}
+        for g, v in sensors.iteritems():
+            data[g] =  { d[0]:d[1] for d in self._dbreader.getdata(g, *v) }
+        return data

+ 0 - 116
modules/helper.py

@@ -1,116 +0,0 @@
-def query_as_dict(expression, field_type=None):
-    """Seralizes gluon.dal.Query as dictionary.
-    
-    Converts a gluon.dal.Query or gluon.dal.Expression
-    into a dictionary structure that can be pickled and
-    stored in the session.
-    
-    Args:
-        expression: gluon.dal.Query or gluon.dal.Expression
-    
-    Returns:
-        Dictionary in the form {op:op, first:expression, second:expression}
-        op: the query operation (eg, AND, EQ, GT)
-        expression: either a dictionary (that expands a gluon Table,
-            Field, Expression or Query object) or a base object such as
-            a string or list.
-    
-    For example:
-        >>>query = (db.comment.id.belongs((1,2,3))) & (db.webpage.title == 'FAQ')
-        >>>print query_as_dict(query)
-        "{'second': {'second': 'FAQ', 'first': {'table': 'webpage', 'fieldname': 'title',
-        'tablename': 'webpage'}, 'op': 'EQ'}, 'first': {'second': (1, 2, 3), 'first':
-        {'table': 'comment', 'fieldname': 'id', 'tablename': 'comment'}, 'op': 'BELONGS'},
-        'op': 'AND'}"
-    """
-    from gluon.dal import Query, Expression, Table, Field
-    if isinstance(expression, Field):
-        tablename = expression._tablename
-        return dict(tablename=expression._tablename,
-                    table = str(expression._table),
-                    fieldname = expression.name)
-    elif isinstance(expression, (Expression, Query)):
-        if not expression.second is None:
-            return dict(op=expression.op.im_func.__name__, 
-                        first=query_as_dict(expression.first), 
-                        second=query_as_dict(expression.second))
-        elif not expression.first is None:
-            if not expression.op is None:
-                return dict(op=expression.op.im_func.__name__,
-                            first=query_as_dict(expression.first),
-                            second=None) # eg '(person.title IS NULL)'
-            else:
-                return expression.first
-        elif not isinstance(expression.op, str):
-            return expression.op()
-        else:
-            return '(%s)' % expression.op
-    elif field_type:
-        return str(represent(expression,field_type))
-    elif isinstance(expression,(list,tuple)):
-        return expression
-    elif isinstance(expression, Table):
-        return dict(tablename=expression._tablename,
-                    table = str(expression))
-    elif expression==None:
-        return None
-    else:
-        return str(expression)
-
-
-def query_from_dict(db, query, out='Query'):
-    """Builds gluon.dal.Query from dictionary structure.
-        
-    Args:
-        db: gluon.dal.db object
-        query: A dictionary in the form {op:op, first:expression, second:expression}
-            as returned by query_as_dict()
-        out: Set to 'Expression' for gluon.dal.Expression 
-            rather than Query.
-    
-    Returns:
-        gluon.dal.Query or gluon.dal.Expression object
-    """
-    from gluon.dal import Expression, Query
-    if out == 'Expression':
-        out_class = Expression
-    else:
-        out_class = Query
-    if type(query) == dict:
-        if 'op' in query.keys():
-            first = query_from_dict(db, query['first'], out=out)
-            second = query_from_dict(db, query['second'], out=out)
-            op = getattr(db._adapter, query['op'])
-            return out_class(db, op, first=first, second=second)
-        elif 'fieldname' in query.keys():
-            if query['tablename'] == query['table']:
-                return db[query['tablename']][query['fieldname']]
-            else: # a table.field with alias
-                return db[query['table']].with_alias(query['tablename'])[query['fieldname']]
-        elif 'tablename' in query.keys():
-            if query['tablename'] == query['table']:
-                return db[query['tablename']]
-            elif ' AS ' in query['table']: # a table with alias
-                t = query['table'].split(' ')[0]
-                return db[t].with_alias(query['tablename'])          
-        else:
-            raise ValueError
-    else:
-        return query
-
-def process_views(views):
-    import re
-    from itertools import groupby
-    pattern = re.compile('(\w+).(\w+)')
-    keyfunc = lambda x: x[0]
-    grouped_views = {}
-    for view_name, sensors in views.iteritems():
-        slist = []
-        for sensor in sensors:
-            m = pattern.match(sensor)
-            slist.append((m.group(1), m.group(2)))
-        sgroups = []
-        for table, sgroup in groupby(slist, keyfunc):
-            sgroups.append((table, [s[1] for s in sgroup]))
-        grouped_views[view_name] = sgroups
-    return grouped_views

+ 0 - 192
modules/kitcube.py

@@ -1,192 +0,0 @@
-import re
-from gluon import DAL, Field
-# from gluon.dal import Row, Rows
-
-dal_field_type = dict(
-    id='id',
-    bit='boolean',
-    tinyint='boolean',
-    int='integer',
-    bigint='bigint',
-    double='double',
-    float='double',
-    mediumblob='blob',
-    blob='blob',
-    varchar='string',
-    text='text'
-)
-
-def search_objects(objs, **kargs):
-    ''' search list of objects, matching supplied fields'''
-    return filter(lambda s: all([ s[k] == v for k,v in kargs.iteritems() ]), objs)
-def search_objects2(axes, sensor_name, module_mark):
-    for ax in axes:
-        if (ax['sensor_name'] == sensor_name) and (ax['module_mark'] == module_mark):
-            return ax['unit']
-    return None
-
-        
-class DBConnector(object):
-    def __init__(self, key, host, database):
-        self._modules = None
-        self._views = None
-        self._module_pattern = None
-        self.axis_list = 'Axislist'
-        self.sensor_list = 'Sensorlist'
-        self.key = key
-        self.host = host
-        self.database = database
-        self.uri = "%s/%s" % (self.host, self.database)
-        self.dal = DAL( self.uri, migrate=False, pool_size=10, lazy_tables=True)
-    def extract(self):
-        self._modules = self.extract_modules()
-        self._sensors = self.extract_sensors(self._modules)
-    def connect(self):
-        self.extract()
-        for mod in self._modules:
-            s = search_objects(self._sensors, module_name=mod['name'])
-            f = [ Field(k['name'], type=k['type'], length=k['length'], rname=k['rname']) for k in s ]
-            self.dal.define_table(mod['name'], *f)
-    def reconnect(self):
-        self.dal._adapter.reconnect()
-    def query(self, module, fields=None, len=1):
-        db = self.dal
-        if module not in self._modules:
-            rows = []
-        if fields is None:
-            rows = db(db[module].id>0).select(db[module].ALL, orderby='id DESC', limitby=(0,len))
-        else:
-            fields = [ db[module][f] for f in fields ]
-            rows = db(db[module].id>0).select(db[module].usec, *fields, orderby='id DESC', limitby=(0,len))
-        return {module: rows.as_list()[::-1]}
-    def query_view(self, view, len=1):
-        res = {}
-        for mod, cols in view:
-            res.update(self.query(mod, cols, len=len))
-        return res
-    @property
-    def module_pattern(self):
-        return self._module_pattern
-    @module_pattern.setter
-    def module_pattern(self, value):
-        self._module_pattern = re.compile(value)
-    @property
-    def modules(self):
-        return self._modules
-    @modules.setter
-    def modules(self, value):
-        self._modules = value
-    @property
-    def sensors(self):
-        return self._sensors
-    @property
-    def views(self):
-        return self._views
-    @views.setter
-    def views(self, value):
-        self._views = value
-    
-    
-    
-class KITCube(DBConnector):
-    def __init__(self, key, host, database, ignore_fields=[], field_type_fix={}):
-        self.ignore_fields = ignore_fields
-        self.field_type_fix = field_type_fix
-        super(KITCube, self).__init__(key, host, database)
-
-    def extract_modules(self):
-        ''' extract modules 
-            extrace from database when self._modules is not defined,
-            otherwise filter self._modules, 
-            
-            Returns: 
-                [{'name': <name>, 'mark': <module number>}, ... ]
-        '''
-        tables = [ tname for tname, in self.dal.executesql('show tables') ]
-        if self._modules is None:
-            matches = [ (tname, self.module_pattern.match(tname)) for tname in tables ]
-            modules = [ dict(name=tname, mark=int(m.group('mod_mark'))) for tname, m in matches if m ]
-        else:
-            modules = [ t for t in self._modules if t in tables ]
-        return modules
-
-    def extract_sensors(self, modules):
-        ''' extract sensors from database
-
-            Args: 
-                modules: list of moudles to be extracted
-            
-            Returns:
-                [ {
-                    'name': <sensor name>,       # dots in name is coverted to underscore
-                    'rname': <sensor rname>,     # original name from db, if it contains dots
-                    'type': <sensor type>,
-                    'length': <length>,
-                    'unit': <unit>,
-                    'module_mark': 'module_mark'
-                   },
-                   ...
-                   ...
-                ]
-        '''
-        fld_type_pattern = re.compile('(?P<fld_type>\w+)\((?P<fld_length>\d*)\)')
-        sensor_units = self.extract_units()
-        sensors = []
-
-        for mod in modules:
-            cols = self.dal.executesql('show columns from ' + mod['name'])
-            for fld_name, fld_type, _, fld_key, _, _ in cols:
-                if '.' in fld_name:
-                    rname = fld_name
-                    fld_name = fld_name.replace('.', '_')
-                else:
-                    rname = None
-                fld_name_long = mod['name'] + '.' + fld_name
-
-                if fld_key == 'PRI':
-                    fld_type = 'id'
-                if fld_name_long in self.field_type_fix.keys():
-                    fld_type = self.field_type_fix[fld_name_long]
-
-                m = fld_type_pattern.match(fld_type)
-                if m:
-                    fld_type = m.group('fld_type')
-                    fld_length = m.group('fld_length')
-                else:
-                    fld_length = None
-
-                if fld_name_long in self.ignore_fields:
-                    continue                
-                if fld_type in ['mediumblob', 'blob']:
-                    continue
-
-                s = search_objects(sensor_units, module=mod['mark'], name=rname or fld_name)
-                try:
-                    unit = s[0]['unit']
-                except:
-                    unit = None
-
-                if rname is not None:
-                    rname = '`' + rname + '`'
-
-                sensors.append(dict(name=fld_name, rname=rname,
-                                    type=fld_type, length=fld_length, 
-                                    unit=unit, module_name=mod['name']))
-        return sensors
-
-    def extract_units(self):
-        self.dal.define_table(self.axis_list,   
-                            Field('id'), Field('unit', 'string'))
-        self.dal.define_table(self.sensor_list, 
-                            Field('id'), Field('name', 'string'),
-                            Field('module', 'integer'),
-                            Field('axis', 'reference %s' % self.axis_list))
-        
-        sensortable = self.dal[self.sensor_list]
-        axistable = self.dal[self.axis_list]
-        
-        records = self.dal(sensortable.axis==axistable.id).select()
-        sensor_units = [ dict(name=r[self.sensor_list].name, 
-                              module=r[self.sensor_list].module, 
-                              unit=r[self.axis_list].unit) for r in records ]
-        return sensor_units

+ 1 - 0
modules/readers/__init__.py

@@ -0,0 +1 @@
+__all__ = [ 'adeireader', 'sqlreader' ]

+ 205 - 0
modules/readers/adeireader.py

@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import datetime
+import calendar
+import urllib2 as urllib
+import base64
+from helper import csvparser, xmlparser, print_exc
+import re
+from itertools import groupby
+
+DEBUG = 0
+ADEI_Time_Format = '%d-%b-%y %H:%M:%S.%f'
+
+def adei_timestamp(adeitimestr):
+    timestamp = datetime.datetime.strptime(adeitimestr, ADEI_Time_Format)
+    timestamp = calendar.timegm(timestamp.timetuple())
+    return timestamp
+
+def parse_csv(url, username=None, password=None):
+    request = urllib.Request(url)
+    base64string = base64.encodestring('%s:%s' % ( username, password ) )
+    request.add_header("Authorization", "Basic %s" % base64string)
+    fp = urllib.urlopen(request)
+    resp = csvparser(fp)
+    #stamps = map(adei_timestamp, resp[0][1:])
+    #data = [dict(zip(('name', 'values'), [s[0], s[1:]])) for s in resp[1:]]
+    return resp
+    #return stamps, data
+
+def parse_xml(url, username=None, password=None):
+    request = urllib.Request(url)
+    base64string = base64.encodestring('%s:%s' % ( username, password ) )
+    request.add_header("Authorization", "Basic %s" % base64string)
+    fp = urllib.urlopen(request)
+    return xmlparser(fp)
+    
+
+class ADEIReader(object):
+    
+    def __init__(self, host, server, db):
+        self.host = host + '/services/'
+        self.server = server
+        self.db = db
+        self._sensors = None
+        print 'ADEIReader initialized'
+
+    def qurl(self, qtype, **kargs):
+        url = self.host
+
+        if qtype == 'get':
+            url = url + 'getdata.php?'
+        elif qtype == 'group':
+            url = url + 'list.php?target=groups'
+        elif qtype == 'sensor':
+            url = url + 'list.php?target=items'
+
+        kargs['db_server'] = self.server
+        kargs['db_name'] = self.db
+        kargs['window'] = kargs.get('window') or '-1'
+        kargs['resample'] = kargs.get('resample') or '0'
+        for k, v in kargs.iteritems():
+            url += '&' + k + '=' + v
+        return url
+
+    def get_sensor_list(self):
+        url = self.qurl('group')
+        groups = [ v.get('db_group') for v in parse_xml(url) ]
+        sensor_list = {}
+        for g in groups:
+            url = self.qurl('sensor', db_group=g)
+            sensor_list[g] = { v['name']:v['value'] for v in  parse_xml(url) }
+        return sensor_list
+
+    def getdata(self, group, *sensors):
+        ' Fetch data from ADEI server. '
+        if not self._sensors:
+            self._sensors = self.get_sensor_list()
+        try:
+            masks = map(self._sensors.get(group).get, sensors)
+            data = self.querydata(group, *masks)
+            data[0] = ('timestamp', adei_timestamp(data[0][1]))
+        except:
+            data = []
+        return data
+
+    def querydata(self, group, *masks):
+        masks = ','.join(map(str, masks))
+        url = self.qurl('get', db_group=group, db_mask=masks)
+        return parse_csv(url)
+
+    @property
+    def sensors(self):
+        if not self._sensors:
+            print 'retrieve sensor list'
+            self._sensors = self.get_sensor_list()
+        return self._sensors
+
+###### Deprected #######       
+
+    def query(self, qtype='get', **kargs):
+        # parse args
+        group = str(kargs.get('group', ''))
+        sensor = kargs.get('sensor', [])
+        window = kargs.get('window','-1')
+        resample = kargs.get('resample','0')
+        sensor_mask = ','.join(map(str, sensor))
+        url = self.qurl('get', db_group=group, db_mask=sensor_mask, window=window, resample=resample)
+        data = parse_csv(url)
+        # build sensor list
+        #sensorlist = [ group+'__'+s for s in sensorMaskList ]
+        #sensorlist.insert(0, 'timestamp')
+        # pack data
+        sname = [d[0] for d in data]
+        svalue = [list( d[1:] ) for d in data]
+        print svalue, len(svalue)
+        svalue[0]  = map(adei_timestamp, svalue[0])
+        svalue = transpose_list(svalue)
+        # resample data
+        if resample != '0':
+            res_value = [svalue[0]]
+            t0 = svalue[0][0]
+            for v in svalue:
+                if v[0] - t0  >= resample:
+                    res_value.append(v)
+                    t0 = v[0]
+            svalue = res_value
+        # debug info
+        if DEBUG == 1:
+            #import pprint
+            print '--------------------------------------------------'
+            print url
+            print 'data:', len(data), data[0]
+            print 'sensor names', sname, len(sname)
+            print 'sensor values', svalue, len(svalue[0])
+            print 'sensor', len(sensor)
+            print
+        return svalue
+
+     #def parse_sensors(self, *sensors):
+        #'''
+        #Args:
+            #sensors (list): a list of sensor identifiers
+
+        #Return:
+
+        #Description:
+            #sensor identifiers  can be one of the following two forms, 
+                #1) <group name>.<sensor name>,
+                #2) (<group name>, <sensor name 1>, [<sensor name 2>, [...]]).
+        #'''
+        #if self.sensor_list is None:
+            #self.sensor_list = self.get_sensor_list()
+
+        #parsed_sensors = []
+        #for item in sensors:
+            #try:
+                #m = re.match('(\w+).(\w+)', item)
+                #if m:
+                    #grp, sns = m.group(1), (m.group(2),)
+            #except TypeError:
+                #grp, sns= item[0], item[1:]
+            #parsed_sensors.append((grp, sns))
+
+        #res = {}
+        #for k, v in parsed_sensors:
+            #try: 
+                #res[k].extend(v)
+            #except: 
+                #res[k] = list(v)
+        #for k, v in res.iteritems():
+            #try:
+                #res[k] = sorted([ int(self.sensor_list.get(k).get(s)) for s in v ])
+            #except:
+                #pass
+        #return res
+
+ADEI_QUERY_STRING = {
+        'group': 'db_group',
+        'sensor': 'db_mask'
+}
+
+def f(x):
+    x = str(x)
+    result = []
+    try:
+        for part in x.split(','):
+            if '-' in part:
+                a, b = part.split('-')
+                a, b = int(a), int(b)
+                result.extend(range(a, b + 1))
+            else:
+                a = int(part)
+                result.append(a)
+    except:
+        pass
+    return map(str, result )
+
+def transpose_list(l):
+    return map(list, zip(*l))
+
+
+class ADEIError(Exception):
+    pass
+  

+ 14 - 0
modules/readers/colorstring.py

@@ -0,0 +1,14 @@
+HEADER = '\033[95m'
+OKBLUE = '\033[94m'
+OKGREEN = '\033[92m'
+C_ERR = '\033[91m'
+C_WRN = '\033[94m'
+C_END = '\033[0m'
+
+def ERROR(s):
+   return C_ERR + s + C_END 
+
+def WARNING(s):
+    return C_WRN + s + C_END
+
+

+ 31 - 0
modules/readers/helper.py

@@ -0,0 +1,31 @@
+import xml.etree.ElementTree as ET
+import csv
+import sys
+import colorstring
+import traceback
+
+def xmlparser(handler):
+    tree = ET.parse(handler)
+    result = []
+    for child in tree.getroot():
+        result.append(child.attrib)
+    return result 
+
+def csvparser(handler):
+    rows = csv.reader(handler, skipinitialspace=True)                                                                      
+    fields = rows.next()
+    data = [row for row in rows if row]
+    return zip(fields, *data)
+
+def print_exc():
+    exctype, value, tb  = sys.exc_info()
+    filename, linenumber, functionname, text =  traceback.extract_tb(tb, 1)[0]
+    sys.stderr.write("[Error] Unexpected exception\n")
+    sys.stderr.write("            type: %s\n" % exctype)
+    sys.stderr.write("         message: %s\n" % colorstring.ERROR(value.message))
+    sys.stderr.write("        function: %s\n" % colorstring.ERROR(functionname))
+    sys.stderr.write("            text: %s\n" % text)
+    sys.stderr.write("            file: %s: %s\n" % (filename, linenumber))
+
+
+

+ 0 - 77
modules/sql_models.py

@@ -1,77 +0,0 @@
-import re
-from itertools import groupby
-from gluon.dal import DAL, Field
-from config import sql_databases
-from kitcube import KITCube
-from helper import process_views
-
-
-kitcube = {}            
-for database in sql_databases:
-     if database['disabled']: continue
-     
-     db_key  = database['key']
-     db_host = database['host']
-     db      = database['db']
-     modules = database['modules']
-     views   = database.get('views', None)
-
-     connector = KITCube(db_key, db_host, db)
-     connector.views = process_views(views)
-     if isinstance(modules, basestring):
-          connector.module_pattern = modules
-     else:
-          connector.modules = modules
-
-     connector.connect()
-     kitcube[db_key] = connector
-
-### Save DB meta info into database
-info = DAL('sqlite://info.storage.sqlite', 
-           pool_size=10,
-           check_reserved=False)
-
-info.define_table('sql_db', 
-                    Field('key', 'string'),
-                    Field('host', 'string'),
-                    Field('db', 'string'),
-                    Field('modules', 'list:integer'),
-                    Field('flag', 'boolean', default=False))
-info.define_table('sql_module', 
-                    Field('mark', 'integer'),
-                    Field('name', 'string'),
-                    Field('db_id', 'reference sql_db'))
-info.define_table('sql_sensor', 
-                    Field('name', 'string'),
-                    Field('type', 'string'),
-                    Field('unit', 'string'),
-                    Field('length', 'integer'),
-                    Field('rname', 'string'),
-                    Field('module_name', 'string'),
-                    Field('db_id', 'reference sql_db'),
-                    Field('module_id', 'reference sql_module'))
-  
-for connector in kitcube.values():
-     q = (info.sql_db.db==db) & (info.sql_db.host==db_host)
-     r = info(q).select().last()  
-     if r is None:
-          modules = connector.modules
-          sensors = connector.sensors
-
-          module_marks = [ m['mark'] for m in modules]
-          module_ids = {}
-
-          db_id = info.sql_db.insert(key=db_key, host=db_host, 
-                                     db=db, modules=module_marks)
-          for m in modules:
-               module_id = info.sql_module.insert(db_id=db_id, **m)
-               module_ids[m['name']] = module_id
-          for s in sensors:
-               module_id = module_ids[ s['module_name'] ]
-               info.sql_sensor.insert(db_id=db_id, module_id=module_id, **s)
-     else:
-          if r.key != db_key:
-               r.update_record(key=db_key)
-
-
-

+ 0 - 32
views/default/index.html

@@ -1,32 +0,0 @@
-{{extend 'layout_bs3.html'}}
-
-<div class="container-fluid">
-  <div class="row">
-    <div class="col-md-6">
-      <div class="row left-cols">
-        <div class="col-md-6">
-          col 1
-        </div>
-        <div class="col-md-6">
-          col 2
-        </div>
-      </div>
-    </div>
-
-    <div class="col-md-6">
-      <div class="row left-cols">
-        <div class="col-md-6">
-          col 3
-        </div>
-        <div class="col-md-6">
-          col 5
-        </div>
-        <div class="col-md-12">
-          col 4
-        </div>
-      </div>
-    </div>
-  </div>
-</div>
-
-

+ 0 - 25
views/default/user.html

@@ -1,25 +0,0 @@
-{{extend 'layout.html'}}
-
-<h2>{{=T( request.args(0).replace('_',' ').capitalize() )}}</h2>
-<div id="web2py_user_form">
-{{
-if request.args(0)=='login' and not session.auth_2_factor_user:
-    if not 'register' in auth.settings.actions_disabled:
-        form.add_button(T('Register'),URL(args='register', vars={'_next': request.vars._next} if request.vars._next else None),_class='btn')
-    pass
-    if not 'request_reset_password' in auth.settings.actions_disabled:
-        form.add_button(T('Lost Password'),URL(args='request_reset_password'),_class='btn')
-    pass
-pass
-=form
-}}
-</div>
-<script language="javascript"><!--
-jQuery("#web2py_user_form input:visible:enabled:first").focus();
-{{if request.args(0)=='register':}}
-    web2py_validate_entropy(jQuery('#auth_user_password'),100);
-{{elif request.args(0) in ('change_password','reset_password'):}}
-    web2py_validate_entropy(jQuery('#no_table_new_password'),100);
-{{pass}}
-//--></script>
-