Browse Source

better mem during preprocessing and enhanced tesrresults

Felix Schultze 9 years ago
parent
commit
6339f33bfa

+ 14 - 13
Gruntfile.js

@@ -1,20 +1,26 @@
 module.exports = function(grunt) {
     basicPath = 'static/js/threeJsHelper/';
 
-    sshServer = 'ipepdvcompute3.ipe.kit.edu';
+    sshServers = {};
+    sshServers['compute3'] = 'ipepdvcompute3.ipe.kit.edu';
+    sshServers['anka'] = 'anka-visualize.anka.kit.edu';
     username = 'visualization';
 	tarName = '<%= pkg.name %>.tar';
 
-    deployCommand = function(env) {
-        var command = 'ssh  ' + username + '@' + sshServer
+    deployCommand = function(env, server) {
+        var command = 'ssh  ' + username + '@' + sshServers[server]
                 + ' "cd /usr/local/www/;'
                 + 'rm -rf <%= pkg.name %>;'
                 + 'tar -xf ' + tarName + ';'
                 + 'cp -r <%= pkg.name %>/* visualization-' + env + '/;"'
-				+ 'cp visualization-' + env + '/visualization/settings_env.py.' + env + ' visualization-' + env + '/visualization/settings_env.py;';
+				+ 'cp visualization-' + env + '/visualization/settings_env.py.' + env + '.' + server + ' visualization-' + env + '/visualization/settings_env.py;';
         return command;
     }
 
+    var env = grunt.option('env') || 'staging';
+    var server = grunt.option('server') || 'compute3';
+
+
     grunt.initConfig({
         pkg: grunt.file.readJSON('package.json'),
         concat: {
@@ -35,13 +41,10 @@ module.exports = function(grunt) {
                 command: 'rm ' + tarName
             },
             scp: {
-                command: 'scp ' + tarName + ' visualization@ipepdvcompute3.ipe.kit.edu:/usr/local/www/'
+                command: 'scp ' + tarName + ' visualization@' + sshServers[server] + ':/usr/local/www/'
             },
-            deployStaging: {
-                command: deployCommand('staging')
-            },
-            deployProd: {
-                command: deployCommand('prod')
+            deploy: {
+                command: deployCommand(env, server)
             },
             bumbVersion: {
                 command: 'npm version patch'
@@ -59,7 +62,6 @@ module.exports = function(grunt) {
         }
     });
 
-
     grunt.loadNpmTasks('grunt-contrib-concat');
     grunt.loadNpmTasks('grunt-contrib-watch');
     grunt.loadNpmTasks('grunt-shell');
@@ -67,6 +69,5 @@ module.exports = function(grunt) {
     grunt.registerTask('default', ['concat']);
     grunt.registerTask('dev-watch', ['concat:dist']);
     grunt.registerTask('update-version', ['shell:bumbVersion']);
-    grunt.registerTask('deploy-staging', ['concat', 'shell:pkg', 'shell:scp', 'shell:rmPkg', 'shell:deployStaging']);
-    grunt.registerTask('deploy-prod', ['concat', 'shell:pkg', 'shell:scp', 'shell:rmPkg', 'shell:deployProd']);
+    grunt.registerTask('deploy', ['concat', 'shell:pkg', 'shell:scp', 'shell:rmPkg', 'shell:deploy']);
 };

+ 26 - 1
README.md

@@ -19,7 +19,11 @@ python >= 2.7
 python-numpy
 python-numpy-devel
 mongodb
-memcached libmemcached-devel
+memcached
+libmemcached-devel
+libtiff-devel
+libjpeg8-devel
+
 
 pip-packages:
 =============
@@ -74,3 +78,24 @@ After that, please install 'grunt' via 'sudo npm install -g grunt-cli'
 You are ready to go developing JS for the application.
 
 Just use Grunt, e.g. 'grunt watch' (in a separate terminal) All 3D relevant files will be packaged together to one JS file.
+
+Deploying on servers
+=====================
+Currently there exist different servers and on that servers, different stages.
+
+Available servers:
+- (1) ipepdvcompute3.ipe.kit.edu
+- (2) anka-visualize.anka.kit.edu
+
+Both servers provide a staging and a prod environment
+
+To deploy the application on a server, grunt can be used with its deploy target.
+As options --env and --server are mandatory
+Possible values for --env: 'staging', 'prod'
+Possible values for --server: 'compute3', 'anka' where 'compute3' refers to server (1) and 'anka' to server (2)
+
+E.g. so if you want to deploy to ipepdvcompute3.ipe.kit.edu on prod env:
+grunt deploy --env=prod --server=compute3#
+
+Please refer to Grundfile.js for more information
+

+ 2 - 2
loadtests/facade.py

@@ -2,6 +2,6 @@ from .testmodels import *
 
 import pdb
 
-def addTestResult(testId, inputShape, subvolumeShape, pointAsTuple, durationInSec):
+def addTestResult(testId, inputShape, subvolumeShape, pointAsTuple, durationInSec, duration_copy_in_sec, dtype):
     test = TestRun.objects(id=testId).first()
-    test.addTestResult(inputShape, subvolumeShape, pointAsTuple, durationInSec)
+    test.addTestResult(inputShape, subvolumeShape, pointAsTuple, durationInSec, duration_copy_in_sec, dtype)

+ 37 - 2
loadtests/testmodels.py

@@ -11,7 +11,18 @@ class Result(EmbeddedDocument):
     originalSize = EmbeddedDocumentField(Dim)
     subvolumeSize = EmbeddedDocumentField(Dim)
     durationInSec = FloatField()
+    duration_copy_in_sec = FloatField()
     point = EmbeddedDocumentField(Dim)
+    itemsize = IntField()
+
+    @property
+    def get_copy_throughput(self):
+        number_of_items = self.itemsize
+        number_of_items *= self.subvolumeSize.x
+        number_of_items *= self.subvolumeSize.y
+        number_of_items *= self.subvolumeSize.z
+
+        return number_of_items / self.duration_copy_in_sec / 1024 / 1024
 
 
 class TestRun(Document):
@@ -22,8 +33,7 @@ class TestRun(Document):
 
     meta = { "db_alias": "test_results" }
 
-    def addTestResult(self, originalShape, subvolumeShape, subvolumePoint, durationInSec):
-        print('addTestResult')
+    def addTestResult(self, originalShape, subvolumeShape, subvolumePoint, durationInSec, duration_copy_in_sec, dtype):
         result = Result()
 
         inputSize = Dim()
@@ -39,6 +49,31 @@ class TestRun(Document):
         result.point = point
 
         result.durationInSec = durationInSec
+        result.duration_copy_in_sec = duration_copy_in_sec
+        result.itemsize = dtype.itemsize
 
         TestRun.objects(id=self.id).update_one(push__results=result)
 
+    @property
+    def get_average(self):
+        if len(self.results) is 0:
+            return 0
+
+        average = 0
+        for result in self.results:
+            average += result.durationInSec
+
+        return average / len(self.results)
+
+    @property
+    def get_copy_throughput_average(self):
+        if len(self.results) is 0:
+            return 0
+
+        average = 0
+        for result in self.results:
+            average += result.get_copy_throughput
+
+
+        return average / len(self.results)
+

+ 2 - 1
loadtests/urls.py

@@ -7,5 +7,6 @@ from . import views
 urlpatterns = patterns('',
     url(r'^$', views.index, name='home'),
     url(r'^init/(?P<volumeId>[\da-z]+)/(?P<testName>[\da-z+*_/%]+)/$', views.testsetup),
-    url(r'^delete/(?P<id>[\da-z]+)/$', views.deletetest, name='delete')
+    url(r'^delete/(?P<id>[\da-z]+)/$', views.deletetest, name='delete'),
+    url(r'^csv/(?P<id>[\da-z]+)/$', views.createcsv, name='createcsv')
 )

+ 17 - 0
loadtests/views.py

@@ -1,9 +1,12 @@
+import csv
+
 from django.template import RequestContext
 from django.http import HttpResponseRedirect, HttpResponse
 from django.shortcuts import render
 from django.core.urlresolvers import reverse
 from .testmodels import *
 
+
 from datetime import datetime
 import logging
 
@@ -33,6 +36,20 @@ def deletetest(request, id):
     test.delete()
     return HttpResponseRedirect(reverse('loadtests:home'))
 
+def createcsv(request, id):
+    test = TestRun.objects(id=id).first()
+
+    # Create the HttpResponse object with the appropriate CSV header.
+    response = HttpResponse(content_type='text/csv')
+    filename = '%s_%s.csv' % (test.name, test.volumeId)
+
+    response['Content-Disposition'] = 'attachment; filename="%s"' % (filename)
+
+    writer = csv.writer(response)
+    for result in test.results:
+        writer.writerow([result.durationInSec])
+
+    return response
 
 
 

+ 8 - 3
templates/loadtests/index.html

@@ -3,11 +3,16 @@
 {% block content %}
 {% for test in tests %}
     <div class="complete-width">
-        <span id="{{ test.id }}">{{test.name}} {{ test.id }} {{ test.created }}</span><a href="{% url 'loadtests:delete' test.id %}">delete</a><br />
+        <span id="{{ test.id }}">{{test.name}} {{ test.id }} {{ test.created }} avg: {{ test.get_average }} avg_throughput: {{ test.get_copy_throughput_average }}</span>
+        <a href="{% url 'loadtests:delete' test.id %}">delete</a>
+        <a href="{% url 'loadtests:createcsv' test.id %}">download csv</a><br />
+
         <div id="results-{{ test.id }}" class="results hidden">
             {% for result in test.results %}
-            {{ result.durationInSec }}
-            point: {{ result.point.x }} {{ result.point.y }} {{ result.point.z }}<br/>
+                {{ result.durationInSec }} &nbsp;
+                {{ result.get_copy_throughput }} MB/s
+                subvol size: {{ result.subvolumeSize.x }}*{{ result.subvolumeSize.y }}*{{ result.subvolumeSize.z }}*{{ result.itemsize }}
+                point : {{ result.point.x }}*{{ result.point.y }}*{{ result.point.z }}<br/>
             {% endfor %}
         </div>
 

+ 1 - 1
templates/volumes/show.html

@@ -55,7 +55,7 @@
         var generateSlicesStatus = '{{ volume.generateSlicesStatus }}';
         if(generateSlicesStatus.indexOf('running') !== -1) {
             $('div.waiting').removeClass('hidden');
-            var timeout = 10000;
+            var timeout = 5000;
             var getNewStatus = function() {
                 $.ajax({
                     url: homeUrl + 'volumes/{{ volume.id }}/show-status/',

+ 20 - 0
visualization/settings_env.py.prod.anka

@@ -0,0 +1,20 @@
+import os
+'''
+    define, where this application runs
+    prod, staging, dev
+'''
+ENVIRONMENT = 'staging'
+
+
+''' configure more '''
+globalConfiguration = {}
+
+globalConfiguration['sequencer'] = '/mnt/visualize/sequencer/'
+globalConfiguration['static-url-prefix'] = ''
+globalConfiguration['db-name'] = 'volume_visualization'
+globalConfiguration['db-name-tests-results'] = 'volume_visualization_test_results'
+globalConfiguration['log-base-path'] = '/var/log/visualization/visualization-prod'
+globalConfiguration['fiji-path'] = '/usr/local/www/Fiji.app/ImageJ-linux64'
+globalConfiguration['default-import-path'] = '/mnt/visualize/import'
+globalConfiguration['mmap-folder'] = '/mnt/visualize/mmap/prod/'
+globalConfiguration['reduce-harddisk-access'] = False

+ 0 - 0
visualization/settings_env.py.prod → visualization/settings_env.py.prod.compute3


+ 0 - 0
visualization/settings_env.py.staging → visualization/settings_env.py.staging.compute3


+ 18 - 0
volumes/processing/service/Normalizer.py

@@ -0,0 +1,18 @@
+import numpy
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def normalize_image(frames, volumeId):
+    logger.debug('volId: %s, float frames detected, normalize them' % volumeId)
+    collection_min = frames.min()
+    collection_max = frames.max()
+    logger.debug('volId: %s, imagesmin: %d, imagesmax: %d' %(volumeId, collection_min, collection_max))
+
+    # due to memory problems, we have to trigger it for each frame
+    for frame in frames:
+        for frame_slice in frame:
+            frame_slice = (frame_slice - collection_min) / (collection_max - collection_min)
+
+    logger.debug('volId: %s, after normalization imagesmin: %d, imagesmax: %d' %(volumeId, frames.min(), frames.max()))

+ 19 - 15
volumes/processing/service/SliceGenerator.py

@@ -15,7 +15,7 @@ from skimage import img_as_ubyte, img_as_int
 import numpy as np
 
 from imageprocessing import tiffPreparer, sliceMapCreator, binaryPreparer
-from . import MultiProcessUByte
+from . import MultiProcessUByte, Normalizer
 
 import logging
 
@@ -111,6 +111,16 @@ def sliceGeneratorPlain(data, volumeId):
     if len(frames.shape) is not 4:
         raise ValueError('read input file shall have dim of 4, please check the used fileparser')
 
+    if frames.dtype == np.float32 or frames.dtype == np.float16:
+        try:
+            Normalizer.normalize_image(frames, volumeId)
+        except Exception as e:
+            logger.error('volId: %s, error while normalization: ' % (str(e)))
+            Volume.objects(id=volume.id).update_one(set__generateSlicesStatus = 'generation failed, please read the logs')
+            return
+    else:
+        logger.debug('volId: %s, frames do NOT have to be normalized, dtype is %s' % (volumeId, str(frames.dtype)))
+
     logger.debug('volId: %s, cropping images to be square' % volumeId)
     oldshape = frames.shape
     pixelOffset, newSize = sliceMapCreator.calculateXYDimensions(frames[0])
@@ -121,17 +131,6 @@ def sliceGeneratorPlain(data, volumeId):
     del frames
     logger.debug('volId: %s, memory freed, frames deleted, keeping squared croppedFrames' % volumeId)
 
-    if croppedFrames.dtype == np.float32 or croppedFrames.dtype == np.float16:
-        logger.debug('volId: %s, float frames detected, normalize them' % volumeId)
-        collectionMin = croppedFrames.min()
-        collectionMax = croppedFrames.max()
-        logger.debug('volId: %s, imagesmin: %d, imagesmax: %d' %(volumeId, collectionMin, collectionMax))
-        croppedFrames = (croppedFrames - collectionMin) / (collectionMax - collectionMin)
-
-        logger.debug('volId: %s, after normalization imagesmin: %d, imagesmax: %d' %(volumeId, croppedFrames.min(), croppedFrames.max()))
-    else:
-        logger.debug('volId: %s, frames do NOT have to be normalized, dtype is %s' % (volumeId, str(croppedFrames.dtype)))
-
     if croppedFrames.dtype != np.int16 and croppedFrames.dtype != np.uint8:
         logger.debug('volId: %s, frames are not int16 and not uint8, convert them to int' % volumeId)
         croppedFrames = img_as_int(croppedFrames)
@@ -149,9 +148,14 @@ def sliceGeneratorPlain(data, volumeId):
 
         logger.debug('volId: %s, setting rawframes' % volumeId)
 
-        for frameNumber in range(0, len(croppedFrames)):
-            logger.debug('volId: %s, rawFrame: %d, sizeInMemory: %d' % (volumeId, frameNumber, sys.getsizeof(croppedFrames[frameNumber])))
-            rawFrame = volume.add_frame(croppedFrames[frameNumber], frameNumber)
+        try:
+            for frameNumber in range(0, len(croppedFrames)):
+                logger.debug('volId: %s, rawFrame: %d, sizeInMemory: %d' % (volumeId, frameNumber, sys.getsizeof(croppedFrames[frameNumber])))
+                rawFrame = volume.add_frame(croppedFrames[frameNumber], frameNumber)
+        except Exception:
+            logger.error('volId: %s, please provide integers or "" for "sliceFrom" and "sliceTo"' % (volumeId))
+            Volume.objects(id=volume.id).update_one(set__generateSlicesStatus = 'generation failed, please read the logs')
+            return
     else:
         rawFrame = volume.rawFrames[0]
 

+ 11 - 6
volumes/processing/service/SubvolumeCreator.py

@@ -86,6 +86,7 @@ def subvolumeCreator(volumeId, frameNumber, numberOfLayers, x, y, z, t, minWidth
     logger.debug('layers per row: %d' % sqrtZ)
     layerCounter = 0
     print('processing z index in range(%d, %d)' %(z, z+numberOfLayers))
+    copy_start_time = time.time()
     for index in range(z, z + numberOfLayers):
         zlayer = imageAsArray[index]
 
@@ -95,6 +96,7 @@ def subvolumeCreator(volumeId, frameNumber, numberOfLayers, x, y, z, t, minWidth
         subvolume[yoffset : yoffset+minWidth, xoffset : xoffset+minWidth] = zlayer[y : y+minWidth, x : x+minWidth]
 
         layerCounter += 1
+    copy_end_time = time.time()
 
     subvolume = img_as_ubyte(subvolume)
     image = Image.fromarray(subvolume)
@@ -108,14 +110,17 @@ def subvolumeCreator(volumeId, frameNumber, numberOfLayers, x, y, z, t, minWidth
     #gridfshelper.savefile(spriteFilename, buff)
     bufferValue = buff.getvalue()
     print('length of subvolume texture %d' % len(bufferValue))
-    cache.set(spriteFilename, bufferValue, settings.SUBVOLUME_CACHING_TIME)
-    endtime = time.time()
-    datetimeEnd = datetime.now()
-    duration = endtime - starttime
 
     if testId is not None:
-        loadtests.addTestResult(testId, imageAsArray.shape, (numberOfLayers, minWidth, minWidth), (x, y, z), duration)
+        cache.set(spriteFilename, bufferValue, settings.SUBVOLUME_CACHING_TIME)
+
+    processing_endtime = time.time()
+    copy_duration = copy_end_time - copy_start_time
+    processing_duration = processing_endtime - starttime
+
+    if testId is not None:
+        loadtests.addTestResult(testId, imageAsArray.shape, (numberOfLayers, minWidth, minWidth), (x, y, z), processing_duration, copy_duration, imageAsArray.dtype)
     else:
         logger.debug('no testid given')
 
-    logger.debug(str(os.getpid()) + 'finished %s, duration: %s' %(datetimeEnd.strftime("%A, %d. %B %Y %I:%M%p"), str(duration)))
+    logger.debug(str(os.getpid()) + 'finished, duration: %s of ' % (str(processing_duration)))