Skip to content

Commit

Permalink
Merge pull request #7 from saroele/master
Browse files Browse the repository at this point in the history
pull of saroele
  • Loading branch information
rubenbaetens committed Sep 14, 2012
2 parents e6b8435 + 4029dbc commit 9a19a96
Show file tree
Hide file tree
Showing 11 changed files with 2,632 additions and 110 deletions.
29 changes: 28 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,31 @@ buildlog.txt
/doc/Makefile
/doc/make.bat
/tests/SpeedInvestigations.xlsx
/doc/Untitled0.ipynb
/doc/Untitled0.ipynb
/tests/SSB_files/SSB_5_HeatingCurve_F24H.mat
/tests/SSB_files/SSB_6_HeatingCurve_Ref - Copy.mat
/tests/SSB_files/TME_SSB_5_34degdata.mat
/tests/SSB_files/TME_SSB_5_DayTime_5000W_R10_3.mat
/tests/SSB_files/TME_SSB_5_DayTime_5000W_R10_4.mat
/tests/SSB_files/TME_SSB_5_DayTime_R5_1.mat
/tests/SSB_files/TME_SSB_5_DayTime_R5_2.mat
/tests/SSB_files/TME_SSB_5_DayTime_R5_3.mat
/tests/SSB_files/TME_SSB_5_DayTime_R5_4.mat
/tests/SSB_files/TME_SSB_5_DayTime_R5_5.mat
/tests/SSB_files/TME_SSB_5_GridLoad_4.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_19.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_20.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_1.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_2.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_3.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_4.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_5.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_6.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_7.mat
/tests/SSB_files/TME_SSB_5_GridLoad_5000W_B3000_8.mat
/tests/SSB_files/TME_SSB_5_HeatingCurve_4000W_Ref.mat
/tests/SSB_files/TME_SSB_6_DayTime_4.mat
/tests/SSB_files/TME_SSB_6_DayTime_R5_4.mat
/tests/SSB_files/TME_SSB_6_GridLoad_3.mat
/tests/SSB_files/TME_SSB_6_GridLoad_4.mat
76 changes: 64 additions & 12 deletions awesim/pymosim.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@
from subprocess import Popen
import subprocess
import sys
from simman import Simulation, Simdex
from shutil import copyfile
import copy
import numpy as np


def set_solver(solver, dsin = '', copy_to = None):
"""
Expand Down Expand Up @@ -321,9 +322,23 @@ def analyse_log(log_file):
summary['successful'] = True
elif line.find('CPU time for integration') > -1 or \
line.find('CPU-time for integration') > -1:
summary['CPU_time'] = line.split(' ')[-2]
summary['cpu_time'] = line.split(' ')[-2]
elif line.find('Number of (successful) steps') > -1:
summary['steps_ok'] = line.split(' ')[-1]
summary['successful_steps'] = line.split(' ')[-1]
elif line.find('Number of (model) time events') > -1:
summary['time_events_model'] = line.split(' ')[-1]
elif line.find('Number of (U) time events') > -1:
summary['time_events_U'] = line.split(' ')[-1]
elif line.find('Number of state events') > -1:
summary['state_events'] = line.split(' ')[-1]
elif line.find('Number of step events') > -1:
summary['step_events'] = line.split(' ')[-1]
elif line.find('Minimum integration stepsize') > -1:
summary['step_size_min'] = line.split(' ')[-1]
elif line.find('Maximum integration stepsize') > -1:
summary['step_size_max'] = line.split(' ')[-1]
elif line.find('Maximum integration order') > -1:
summary['int_order_max'] = line.split(' ')[-1]
elif line.find('Number of rejected steps') > -1:
summary['steps_nok'] = line.split(' ')[-1]
elif line.find('Integration started at 0 using integration method:') > -1:
Expand Down Expand Up @@ -373,26 +388,63 @@ def run_ds(dymosim = '', dsin = '', result = ''):
return proc


def create_input_file(data, filename):
def create_input_file(data, filename, discrete=False, compress=True):
"""
Create an input file for the TimeTables from the MSL.
The input files are in ascii format
data has to be an array with time as first column. All columns of this array
will be written in the ascii file.
The input files are in ascii format.
Parameters
==========
* data: array with time as first column. All columns of this array
will be written in the ascii file.
* filename: filename (with extension). If this file already exists, it
will be overwritten
* discrete: if True, the data array will be modified to become a discrete
profile. At each timestep, an additional line will be created. See
the documentation of the Modelica.Timetable.mo model for more info.
* compress: if True, all reduntant lines will be removed from data.
"""

if compress:
# rows with only zeros are removed, UNLESS they come after a row
# containing any value.

row_contains_data = data[:,1:].any(axis=1)
row_contains_data_rolled = np.roll(row_contains_data, 1)
row_contains_data[0] = True
row_contains_data[-1] = True
keep = np.logical_or(row_contains_data, row_contains_data_rolled)
data = data[keep]

l,w = data.shape

if discrete:
# create a second, shifted data array. We'll write each row of this
# shifted array after each row of the original one.
data_shifted = data.copy()[:-1,:]
data_shifted[:,0] = data[1:,0]
shape_string = '(' + str(2*l-1) + ',' + str(w) + ')'
else:
shape_string = '(' + str(l) + ',' + str(w) + ')'



f = open(filename, 'w')
f.write(u'#1\n')
shape_string = str(data.shape)
f.write(''.join([u'double data', shape_string,
u'# Profiles created by python script: ', sys.argv[0],
'\n']))
for i in range(data.shape[0]):
for i in range(data.shape[0] - 1):
f.write('\t'.join([str(v) for v in data[i,:]]))
f.write('\n')

f.write('\n')
if discrete:
f.write('\t'.join([str(v) for v in data_shifted[i,:]]))
f.write('\n')

f.write('\t'.join([str(v) for v in data[-1,:]]))
f.write('\n')
f.close()




Expand Down
2 changes: 1 addition & 1 deletion awesim/result.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def create_time4plot(sid):

ax.plot(self.val[sid], 'o-', label=label)

leg = ax.legend()
leg = ax.legend(loc='best')
lines = ax.get_lines()
if plot_type == 'plot_date':
ax.set_xlabel('time')
Expand Down
139 changes: 58 additions & 81 deletions awesim/simdex.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class Simdex:
import pdb
from .simulation import Simulation
from .result import Result
from .pymosim import analyse_log


class Simdex:
Expand Down Expand Up @@ -284,12 +285,15 @@ def scan(self, folder='', process=None, timecheck=True):
process = self.process

if folder == '' :
filenames = self.__get_files(os.getcwd(), '.mat')
elif os.path.exists(folder):
folder = os.getcwd()

try:
filenames = self.__get_files(folder, '.mat')
else:
raise IOError('folder does not exist')

except IOError:
raise IOError('folder %s does not exist' % (folder))

if len(filenames) == 0:
raise ValueError("No .mat files found in %s" % (folder))

# Convert to full path filenames to avoid confusion
full_path_filenames = []
Expand All @@ -312,6 +316,8 @@ def scan(self, folder='', process=None, timecheck=True):
# we find a first Dymola file
sim = Simulation(full_path_filenames[index])
simulation_file = True
except MemoryError:
print 'WARNING: %s could not be indexed because of a MemoryError.\nThe file is probably too big. It could help to try in a fresh python instance' % (full_path_filenames[index])
except:
print '%s is no Dymola file. It is not indexed' % \
(full_path_filenames[index])
Expand Down Expand Up @@ -527,24 +533,24 @@ def index_one_var(variables, varmap, var, index):
"""


if var==variables[index]:
# var is the first element in variables, update varmap
varmap[index,-1] = 1
pos = index+1
else:
try:
# search for it in variables, but only in the part AFTER index

try:
if var==variables[index]:
# var is the first element in variables, update varmap
varmap[index,-1] = 1
pos = index+1
else:# search for it in variables, but only in the part AFTER index
pos=variables[index:].index(var)+index
varmap[pos,-1] = 1
except(ValueError):
# this variable was not found. Add it in the right position
# keeping the list in sorted order
pos = bisect.bisect_left(variables, var, lo=index)
variables.insert(pos,var)
# make new row in variablemap and add '1' in the last column
varmap = np.insert(varmap, pos, 0, axis=0)
varmap[pos,-1] = 1
pos+=1
except(ValueError, IndexError):
# this variable was not found. Add it in the right position
# keeping the list in sorted order
pos = bisect.bisect_left(variables, var, lo=index)
variables.insert(pos,var)
# make new row in variablemap and add '1' in the last column
varmap = np.insert(varmap, pos, 0, axis=0)
varmap[pos,-1] = 1
pos+=1
return variables, varmap, pos

# internal function to enhance readibility
Expand All @@ -559,86 +565,57 @@ def index_one_par(parameters, parmap, parvalues, par, index, parvalue):
Returns the new parameters, parmap, parvalues and index
"""

if par==parameters[index]:
# par is the first element in parameters, update parmap, parvalues
parmap[index,-1] = 1
parvalues[index, -1] = parvalue
pos = index+1
else:
try:

try:
if par==parameters[index]:
# par is the first element in parameters, update parmap, parvalues
parmap[index,-1] = 1
parvalues[index, -1] = parvalue
pos = index+1
else:
# search for it in parameters, but only in the part AFTER index
pos=parameters[index:].index(par)+index
parmap[pos,-1] = 1
parvalues[pos, -1] = parvalue
except(ValueError):
# this parameter was not found. Add it in the right position
# keeping the list in sorted order
pos = bisect.bisect_left(parameters, par, lo=index)
parameters.insert(pos,par)
# make new row in parametermap and add '1' in the last column
parmap = np.insert(parmap, pos, 0, axis=0)
parmap[pos,-1] = 1
parvalues = np.insert(parvalues, pos, 0, axis=0)
parvalues[pos,-1] = parvalue
pos+=1
except(ValueError, IndexError):
# this parameter was not found. Add it in the right position
# keeping the list in sorted order
pos = bisect.bisect_left(parameters, par, lo=index)
parameters.insert(pos,par)
# make new row in parametermap and add '1' in the last column
parmap = np.insert(parmap, pos, 0, axis=0)
parmap[pos,-1] = 1
parvalues = np.insert(parvalues, pos, 0, axis=0)
parvalues[pos,-1] = parvalue
pos+=1
return parameters, parmap, parvalues, pos


def add_meta(simulation, key):
"""Create a node for the simulation and add data to /Metadata"""

def analyse_log(log_file):
"""
analyse_log(log_file)
log_file = string with path to a dslog.txt file
Check if the simulation ended successfully, which solver was used and
how much time it took. Optionally, show the number of this and that
Returns a dictionary with the results
"""

summary = {'successful':False}
lf = open(log_file, 'r')
lines = lf.readlines()
for line_number, line in enumerate(lines):
if line.find('Integration terminated successfully at T =') > -1:
summary['successful'] = True
elif line.find('CPU time for integration') > -1 or \
line.find('CPU-time for integration') > -1:
summary['CPU_time'] = line.split(' ')[-2]
elif line.find('Number of (successful) steps') > -1:
summary['steps_ok'] = line.split(' ')[-1]
elif line.find('Number of rejected steps') > -1:
summary['steps_nok'] = line.split(' ')[-1]
elif line.find('Integration started at 0 using integration method:') > -1:
summary['algorithm'] = lines[line_number + 1].strip('\n')
elif line.find('Integration started at T = 0 using integration method') > -1:
summary['algorithm'] = line.split(' ')[-1].strip('\n')
elif line.find('This simulation timed out and was killed') > -1:
summary['successful'] = False
summary['timed_out'] = True
elif line.find('Corresponding result file') > -1:
summary['result file'] = line.split(' ')[-1].strip('\n')
lf.close()
if summary.has_key('steps_nok'):
summary['perc_wrong'] = 100. * float(summary['steps_nok']) / \
float(summary['steps_ok'])
else:
summary['perc_wrong'] = 0
return summary


class Meta(tbl.IsDescription):
SID = tbl.StringCol(itemsize=16)
path = tbl.StringCol(itemsize=160)
log_analysed = tbl.BoolCol()
successful = tbl.BoolCol()
algorithm = tbl.StringCol(itemsize=16)
CPU_time = tbl.Float32Col()
steps_ok = tbl.Int32Col()
cpu_time = tbl.Float32Col()
successful_steps = tbl.Int32Col()
steps_nok = tbl.Int32Col()
timed_out = tbl.BoolCol()
perc_wrong = tbl.Float32Col()
time_events_model = tbl.Int32Col()
time_events_U = tbl.Int32Col()
state_events = tbl.Int32Col()
step_events = tbl.Int32Col()
step_size_min = tbl.Float32Col()
step_size_max = tbl.Float32Col()
int_order_max = tbl.Int32Col()



self.openh5()

Expand Down
Loading

0 comments on commit 9a19a96

Please sign in to comment.