diff --git a/msgpi/cross_section.py b/msgpi/cross_section.py index 293e0e4..08e57ce 100644 --- a/msgpi/cross_section.py +++ b/msgpi/cross_section.py @@ -1,9 +1,4 @@ -import csv -import os -import sys import pprint -import numpy as np -import xml.etree.ElementTree as et class Layer(object): @@ -51,12 +46,26 @@ def readIXGENLine(self, line, cs, fmt=0, tb=''): self.angle = float(line[-1]) + + + + + + + class Layup(object): def __init__(self, name=''): self.name = name self.layers = [] + + + + + + + class CrossSection(object): """ Stores all information of a cross section. @@ -115,19 +124,33 @@ def __init__(self, name): # self.etcm = None #: Effective timoshenko compliance matrix (6x6) # Global results for recovery/dehomogenization - self.gdisplacements = np.zeros(3) #: Global displacements [u1, u2, u3] + self.gdisplacements = [0, 0, 0] #: Global displacements [u1, u2, u3] #: Global rotations [[c11, c12, c13], [c21, c22, c23], [c31, c32, c33]] - self.grotations = np.eye(3) - self.gforces = np.zeros(3) #: Global forces (VABS) [F1, F2, F3] - self.gmoments = np.zeros(3) #: Global moments (VABS) [M1, M2, M3] + self.grotations = [ + [1, 0, 0], + [0, 1, 0], + [0, 0, 0] + ] + self.gforces = [0, 0, 0] #: Global forces (VABS) [F1, F2, F3] + self.gmoments = [0, 0, 0] #: Global moments (VABS) [M1, M2, M3] #: Global distributed forces (VABS) [[f1, f2, f3], [d1f1, d1f2, d1f3], [d2f1, d2f2, d2f3], [d3f1, d3f2, d3f3]] - self.gdforces = np.zeros((4, 3)) + self.gdforces = [ + [0, 0, 0], + [0, 0, 0], + [0, 0, 0], + [0, 0, 0] + ] #: Global distributed moments (VABS) [[m1, m2, m3], [d1m1, d1m2, d1m3], [d2m1, d2m2, d2m3], [d3m1, d3m2, d3m3]] - self.gdmoments = np.zeros((4, 3)) + self.gdmoments = [ + [0, 0, 0], + [0, 0, 0], + [0, 0, 0], + [0, 0, 0] + ] #: Indicate whether the generalized loads are stresses or strains (SwiftComp) self.gmeasure = 'stress' #: Global straints (SwiftComp) [] - self.gloads = np.zeros(6) + self.gloads = [0, 0, 0, 0, 0, 0] #: Load for the horizontal axis for failure envelope (SwiftComp) self.feaxis1 = '' diff --git a/msgpi/dakota_interface.py b/msgpi/dakota_interface.py index 4feccb3..50c6c5c 100644 --- a/msgpi/dakota_interface.py +++ b/msgpi/dakota_interface.py @@ -8,6 +8,7 @@ import msgpi.analysis as sga import dakota.interfacing as di + def process(fn_json_args): # if logger is None: # logger = mlog.initLogger(__name__) diff --git a/msgpi/design_analysis.py b/msgpi/design_analysis.py index 3ec037d..48b5357 100644 --- a/msgpi/design_analysis.py +++ b/msgpi/design_analysis.py @@ -1,9 +1,13 @@ +import abc +import platform +import msgpi.cross_section as mcs import msgpi.logger as mlog +import dakota.interfacing as di -class DesignAnalysis(): +class DesignAnalysis(metaclass=abc.ABCMeta): - def __init__(self, object, inputs={}, outputs={}, settings={}, prepros=[], postpros=[], logger=None): + def __init__(self, object=None, inputs={}, outputs={}, settings={}, prepros=[], postpros=[], analyses=[], logger=None): self.object = object self.settings = settings @@ -27,11 +31,80 @@ def __init__(self, object, inputs={}, outputs={}, settings={}, prepros=[], postp ] """ self.postpros = postpros + self.logger = logger - if logger is None: - self.logger = mlog.initLogger(__name__) - else: - self.logger = logger + self.analyses = analyses + + + def updateData(self, data): + """ + data = { + 'inputs': {}, + 'outputs': {}, + 'settings': {}, + 'preprocessors': [], + 'postprocessors': [] + } + """ + try: + self.inputs.update(data['inputs']) + except KeyError: + pass + + try: + self.outputs.update(data['outputs']) + except KeyError: + pass + + try: + self.settings.update(data['settings']) + except KeyError: + pass + + try: + self.prepros += data['preprocessors'] + except KeyError: + pass + + try: + self.postpros += data['postprocessors'] + except KeyError: + pass + + try: + self.analyses += data['analyses'] + except KeyError: + pass + + + def initLogger(self): + try: + log_level_cmd = self.settings['log_level_cmd'].upper() + except KeyError: + log_level_cmd = 'INFO' + pass + + try: + log_level_file = self.settings['log_level_file'].upper() + except KeyError: + log_level_file = 'INFO' + pass + + try: + log_file_name = self.settings['log_file_name'] + except KeyError: + log_file_name = 'log.txt' + pass + + self.logger = mlog.initLogger( + __name__, + cout_level=log_level_cmd, fout_level=log_level_file, filename=log_file_name + ) + + + @abc.abstractmethod + def analyze(self): + pass def preprocess(self): @@ -68,3 +141,156 @@ def postprocess(self): kwargs = {} func(self.object, self.inputs, self.outputs, self.settings, self.logger, *args, **kwargs) + + + def run(self): + self.preprocess() + self.analyze() + self.postprocess() + + + + + + + + + +class DakotaDesignAnalysis(DesignAnalysis): + def __init__(self, data={}, fn_dakota_params='', fn_dakota_results='', logger=None): + + DesignAnalysis.__init__(self, logger=logger) + self.updateData(data) + + # self.dakota_params = None + # self.dakota_results = None + + self.dakota_params, self.dakota_results = di.read_parameters_file( + fn_dakota_params, fn_dakota_results + ) + self.settings['eval_num'] = self.dakota_params.eval_num + for param_name in self.dakota_params.descriptors: + self.inputs[param_name] = self.dakota_params[param_name] + + + def analyze(self): + + for a in self.analyses: + try: + da = a['object'] + except KeyError: + class_name = a['class'] + da = eval(f'{class_name}DesignAnalysis')(logger=self.logger) + + try: + da.updateData(self.settings[a['group']]) + except KeyError: + pass + + da.updateData(a) + + da.run() + + a['object'] = da + + return + + + + + + + + + +class BladeDesignAnalysis(DesignAnalysis): + def __init__(self, blade=None, data={}, logger=None): + DesignAnalysis.__init__(self, object=blade, logger=logger) + self.updateData(data) + + def generateDesign(self): + return + + def analyze(self): + return + + + + + + + + + +class CrossSectionDesignAnalysis(DesignAnalysis): + + def __init__(self, cs: mcs.CrossSection, inputs={}, outputs={}, prepros=[], postpros=[], config={}, logger=None): + DesignAnalysis.__init__(self, cs, inputs, outputs, config, prepros, postpros, logger) + # self.cs = cs + # self.job_args = job_args + + # if logger is None: + # self.logger = mlog.initLogger(__name__) + # else: + # self.logger = logger + + # self.inputs = inputs + # self.outputs = {} + + + + + def analyze(self): + self.logger.info(f'running design analysis for {self.cs.name}...') + + analysis = self.config['analysis'] + + if platform.system() == 'Windows': + ppcmd = self.config['prevabs_cmd_win'] + elif platform.system() == 'Linux': + ppcmd = self.config['prevabs_cmd_linux'] + + solver = self.config['solver'] + integrated = False + if 'integrated' in self.config.keys(): + integrated = self.config['integrated'] + timeout = 30 + if 'timeout' in self.config.keys(): + timeout = self.config['timeout'] + scrnout = False + if 'scrnout' in self.config.keys(): + scrnout = self.config['scrnout'] + + + # Pre-process data + # ---------------- + self.preprocess() + + # Substitute parameters + # --------------------- + if self.sg.fn_design_xml == '': + self.sg.fn_design_xml = self.sg.name + '.xml' + di.dprepro( + template=self.sg.fn_design_tmp, output=self.sg.fn_design_xml, + include=self.inputs + ) + + # Solve + # ----- + self.sg.props = sga.solve( + self.sg.fn_design_xml, analysis, ppcmd, solver, integrated, + timeout=timeout, scrnout=scrnout, logger=self.logger + ) + + # Extract beam properties + # ----------------------- + self.logger.debug('extracting beam properties...') + for n in self.job_args['beam_properties']: + self.outputs[n] = self.sg.props.get(n) + + + # Post-process data + # ----------------- + self.postprocess() + +