Source code for viiapackage.results.viia_pushover_curve

### ===================================================================================================================
###   Push-over curves for result handling
### ===================================================================================================================
# Copyright ©VIIA 2024

### ===================================================================================================================
###   1. Import modules
### ===================================================================================================================

# General imports
from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Union, Iterable
from collections import namedtuple
from pathlib import Path

# References for functions and classes in the rhdhv_fem package
from rhdhv_fem.fem_config import Config
from rhdhv_fem.fem_math import fem_area, fem_greater, fem_smaller
from rhdhv_fem.fem_tools import fem_make_list_of_text, fem_read_file
from rhdhv_fem.mesh import MeshNode

# References for functions and classes in the viiaPackage
if TYPE_CHECKING:
    from viiapackage.viiaStatus import ViiaProject
from viiapackage.viiaGeneral import viia_find_closest_mesh_node
from viiapackage.analyses.nlpo_helper_functions import viia_center_of_mass_nlpo
from viiapackage.results.result_functions.viia_results_pushover import _viia_store_demand_curves

# Import module numpy
# This module is used for initialising and using methods of the PushoverCurve class
import numpy as np

# Import module matplotlib
import matplotlib
# Switch to a non-interactive backend to properly import matplotlib.pyplot
matplotlib.use(Config.MPL_NONINTERACTIVE(notify=False))
import matplotlib.pyplot as plt


### ===================================================================================================================
###   2. Class PushoverCurve
### ===================================================================================================================

[docs]class PushoverCurve: """ This is the class used for the result handling of push-over curves. It processes the raw data for a system where its displacement and base shear are read from the .tb files. Methods are provided to manipulate the data based on engineering judgement which aligned with UPR. e.g. Normalization from the MDOF to SDOF, cut off the push over curve until desired steps, bilinearization of the push over curves, etc.""" def __init__( self, project: ViiaProject, displacement: Union[List, np.array], base_shear: Union[List, np.array], eff_mass: Optional[float] = None, seismic_mass: Optional[float] = None, direction: str = 'Unknown', meshnode: Union[MeshNode, List[float], int, str] = None, level: str = None): """ Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - displacement (list of float or np array): List with the displacements of a single mesh-node in [m], steps should correspond to the base_shear input. - base_shear (list of float or np array): List with the base-shear in [N]. The steps should correspond to the displacement. - seismic_mass (float): The seismic mass of the given system in [kg]. - direction (str): Specify the direction taken for the push-over curve. This value is only used for providing title when plotting in graphs. Default set to 'Unknown'. Value can be 'X_pos', 'X_neg', 'Y_pos' or 'Y_neg') - mesh_node (list of int or str): List of node numbers of which the results are returned. By default the argument is set to None, extracting all nodes in the file. When a node is requested that is not in the file, it will be omitted in the dictionary that is returned. """ # Set the project self.project = project # Set the raw data for the push-over curve and convert to positive if required self.displacement = np.abs(np.array(displacement)).flatten() self.base_shear = np.abs(np.array(base_shear)).flatten() self.seismic_mass = seismic_mass self.eff_mass = eff_mass # The effective mass defined in protocol 4.9.1.2 self.level = level self.failure_mechanism = None self.diana_coord = None self.cut_off_step = None # Set general information of the push-over curve self.direction = direction if isinstance(meshnode, List): meshnode = project.create_meshnode(meshnode) self.meshnode = meshnode @property def cut_off_step(self): return self.__cut_off_step @cut_off_step.setter def cut_off_step(self, new_steps): if new_steps is not None: try: self.__cut_off_step = int(new_steps) self.displacement = self.displacement[:self.cut_off_step] self.base_shear = self.base_shear[:self.cut_off_step] self.project.write_log(f'The steps has been cut to {new_steps}.') except ValueError: self.__cut_off_step = None else: self.__cut_off_step = None @property def eff_mass(self): return self.__eff_mass @eff_mass.setter def eff_mass(self, effective_mass): """ The effective mass defined in protocol 4.9.1.2""" if effective_mass is not None and isinstance(effective_mass, float): self.__eff_mass = effective_mass else: self.__eff_mass = None def __repr__(self): if self.cut_off_step: if not self.meshnode: return f"RHDHV-PushoverCurve: {self.direction} direction, all nodes are selected without a " \ f"specification of mesh node" else: return f"RHDHV-PushoverCurve: {self.direction} direction for coordinate {self.meshnode} " \ f"and cut off step: {self.cut_off_step}." else: if not self.meshnode: return f"RHDHV-PushoverCurve: {self.direction} direction, all nodes are selected without a " \ f"specification of mesh node." else: return f"RHDHV-PushoverCurve: {self.direction} direction for coordinate {self.meshnode}." @property def graph_coordinates(self): """ Property of the 'PushoverCurve' class, this property is designed to return the updated data [displacement, base shear] cut-off for a specific step. Subsequent methods (e.g. bilinearization) will be using the updated array. The graph_coordinates is setup as a matrix, formatted: step_1 [[disp1, base shear1], step_2 [disp2, base shear2], ... step_cutoff [dispn, base shearn]] Input: - No input required. Return: - Returns the updated data matrix. """ if self.cut_off_step is None: # Cut off the redundant to make it the same length and zip into the matrix if len(self.displacement) != len(self.base_shear): idx = min(len(self.displacement), len(self.base_shear)) graph_coordinates = np.stack((self.displacement[:idx], self.base_shear[:idx]), axis=1) self.project.write_log(f"WARNING: The data has been cut to step: {idx}, because of the inconsistency.") else: graph_coordinates = np.stack((self.displacement, self.base_shear), axis=1) # Cut off until the step else: graph_coordinates = np.stack( (self.displacement[:self.cut_off_step], self.base_shear[:self.cut_off_step]), axis=1) return graph_coordinates
[docs] def acceleration_spectrum(self, effective_mass: float = None): """ Method of the 'PushoverCurve' class, it is updated accordingly to the graph_coordinates array. Normalises the base shear to the acceleration spectrum. Input: - effective_mass (float): Value for the effective mass in [kg] for which the acceleration spectrum is created. Return: - Acceleration spectrum. """ if hasattr(self, 'eff_mass'): return self.graph_coordinates[:, 1] / (self.eff_mass * self.project.gravitational_acceleration) else: return self.graph_coordinates[:, 1] / (effective_mass * self.project.gravitational_acceleration)
[docs] @staticmethod def plot_curve( project: ViiaProject, curve_type: str = 'PO_curve', input_curve=None, bilinear_data=None, plot_all: bool = False, effective_mass=None, ADRS: bool = False, **kwargs): """ Method of the 'PushoverCurve' class, to plot the push-over curve in a single graph. Bilinear curve is optional. Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - curve_type (str): Optional of 'PO_curve', 'Bilinearize', 'CS' or 'Updated ADRS'. - input_curve - bilinear_data (list or np.array): 1 dimensional bilinear data to be plotted. - plot_all (boolean): Option to plot bilinear curve with capacity curve in one graph - effective_mass - ADRS (boolean): Option to plot the elastic ADRS and 20% damping ADRS in one graph. Return: - Plot of the raw data push-over curve. """ # Create the graph (close existing previous ones) plt.close() plt.figure(figsize=(15, 5)) # Apply VIIA graph style style_file = project.viia_settings.project_specific_package_location / 'viiaGraph.mplstyle' plt.style.use(style_file.as_posix()) ax1 = plt.subplot2grid((1, 1), (0, 0)) # Plot the 5% and 20% damping figure if ADRS: # meters into millimeters x_el = project.project_specific['ADRS']['Elastic']['displacement'] * 1000 y_el = project.project_specific['ADRS']['Elastic']['acceleration'] x_in = project.project_specific['ADRS']['Inelastic 20% damped']['displacement'] * 1000 y_in = project.project_specific['ADRS']['Inelastic 20% damped']['acceleration'] ax1.plot(x_el, y_el, label='Elastic ADRS (Damping 5%)', linestyle='dashed', linewidth=1.7, color='purple') ax1.plot(x_in, y_in, label='Inelastic ADRS (Damping 20%)', linestyle='dashed', linewidth=1.7, color='salmon') # Plot push over curves if curve_type == 'PO_curve': plot_info = {'title': 'Push Over curves', 'x_label': 'Displacement (mm)', 'y_label': 'Base shear (kN)', 'fig_name': 'push_over_curve'} if isinstance(input_curve, PushoverCurve): x = input_curve.graph_coordinates[:, 0] y = input_curve.graph_coordinates[:, 1] / 1000 ax1.plot(x, y, label=f'Meshnode: {input_curve.meshnode} at level {input_curve.level}') plot_info['fig_name'] += f'{input_curve.meshnode}' elif isinstance(input_curve, List): for po_curve in input_curve: x = po_curve.graph_coordinates[:, 0] y = po_curve.graph_coordinates[:, 1] / 1000 ax1.plot(x, y, label=f'Meshnode: {po_curve.meshnode} at level {po_curve.level}') else: project.write_log('ERROR: ' 'The po_curves should be either a PushoverCurve or a list of PushoverCurve instances') return elif curve_type == 'Bilinearize': plot_info = {'title': 'Bilinearized-curve', 'x_label': 'Displacement (mm)', 'y_label': 'Acceleration (%g)', 'fig_name': 'bilineaized_curve'} x = input_curve.graph_coordinates[:, 0] # Plot both CS and bilinearized curve if plot_all: y = input_curve.acceleration_spectrum(effective_mass=effective_mass) y1 = bilinear_data # Make sure that the index is the minimum one from bilinear curve array ax1.plot(x[:y1.shape[0]], y1, label='Bilinear_curve') ax1.plot(x, y, label='System_capacity') else: y = bilinear_data # Make sure the dims are ok ax1.plot(x[:y.shape[0]], y, label='Bilinear_curve') elif curve_type == 'CS': plot_info = {'title': 'Capacity Spectrum', 'x_label': 'Displacement (mm)', 'y_label': 'Acceleration (%g)', 'fig_name': 'capacity_spectrum'} x = input_curve.graph_coordinates[:, 0] y = input_curve.acceleration_spectrum() ax1.plot(x, y, label='capacity_spectrum') elif curve_type == 'Updated ADRS': plot_info = { 'title': 'Capacity Spectrum', 'x_label': 'Displacement (mm)', 'y_label': 'Acceleration (%g)', 'fig_name': 'capacity_spectrum'} x = kwargs['x'] y = kwargs['elastic_ADRS'] y2 = kwargs['inelastic_ADRS'] y3 = kwargs['bilinear'] ax1.plot(x, y, label='elastic_ADRS') ax1.plot(x, y2, label='inelastic_ADRS') ax1.plot(x, y3, label='bilinear') else: project.write_log(f"ERROR: The curve type '{curve_type}' not recognised, please check the docstring.") return # Plotting adjustment ax1.set_ylim([0, 1.3 * max(y)]) ax1.set_xlim([0, 1.3 * max(x)]) ax1.set_title(plot_info['title']) ax1.set_xlabel(plot_info['x_label']) ax1.set_ylabel(plot_info['y_label']) ax1.legend() # Save the graph to the same folder filename = project.workfolder_location / f"{plot_info['fig_name']}.png" plt.savefig(filename.as_posix()) project.write_log(f"The figure {plot_info['fig_name']} has been saved to {project.workfolder_location}")
[docs] @staticmethod def find_intersections_index( project: ViiaProject, x: Union[Iterable, float, int], y: [Union[Iterable, float, int]], warnings: bool = True): """ Method of PushoverCurve to find the intersection of two iterable x and y. The index will be returned if there is an sign change of x - y between this index and index + 1. https://stackoverflow.com/questions/28766692/intersection-of-two-graphs-in-python-find-the-x-value >>> x = [1,2,3] >>> y = [2,0,6] >>> PushoverCurve.find_intersections_index(project,x,y) array([0, 1], dtype=int64) Input: - x: array or list - y: array or list or scalar Output: - Returns a numpy array which contains all the intersection indices. """ try: x, y = (np.array(x).flatten(), np.array(y).flatten()) if np.isnan(x).any() or np.isnan(y).any(): x_idx = np.argwhere(np.isnan(x)) y_idx = np.argwhere(np.isnan(y)) if x_idx.size != 0 and y_idx.size != 0: min_nan_idx = min(x_idx[0], y_idx[0]) else: min_nan_idx = int(next(i for i in (x_idx, y_idx) if i.size != 0)[0]) x, y = (x[:min_nan_idx], y[:min_nan_idx]) idx = np.argwhere(np.diff(np.sign(x - y))).flatten() if idx.size == 0: if warnings: project.write_log( f'There is no intersection between curve {x[:5]} and curve {y[:5]}', True) return None return idx except Exception as e: project.write_log( f"WARNING: {e:} cannot find the intersection between line {x[:5]} and value {y[:5]} please check the " f"type of {x[:5]} and {y[:5]}.") return None
[docs] def find_cut_off_step(self, displacement_cut_off: float = None, base_shear_cut_off: float = None): """ Method of 'PushoverCurve' class to find the closest step given a displacement/base shear value or given both. Input: - displacement_cut_off (float): Magnitude of displacement used to find the corresponding step. - base_shear_cut_off (float): Magnitude of the base shear used to find the corresponding step. Return: - The nearest step for the input. """ def _find_nearest_idx(x, value): if isinstance(value, float) or isinstance(value, int): idx = np.argmin(np.abs(np.array(x) - value)) else: residual = np.sqrt((np.array(x)[:, 0] - value[0]) ** 2 + (np.array(x)[:, 1] - value[1]) ** 2) idx = np.argmin(residual) return idx + 1 if displacement_cut_off and not base_shear_cut_off: return _find_nearest_idx(self.displacement, displacement_cut_off) elif not displacement_cut_off and base_shear_cut_off: return _find_nearest_idx(self.base_shear, base_shear_cut_off) elif displacement_cut_off and base_shear_cut_off: return _find_nearest_idx(self.graph_coordinates, [displacement_cut_off, base_shear_cut_off])
[docs] def find_elastic_mode(self, return_idx: Optional[bool] = False): """ Method of 'PushoverCurve' class to find the displacement value corresponding to the 60% of maximum base shear in the elastic range. Input - return_idx (bool): Select to return both the index and value of the displacement, by default set to False, only returning the value for the displacement. Output: - Displacement of 60% maximum base shear and its index. """ base_shear_60p = 0.6 * self.base_shear.max() try: # There must be at least 1 intersection disp_60pmax_idx = PushoverCurve.find_intersections_index(project=self.project, x=self.base_shear, y=base_shear_60p)[0] except: self.project.write_log('Cannot find the intersection between maximum base shear and' ' 60% of max base shear, please check the consistency of data') return if not return_idx: return self.displacement[disp_60pmax_idx] else: return disp_60pmax_idx, self.displacement[disp_60pmax_idx]
[docs] def find_k_init(self): """ Method of 'PushoverCurve' class to find the initial stiffness of a system using 60% of maximum base shear. Input: - No input required. output: - Returns the value of the initial stiffness in [N/m] """ idx, disp60 = self.find_elastic_mode(return_idx=True) base_shear60 = self.base_shear[idx] k_init = base_shear60 / disp60 return k_init
[docs] @staticmethod def normalize_to_sdof(project, *args): """ Method of 'PushoverCurve' class to normalize the input systems given by its seismic mass. This step is to convert the push over curve raw data to capacity curve data by dividing by the effective mass. e.g. for two storey building, this method will take into account the displacement and seismic mass for each floor. Convert a MDOF to SDOF where the effective mass is assigned. Input: - Instance of PushoverCurve object. The seismic mass for each system needs to be provided. Return: - The effective mass of SDOF system - The transformation factor """ # Construct the seismic_mass matrix for curve_data in args: if not isinstance(curve_data, PushoverCurve): project.write_log('The input should be the PushoverCurve class') return if not hasattr(curve_data, 'seismic_mass'): project.write_log(f'The {curve_data} should have the seismic mass') return # Sort curve by its height lst_po_curve = sorted(list(args), key=lambda x: x.level[-1]) # Construct the matrix mass_matrix = np.zeros((len(args), len(args))) fi_matrix = np.zeros(len(args)) # Normalize for idx, po_data in enumerate(args): mass_matrix[idx, idx] = po_data.seismic_mass fi_matrix[idx] = po_data.find_elastic_mode() # Matrix normalization fi_max = max(fi_matrix) fi_matrix = fi_matrix / fi_max eff_mass = float(sum(np.matmul(mass_matrix, fi_matrix))) # Calculate gamma if len(args) > 2: gamma = eff_mass / sum(np.matmul(fi_matrix ** 2, mass_matrix)) else: gamma = 1 # Bookkeeping the system data SystemPushOver = namedtuple('SystemPushOver', ['eff_mass', 'eff_height', 'gamma', 'po_curves', 'v_star_base', 'u_star_roof', 'roof']) # Find the roof roof = sorted(args, key=lambda x: x.level[-1], reverse=True)[0] # Store the system data # Consistent with Protocol 4.9.1.2,(NPR G.4.2) # The v_star_base and u_star_roof are already divided by gamma po_system = SystemPushOver(gamma=gamma, po_curves=lst_po_curve, v_star_base=roof.base_shear / gamma, u_star_roof=roof.displacement / gamma, eff_mass=eff_mass, eff_height=roof.diana_coord[-1] / gamma, roof=roof) return po_system
[docs] def bilinearise( self, drift_limit: Union[float, int], effective_mass: float = None, output_params: bool = False, plot: bool = True): """ Method of 'PushoverCurve' class to bilinearize a given system. The data for bilinear curve will be returned. The parameters for the curve can be outputted optionally. Input: - drift_limit (float): The drift limit of the system in [m]. - effective_mass (float): Value for the effective mass in [kg]. - output (bool): Option to output all calculated parameters for generating the bilinearised data of the PushoverCurve. Return: - The biliearize data is returned. If requested a dictionary is returned with the following parameters (keys) and the calculated value for it. Parameters 'K_init', 'plateau', 'u_cap_sys', 'u_cap_bilin' and 'energy' are returned. """ if hasattr(self, 'eff_mass'): effective_mass = self.eff_mass # Use the updated properties after cutting off disp = self.graph_coordinates[:, 0] k_init = self.find_k_init() spectr_a_80pmax = 0.8 * max(self.acceleration_spectrum(effective_mass)) # [%g] spectr_a_50pmax = 0.5 * max(self.acceleration_spectrum(effective_mass)) # Only one intersection at elastic phase, take the last value of po data # Get the u_capacity_bilinear using 80% maximum base shear (related to plateau) # Find the intersection of 80% acceleration index intersections_idx_bi = PushoverCurve.find_intersections_index(project=self.project, x=self.acceleration_spectrum(effective_mass), y=spectr_a_80pmax) if intersections_idx_bi is None: self.project.write_log('Cannot find the intersection between 80% maximum acceleration ' 'and max acceleration, please check the data') return # No intersection after ascending part elif len(intersections_idx_bi) <= 1: disp_cap_bilin = disp[-1] # Check if the last value pass the drift limit if fem_smaller(disp_cap_bilin, drift_limit): disp_cap_bilin_idx = len(disp) else: self.project.write_log( f"WARNING: the deformation capacity has been bounded to the drift limit {drift_limit}", True) disp_cap_bilin = drift_limit # Find the steps up to drift limit disp_cap_bilin_idx = PushoverCurve.find_intersections_index(self.project, disp_cap_bilin, disp)[0] # two or more intersections, take the one right after max value index else: # Avoid the oscillation of the curve of 80% max is found before the reaching the max value if np.any(not fem_smaller(intersections_idx_bi, self.acceleration_spectrum(effective_mass).argmax())): # Take the first value after peak disp_cap_bilin_idx = \ intersections_idx_bi[intersections_idx_bi > self.acceleration_spectrum(effective_mass).argmax()][0] disp_cap_bilin = disp[disp_cap_bilin_idx] else: disp_cap_bilin = disp[-1] # [m] # Check with drift limit if fem_smaller(disp_cap_bilin, drift_limit): if np.any(not fem_smaller(intersections_idx_bi, self.acceleration_spectrum(effective_mass).argmax())): disp_cap_bilin_idx = intersections_idx_bi[-1] # [m] else: disp_cap_bilin_idx = len(disp) # Take the drift limit else: self.project.write_log( f"WARNING: the deformation capacity has been bounded to the drift limit {drift_limit}", True) disp_cap_bilin = drift_limit # Find the steps up to drift limit disp_cap_bilin_idx = PushoverCurve.find_intersections_index(self.project, disp_cap_bilin, disp)[0] # Only one intersection at elastic phase, take the last value of po data # Get the u_capacity_sys (final step) using 50% maximum base shear intersections_idx_sys = \ PushoverCurve.find_intersections_index(self.project, self.acceleration_spectrum(effective_mass), spectr_a_50pmax) if len(intersections_idx_sys) <= 1: disp_cap_sys = disp[-1] # Check if the last value pass the drift limit if fem_smaller(disp_cap_sys, drift_limit): disp_cap_sys_idx = len(disp) else: self.project.write_log( f"WARNING: the deformation capacity has been bounded to the drift limit {drift_limit}", True) disp_cap_sys = drift_limit # Find the steps up to drift limit disp_cap_sys_idx = PushoverCurve.find_intersections_index(self.project, disp_cap_sys, disp)[0] # two or more intersections, take the last one else: disp_cap_sys = disp[intersections_idx_sys[-1]] # [m] if fem_smaller(disp_cap_sys, drift_limit): disp_cap_sys_idx = intersections_idx_sys[-1] # [m] else: self.project.write_log( f"WARNING: the deformation capacity has been bounded to the drift limit {drift_limit}", True) disp_cap_sys = drift_limit # Find the steps up to drift limit disp_cap_sys_idx = PushoverCurve.find_intersections_index(self.project, disp_cap_sys, disp)[0] # Integration from scipy import integrate area = integrate.cumtrapz(self.acceleration_spectrum(effective_mass)[0:disp_cap_bilin_idx], disp[0:disp_cap_bilin_idx], initial=0)[-1] energy = area * (effective_mass * self.project.gravitational_acceleration) # Check the square root if fem_smaller((disp_cap_bilin * k_init) ** 2 - 2 * energy * k_init, 0): self.project.write_log('Error: Please check the K_init and the effective mass and the unit ' 'of acceleration or displacement.') return plateau = (disp_cap_bilin * k_init - np.sqrt((disp_cap_bilin * k_init) ** 2 - 2 * energy * k_init)) / \ (effective_mass * self.project.gravitational_acceleration) # [N/kg] converted to [%g] # Check the plateau value if fem_smaller(plateau, 0.1) or fem_greater(plateau, 5): self.project.write_log( f"WARNING: the value of 'spectr_a_y'' is expected to be in the range of 0.1-5 for {self} " f"Please check input data.", True) # First segment of curve bi_spectr_a_y = disp[:disp_cap_sys_idx] * k_init / \ (effective_mass * self.project.gravitational_acceleration) # Replace the rest with the plateau, second segment intersection_idx_plateau = PushoverCurve.find_intersections_index(self.project, bi_spectr_a_y, plateau)[0] bi_spectr_a_y[intersection_idx_plateau:] = plateau # Yielding ductility u_y_sys = disp[intersection_idx_plateau] if plot: self.plot_curve(self.project, bilinear=True, input_curve=self, curve_type='Bilinearize', bilinear_data=bi_spectr_a_y, effective_mass=effective_mass, plot_all=True, ADRS=True) # Output biliear curve parameters if not output_params: return bi_spectr_a_y else: return (bi_spectr_a_y, {'K_init': k_init, 'plateau': plateau, 'u_cap_sys': disp_cap_sys, 'u_cap_bilin': disp_cap_bilin, 'u_y_sys': u_y_sys, 'energy': energy, 'area': area, 'bi_2Darray': np.stack((disp[:disp_cap_sys_idx], bi_spectr_a_y), axis=-1)})
def _direction(direction, project): # Select the required direction, the number defines the result item in the file, mapping the direction to number. if direction.lower() == 'x': direction = 1 elif direction.lower() == 'y': direction = 2 elif direction.lower() == 'z': direction = 3 else: project.write_log( f"ERROR: The direction argument for viia_read_tb_displacements_nlpo is recognised: {direction}. Only " f"x, y or z is allowed as input.") direction = None return direction ### =================================================================================================================== ### 3. Functions for use of pushover curve ### ===================================================================================================================
[docs]def viia_read_tb_displacements_nlpo( project: ViiaProject, file: Union[Path, str], direction: str, mesh_nodes: List[MeshNode] = None): """ Function to read the DIANA result file in tabulated format. The file should contain the displacements of the point of which the push-over curve is requested. Input: - file (str or Path): Path reference of the result file of DIANA (.tb-file). Alternative is file as string. - direction (str): Direction for the results to be collected. Select from x or y. - mesh_nodes (list of obj.ref.): List of MeshNode objects of which the results are returned. By default the argument is set to None, extracting all nodes in the file. When a node is requested that is not in the file, it will be omitted in the dictionary that is returned. Output: - Returns a dictionary with the mesh-nodes as key and a list of displacements as value (mm). The information on steps is not collected. """ # Read the tb-file lines = fem_read_file(file) # Check if displacements are provided for line in lines: if 'Result' in line: if 'DISPLA TOTAL TRANSL' not in line: project.write_log( f"ERROR: The result items should contain total translational displacements. " f"See file {file.as_posix()}.", True) return None else: break # Select the required direction, the number defines the result item in the file direction = _direction(direction, project) if not direction: return # Extract the data from the file results = dict() for line in lines: line_list = [item for item in line.replace('\n', '').split(' ') if item != ''] node_nr = None if len(line_list) == 4: try: node_nr = int(line_list[0]) except ValueError: pass if node_nr: mesh_node = project.find_mesh_node(id=node_nr) if node_nr not in results: results[mesh_node] = [abs(float(line_list[direction])) * 1000] # convert to mm else: results[mesh_node].append(abs(float(line_list[direction])) * 1000) # convert to mm for key in results: results[key] = results[key][2:] for key in results: results[key].insert(0,0) # Select the output that is required if mesh_nodes: results_filtered = {} for mesh_node in mesh_nodes: if mesh_node in results: results_filtered[mesh_node] = results[mesh_node] return results_filtered else: return results
[docs]def read_one_output_item(output_item: str, lines: list, direction: int): """ An output file may contain multiple output items. This function will read only the output_item and returns the sum in a list Input: - output_item - (str) Name of the output item which is needed to be read - lines - (list) A list containing all the lines from the base shear .tb file - direction - (int) An Integer corresponding to X ( =1 ) or Y( =2 ) direction Output: - A dictionary with key:value pair of step:total force for the given output_item is returned. """ output_types = [] for line in lines: if 'Results' in line: if line.split(' ')[1] in output_types: pass else: output_types.append(line.split(' ')[1]) output_lines = [] # Storing all output items in a new list for line in lines: if line.strip() and 'Page' not in line: if 'Analysis type' in line: output_lines.append([]) line = line.replace("\n","") output_lines[-1].append(line) list_on = [] # creating a new list to store 1 for output of interest, 0 for all other outputs for i in range(len(output_lines)): for items in output_lines[i]: if 'Result' in items: if output_item not in items: list_on.append(0) else: list_on.append(1) # Reading items from lists corresponding to output_item (input) only results = dict() for i in range(len(output_lines)): if list_on[i] == 1: for item in output_lines[i]: line_list = fem_make_list_of_text(item) if 'Step nr.' in item: step = int(line_list[-1]) results[step] = 0.0 force = None # In some of the .tb files we have 5 columns instead of 4. # For example, In 'STRESS TOTAL TRACTI' there are 5 columns (Element Nr., Integration Point, STSx, # STSTy and STSz) as compared to 4 columns in case of 'FORCE REACTI TRANSL' (Node number, FBx, FBy, FBz). # To maintain the generality, the 'if' statement below works even if the line list has 5 columns. if len(line_list) == 4 or len(line_list) == 5: try: if len(line_list) == 5: axis = direction axis = axis + 1 force = float(line_list[axis]) elif len(line_list) == 4: force = float(line_list[direction]) except ValueError: pass if force and step: results[step] = results[step] + force return results
[docs]def viia_read_tb_base_shear_nlpo( project: ViiaProject, file_dat: Union[Path, str], file_tb: Union[Path, str], direction: str = None, traction: Optional[str] = False): """ Function to read the DIANA result file in tabulated format. The file should contain the displacements of the point of which the push-over curve is requested. Input: - file_dat (str or Path): Path reference of the result file of DIANA (.tb-file). Alternative is file as string. - file_tb (str or Path): Path reference of the result file of DIANA names OUTPUT_NLPO_BASE_SHEAR_TB. or else path is provided as a string - direction (str): Direction for the results to be collected. Select from x, y or z-direction. - traction (Bool): False, only when the object is flexbase and output block contains boundary interface tractions this. Output: - Returns a dictionary with the load step as key and the base shear (N) of that step as value. """ # Read the dat-file dat_file = project.from_diana(file_dat) # Check if shallow foundation is present and if piles are present piles = False fos = False flex_fos = False only_pile_fdn_flex = False only_pile_fixed = False for key in dat_file.elementsets: if 'FUNDERINGSSTROKEN' in key: fos = True if 'PAAL' in key: piles = True if 'IF-FOS' in key: flex_fos = True if 'PAAL' in key: # absence of 'IF-FOS' means that there are no boundary interface below shallow foundation if 'IF-FOS' not in key: # presence of 'FCRIT' in key means that there are springs below pile foundation and not fixed support if 'FCRIT' in key: flex_fos = True only_pile_fdn_flex = True if 'PAAL' in key: if 'IF-FOS' not in key: if 'FCRIT' not in key: only_pile_fixed = True flex_fos = False # Read the tb-file lines = fem_read_file(file_tb) elements = {} base_shear_step_dict = dict() if traction: # if shallow foundation is present if fos: # if flexible base is present if flex_fos: # Check if tractions are provided for line in lines: if 'Result' in line: if 'STRESS TOTAL TRACTI' not in line: project.write_log( f"ERROR: The result items should contain tractions. " f"See file {file_tb.as_posix()}.", True) return None else: break for key in dat_file.elements: polygon = [] for i in range(len(dat_file.elements[key]['nodes'])): polygon.append(dat_file.nodes[dat_file.elements[key]['nodes'][i]]) if dat_file.elements[key]['type'] in ['Q24IF', 'T18IF']: polygon = polygon[0:int(len(polygon) / 2)] try: area = fem_area(points_list=polygon) except ZeroDivisionError: area = 0. dat_file.elements[key]['area'] = area force_bool = False current_step = 0 current_element = None base_shear_x_fos = [] base_shear_y_fos = [] base_shear_z_fos = [] force_fos = [] element_sx = 0 element_sy = 0 element_nz = 0 nr_int_points = 1 for i, line in enumerate(lines): line = fem_make_list_of_text(line) if 'Step' in line: if not force_bool: force_bool = True else: force_fos.append(current_step) base_shear_x_fos.append(base_shear_x_fos_loadstep) base_shear_y_fos.append(base_shear_y_fos_loadstep) base_shear_z_fos.append(base_shear_z_fos_loadstep) base_shear_x_fos_loadstep = 0 base_shear_y_fos_loadstep = 0 base_shear_z_fos_loadstep = 0 current_step = int(line[2]) elif len(line) == 5 and force_bool: try: new_element = int(line[0]) if current_element: base_shear_x_fos_loadstep += \ float(element_sx) / nr_int_points * dat_file.elements[current_element]['area'] base_shear_y_fos_loadstep += \ float(element_sy) / nr_int_points * dat_file.elements[current_element]['area'] base_shear_z_fos_loadstep += \ float(element_nz) / nr_int_points * dat_file.elements[current_element]['area'] current_element = new_element element_sx = float(line[2]) element_sy = float(line[3]) element_nz = float(line[4]) nr_int_points = 1 except ValueError: pass elif len(line) == 4 and force_bool: try: element_sx += float(line[1]) element_sy += float(line[2]) element_nz += float(line[3]) nr_int_points += 1 except ValueError: pass elif i == len(lines) - 1: if current_step not in force_fos: force_fos.append(current_step) base_shear_x_fos.append(base_shear_x_fos_loadstep) base_shear_y_fos.append(base_shear_y_fos_loadstep) base_shear_z_fos.append(base_shear_z_fos_loadstep) base_shear_values = [] base_shear_values.append(0) for i in range(2, len(base_shear_x_fos)): if direction == 'X': base_shear_values.append(abs(base_shear_x_fos[i])) # Convert to kN else: base_shear_values.append(abs(base_shear_y_fos[i])) # Convert to kN # creating a dictionary with key: load step and value: abs(base_shear) for i in range(len(base_shear_values)): if i == 0: base_shear_step_dict[i] = base_shear_values[i] else: base_shear_step_dict[i+2] = base_shear_values[i] return base_shear_step_dict else: # Check if forces are provided for line in lines: if 'Result' in line: if 'FORCE REACTI TRANSL' not in line: project.write_log( f"ERROR: The result items should contain translational reaction forces. " f"See file {file_tb.as_posix()}.", True) return None else: break force_bool = False current_step = 0 base_shear_x_fos = [] base_shear_y_fos = [] base_shear_z_fos = [] force_fos = [] for i, line in enumerate(lines): line = fem_make_list_of_text(line) if 'Step' in line: if not force_bool: force_bool = True else: force_fos.append(current_step) base_shear_x_fos.append(base_shear_x_fos_loadstep) base_shear_y_fos.append(base_shear_y_fos_loadstep) base_shear_z_fos.append(base_shear_z_fos_loadstep) base_shear_x_fos_loadstep = 0 base_shear_y_fos_loadstep = 0 base_shear_z_fos_loadstep = 0 current_step = float(line[2]) if len(line) == 4 and force_bool: try: int(line[0]) base_shear_x_fos_loadstep += float(line[1]) base_shear_y_fos_loadstep += float(line[2]) base_shear_z_fos_loadstep += float(line[3]) except ValueError: pass base_shear_values = [] base_shear_values.append(0) for i in range(2, len(base_shear_x_fos)): if direction == 'X': base_shear_values.append(abs(base_shear_x_fos[i])) # Convert to kN else: base_shear_values.append(abs(base_shear_y_fos[i])) # Convert to kN # creating a dictionary with key: load step and value: base_shear for i in range(len(base_shear_values)): if i == 0: base_shear_step_dict[i] = base_shear_values[i] else: base_shear_step_dict[i+2] = base_shear_values[i] return base_shear_step_dict # else loop for the case when user doesn't give an input for 'traction' variable else: # for the case when foundation has flex base and no piles if flex_fos: if not piles: # Select the required direction, the number defines the result item in the file direction = _direction(direction, project=project) if not direction: return # Check if 'FORCE REACTI TRANSL' are present in the .tb files for line in lines: if 'Result' in line: if 'FORCE REACTI TRANSL' not in line: project.write_log( f"ERROR: The result items should contain translational reaction forces. " f"See file {file_tb.as_posix()}.", True) return None else: break # Extract the data from the file results = read_one_output_item(output_item='FORCE REACTI TRANSL', lines=lines, direction=direction) base_shear_values = [] base_shear_values.append(0) for i in range(2, len(results)): if direction == 'X': base_shear_values.append(abs(results[i])) # Convert to kN else: base_shear_values.append(abs(results[i])) # Convert to kN # creating a dictionary with key: load step and value: base_shear for i in range(len(base_shear_values)): if i == 0: base_shear_step_dict[i] = base_shear_values[i] else: base_shear_step_dict[i + 2] = base_shear_values[i] return base_shear_step_dict # for the case when foundation is pile only with flex boundary interface at pile bottom elif only_pile_fdn_flex: direction = _direction(direction, project=project) if not direction: return # checking in 'NODFOR TOTAL TRANSL' in present in the base_shear_tb file for line in lines: if 'Result' in line: if 'NODFOR TOTAL TRANSL' not in line: project.write_log( f"ERROR: The result items should contain NODFOR TOTAL TRANSL. " f"See file {file_tb.as_posix()}.", True) return None else: break # Extract the data from the file results_pile_only = dict() step = None for line in lines: line_list = fem_make_list_of_text(line) if 'Step nr.' in line: step = int(line_list[-1]) results_pile_only[step] = 0.0 force = None if len(line_list) == 4: try: force = float(line_list[direction]) except ValueError: pass if force: results_pile_only[step] = results_pile_only[step] + force # adding zero as the first value base_shear_values = [0] # ignoring step 1 and step 2 as these are Dead Load steps, appending Load Step 3 and onwards to # a list 'base_shear_values' for i in range(len(results_pile_only)): if i > 2: base_shear_values.append(abs(results_pile_only[i])) # creating a dictionary with key: load step and value: base_shear for i in range(len(base_shear_values)): if i == 0: base_shear_step_dict[i] = base_shear_values[i] else: base_shear_step_dict[i + 2] = base_shear_values[i] return base_shear_step_dict # For the case when only supports are Pile - Fixed. Shallow foundation might be present in the object # but should not have been supported. elif only_pile_fixed: file = fem_read_file(file_tb) if not file: return None direction = _direction(direction, project=project) if not direction: return # storing all different output in a 'lines' lines = [] for line in file: if line.strip() and 'Page' not in line: if 'Analysis type' in line: lines.append([]) line = line.replace("\n", "") lines[-1].append(line) del file results_pile_fixed_only = dict() # creating a new list force_on force_on = [] # the individual items in list 'force_on' will take a value 0 if results are not corresponding to # 'STRESS TOTAL FORCE' in lines[i], else 1. for i in range(len(lines)): for item in lines[i]: if 'Result' in item: if 'STRESS TOTAL FORCE' not in item: force_on.append(0) else: force_on.append(1) # reading values from 'lines' list and storing them in dictionary 'results_pile_fixed_only' for i in range(len(lines)): if force_on[i] == 1: for item in lines[i]: line_list = fem_make_list_of_text(item) if 'Step nr.' in item: step = int(line_list[-1]) results_pile_fixed_only[step] = 0.0 force = None if len(line_list) == 4 or len(line_list) == 5: try: if len(line_list) == 5: axis = direction axis = axis + 1 force = float(line_list[axis]) elif len(line_list) == 4: force = float(line_list[direction]) except ValueError: pass if force and step: results_pile_fixed_only[step] = results_pile_fixed_only[step] + force # correcting results - division by 2 is required because the output is given at both top and bottom of pile # and is therefore double the actual shear force. Hence division by 2. division by 1000 to convert to kN for i in range(1, len(results_pile_fixed_only)): results_pile_fixed_only[i] = abs(results_pile_fixed_only[i] / 2) # adding zero as the first value base_shear_values = [] base_shear_values.append(0) # ignoring step 1 and step 2 as these are Dead Load steps, appending Load Step 3 and onwards to # a list 'base_shear_values' for i in range(len(results_pile_fixed_only)): if i > 2: base_shear_values.append(results_pile_fixed_only[i]) # creating a dictionary with key: load step and value: base_shear for i in range(len(base_shear_values)): if i == 0: base_shear_step_dict[i] = base_shear_values[i] else: base_shear_step_dict[i + 2] = base_shear_values[i] return base_shear_step_dict
[docs]def viia_read_pushover_curve_data( project: ViiaProject, work_folder: Union[Path, str], direction: str, traction: Optional[bool] = True, mesh_nodes: List[MeshNode] = None): """ Function to read the DIANA result file in tabulated format. The work folder should contain the files that are needed for the push over analysis: displacement and base shear tabulated file, mesh file and json file. Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - work_folder (str or Path): Path reference of the result folder. - direction (str): Direction for the results to be collected. Select from x, y or z-direction. - traction (bool) - Optional argument. Set it to 'True' if base shear '.tb' files contains interface tractions instead of reaction forces. - mesh_nodes (list of obj.ref.): List of MeshNode objects of which the results are returned. By default the argument is set to None, extracting all nodes in the file. When a node is requested that is not in the file, it will be omitted in the dictionary that is returned. Output: - diplacement dict: with the meshnode and the steps as nested key, displacement (mm) as value, - base shear dict: with the load step as key and the base shear (N) of that step as value. - po_curve list: list of push over curve object created with the displacement and base shear data """ # Set up the working folder and check all necessary files project.workfolder_location = work_folder try: disp_tb = next(x for x in project.workfolder_location.iterdir() if 'OUTPUT_NLPO_DISPLA_TB' in x.name) base_shear_tb = next(x for x in project.workfolder_location.iterdir() if 'OUTPUT_NLPO_BASE_SHEAR_TB' in x.name) dat_file = next(x for x in project.workfolder_location.iterdir() if x.suffix == '.dat') json_file = next(x for x in project.workfolder_location.iterdir() if x.suffix == '.json' and x.name.startswith(project.name)) except StopIteration: project.write_log('ERROR: Please check if the displacement & base shear .tb files .dat and json file of the ' 'model are in the working directory') raise FileNotFoundError # Read the data into project project.dat_file = project.from_diana(dat_file) project.read_dump(json_file) _viia_store_demand_curves(project=project) if not mesh_nodes: mesh_nodes = [] mesh_dict = viia_center_of_mass_nlpo(project=project) for level, v in mesh_dict.items(): eval_coord = [v['x'], v['y'], v['z']] eval_node = viia_find_closest_mesh_node(project=project, target_point=eval_coord) mesh_nodes.append(eval_node) disp_dict = viia_read_tb_displacements_nlpo( project=project, file=disp_tb, direction=direction, mesh_nodes=mesh_nodes) # In mm base_shear_dict = viia_read_tb_base_shear_nlpo( project=project, file_tb=base_shear_tb, file_dat=dat_file, direction=direction, traction=traction) # Instantiate PushOverCurve class po_curves = list() for meshnode, disp in disp_dict.items(): po_curve = PushoverCurve( displacement=disp, meshnode=meshnode, base_shear=list(base_shear_dict.values()), direction=direction, project=project) po_curves.append(po_curve) # Plot pushover raw data curves for first intuition PushoverCurve.plot_curve(project, input_curve=po_curves, ADRS=False) return disp_dict, base_shear_dict, po_curves
[docs]def viia_convert_to_sdof_po_system(project: ViiaProject, po_curves: list, mass_dict_manual: dict = None): """ Function to convert the MDOF into SDOF for push over analysis, the global behavior of the structure is simplified to an equivalent simple degree of freedom system SDOF-system. Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - po_curves (list): list of push over curve object created with the displacement and base shear data - mass_dict_manual (dict): Dictionary that with the same key value definition of the viia_center_of_mass_nlpo(). this gives the user possibility to overwrite the mass dictionary. Output: - Returns the equivalent SDOF push over system and asscociated with its structural data, which includes : 'eff_mass', 'eff_height', 'gamma', 'po_curves', 'v_star_base', 'u_star_roof', 'roof' and the push over curve is plotted """ # Read mass if not mass_dict_manual: mass_dict = viia_center_of_mass_nlpo(project=project) else: mass_dict = mass_dict_manual # Match the structural information for nr_floor, v in mass_dict.items(): eval_coord = [v['x'], v['y'], v['z']] # This meshnode should be consistent with the analysis block eval_node = viia_find_closest_mesh_node(project=project, target_point=eval_coord).id # Find meshnode, link the structural information to the PO_curve instance for po_curve in po_curves: if po_curve.meshnode == eval_node: # Assign structural information to the push over data po_curve.seismic_mass = v['mass'] po_curve.level = nr_floor po_curve.diana_coord = eval_coord break else: raise ValueError(f'The evaluation nodes {eval_node} for the displacement node is not consist between' ' the .tb file and the .dat file, please double check it') # Convert into SDOF po_system = PushoverCurve.normalize_to_sdof(project, *po_curves) # Plot push over curve PushoverCurve.plot_curve(project, input_curve=po_curves, ADRS=False) return po_system
[docs]def viia_get_drift_limit(project: ViiaProject, po_system: namedtuple): """ Function to get the inter-story drift limit of a structure. Value taken from: Table 15 from NPR: Global limit state criteria for building systems and inelastic mechanisms Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - po_system (namedtuple): Output from viia_create_pushover_curves with all structural information contained Output: - Returns the drift limit for ductile and brittle failure mechanism """ # Get the inter-story drift limit def _interstory_dri_lim(factor): # Make the height in mm, the maximum allowable drift limit for each story inter = [factor * 1000 * i for i in relative_height] # Check if the relative displacement exceeding the threshold cut_off_step = [PushoverCurve.find_intersections_index(project=project, x=i, y=j, warnings=False) for i, j in zip(inter, relative_disp)] # Find the minimal steps where the relative displacement is exceeding the threshold for all stories if not all(i is None for i in cut_off_step): inter_limit_idx = min(i for i in cut_off_step if i is not None) inter_limit = po_system.u_star_roof[inter_limit_idx] else: # If the relative displacement not exceeding, then take the u_sys_cap inter_limit = po_system.u_star_roof[-1] return inter_limit # Get relative displacement and height relative_disp = [po_system.po_curves[0].displacement] relative_height = [po_system.po_curves[0].diana_coord[-1]] # Looping from the second item to the end for idx, po_curve in enumerate(po_system.po_curves[1:], start=1): relative_disp.append(po_curve.displacement - po_system.po_curves[idx - 1].displacement) relative_height.append(po_curve.diana_coord[-1] - po_system.po_curves[idx - 1].diana_coord[-1]) # Get the NC limit for ductile and brittle failure mechanism liter_limit_dc, inter_limit_br = _interstory_dri_lim(0.015), _interstory_dri_lim(0.006) return {'dc': liter_limit_dc, 'br': inter_limit_br}
[docs]def viia_get_sys_ductility_factor(bi_params: dict, u_updated=None): """ Function to get the system ductility factor of a structure. Later this will be updated for the iterative process of performance point. Input: - bi_params (dict): Output from PushoverCurve.bilinearise() with bilinearized curve data contained. Output: - Returns the system ductility factor """ if u_updated: mu_sys = u_updated / bi_params['u_y_sys'] else: mu_sys = bi_params['u_cap_sys'] / bi_params['u_y_sys'] return mu_sys
[docs]def viia_get_damping_ratio_correction_factor(mu_sys: float, failure: str = 'dc', beta: float = 0): """ Function to get the damping ratio and reduction factor of a system. Input: - mu_sys (float): System ductility factor, output from viia_get_sys_ductility_factor - failure (str): failure mechanism, optional 'dc'(ductile) or 'br'(brittle) - beta (float): Output from PushoverCurve.bilinearise() with bilinearized curve data contained. Output: - Returns the reduction factor and system damping factor """ # NPR G4.2.6 and G7 # system equivalent viscous damping ratio and damping correction factor ksi_hys = {'dc': min(0.15, 0.42 * (1 - 0.9 / np.sqrt(mu_sys) - 0.1 * np.sqrt(mu_sys))), 'br': 0} ksi_0 = 0.05 # System damping is limited to 40% ksi_sys = min(ksi_0 + ksi_hys[failure] + beta, 0.4) # Reduction factor yita_kesi = min(1.0, max(0.55, np.sqrt(7 / (2 + ksi_sys * 100)))) params = {'reduction': yita_kesi, 'system_damping': ksi_sys} return params
[docs]def viia_update_inelastic_ADRS(project: ViiaProject, correction_factor: float): """ Function to update the inelastic ADRS given a correction factor Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - correction_factor (float): range from (0,1), a scaling factor to elastic ADRS, also called reduction factor. Output: - Returns the updated inelastic ADRS based on a given correction factor. """ # Take the last spectrum spectrum = project.collections.response_spectra[-1] agS = spectrum.peak_ground_acceleration agd = agS * project.importance_factor t_b = spectrum.t_b t_c = spectrum.t_c t_d = spectrum.t_d rho = spectrum.plateau_factor # t_b: lower limit of the period of the constant spectral acceleration branch def _get_S_a_in(t): if not fem_greater(t, t_b): S_a_in = agd * (1 + t / t_b * (rho * correction_factor - 1)) # t_c: upper limit of the period of the constant spectral acceleration branch elif fem_smaller(t_b, t) and (not fem_greater(t, t_c)): S_a_in = agd * rho * correction_factor # t_d: value defining the start of the constant displacement response spectrum elif fem_smaller(t_c, t) and (not fem_greater(t, t_d)): S_a_in = agd * rho * t_c / t * correction_factor else: S_a_in = agd * rho * t_c * t_d / t ** 2 * correction_factor S_d_in = S_a_in * (t / (2 * np.pi)) ** 2 * project.gravitational_acceleration return S_d_in, S_a_in # The range of period is 0-2 t = np.linspace(0, 2, 100) # Vectorize the 2D data, make it callable vec_get_S_a_in = np.vectorize(_get_S_a_in) # Convert from t to displacement and acceleration S_d_in, S_a_in = vec_get_S_a_in(t) # Unit in mm and % return np.stack((S_d_in * 1000, S_a_in), axis=1)
[docs]def viia_reshape_ADRS(x: np.array, y: np.array): """ Function to reshape inelastic ADRS to add an off for the constant y value range. This is needed for the further interpolation of the ADRS data to ensure the 2D data can be appropriately interpolated and make it differentiable. It is done by replacement of the constant value segment with an offset of (x_const, y_const), (x_const + 0.01, 0) line. Input: - x (np.array): x of ADRS value (displacement). - y (np.array): y of ADRS value (acceleration). Output: - Returns the updated inelastic ADRS (2dim np.array) based on a given correction factor """ for idx, v in enumerate(x[:-1]): # Find the constant part where is perpendicular to x axis if fem_smaller(x[idx + 1] - v, 0.00005): step = idx break else: step = len(x) + 1 x, y = (x[:step + 1], y[:step + 1]) return np.append(x, x[step] + 0.01), np.append(y, 0)
[docs]def viia_interpolation_performance_point(x: np.array, y: np.array, x_new: np.array, is_adrs: bool = False, fill: bool = False): """ Function to interpolate the array based on a given new grid. If the array is ADRS data, then fill the value with zero where it is beyond the range. By doing so, the 2D array will be a closed contour which is easier to find the intersection. Input: - x (np.array): x of ADRS value (displacement). - y (np.array): y of ADRS value (acceleration). - x_new (np.array): x of ADRS value (displacement). - is_adrs (bool): boolean to choose if the input data is ADRS data. - fill (bool): If true, will extraplate the data, else with fill the data with np.nan. In practice, the Teff will use the extraplate while the bilinear data should fill with nan. Output: - Returns the 2D array based on the new given grid. """ from scipy import interpolate # ADRS fill with 0 if is_adrs: f = interpolate.interp1d(x, y, bounds_error=False, fill_value=0) else: if fill: # Teff fill with extrapolation of straight line f = interpolate.interp1d(x, y, bounds_error=False, fill_value='extrapolate') else: # Bilinear curve fill with np.nan f = interpolate.interp1d(x, y, bounds_error=False, fill_value=np.nan) y_new = f(x_new) return np.stack((x_new, y_new), axis=-1)
[docs]def viia_initialise_performance_point(project: ViiaProject, bi_params:dict): """ Function to initialize the algorithm to find the performance point. User should check if the output parameters are rational, for details, please refer to NPR or protocol. Input: - project (obj): VIIA project object containing collections of fem objects and project variables. - bi_params (dict): Output dictionary from the bilinearize function. Output: - Returns the dictionary containing the information of the first step for performance point. """ # Interploate the elastic and in-elastic ADRS elastic_adrs_x = project.project_specific['ADRS']['Elastic']['displacement'] * 1000 elastic_adrs_y = project.project_specific['ADRS']['Elastic']['acceleration'] elastic_adrs_x, elastic_adrs_y = viia_reshape_ADRS(elastic_adrs_x, elastic_adrs_y) inelastic_adrs_x = project.project_specific['ADRS']['Inelastic 20% damped']['displacement'] * 1000 inelastic_adrs_y = project.project_specific['ADRS']['Inelastic 20% damped']['acceleration'] inelastic_adrs_x, inelastic_adrs_y = viia_reshape_ADRS(inelastic_adrs_x, inelastic_adrs_y) # Original bilinear data cs_x = bi_params['bi_2Darray'][:, 0] cs_y = bi_params['bi_2Darray'][:, 1] # Find the maximum range from bilinear curve or elastic ADRS x_max = max(elastic_adrs_x[-1], cs_x[-1]) # Interpolate everything with interval 0.01 x_new = np.arange(cs_x[0], x_max, 0.01) # Initialization elastic_adrs = viia_interpolation_performance_point(elastic_adrs_x, elastic_adrs_y, x_new, is_adrs=True) inelastic_adrs = viia_interpolation_performance_point(inelastic_adrs_x, inelastic_adrs_y, x_new, is_adrs=True) bilinear = viia_interpolation_performance_point(cs_x, cs_y, x_new) # Get mu_0, initial system ductility T_eff = viia_interpolation_performance_point([0, cs_x[-1]], [0, cs_y[-1]], x_new, fill=True) # Intersection with the 20% damping u_idx = PushoverCurve.find_intersections_index(project, inelastic_adrs[:, 1], T_eff[:, 1]) # Output cache, users are able to check init_cache = {} init_cache['x_new'] = x_new init_cache['inelastic_adrs'] = inelastic_adrs init_cache['elastic_adrs'] = elastic_adrs init_cache['bilinear'] = bilinear init_cache['u_idx'] = u_idx return init_cache
[docs]def viia_get_performance_point(project: ViiaProject, bi_params: dict, init_cache:dict, error=1): """ Function to get the performance point. The iterative process should yield the performance point within 5000 steps. If the performance point is not converging, check the numerical stability of the bilinear data and previous output. Input: - x (np.array): x of ADRS value (displacement). - y (np.array): y of ADRS value (acceleration). - x_new (np.array): x of ADRS value (displacement). - is_adrs (bool): boolean to choose if the input data is ADRS data. - fill (bool): If true, will extraplate the data, else with fill the data with np.nan. In practice, the Teff will use the extraplate while the bilinear data should fill with nan. Output: - Returns the 2D array based on the new given grid. """ # Read step 1 data x_new = init_cache['x_new'] elastic_adrs = init_cache['elastic_adrs'] bilinear = init_cache['bilinear'] u_idx = init_cache['u_idx'] iter = 0 cache = {} skip_iter = False # Get rid of np.nan bi_y = bilinear[:, 1][~np.isnan(bilinear[:, 1])] # Elastic range elastic_intersection = PushoverCurve.find_intersections_index(project, elastic_adrs[:, 1], bilinear[:, 1], warnings=False) if elastic_intersection is not None: u_idx = elastic_intersection[0] mu_new = viia_get_sys_ductility_factor(bi_params, u_updated=float(x_new[u_idx])) project.write_log(f'The performance point is {float(x_new[u_idx])}, ' f'which is found in the elastic range.') skip_iter = True # Start iteration while iter < 5000: u_updated = float(x_new[u_idx]) # Update ADRS according to new u mu_new = viia_get_sys_ductility_factor(bi_params, u_updated=u_updated) reduction = viia_get_damping_ratio_correction_factor(mu_new, failure='dc')['reduction'] adrs_new = viia_update_inelastic_ADRS(project, reduction) # Reshape and interpolate adrs_new_x, adrs_new_y = viia_reshape_ADRS(adrs_new[:, 0], adrs_new[:, 1]) adrs_updated = viia_interpolation_performance_point(adrs_new_x, adrs_new_y, x_new, is_adrs=True) # Effective period T_eff = viia_interpolation_performance_point([0, float(bilinear[u_idx, 0])], [0, float(bilinear[u_idx, 1])], x_new, fill=True) if skip_iter: break # In-elastic range if not fem_smaller(bi_y.max(), adrs_updated[:, 1].max()): # If the plateau of CS is greater than that of inelastic ADRS, then the intersection is obtained between # Teff and ADRS u_idx = PushoverCurve.find_intersections_index(project, adrs_updated[:, 1], T_eff[:, 1]) else: # If the plateau of CS is lower than that of inelastic ADRS, then the intersection is obtained between # CS and ADRS u_idx = PushoverCurve.find_intersections_index(project, adrs_updated[:, 1], bilinear[:, 1]) # Check the difference try: diff = np.abs(float(x_new[u_idx]) - u_updated) except Exception as e: project.write_log( "WARNING: The intersection between the bilinear curve/ effective period and the updated ADRS can not " "be found, it can be a numerical issue where the updated ADRS encompass the bilinear curve with on " "intersections. Tweak the tolerance (input param: error) could help. Please check the bilinear data " f"and the init_cache and the exception: {e}.") return None else: if fem_smaller(diff, error): project.write_log(f'Congrats! The performance point is found: {float(x_new[u_idx])}') break iter += 1 else: # The while loop does not break raise RuntimeWarning( "ERROR: After 5000 iterations, the performance point is still not found, please check the numercial " "stability of the bi-linear curve data or contact knowledge team.") # This is outside while block, which means the point has already been found. cache['index'] = u_idx cache['s_int'] = float(x_new[u_idx]) cache['sys_ductilty'] = mu_new # Plot the PP plt.close() plt.figure(figsize=(15, 5)) # Apply VIIA graph style style_file = project.viia_settings.project_specific_package_location / 'viiaGraph.mplstyle' plt.style.use(style_file.as_posix()) ax = plt.subplot2grid((1, 1), (0, 0)) ax.plot(elastic_adrs[:, 0], elastic_adrs[:, 1], '--', label='elastic_adrs', linewidth=1.7, color='purple') ax.plot(adrs_new[:, 0], adrs_new[:, 1], '--', label='inelastic_adrs', linewidth=1.7, color='orange') ax.plot(T_eff[:, 0], T_eff[:, 1], '--', label='Teff', linestyle='dashed', linewidth=1.7, color='green') ax.plot(bilinear[:, 0], bilinear[:, 1], label='bilinear_curve') ax.plot(bilinear[u_idx, 0], bilinear[u_idx, 1], 'ro', label='PP') # plt.legend() ax.set_ylim([0, 1.3*elastic_adrs[:, 1].max()]) ax.set_xlabel('Displacement (mm)') ax.set_ylabel('Acceleration (%g)') ax.set_title('Performance point') ax.legend() # Save the graph to the same folder filename = project.workfolder_location / "performance_point.png" plt.savefig(filename.as_posix()) project.write_log(f'The Performance point figure has been saved to {project.workfolder_location}') return cache
### =================================================================================================================== ### 4. End of script ### ===================================================================================================================