7.3. Example 3: Combining fragility-based damage consequences and loss functions.

Tests a complete loss estimation workflow combining damage state and loss function driven components. The code is based on PRJ-3411v5 hosted on DesignSafe.

[1]:
import tempfile

import numpy as np
import pandas as pd
import pytest

import pelicun
from pelicun import assessment, file_io
from pelicun.pelicun_warnings import PelicunWarning
[2]:
temp_dir = tempfile.mkdtemp()

sample_size = 10000
[3]:
# Initialize a pelicun assessment
asmnt = assessment.Assessment(
    {'PrintLog': True, 'Seed': 415, 'LogFile': f'{temp_dir}/log_file.txt'}
)

asmnt.options.list_all_ds = True
asmnt.options.eco_scale['AcrossFloors'] = True
asmnt.options.eco_scale['AcrossDamageStates'] = True
pelicun 3.4.0 |

System Information:
         local time zone: UTC
         start time: 2025-01-30T22:27:55
         python: 3.10.16 (main, Dec 12 2024, 19:07:59) [GCC 13.2.0]
         numpy: 1.26.4
         pandas: 2.2.3

         --------------------------------------------------------------------------------
22:27:55 Assessment Started
[4]:
demand_data = file_io.load_data(
    'example_3/demand_data.csv',
    unit_conversion_factors=None,
    reindex=False,
)
ndims = len(demand_data)
perfect_correlation = pd.DataFrame(
    np.ones((ndims, ndims)),
    columns=demand_data.index,  # type: ignore
    index=demand_data.index,  # type: ignore
)
[5]:
#
# Additional damage state-driven components
#

damage_db = pelicun.file_io.load_data(
    'example_3/additional_damage_db.csv',
    reindex=False,
    unit_conversion_factors=asmnt.unit_conversion_factors,
)
consequences = pelicun.file_io.load_data(
    'example_3/additional_consequences.csv',
    reindex=False,
    unit_conversion_factors=asmnt.unit_conversion_factors,
)
[6]:
#
# Additional loss function-driven components
#

loss_functions = pelicun.file_io.load_data(
    'example_3/additional_loss_functions.csv',
    reindex=False,
    unit_conversion_factors=asmnt.unit_conversion_factors,
)
[7]:
#
# Demands
#

# Load the demand model
asmnt.demand.load_model(
    {'marginals': demand_data, 'correlation': perfect_correlation}
)

# Generate samples
asmnt.demand.generate_sample({'SampleSize': sample_size})


def add_more_edps() -> None:
    """Add SA_1.13 and residual drift to the demand sample."""
    # Add residual drift and Sa
    demand_sample = asmnt.demand.save_sample()

    # RIDs are all fixed for testing.
    rid = pd.concat(
        [
            pd.DataFrame(
                np.full(demand_sample['PID'].shape, 0.0050),  # type: ignore
                index=demand_sample['PID'].index,  # type: ignore
                columns=demand_sample['PID'].columns,  # type: ignore
            )
        ],
        axis=1,
        keys=['RID'],
    )
    demand_sample_ext = pd.concat([demand_sample, rid], axis=1)  # type: ignore

    demand_sample_ext['SA_1.13', 0, 1] = 1.50

    # Add units to the data
    demand_sample_ext.T.insert(0, 'Units', '')

    # PFA and SA are in "g" in this example, while PID and RID are "rad"
    demand_sample_ext.loc['Units', ['PFA', 'SA_1.13']] = 'g'
    demand_sample_ext.loc['Units', ['PID', 'RID']] = 'rad'

    asmnt.demand.load_sample(demand_sample_ext)


add_more_edps()
         --------------------------------------------------------------------------------
22:27:55 Loading demand model...
         Unit conversion successful.
         Demand model successfully loaded.
         --------------------------------------------------------------------------------
22:27:55 Generating sample from demand variables...

         18 random variables created.

         Correlations between 18 random variables successfully defined.

         Successfully generated 10000 realizations.
         --------------------------------------------------------------------------------
         Preparing data ...
         Converting units...
         Unit conversion successful.
         --------------------------------------------------------------------------------
22:27:55 Loading demand data...
         Converting units...
         Unit conversion successful.
         Demand data successfully parsed.
         Demand units successfully parsed.
[8]:
#
# Asset
#

# Specify number of stories
asmnt.stories = 1

# Load component definitions
cmp_marginals = pd.read_csv('example_3/CMP_marginals.csv', index_col=0)
cmp_marginals['Blocks'] = cmp_marginals['Blocks']
asmnt.asset.load_cmp_model({'marginals': cmp_marginals})

# Generate sample
asmnt.asset.generate_cmp_sample(sample_size)
         --------------------------------------------------------------------------------
22:27:55 Loading component model...
         Unit conversion successful.
         Model parameters successfully parsed. 8 performance groups identified
         Converting model parameters to internal units...
         Model parameters successfully loaded.

         Component model marginal distributions:
                                          Theta_0  Blocks Units
         cmp                 loc dir uid
         D.30.31.013i        2   0   0        1.0       1    ea
         collapse            0   1   0        1.0       1    ea
         excessiveRID        1   1   0        1.0       1    ea
                                 2   0        1.0       1    ea
         irreparable         0   1   0        1.0       1    ea
         missing.component   0   1   0        1.0       1    ea
         testing.component   1   1   0        1.0       1    ea
         testing.component.2 1   1   0        2.0       4    ea
         --------------------------------------------------------------------------------
22:27:55 Generating sample from component quantity variables...

         8 random variables created.

         Successfully generated 10000 realizations.
[9]:
#
# Damage
#

cmp_set = set(asmnt.asset.list_unique_component_ids())

# Load the models into pelicun
asmnt.damage.load_model_parameters(
    [
        damage_db,  # type: ignore
        'PelicunDefault/damage_DB_FEMA_P58_2nd.csv',
    ],
    cmp_set,
)

# Prescribe the damage process
dmg_process = {
    '1_collapse': {'DS1': 'ALL_NA'},
    '2_excessiveRID': {'DS1': 'irreparable_DS1'},
}

# Calculate damages

asmnt.damage.calculate(dmg_process=dmg_process)

# Test load sample, save sample
asmnt.damage.save_sample(f'{temp_dir}/out.csv')
asmnt.damage.load_sample(f'{temp_dir}/out.csv')
         --------------------------------------------------------------------------------
         Loading damage model...
         Damage model parameters loaded successfully.
         Removing unused damage model parameters.
         Converting damage model parameter units.
         Checking damage model parameter availability for all components in the asset model.

         The damage model does not provide damage information for the following component(s) in the asset model: ['testing.component.2', 'testing.component', 'missing.component'].

         --------------------------------------------------------------------------------
22:27:56 Calculating damages...
         Number of Performance Groups in Asset Model: 8
         Number of Component Blocks: 5
         1 batches of Performance Groups prepared for damage assessment
22:27:56 Calculating damage states for PG batch 1 with 5 blocks
         Damage state calculation successful.
22:27:56 Applying damage processes.
         Damage processes successfully applied.
         Damage calculation completed.
         --------------------------------------------------------------------------------
22:27:56 Saving damage sample...
         Saving data to `/tmp/tmp_5tcd65n/out.csv`...
         Converting units...
         Unit conversion successful.
         Data successfully saved to file.
         Damage sample successfully saved.
         --------------------------------------------------------------------------------
22:27:56 Loading damage sample...
         Converting units...
         Unit conversion successful.
         Damage sample successfully loaded.
[11]:
asmnt.damage.ds_model.sample.mean()
[11]:
cmp           loc  dir  uid  ds
D.30.31.013i  2    0    0    0     0.3194
                             1     0.2282
                             2     0.2236
                             3     0.2288
collapse      0    1    0    0     0.5000
                             1     0.5000
excessiveRID  1    1    0    0     0.9902
                             1     0.0098
                   2    0    0     0.9902
                             1     0.0098
irreparable   0    1    0    0     0.9804
                             1     0.0196
dtype: float64
[12]:
#
# Losses
#

# Create the loss map
loss_map = pd.DataFrame(
    ['replacement', 'replacement'],
    columns=['Repair'],
    index=['collapse', 'irreparable'],
)

# Load the loss model
asmnt.loss.decision_variables = ('Cost', 'Time')
asmnt.loss.add_loss_map(loss_map, loss_map_policy='fill')
with pytest.warns(PelicunWarning):
    asmnt.loss.load_model_parameters(
        [
            consequences,  # type: ignore
            loss_functions,  # type: ignore
            'PelicunDefault/loss_repair_DB_FEMA_P58_2nd.csv',
        ]
    )

# Perform the calculation
asmnt.loss.calculate()

# Test load sample, save sample
with pytest.warns(PelicunWarning):
    asmnt.loss.save_sample(f'{temp_dir}/sample.csv')
    asmnt.loss.load_sample(f'{temp_dir}/sample.csv')

#
# Loss sample aggregation
#

# Get the aggregated losses
with pytest.warns(PelicunWarning):
    agg_df = asmnt.loss.aggregate_losses()
22:27:56 Loading loss map...
         Loss map is provided.
22:27:56 Loss map loaded successfully.
         --------------------------------------------------------------------------------
22:27:56 Loading loss parameters...
         Loss model parameters loaded successfully.
         Removing unused loss model parameters.
         Converting loss model parameter units.
         Checking loss model parameter availability for all components in the asset model.
         --------------------------------------------------------------------------------
22:27:56 Calculating losses...

         Aggregating damage quantities...
         Successfully aggregated damage quantities.

         Calculating the median repair consequences...
         Successfully determined median repair consequences.
22:27:56
         Considering deviations from the median values to obtain random DV sample...
         Preparing random variables for repair consequences...

         6 random variables created.

         Successfully generated 10000 realizations of deviation from the median consequences.
         Successfully obtained DV sample.

         Calculating the median repair consequences...
         Successfully determined median repair consequences.
22:27:56
         Considering deviations from the median values to obtain random DV sample...
         Preparing random variables for repair cost and time...

         2 random variables created.

         Successfully generated 10000 realizations of deviation from the median consequences.
         Successfully obtained DV sample.
22:27:56 Loss calculation successful.
         --------------------------------------------------------------------------------
22:27:56 Saving loss sample...
         Saving data to `/tmp/tmp_5tcd65n/sample.csv`...
         Converting units...
         Unit conversion successful.
         Data successfully saved to file.
         Loss sample successfully saved.
         --------------------------------------------------------------------------------
22:27:56 Loading loss sample...
         Converting units...
         Unit conversion successful.
         Loss sample successfully loaded.
         Preparing data ...
         Converting units...
         Unit conversion successful.