Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Recovery simulations #23

Merged
merged 6 commits into from
Oct 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions uvdat/core/serializers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from rest_framework import serializers

from uvdat.core.models import Chart, City, Dataset, NetworkNode, SimulationResult
from uvdat.core.tasks.simulations import AVAILABLE_SIMULATIONS


class NetworkNodeSerializer(serializers.ModelSerializer):
Expand Down Expand Up @@ -45,6 +46,17 @@ class Meta:


class SimulationResultSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField('get_name')

def get_name(self, obj):
time = obj.modified.strftime('%Y-%m-%d %H:%M')
simulation_type_matches = [t for t in AVAILABLE_SIMULATIONS if t['id'] == obj.simulation_id]
if len(simulation_type_matches) == 0:
return f'Result {time}'
else:
simulation_type = simulation_type_matches[0]
return f"{simulation_type['name']} Result {time}"

class Meta:
model = SimulationResult
fields = '__all__'
73 changes: 69 additions & 4 deletions uvdat/core/tasks/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,17 @@

from uvdat.core.models import NetworkNode

NODE_RECOVERY_MODES = [
'random',
'betweenness',
'degree',
'information',
'eigenvector',
'load',
'closeness',
'second order',
]


def save_network_nodes(dataset):
with tempfile.TemporaryDirectory() as temp_dir:
Expand Down Expand Up @@ -114,14 +125,68 @@ def save_network_nodes(dataset):
node_object.save()


def network_gcc(edges: dict[str, list[int]], exclude_nodes: list[int]) -> list[int]:
# Convert input keys to integer
int_edges = {int(k): v for k, v in edges.items()}
def construct_edge_list(dataset):
network_nodes = dataset.network_nodes.values_list('id', flat=True)
edges = NetworkNode.adjacent_nodes.through.objects.filter(
from_networknode_id__in=network_nodes, to_networknode_id__in=network_nodes
).values_list('from_networknode_id', 'to_networknode_id')

# Construct adj list
edge_list: dict[int, list[int]] = {}
for start, end in edges:
if start not in edge_list:
edge_list[start] = []

edge_list[start].append(end)

# Ensure that the type of all keys is an integer
assert all(isinstance(x, int) for x in edge_list.keys())

# Sort all node id lists
for start_node in edge_list.keys():
edge_list[start_node].sort()

return edge_list


def network_gcc(edges: dict[int, list[int]], exclude_nodes: list[int]) -> list[int]:
# Create graph, remove nodes, get GCC
G = nx.from_dict_of_lists(int_edges)
G = nx.from_dict_of_lists(edges)
G.remove_nodes_from(exclude_nodes)
gcc = max(nx.connected_components(G), key=len)

# Return GCC's list of nodes
return list(gcc)


# Authored by Jack Watson
# Takes in a second argument, measure, which is a string specifying the centrality
# measure to calculate.
def sort_graph_centrality(G, measure):
if measure == 'betweenness':
cent = nx.betweenness_centrality(G) # get betweenness centrality
elif measure == 'degree':
cent = nx.degree_centrality(G)
elif measure == 'information':
cent = nx.current_flow_closeness_centrality(G)
elif measure == 'eigenvector':
cent = nx.eigenvector_centrality(G, 10000)
elif measure == 'load':
cent = nx.load_centrality(G)
elif measure == 'closeness':
cent = nx.closeness_centrality(G)
elif measure == 'second order':
cent = nx.second_order_centrality(G)
cent_list = list(cent.items()) # convert to np array
cent_arr = numpy.array(cent_list)
cent_idx = numpy.argsort(cent_arr, 0) # sort array of tuples by betweenness
# cent_sorted = cent_arr[cent_idx[:, 1]]

node_list = list(G.nodes())
nodes_sorted = [node_list[i] for i in cent_idx[:, 1]]
edge_list = list(G.edges())

# Currently sorted from lowest to highest betweenness; let's reverse that
nodes_sorted.reverse()

return nodes_sorted, edge_list
135 changes: 111 additions & 24 deletions uvdat/core/tasks/simulations.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import inspect
import json
import networkx as nx
from pathlib import Path
import random
import re
import tempfile

Expand All @@ -8,8 +11,14 @@
import large_image
import shapely

from rest_framework.serializers import ModelSerializer
from uvdat.core.models import City, Dataset, SimulationResult
from uvdat.core.serializers import DatasetSerializer, SimulationResultSerializer
from uvdat.core.tasks.networks import (
NODE_RECOVERY_MODES,
construct_edge_list,
sort_graph_centrality,
)
import uvdat.core.serializers as serializers


def get_network_node_elevations(network_nodes, elevation_dataset):
Expand Down Expand Up @@ -49,7 +58,7 @@ def flood_scenario_1(simulation_result_id, network_dataset, elevation_dataset, f
except Dataset.DoesNotExist:
result.error_message = 'Dataset not found.'
result.save()
return None
return

if (
not network_dataset.network
Expand All @@ -58,9 +67,9 @@ def flood_scenario_1(simulation_result_id, network_dataset, elevation_dataset, f
):
result.error_message = 'Invalid dataset selected.'
result.save()
return None
return

disabled_nodes = []
node_failures = []
network_nodes = network_dataset.network_nodes.all()
flood_geodata = json.loads(flood_dataset.geodata_file.open().read().decode())
flood_areas = [
Expand All @@ -69,12 +78,52 @@ def flood_scenario_1(simulation_result_id, network_dataset, elevation_dataset, f
for network_node in network_nodes:
node_point = shapely.geometry.Point(*network_node.location)
if any(flood_area.contains(node_point) for flood_area in flood_areas):
disabled_nodes.append(network_node)
node_failures.append(network_node)

node_elevations = get_network_node_elevations(network_nodes, elevation_dataset)
disabled_nodes.sort(key=lambda n: node_elevations[n.id])
node_failures.sort(key=lambda n: node_elevations[n.id])

result.output_data = [n.id for n in disabled_nodes]
result.output_data = {'node_failures': [n.id for n in node_failures]}
result.save()


@shared_task
def recovery_scenario(simulation_result_id, node_failure_simulation_result, recovery_mode):
result = SimulationResult.objects.get(id=simulation_result_id)
try:
node_failure_simulation_result = SimulationResult.objects.get(
id=node_failure_simulation_result
)
except SimulationResult.DoesNotExist:
result.error_message = 'Node failure simulation result not found.'
result.save()
return
if recovery_mode not in NODE_RECOVERY_MODES:
result.error_message = f'Invalid recovery mode {recovery_mode}.'
result.save()
return

node_failures = node_failure_simulation_result.output_data['node_failures']
node_recoveries = node_failures.copy()
if recovery_mode == 'random':
random.shuffle(node_recoveries)
else:
dataset_id = node_failure_simulation_result.input_args['network_dataset']
try:
dataset = Dataset.objects.get(id=dataset_id)
except Dataset.DoesNotExist:
result.error_message = 'Dataset not found.'
result.save()
return
edge_list = construct_edge_list(dataset)
G = nx.from_dict_of_lists(edge_list)
nodes_sorted, edge_list = sort_graph_centrality(G, recovery_mode)
node_recoveries.sort(key=lambda n: nodes_sorted.index(n))

result.output_data = {
'node_failures': node_failures,
'node_recoveries': node_recoveries,
}
result.save()


Expand All @@ -87,7 +136,7 @@ def flood_scenario_1(simulation_result_id, network_dataset, elevation_dataset, f
to determine which network nodes go out of service
when the target flood occurs.
''',
'output_type': 'node_failure_animation',
'output_type': 'node_animation',
'func': flood_scenario_1,
'args': [
{
Expand All @@ -106,7 +155,30 @@ def flood_scenario_1(simulation_result_id, network_dataset, elevation_dataset, f
'options_query': {'category': 'flood'},
},
],
}
},
{
'id': 2,
'name': 'Recovery Scenario',
'description': '''
Provide the output of another simulation which returns a list of deactivated nodes,
and select a recovery mode to determine the order in which
nodes will come back online.
''',
'output_type': 'node_animation',
'func': recovery_scenario,
'args': [
{
'name': 'node_failure_simulation_result',
'type': SimulationResult,
'options_query': {'simulation_id__in': [1]},
},
{
'name': 'recovery_mode',
'type': str,
'options': NODE_RECOVERY_MODES,
},
],
},
]


Expand All @@ -115,21 +187,36 @@ def get_available_simulations(city_id: int):
for available in AVAILABLE_SIMULATIONS:
available = available.copy()
available['description'] = re.sub(r'\n\s+', ' ', available['description'])
available['args'] = [
{
'name': a['name'],
'options': list(
DatasetSerializer(d).data
for d in a['type']
.objects.filter(
city__id=city_id,
**a['options_query'],
args = []
for a in available['args']:
options = a.get('options')
if not options:
options_query = a.get('options_query')
options_type = a.get('type')
option_serializer_matches = [
s
for name, s in inspect.getmembers(serializers, inspect.isclass)
if issubclass(s, ModelSerializer) and s.Meta.model == options_type
]
if not options_query or not options_type or len(option_serializer_matches) == 0:
options = []
else:
option_serializer = option_serializer_matches[0]
if hasattr(options_type, 'city'):
options_query['city__id'] = city_id
options = list(
option_serializer(d).data
for d in options_type.objects.filter(
**options_query,
).all()
)
.all()
),
}
for a in available['args']
]
args.append(
{
'name': a['name'],
'options': options,
}
)
available['args'] = args
del available['func']
sims.append(available)
return sims
Expand All @@ -149,5 +236,5 @@ def run_simulation(simulation_id: int, city_id: int, **kwargs):

simulation = simulation_matches[0]
simulation['func'].delay(sim_result.id, **kwargs)
return SimulationResultSerializer(sim_result).data
return serializers.SimulationResultSerializer(sim_result).data
return f"No simulation found with id {simulation_id}."
15 changes: 3 additions & 12 deletions uvdat/core/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
)
from uvdat.core.tasks.charts import add_gcc_chart_datum
from uvdat.core.tasks.conversion import convert_raw_data
from uvdat.core.tasks.networks import network_gcc
from uvdat.core.tasks.networks import network_gcc, construct_edge_list
from uvdat.core.tasks.simulations import get_available_simulations, run_simulation


Expand Down Expand Up @@ -135,22 +135,13 @@ def get_gcc(self, request, **kwargs):
exclude_nodes = request.query_params.get('exclude_nodes')
exclude_nodes = exclude_nodes.split(',')
exclude_nodes = [int(n) for n in exclude_nodes if len(n)]
edge_list = {}
visited_nodes = []
excluded_node_names = []

excluded_node_names = []
for node in dataset.network_nodes.all():
adjacencies = [
adj_node.id
for adj_node in node.adjacent_nodes.all()
if adj_node.id not in visited_nodes
]
if len(adjacencies) > 0:
edge_list[node.id] = sorted(adjacencies)
visited_nodes.append(node.id)
if node.id in exclude_nodes:
excluded_node_names.append(node.name)

edge_list = construct_edge_list(dataset)
gcc = network_gcc(edge_list, exclude_nodes)
add_gcc_chart_datum(dataset, excluded_node_names, len(gcc))
return Response(gcc, status=200)
Expand Down
Loading