Skip to content

Commit

Permalink
Merge pull request #142 from ImperialCollegeLondon/un_double_directed
Browse files Browse the repository at this point in the history
Reduce double directed to improve network cleanup
  • Loading branch information
barneydobson authored May 3, 2024
2 parents 57f28e8 + 5e287d8 commit 6e6c326
Show file tree
Hide file tree
Showing 7 changed files with 295 additions and 167 deletions.
8 changes: 4 additions & 4 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,7 @@ colorama==0.4.6
contourpy==1.2.0
# via matplotlib
coverage[toml]==7.3.2
# via
# coverage
# pytest-cov
# via pytest-cov
cramjam==2.7.0
# via fastparquet
cycler==0.12.1
Expand Down Expand Up @@ -229,7 +227,9 @@ pyproj==3.6.1
# pysheds
# rioxarray
pyproject-hooks==1.0.0
# via build
# via
# build
# pip-tools
pysheds==0.3.5
# via swmmanywhere (pyproject.toml)
pyswmm==1.5.1
Expand Down
76 changes: 54 additions & 22 deletions swmmanywhere/geospatial_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from pysheds import grid as pgrid
from rasterio import features
from scipy.interpolate import RegularGridInterpolator
from scipy.spatial import KDTree
from shapely import geometry as sgeom
from shapely import ops as sops
from shapely.errors import GEOSException
Expand Down Expand Up @@ -614,8 +615,8 @@ def remove_(mp): return remove_zero_area_subareas(mp, removed_subareas)
return polys_gdf

def derive_rc(polys_gdf: gpd.GeoDataFrame,
G: nx.Graph,
building_footprints: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
building_footprints: gpd.GeoDataFrame,
streetcover: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
"""Derive the Runoff Coefficient (RC) of each subcatchment.
The runoff coefficient is the ratio of impervious area to total area. The
Expand All @@ -626,10 +627,10 @@ def derive_rc(polys_gdf: gpd.GeoDataFrame,
Args:
polys_gdf (gpd.GeoDataFrame): A GeoDataFrame containing polygons that
represent subcatchments with columns: 'geometry', 'area', and 'id'.
G (nx.Graph): The input graph, with node 'ids' that match polys_gdf and
edges with the 'id', 'width' and 'geometry' property.
building_footprints (gpd.GeoDataFrame): A GeoDataFrame containing
building footprints with a 'geometry' column.
streetcover (gpd.GeoDataFrame): A GeoDataFrame containing street cover
with a 'geometry' column.
Returns:
gpd.GeoDataFrame: A GeoDataFrame containing polygons with columns:
Expand All @@ -638,23 +639,7 @@ def derive_rc(polys_gdf: gpd.GeoDataFrame,
polys_gdf = polys_gdf.copy()

## Format as swmm type catchments

# TODO think harder about lane widths (am I double counting here?)
lines = [
{
'geometry': x['geometry'].buffer(x['width'],
cap_style=2,
join_style=2),
'id': x['id']
}
for u, v, x in G.edges(data=True)
]
lines_df = pd.DataFrame(lines)
lines_gdf = gpd.GeoDataFrame(lines_df,
geometry=lines_df.geometry,
crs = polys_gdf.crs)

result = gpd.overlay(lines_gdf[['geometry']],
result = gpd.overlay(streetcover[['geometry']],
building_footprints[['geometry']],
how='union')
result = gpd.overlay(polys_gdf, result)
Expand Down Expand Up @@ -786,4 +771,51 @@ def graph_to_geojson(graph: nx.Graph,
}

with fid.open('w') as output_file:
json.dump(geojson, output_file, indent=2)
json.dump(geojson, output_file, indent=2)

def merge_points(coordinates: list[tuple[float, float]],
threshold: float)-> dict:
"""Merge points that are within a threshold distance.
Args:
coordinates (list): List of coordinates as tuples.
threshold(float): The threshold distance for merging points.
Returns:
dict: A dictionary mapping the original point index to the merged point
and new coordinate.
"""
# Create a KDTtree to pair together points within thresholds
tree = KDTree(coordinates)
pairs = tree.query_pairs(threshold)

# Merge pairs into families of points that are all nearby
families: list = []

for pair in pairs:
matched_families = [family for family in families
if pair[0] in family or pair[1] in family]

if matched_families:
# Merge all matched families and add the current pair
new_family = set(pair)
for family in matched_families:
new_family.update(family)

# Remove the old families and add the newly formed one
for family in matched_families:
families.remove(family)
families.append(new_family)
else:
# No matching family found, so create a new one
families.append(set(pair))

# Create a mapping of the original point to the merged point
mapping = {}
for family in families:
average_point = np.mean([coordinates[i] for i in family], axis=0)
family_head = min(list(family))
for i in family:
mapping[i] = {'maps_to' : family_head,
'coordinate' : tuple(average_point)}
return mapping
Loading

0 comments on commit 6e6c326

Please sign in to comment.