Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add node-specific tolerances to intersection consolidation #1160

Merged
merged 18 commits into from
Apr 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ Read the v2 [migration guide](https://github.com/gboeing/osmnx/issues/1123)
- handle implicit maxspeed values in add_edge_speeds function (#1153)
- change add_node_elevations_google default batch_size to 512 to match Google's limit (#1115)
- allow analysis of MultiDiGraph directional edge bearings and orientation (#1139)
- allow passing node-specific tolerances values for intersection consolidation (#1160)
- fix bug in \_downloader.\_save_to_cache function usage (#1107)
- fix bug in handling requests ConnectionError when querying Overpass status endpoint (#1113)
- fix minor bugs throughout to address inconsistencies revealed by type enforcement (#1107 #1114)
Expand Down
47 changes: 35 additions & 12 deletions osmnx/simplification.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,9 @@
import geopandas as gpd
import networkx as nx
import numpy as np
import pandas as pd
from shapely import LineString
from shapely import MultiPolygon
from shapely import Point
from shapely import Polygon

from . import convert
from . import stats
Expand Down Expand Up @@ -446,7 +445,7 @@ def simplify_graph( # noqa: C901, PLR0912
def consolidate_intersections(
G: nx.MultiDiGraph,
*,
tolerance: float = 10,
tolerance: float | dict[int, float] = 10,
rebuild_graph: bool = True,
dead_ends: bool = False,
reconnect_edges: bool = True,
Expand All @@ -463,6 +462,10 @@ def consolidate_intersections(
Note `tolerance` represents a per-node buffering radius: for example, to
consolidate nodes within 10 meters of each other, use `tolerance=5`.

It's also possible to specify difference tolerances for each node. This can
be done by passing a dictionary mapping node IDs to individual tolerance
values, like `tolerance={1: 5, 2: 10}`.

When `rebuild_graph` is False, it uses a purely geometric (and relatively
fast) algorithm to identify "geometrically close" nodes, merge them, and
return the merged intersections' centroids. When `rebuild_graph` is True,
Expand All @@ -487,7 +490,8 @@ def consolidate_intersections(
A projected graph.
tolerance
Nodes are buffered to this distance (in graph's geometry's units) and
subsequent overlaps are dissolved into a single node.
subsequent overlaps are dissolved into a single node. Can be a float
value or a dictionary mapping node IDs to individual tolerance values.
rebuild_graph
If True, consolidate the nodes topologically, rebuild the graph, and
return as MultiDiGraph. Otherwise, consolidate the nodes geometrically
Expand Down Expand Up @@ -547,7 +551,10 @@ def consolidate_intersections(
return _merge_nodes_geometric(G, tolerance).centroid


def _merge_nodes_geometric(G: nx.MultiDiGraph, tolerance: float) -> gpd.GeoSeries:
def _merge_nodes_geometric(
G: nx.MultiDiGraph,
tolerance: float | dict[int, float],
) -> gpd.GeoSeries:
"""
Geometrically merge nodes within some distance of each other.

Expand All @@ -558,23 +565,38 @@ def _merge_nodes_geometric(G: nx.MultiDiGraph, tolerance: float) -> gpd.GeoSerie
tolerance
Buffer nodes to this distance (in graph's geometry's units) then merge
overlapping polygons into a single polygon via unary union operation.
Can be a float value or a dictionary mapping node IDs to individual
tolerance values.

Returns
-------
merged
The merged overlapping polygons of the buffered nodes.
"""
# buffer nodes GeoSeries then get unary union to merge overlaps
merged = convert.graph_to_gdfs(G, edges=False)["geometry"].buffer(tolerance).unary_union
gdf_nodes = convert.graph_to_gdfs(G, edges=False)

if isinstance(tolerance, dict):
# Create a Series of tolerances, reindexed to match the nodes
tolerances = pd.Series(tolerance).reindex(gdf_nodes.index)
# Buffer nodes to the specified distance
buffered_geoms = gdf_nodes.geometry.buffer(tolerances)
# Replace the missing values with the original points
buffered_geoms = buffered_geoms.fillna(gdf_nodes["geometry"])
else:
# Buffer nodes to the specified distance
buffered_geoms = gdf_nodes.geometry.buffer(tolerance)

# Merge overlapping geometries into a single geometry
merged = buffered_geoms.unary_union

# if only a single node results, make it iterable to convert to GeoSeries
merged = MultiPolygon([merged]) if isinstance(merged, Polygon) else merged
return gpd.GeoSeries(merged.geoms, crs=G.graph["crs"])
# extract the member geometries if it's a multi-geometry
merged = merged.geoms if hasattr(merged, "geoms") else merged
return gpd.GeoSeries(merged, crs=G.graph["crs"])


def _consolidate_intersections_rebuild_graph( # noqa: C901,PLR0912,PLR0915
G: nx.MultiDiGraph,
tolerance: float,
tolerance: float | dict[int, float],
reconnect_edges: bool, # noqa: FBT001
node_attr_aggs: dict[str, Any] | None,
) -> nx.MultiDiGraph:
Expand All @@ -599,7 +621,8 @@ def _consolidate_intersections_rebuild_graph( # noqa: C901,PLR0912,PLR0915
A projected graph.
tolerance
Nodes are buffered to this distance (in graph's geometry's units) and
subsequent overlaps are dissolved into a single node.
subsequent overlaps are dissolved into a single node. Can be a float
value or a dictionary mapping node IDs to individual tolerance values.
reconnect_edges
If True, reconnect edges (and their geometries) to the consolidated
nodes in rebuilt graph, and update the edge length attributes. If
Expand Down
13 changes: 13 additions & 0 deletions tests/test_osmnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,12 +125,25 @@ def test_stats() -> None:
reconnect_edges=False,
)
G_clean = ox.consolidate_intersections(G_proj, tolerance=10, rebuild_graph=False)
G_clean = ox.consolidate_intersections(G_proj, tolerance=50000, rebuild_graph=True)

# try consolidating an empty graph
G = nx.MultiDiGraph(crs="epsg:4326")
G_clean = ox.consolidate_intersections(G, rebuild_graph=True)
G_clean = ox.consolidate_intersections(G, rebuild_graph=False)

# test passing dict of tolerances to consolidate_intersections
tols: dict[int, float]
# every node present
tols = {node: 5 for node in G_proj.nodes}
G_clean = ox.consolidate_intersections(G_proj, tolerance=tols, rebuild_graph=True)
# one node missing
tols.popitem()
G_clean = ox.consolidate_intersections(G_proj, tolerance=tols, rebuild_graph=True)
# one node 0
tols[next(iter(tols))] = 0
G_clean = ox.consolidate_intersections(G_proj, tolerance=tols, rebuild_graph=True)


def test_bearings() -> None:
"""Test bearings and orientation entropy."""
Expand Down