Open mosscoder opened 1 year ago
def generate_points(poly, num):
points = []
minx, miny, maxx, maxy = poly.bounds
while len(points) < num:
random_point = Point(np.random.uniform(minx, maxx), np.random.uniform(miny, maxy))
if (random_point.within(poly)):
points.append(random_point)
return [point.coords[0] for point in points]
def calculate_voronoi_areas(poly, points):
# compute Voronoi tesselation
vor = Voronoi(points)
# create finite Voronoi polygons
regions, vertices = voronoi_finite_polygons_2d(vor)
# construct the polygons and intersect with the original one
voronoi_polygons = [poly.intersection(Polygon(vertices[region])) for region in regions]
# return only the valid ones (completely inside the original polygon)
valid_polygons = [p for p in voronoi_polygons if p.is_valid]
# calculate the area of each valid polygon
areas = [p.area for p in valid_polygons]
return areas, valid_polygons
def calculate_voronoi_perimeters(poly, points):
# compute Voronoi tesselation
vor = Voronoi(points)
# create finite Voronoi polygons
regions, vertices = voronoi_finite_polygons_2d(vor)
# construct the polygons and intersect with the original one
voronoi_polygons = [poly.intersection(Polygon(vertices[region])) for region in regions]
# return only the valid ones (completely inside the original polygon)
valid_polygons = [p for p in voronoi_polygons if p.is_valid]
# calculate the perimeter of each valid polygon
perimeters = [p.length for p in valid_polygons]
return perimeters, valid_polygons
def optimize_voronoi_complexity(poly, num, max_iterations=1000, epsilon=0.001, learning_rate=0.1, seed=None):
points = generate_points(poly, num, seed)
np.random.seed(seed)
mns = [] # to store mean at each iteration
for i in range(max_iterations):
perimeters, polygons = calculate_voronoi_perimeters(poly, points)
areas, _ = calculate_voronoi_areas(poly, points)
mn = np.mean([x / y for x, y in zip(perimeters, areas)])
mns.append(mn) # append current mean to the list
# If mean is less than a threshold, stop
if mn < epsilon:
break
# randomly select a point
point_idx = np.random.randint(0, len(points))
current_point = Point(points[point_idx])
# Compute gradients by trying small movements in each direction
min_mn = mn
min_mn_direction = None
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
new_point = Point(current_point.x + dx * learning_rate, current_point.y + dy * learning_rate)
new_points = points.copy()
new_points[point_idx] = (new_point.x, new_point.y)
new_perimeters, _ = calculate_voronoi_perimeters(poly, new_points)
new_areas, _ = calculate_voronoi_areas(poly, new_points)
new_mn = np.mean([x / y for x, y in zip(new_perimeters, new_areas)])
if new_mn < min_mn:
min_mn = new_mn
min_mn_direction = (dx, dy)
# If we found a direction that decreases the complexity, move the point
if min_mn_direction is not None:
dx, dy = min_mn_direction
points[point_idx] = (current_point.x + dx * learning_rate, current_point.y + dy * learning_rate)
return polygons, mns
parts, std_devs = optimize_voronoi_complexity(flight_roi.to_crs(26911).geometry[0], 10,
learning_rate=30, max_iterations=1000, seed=0)
# Plotting
gpd.GeoSeries(parts).plot(edgecolor='black')
# Plotting
# Plotting standard deviations
import matplotlib.pyplot as plt
plt.figure()
plt.plot(std_devs)
plt.xlabel('Iteration')
plt.ylabel('Mean complexity (perimeter/area)')
plt.title('Optimization of Voronoi Complexity')
plt.grid(True)
plt.show()
gpd.GeoSeries(parts).plot(edgecolor='black')
The above partitions resulted in process ranging from 3 to 6 hours. I think this outcome is adequate, but there is room for improvement. Specifically, I suggest a tuning parameter alpha which could be use to upweight the influence of polygon size on optimization.
Currently I grid out the flight area into square cells, assigning each cell to a node. Cells on the periphery tend to have many fewer photos than the interior.
I propose computing Voroni Diagrams/Theissen Polygons as an alternative, using GCPs, or perhaps the photos themselves as diagram inputs, with the goal of distributing photos more equally across nodes.