valetudo-map-parser 0.1.7__py3-none-any.whl → 0.1.9a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. valetudo_map_parser/__init__.py +19 -12
  2. valetudo_map_parser/config/auto_crop.py +174 -116
  3. valetudo_map_parser/config/color_utils.py +105 -0
  4. valetudo_map_parser/config/colors.py +662 -13
  5. valetudo_map_parser/config/drawable.py +624 -279
  6. valetudo_map_parser/config/drawable_elements.py +292 -0
  7. valetudo_map_parser/config/enhanced_drawable.py +324 -0
  8. valetudo_map_parser/config/optimized_element_map.py +406 -0
  9. valetudo_map_parser/config/rand25_parser.py +42 -28
  10. valetudo_map_parser/config/room_outline.py +148 -0
  11. valetudo_map_parser/config/shared.py +29 -5
  12. valetudo_map_parser/config/types.py +102 -51
  13. valetudo_map_parser/config/utils.py +841 -0
  14. valetudo_map_parser/hypfer_draw.py +398 -132
  15. valetudo_map_parser/hypfer_handler.py +259 -241
  16. valetudo_map_parser/hypfer_rooms_handler.py +599 -0
  17. valetudo_map_parser/map_data.py +45 -64
  18. valetudo_map_parser/rand25_handler.py +429 -310
  19. valetudo_map_parser/reimg_draw.py +55 -74
  20. valetudo_map_parser/rooms_handler.py +470 -0
  21. valetudo_map_parser-0.1.9a1.dist-info/METADATA +93 -0
  22. valetudo_map_parser-0.1.9a1.dist-info/RECORD +27 -0
  23. {valetudo_map_parser-0.1.7.dist-info → valetudo_map_parser-0.1.9a1.dist-info}/WHEEL +1 -1
  24. valetudo_map_parser/images_utils.py +0 -398
  25. valetudo_map_parser-0.1.7.dist-info/METADATA +0 -23
  26. valetudo_map_parser-0.1.7.dist-info/RECORD +0 -20
  27. {valetudo_map_parser-0.1.7.dist-info → valetudo_map_parser-0.1.9a1.dist-info}/LICENSE +0 -0
  28. {valetudo_map_parser-0.1.7.dist-info → valetudo_map_parser-0.1.9a1.dist-info}/NOTICE.txt +0 -0
@@ -0,0 +1,599 @@
1
+ """
2
+ Hipfer Rooms Handler Module.
3
+ Handles room data extraction and processing for Valetudo Hipfer vacuum maps.
4
+ Provides async methods for room outline extraction and properties management.
5
+ Version: 0.1.9
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from math import sqrt
11
+ from typing import Any, Dict, Optional, List, Tuple
12
+
13
+ import numpy as np
14
+
15
+ from .config.drawable_elements import DrawableElement, DrawingConfig
16
+ from .config.types import LOGGER, RoomsProperties, RoomStore
17
+
18
+
19
+ class HypferRoomsHandler:
20
+ """
21
+ Handler for extracting and managing room data from Hipfer vacuum maps.
22
+
23
+ This class provides methods to:
24
+ - Extract room outlines using the Ramer-Douglas-Peucker algorithm
25
+ - Process room properties from JSON data
26
+ - Generate room masks and extract contours
27
+
28
+ All methods are async for better integration with the rest of the codebase.
29
+ """
30
+
31
+ def __init__(self, vacuum_id: str, drawing_config: Optional[DrawingConfig] = None):
32
+ """
33
+ Initialize the HipferRoomsHandler.
34
+
35
+ Args:
36
+ vacuum_id: Identifier for the vacuum
37
+ drawing_config: Configuration for which elements to draw (optional)
38
+ """
39
+ self.vacuum_id = vacuum_id
40
+ self.drawing_config = drawing_config
41
+ self.current_json_data = None # Will store the current JSON data being processed
42
+
43
+ @staticmethod
44
+ def sublist(data: list, chunk_size: int) -> list:
45
+ return [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)]
46
+
47
+ # Cache for RDP results
48
+ _rdp_cache = {}
49
+
50
+ @staticmethod
51
+ def perpendicular_distance(
52
+ point: tuple[int, int], line_start: tuple[int, int], line_end: tuple[int, int]
53
+ ) -> float:
54
+ """Calculate the perpendicular distance from a point to a line.
55
+ Optimized for performance.
56
+ """
57
+ # Fast path for point-to-point distance
58
+ if line_start == line_end:
59
+ dx = point[0] - line_start[0]
60
+ dy = point[1] - line_start[1]
61
+ return sqrt(dx*dx + dy*dy)
62
+
63
+ x, y = point
64
+ x1, y1 = line_start
65
+ x2, y2 = line_end
66
+
67
+ # Precompute differences for efficiency
68
+ dx = x2 - x1
69
+ dy = y2 - y1
70
+
71
+ # Calculate the line length squared (avoid sqrt until needed)
72
+ line_length_sq = dx*dx + dy*dy
73
+ if line_length_sq == 0:
74
+ return 0
75
+
76
+ # Calculate the distance from the point to the line
77
+ # Using the formula: |cross_product| / |line_vector|
78
+ # This is more efficient than the original formula
79
+ cross_product = abs(dy * x - dx * y + x2 * y1 - y2 * x1)
80
+ return cross_product / sqrt(line_length_sq)
81
+
82
+ async def rdp(
83
+ self, points: List[Tuple[int, int]], epsilon: float
84
+ ) -> List[Tuple[int, int]]:
85
+ """Ramer-Douglas-Peucker algorithm for simplifying a curve.
86
+ Optimized with caching and better performance.
87
+ """
88
+ # Create a hashable key for caching
89
+ # Convert points to a tuple for hashing
90
+ points_tuple = tuple(points)
91
+ cache_key = (points_tuple, epsilon)
92
+
93
+ # Check cache first
94
+ if cache_key in self._rdp_cache:
95
+ return self._rdp_cache[cache_key]
96
+
97
+ # Base case
98
+ if len(points) <= 2:
99
+ return points
100
+
101
+ # For very small point sets, process directly without recursion
102
+ if len(points) <= 5:
103
+ # Find the point with the maximum distance
104
+ dmax = 0
105
+ index = 0
106
+ for i in range(1, len(points) - 1):
107
+ d = self.perpendicular_distance(points[i], points[0], points[-1])
108
+ if d > dmax:
109
+ index = i
110
+ dmax = d
111
+
112
+ # If max distance is greater than epsilon, keep the point
113
+ if dmax > epsilon:
114
+ result = [points[0]] + [points[index]] + [points[-1]]
115
+ else:
116
+ result = [points[0], points[-1]]
117
+
118
+ # Cache and return
119
+ self._rdp_cache[cache_key] = result
120
+ return result
121
+
122
+ # For larger point sets, use numpy for faster distance calculation
123
+ if len(points) > 20:
124
+ # Convert to numpy arrays for vectorized operations
125
+ points_array = np.array(points)
126
+ start = points_array[0]
127
+ end = points_array[-1]
128
+
129
+ # Calculate perpendicular distances in one vectorized operation
130
+ line_vector = end - start
131
+ line_length = np.linalg.norm(line_vector)
132
+
133
+ if line_length == 0:
134
+ # If start and end are the same, use direct distance
135
+ distances = np.linalg.norm(points_array[1:-1] - start, axis=1)
136
+ else:
137
+ # Normalize line vector
138
+ line_vector = line_vector / line_length
139
+ # Calculate perpendicular distances using vector operations
140
+ vectors_to_points = points_array[1:-1] - start
141
+ # Project vectors onto line vector
142
+ projections = np.dot(vectors_to_points, line_vector)
143
+ # Calculate projected points on line
144
+ projected_points = start + np.outer(projections, line_vector)
145
+ # Calculate distances from points to their projections
146
+ distances = np.linalg.norm(points_array[1:-1] - projected_points, axis=1)
147
+
148
+ # Find the point with maximum distance
149
+ if len(distances) > 0:
150
+ max_idx = np.argmax(distances)
151
+ dmax = distances[max_idx]
152
+ index = max_idx + 1 # +1 because we skipped the first point
153
+ else:
154
+ dmax = 0
155
+ index = 0
156
+ else:
157
+ # For medium-sized point sets, use the original algorithm
158
+ dmax = 0
159
+ index = 0
160
+ for i in range(1, len(points) - 1):
161
+ d = self.perpendicular_distance(points[i], points[0], points[-1])
162
+ if d > dmax:
163
+ index = i
164
+ dmax = d
165
+
166
+ # If max distance is greater than epsilon, recursively simplify
167
+ if dmax > epsilon:
168
+ # Recursive call
169
+ first_segment = await self.rdp(points[: index + 1], epsilon)
170
+ second_segment = await self.rdp(points[index:], epsilon)
171
+
172
+ # Build the result list (avoiding duplicating the common point)
173
+ result = first_segment[:-1] + second_segment
174
+ else:
175
+ result = [points[0], points[-1]]
176
+
177
+ # Limit cache size
178
+ if len(self._rdp_cache) > 100: # Keep only 100 most recent items
179
+ try:
180
+ self._rdp_cache.pop(next(iter(self._rdp_cache)))
181
+ except (StopIteration, KeyError):
182
+ pass
183
+
184
+ # Cache the result
185
+ self._rdp_cache[cache_key] = result
186
+ return result
187
+
188
+ # Cache for corner results
189
+ _corners_cache = {}
190
+
191
+ async def async_get_corners(
192
+ self, mask: np.ndarray, epsilon_factor: float = 0.05
193
+ ) -> List[Tuple[int, int]]:
194
+ """
195
+ Get the corners of a room shape as a list of (x, y) tuples.
196
+ Uses contour detection and Douglas-Peucker algorithm to simplify the contour.
197
+ Optimized with caching and faster calculations.
198
+
199
+ Args:
200
+ mask: Binary mask of the room (1 for room, 0 for background)
201
+ epsilon_factor: Controls the level of simplification (higher = fewer points)
202
+
203
+ Returns:
204
+ List of (x, y) tuples representing the corners of the room
205
+ """
206
+ # Create a hash of the mask and epsilon factor for caching
207
+ mask_hash = hash((mask.tobytes(), epsilon_factor))
208
+
209
+ # Check if we have a cached result
210
+ if mask_hash in self._corners_cache:
211
+ return self._corners_cache[mask_hash]
212
+
213
+ # Fast path for empty masks
214
+ if not np.any(mask):
215
+ return []
216
+
217
+ # Find contours in the mask - this uses our optimized method with caching
218
+ contour = await self.async_moore_neighbor_trace(mask)
219
+
220
+ if not contour:
221
+ # Fallback to bounding box if contour detection fails
222
+ y_indices, x_indices = np.where(mask > 0)
223
+ if len(x_indices) == 0 or len(y_indices) == 0:
224
+ return []
225
+
226
+ x_min, x_max = np.min(x_indices), np.max(x_indices)
227
+ y_min, y_max = np.min(y_indices), np.max(y_indices)
228
+
229
+ result = [
230
+ (x_min, y_min), # Top-left
231
+ (x_max, y_min), # Top-right
232
+ (x_max, y_max), # Bottom-right
233
+ (x_min, y_max), # Bottom-left
234
+ (x_min, y_min), # Back to top-left to close the polygon
235
+ ]
236
+
237
+ # Cache the result
238
+ self._corners_cache[mask_hash] = result
239
+ return result
240
+
241
+ # For small contours (less than 10 points), skip simplification
242
+ if len(contour) <= 10:
243
+ # Ensure the contour is closed
244
+ if contour[0] != contour[-1]:
245
+ contour.append(contour[0])
246
+
247
+ # Cache and return
248
+ self._corners_cache[mask_hash] = contour
249
+ return contour
250
+
251
+ # For larger contours, calculate perimeter more efficiently using numpy
252
+ points = np.array(contour)
253
+ # Calculate differences between consecutive points
254
+ diffs = np.diff(points, axis=0)
255
+ # Calculate squared distances
256
+ squared_dists = np.sum(diffs**2, axis=1)
257
+ # Calculate perimeter as sum of distances
258
+ perimeter = np.sum(np.sqrt(squared_dists))
259
+
260
+ # Apply Douglas-Peucker algorithm to simplify the contour
261
+ epsilon = epsilon_factor * perimeter
262
+ simplified_contour = await self.rdp(contour, epsilon=epsilon)
263
+
264
+ # Ensure the contour has at least 3 points to form a polygon
265
+ if len(simplified_contour) < 3:
266
+ # Fallback to bounding box
267
+ y_indices, x_indices = np.where(mask > 0)
268
+ x_min, x_max = int(np.min(x_indices)), int(np.max(x_indices))
269
+ y_min, y_max = int(np.min(y_indices)), int(np.max(y_indices))
270
+
271
+ LOGGER.debug(
272
+ f"{self.vacuum_id}: Too few points in contour, using bounding box"
273
+ )
274
+ result = [
275
+ (x_min, y_min), # Top-left
276
+ (x_max, y_min), # Top-right
277
+ (x_max, y_max), # Bottom-right
278
+ (x_min, y_max), # Bottom-left
279
+ (x_min, y_min), # Back to top-left to close the polygon
280
+ ]
281
+
282
+ # Cache the result
283
+ self._corners_cache[mask_hash] = result
284
+ return result
285
+
286
+ # Ensure the contour is closed
287
+ if simplified_contour[0] != simplified_contour[-1]:
288
+ simplified_contour.append(simplified_contour[0])
289
+
290
+ # Limit cache size
291
+ if len(self._corners_cache) > 50: # Keep only 50 most recent items
292
+ try:
293
+ self._corners_cache.pop(next(iter(self._corners_cache)))
294
+ except (StopIteration, KeyError):
295
+ pass
296
+
297
+ # Cache the result
298
+ self._corners_cache[mask_hash] = simplified_contour
299
+ return simplified_contour
300
+
301
+ # Cache for labeled arrays to avoid redundant calculations
302
+ _label_cache = {}
303
+ _hull_cache = {}
304
+
305
+ @staticmethod
306
+ async def async_moore_neighbor_trace(mask: np.ndarray) -> List[Tuple[int, int]]:
307
+ """
308
+ Trace the contour of a binary mask using an optimized approach.
309
+ Uses caching and simplified algorithms for better performance.
310
+
311
+ Args:
312
+ mask: Binary mask of the room (1 for room, 0 for background)
313
+
314
+ Returns:
315
+ List of (x, y) tuples representing the contour
316
+ """
317
+ # Create a hash of the mask for caching
318
+ mask_hash = hash(mask.tobytes())
319
+
320
+ # Check if we have a cached result
321
+ if mask_hash in HypferRoomsHandler._hull_cache:
322
+ return HypferRoomsHandler._hull_cache[mask_hash]
323
+
324
+ # Fast path for empty masks
325
+ if not np.any(mask):
326
+ return []
327
+
328
+ # Find bounding box of non-zero elements (much faster than full labeling for simple cases)
329
+ y_indices, x_indices = np.where(mask > 0)
330
+ if len(x_indices) == 0 or len(y_indices) == 0:
331
+ return []
332
+
333
+ # For very small rooms (less than 100 pixels), just use bounding box
334
+ if len(x_indices) < 100:
335
+ x_min, x_max = np.min(x_indices), np.max(x_indices)
336
+ y_min, y_max = np.min(y_indices), np.max(y_indices)
337
+
338
+ # Create a simple rectangle
339
+ hull_vertices = [
340
+ (int(x_min), int(y_min)), # Top-left
341
+ (int(x_max), int(y_min)), # Top-right
342
+ (int(x_max), int(y_max)), # Bottom-right
343
+ (int(x_min), int(y_max)), # Bottom-left
344
+ (int(x_min), int(y_min)), # Back to top-left to close the polygon
345
+ ]
346
+
347
+ # Cache and return the result
348
+ HypferRoomsHandler._hull_cache[mask_hash] = hull_vertices
349
+ return hull_vertices
350
+
351
+ # For larger rooms, use convex hull but with optimizations
352
+ try:
353
+ # Import here to avoid overhead for small rooms
354
+ from scipy import ndimage
355
+ from scipy.spatial import ConvexHull
356
+
357
+ # Use cached labeled array if available
358
+ if mask_hash in HypferRoomsHandler._label_cache:
359
+ labeled_array = HypferRoomsHandler._label_cache[mask_hash]
360
+ else:
361
+ # Find connected components - this is expensive
362
+ labeled_array, _ = ndimage.label(mask)
363
+ # Cache the result for future use
364
+ HypferRoomsHandler._label_cache[mask_hash] = labeled_array
365
+
366
+ # Limit cache size to prevent memory issues
367
+ if len(HypferRoomsHandler._label_cache) > 50: # Keep only 50 most recent items
368
+ # Remove oldest item (first key)
369
+ try:
370
+ HypferRoomsHandler._label_cache.pop(next(iter(HypferRoomsHandler._label_cache)))
371
+ except (StopIteration, KeyError):
372
+ # Handle edge case of empty cache
373
+ pass
374
+
375
+ # Create a mask with all components
376
+ all_components_mask = (labeled_array > 0)
377
+
378
+ # Sample points instead of using all points for large masks
379
+ # This significantly reduces computation time for ConvexHull
380
+ if len(x_indices) > 1000:
381
+ # Sample every 10th point for very large rooms
382
+ step = 10
383
+ elif len(x_indices) > 500:
384
+ # Sample every 5th point for medium-sized rooms
385
+ step = 5
386
+ else:
387
+ # Use all points for smaller rooms
388
+ step = 1
389
+
390
+ # Sample points using the step size
391
+ sampled_y = y_indices[::step]
392
+ sampled_x = x_indices[::step]
393
+
394
+ # Create a list of points
395
+ points = np.column_stack((sampled_x, sampled_y))
396
+
397
+ # Compute the convex hull
398
+ hull = ConvexHull(points)
399
+
400
+ # Extract the vertices of the convex hull
401
+ hull_vertices = [(int(points[v, 0]), int(points[v, 1])) for v in hull.vertices]
402
+
403
+ # Ensure the hull is closed
404
+ if hull_vertices[0] != hull_vertices[-1]:
405
+ hull_vertices.append(hull_vertices[0])
406
+
407
+ # Cache and return the result
408
+ HypferRoomsHandler._hull_cache[mask_hash] = hull_vertices
409
+
410
+ # Limit hull cache size
411
+ if len(HypferRoomsHandler._hull_cache) > 50:
412
+ try:
413
+ HypferRoomsHandler._hull_cache.pop(next(iter(HypferRoomsHandler._hull_cache)))
414
+ except (StopIteration, KeyError):
415
+ pass
416
+
417
+ return hull_vertices
418
+
419
+ except Exception as e:
420
+ LOGGER.warning(f"Failed to compute convex hull: {e}. Falling back to bounding box.")
421
+
422
+ # Fallback to bounding box if convex hull fails
423
+ x_min, x_max = np.min(x_indices), np.max(x_indices)
424
+ y_min, y_max = np.min(y_indices), np.max(y_indices)
425
+
426
+ # Create a simple rectangle
427
+ hull_vertices = [
428
+ (int(x_min), int(y_min)), # Top-left
429
+ (int(x_max), int(y_min)), # Top-right
430
+ (int(x_max), int(y_max)), # Bottom-right
431
+ (int(x_min), int(y_max)), # Bottom-left
432
+ (int(x_min), int(y_min)), # Back to top-left to close the polygon
433
+ ]
434
+
435
+ # Cache and return the result
436
+ HypferRoomsHandler._hull_cache[mask_hash] = hull_vertices
437
+ return hull_vertices
438
+
439
+
440
+
441
+ async def async_extract_room_properties(
442
+ self, json_data: Dict[str, Any]
443
+ ) -> RoomsProperties:
444
+ """
445
+ Extract room properties from the JSON data.
446
+
447
+ Args:
448
+ json_data: JSON data from the vacuum
449
+
450
+ Returns:
451
+ Dictionary of room properties
452
+ """
453
+ room_properties = {}
454
+ pixel_size = json_data.get("pixelSize", 5)
455
+ height = json_data["size"]["y"]
456
+ width = json_data["size"]["x"]
457
+ vacuum_id = self.vacuum_id
458
+ room_id_counter = 0
459
+
460
+ # Store the JSON data for reference in other methods
461
+ self.current_json_data = json_data
462
+
463
+ for layer in json_data.get("layers", []):
464
+ if layer.get("__class") == "MapLayer" and layer.get("type") == "segment":
465
+ meta_data = layer.get("metaData", {})
466
+ segment_id = meta_data.get("segmentId")
467
+ name = meta_data.get("name", f"Room {segment_id}")
468
+
469
+ # Check if this room is disabled in the drawing configuration
470
+ # The room_id_counter is 0-based, but DrawableElement.ROOM_X is 1-based
471
+ current_room_id = room_id_counter + 1
472
+ room_id_counter = (
473
+ room_id_counter + 1
474
+ ) % 16 # Cycle room_id back to 0 after 15
475
+
476
+ if 1 <= current_room_id <= 15 and self.drawing_config is not None:
477
+ room_element = getattr(
478
+ DrawableElement, f"ROOM_{current_room_id}", None
479
+ )
480
+ if room_element and not self.drawing_config.is_enabled(
481
+ room_element
482
+ ):
483
+ LOGGER.debug(
484
+ "%s: Room %d is disabled and will be skipped",
485
+ self.vacuum_id,
486
+ current_room_id,
487
+ )
488
+ continue
489
+
490
+ compressed_pixels = layer.get("compressedPixels", [])
491
+ pixels = self.sublist(compressed_pixels, 3)
492
+
493
+ # Create a binary mask for the room
494
+ if not pixels:
495
+ LOGGER.warning(f"Skipping segment {segment_id}: no pixels found")
496
+ continue
497
+
498
+ mask = np.zeros((height, width), dtype=np.uint8)
499
+ for x, y, length in pixels:
500
+ if 0 <= y < height and 0 <= x < width and x + length <= width:
501
+ mask[y, x : x + length] = 1
502
+
503
+ # Find the room outline using the improved get_corners function
504
+ # Adjust epsilon_factor to control the level of simplification (higher = fewer points)
505
+ outline = await self.async_get_corners(mask, epsilon_factor=0.05)
506
+
507
+ if not outline:
508
+ LOGGER.warning(
509
+ f"Skipping segment {segment_id}: failed to generate outline"
510
+ )
511
+ continue
512
+
513
+ # Calculate the center of the room
514
+ xs, ys = zip(*outline)
515
+ x_min, x_max = min(xs), max(xs)
516
+ y_min, y_max = min(ys), max(ys)
517
+
518
+ # Scale coordinates by pixel_size
519
+ scaled_outline = [(x * pixel_size, y * pixel_size) for x, y in outline]
520
+
521
+ room_id = str(segment_id)
522
+ room_properties[room_id] = {
523
+ "number": segment_id,
524
+ "outline": scaled_outline, # Already includes the closing point
525
+ "name": name,
526
+ "x": ((x_min + x_max) * pixel_size) // 2,
527
+ "y": ((y_min + y_max) * pixel_size) // 2,
528
+ }
529
+
530
+ RoomStore(vacuum_id, room_properties)
531
+ return room_properties
532
+
533
+ async def get_room_at_position(
534
+ self, x: int, y: int, room_properties: Optional[RoomsProperties] = None
535
+ ) -> Optional[Dict[str, Any]]:
536
+ """
537
+ Get the room at a specific position.
538
+
539
+ Args:
540
+ x: X coordinate
541
+ y: Y coordinate
542
+ room_properties: Room properties dictionary (optional)
543
+
544
+ Returns:
545
+ Room data dictionary or None if no room at position
546
+ """
547
+ if room_properties is None:
548
+ room_store = RoomStore(self.vacuum_id)
549
+ room_properties = room_store.get_rooms()
550
+
551
+ if not room_properties:
552
+ return None
553
+
554
+ for room_id, room_data in room_properties.items():
555
+ outline = room_data.get("outline", [])
556
+ if not outline or len(outline) < 3:
557
+ continue
558
+
559
+ # Check if point is inside the polygon
560
+ if self.point_in_polygon(x, y, outline):
561
+ return {
562
+ "id": room_id,
563
+ "name": room_data.get("name", f"Room {room_id}"),
564
+ "x": room_data.get("x", 0),
565
+ "y": room_data.get("y", 0),
566
+ }
567
+
568
+ return None
569
+
570
+ @staticmethod
571
+ def point_in_polygon(x: int, y: int, polygon: List[Tuple[int, int]]) -> bool:
572
+ """
573
+ Check if a point is inside a polygon using ray casting algorithm.
574
+
575
+ Args:
576
+ x: X coordinate of the point
577
+ y: Y coordinate of the point
578
+ polygon: List of (x, y) tuples forming the polygon
579
+
580
+ Returns:
581
+ True if the point is inside the polygon, False otherwise
582
+ """
583
+ n = len(polygon)
584
+ inside = False
585
+
586
+ p1x, p1y = polygon[0]
587
+ xinters = None # Initialize with default value
588
+ for i in range(1, n + 1):
589
+ p2x, p2y = polygon[i % n]
590
+ if y > min(p1y, p2y):
591
+ if y <= max(p1y, p2y):
592
+ if x <= max(p1x, p2x):
593
+ if p1y != p2y:
594
+ xinters = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x
595
+ if p1x == p2x or x <= xinters:
596
+ inside = not inside
597
+ p1x, p1y = p2x, p2y
598
+
599
+ return inside