Sensei13k commited on
Commit
31a4102
·
verified ·
1 Parent(s): c68f2cc

Delete analog.py

Browse files
Files changed (1) hide show
  1. analog.py +0 -254
analog.py DELETED
@@ -1,254 +0,0 @@
1
- import sys
2
- import cv2
3
- import numpy as np
4
- from ultralytics import YOLO
5
-
6
- # -----------------------------
7
- # Part 1: Helper functions for cropping
8
- # -----------------------------
9
-
10
- def draw_obb(image, obb):
11
- """Draw oriented bounding boxes on an image."""
12
- boxes = obb.xyxyxyxy.cpu().numpy()
13
- for box in boxes:
14
- pts = box.reshape(4, 2).astype(np.int32)
15
- cv2.polylines(image, [pts], isClosed=True, color=(0, 255, 0), thickness=2)
16
- return image
17
-
18
- def order_points(pts):
19
- """Order 4 points as top-left, top-right, bottom-right, bottom-left."""
20
- rect = np.zeros((4, 2), dtype="float32")
21
- s = pts.sum(axis=1)
22
- rect[0] = pts[np.argmin(s)]
23
- rect[2] = pts[np.argmax(s)]
24
- diff = np.diff(pts, axis=1)
25
- rect[1] = pts[np.argmin(diff)]
26
- rect[3] = pts[np.argmax(diff)]
27
- return rect
28
-
29
- def crop_region(image, obb):
30
- """
31
- Crop the meter region from the image using the OBB.
32
- Uses a perspective transformation based on the minimal area rectangle.
33
- """
34
- boxes = obb.xyxyxyxy.cpu().numpy()
35
- if len(boxes) == 0:
36
- return None
37
- # Use the first detected box for cropping.
38
- box = boxes[0]
39
- pts = box.reshape(4, 2).astype(np.float32)
40
-
41
- # Get the minimal area rectangle for the points.
42
- rect = cv2.minAreaRect(pts)
43
- width = int(rect[1][0])
44
- height = int(rect[1][1])
45
- if width <= 0 or height <= 0:
46
- return None
47
-
48
- # Destination points for the warp (top-left, top-right, bottom-right, bottom-left)
49
- dst_pts = np.array([
50
- [0, 0],
51
- [width - 1, 0],
52
- [width - 1, height - 1],
53
- [0, height - 1]], dtype=np.float32)
54
-
55
- # Order the source points and compute the perspective transform.
56
- ordered_pts = order_points(pts)
57
- M = cv2.getPerspectiveTransform(ordered_pts, dst_pts)
58
- cropped = cv2.warpPerspective(image, M, (width, height))
59
- return cropped
60
-
61
- def detect_and_crop_region(analog_box_model, image_path):
62
- """
63
- Detect the meter region using analog_box.pt and return the cropped image.
64
- """
65
- model = YOLO(analog_box_model)
66
- image = cv2.imread(image_path)
67
- if image is None:
68
- print("Error: Could not read image at", image_path)
69
- sys.exit(1)
70
-
71
- results = model(image)
72
- for r in results:
73
- if hasattr(r, "obb") and r.obb is not None:
74
- cropped = crop_region(image, r.obb)
75
- if cropped is not None:
76
- return cropped
77
- print("No meter detected.")
78
- sys.exit(1)
79
-
80
- # -----------------------------
81
- # Part 2: Meter reading functions (provided calculation code)
82
- # -----------------------------
83
-
84
- def get_center_point(box):
85
- """Calculate the center point of a bounding box (4 corners)."""
86
- pts = box.reshape(4, 2)
87
- center_x = np.mean(pts[:, 0])
88
- center_y = np.mean(pts[:, 1])
89
- return (center_x, center_y)
90
-
91
- def calculate_meter_reading(needle_corners, number_positions):
92
- """
93
- Given the needle corners and number positions, calculate the meter reading.
94
- The numbers are standardized as [0, 5, 10, 15, 20, 25, 30].
95
- """
96
- number_values = [0, 5, 10, 15, 20, 25, 30]
97
-
98
- # Sort number positions left-to-right by x-coordinate.
99
- sorted_positions = sorted(number_positions, key=lambda x: x[1][0])
100
- labeled_positions = []
101
- for i, (_, position) in enumerate(sorted_positions):
102
- if i < len(number_values):
103
- labeled_positions.append((number_values[i], position))
104
-
105
- # Compute needle tip as midpoint between corner 3 and corner 4.
106
- needle_tip_x = (needle_corners[2][0] + needle_corners[3][0]) / 2
107
- needle_tip_y = (needle_corners[2][1] + needle_corners[3][1]) / 2
108
- needle_tip = np.array([needle_tip_x, needle_tip_y])
109
-
110
- # Check if needle tip exactly matches a number position.
111
- for value, position in labeled_positions:
112
- distance = np.sqrt((needle_tip[0] - position[0])**2 + (needle_tip[1] - position[1])**2)
113
- if distance < 15: # threshold for "exact match"
114
- return value, "exact_midpoint"
115
-
116
- # If not an exact match, find the two numbers between which the needle lies.
117
- left_value = None
118
- right_value = None
119
- left_position = None
120
- right_position = None
121
- for i in range(len(labeled_positions) - 1):
122
- curr_value, curr_pos = labeled_positions[i]
123
- next_value, next_pos = labeled_positions[i + 1]
124
- if curr_pos[0] <= needle_tip[0] <= next_pos[0]:
125
- left_value = curr_value
126
- right_value = next_value
127
- left_position = curr_pos
128
- right_position = next_pos
129
- break
130
-
131
- # If not between any two, return the closest.
132
- if left_value is None or right_value is None:
133
- min_distance = float('inf')
134
- closest_value = None
135
- for value, position in labeled_positions:
136
- distance = np.sqrt((needle_tip[0] - position[0])**2 + (needle_tip[1] - position[1])**2)
137
- if distance < min_distance:
138
- min_distance = distance
139
- closest_value = value
140
- return closest_value, "closest_midpoint"
141
-
142
- # Interpolate based on x-distance.
143
- total_x_distance = right_position[0] - left_position[0]
144
- needle_x_distance = needle_tip[0] - left_position[0]
145
- ratio = needle_x_distance / total_x_distance if total_x_distance > 0 else 0
146
- value_range = right_value - left_value
147
- interpolated_value = left_value + (ratio * value_range)
148
- interpolated_value = round(interpolated_value, 1)
149
-
150
- return interpolated_value, "interpolated_midpoint"
151
-
152
- def process_meter_reading(analog_reading_model, image):
153
- """
154
- Run detection on the provided (cropped) meter image using analog_reading_v2.pt,
155
- compute the meter reading, and print the result.
156
- """
157
- model = YOLO(analog_reading_model)
158
- results = model(image)
159
-
160
- needle_corners = None
161
- number_positions = [] # Each element is a tuple: (detected_label, center)
162
-
163
- # Process each detection result.
164
- for r in results:
165
- if hasattr(r, "obb") and r.obb is not None:
166
- image = draw_obb(image, r.obb)
167
- boxes = r.obb.xyxyxyxy.cpu().numpy()
168
- classes = r.obb.cls.cpu().numpy()
169
-
170
- for box, class_id in zip(boxes, classes):
171
- class_name = r.names[int(class_id)]
172
- center = get_center_point(box)
173
- cv2.circle(image, (int(center[0]), int(center[1])), 3, (0, 0, 255), -1)
174
-
175
- if class_name.lower() == "needle":
176
- needle_corners = box.reshape(4, 2)
177
- # Check if class is a digit (or the word "numbers") representing meter numbers.
178
- elif class_name.isdigit() or class_name in ["0", "5", "10", "15", "20", "25", "30"] or class_name.lower() == "numbers":
179
- number_positions.append((0, center))
180
-
181
- # Label the numbers (using standard ordering) on the image.
182
- if number_positions:
183
- number_values = [0, 5, 10, 15, 20, 25, 30]
184
- sorted_positions = sorted(number_positions, key=lambda x: x[1][0])
185
- for i, (_, position) in enumerate(sorted_positions):
186
- if i < len(number_values):
187
- label = str(number_values[i])
188
- cv2.putText(image, label,
189
- (int(position[0]), int(position[1]) - 15),
190
- cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
191
-
192
- # Compute and print the meter reading if needle and numbers are detected.
193
- if needle_corners is not None and number_positions:
194
- needle_tip_x = (needle_corners[2][0] + needle_corners[3][0]) / 2
195
- needle_tip_y = (needle_corners[2][1] + needle_corners[3][1]) / 2
196
- needle_tip = np.array([needle_tip_x, needle_tip_y])
197
-
198
- reading, method = calculate_meter_reading(needle_corners, number_positions)
199
- if reading is not None:
200
- result_text = f"Meter reading: {reading} ({method})"
201
- print(result_text)
202
-
203
- # Visualize connection between the needle tip and the nearest number.
204
- number_values = [0, 5, 10, 15, 20, 25, 30]
205
- sorted_positions = sorted(number_positions, key=lambda x: x[1][0])
206
- labeled_positions = []
207
- for i, (_, position) in enumerate(sorted_positions):
208
- if i < len(number_values):
209
- labeled_positions.append((number_values[i], position))
210
-
211
- # Find adjacent numbers for interpolation visualization.
212
- left_pos = None
213
- right_pos = None
214
- for i in range(len(labeled_positions) - 1):
215
- curr_value, curr_pos = labeled_positions[i]
216
- next_value, next_pos = labeled_positions[i + 1]
217
- if curr_pos[0] <= needle_tip[0] <= next_pos[0]:
218
- left_pos = curr_pos
219
- right_pos = next_pos
220
- break
221
-
222
- if "interpolated" in method and left_pos is not None and right_pos is not None:
223
- cv2.line(image,
224
- (int(needle_tip[0]), int(needle_tip[1])),
225
- (int(left_pos[0]), int(left_pos[1])),
226
- (255, 0, 255), 1, cv2.LINE_AA)
227
- cv2.line(image,
228
- (int(needle_tip[0]), int(needle_tip[1])),
229
- (int(right_pos[0]), int(right_pos[1])),
230
- (255, 0, 255), 1, cv2.LINE_AA)
231
- else:
232
- # Connect to closest number if not interpolated.
233
- min_distance = float('inf')
234
- closest_position = None
235
- for _, position in labeled_positions:
236
- distance = np.sqrt((needle_tip[0] - position[0])**2 +
237
- (needle_tip[1] - position[1])**2)
238
- if distance < min_distance:
239
- min_distance = distance
240
- closest_position = position
241
- if closest_position is not None:
242
- cv2.line(image,
243
- (int(needle_tip[0]), int(needle_tip[1])),
244
- (int(closest_position[0]), int(closest_position[1])),
245
- (255, 0, 255), 2)
246
- else:
247
- print("Needle position is out of range")
248
- else:
249
- if needle_corners is None:
250
- print("Needle not detected")
251
- if not number_positions:
252
- print("No numbers detected")
253
-
254
- return image