screenshot.py 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242
  1. import cv2
  2. import time
  3. import datetime
  4. import numpy as np
  5. import io
  6. from PIL import Image
  7. from io import BytesIO
  8. from ppadb.client import Client as AdbClient
  9. from PIL import Image
  10. client = AdbClient(host="127.0.0.1", port=5037)
  11. device = client.device("192.168.178.32:5555")
  12. # templates
  13. close_sub_fights = cv2.imread("templates/close_sub_fights.jpg")
  14. close_fight = cv2.imread("templates/close_fight.jpg")
  15. fight_button = cv2.imread("templates/i.jpg")
  16. end_of_log = cv2.imread("templates/end_of_log.jpg")
  17. end_of_log2 = cv2.imread("templates/end_of_log2.jpg")
  18. # cursor positions
  19. fight_scroll_top = "2200 626"
  20. fight_scroll_bottom = "2200 1500"
  21. titan_fight = "1400 860"
  22. damage_taken = "450 850"
  23. close_details = "2175 450"
  24. close_titan_fight = "2650 570"
  25. defense_log = "350 870"
  26. def non_max_suppression(boxes, overlapThresh):
  27. if len(boxes) == 0:
  28. return []
  29. # Convert to float
  30. boxes = np.array(boxes, dtype="float")
  31. # Initialize the list of picked indexes
  32. pick = []
  33. # Grab the coordinates of the bounding boxes
  34. x1 = boxes[:, 0]
  35. y1 = boxes[:, 1]
  36. x2 = boxes[:, 2]
  37. y2 = boxes[:, 3]
  38. # Compute the area of the bounding boxes and sort by bottom-right y-coordinate
  39. area = (x2 - x1 + 1) * (y2 - y1 + 1)
  40. idxs = np.argsort(y2)
  41. # Keep looping while some indexes still remain in the indexes list
  42. while len(idxs) > 0:
  43. # Grab the last index in the indexes list and add the index value to the list of picked indexes
  44. last = len(idxs) - 1
  45. i = idxs[last]
  46. pick.append(i)
  47. # Find the largest (x, y) coordinates for the start of the bounding box and the smallest (x, y)
  48. # coordinates for the end of the bounding box
  49. xx1 = np.maximum(x1[i], x1[idxs[:last]])
  50. yy1 = np.maximum(y1[i], y1[idxs[:last]])
  51. xx2 = np.minimum(x2[i], x2[idxs[:last]])
  52. yy2 = np.minimum(y2[i], y2[idxs[:last]])
  53. # Compute the width and height of the bounding box
  54. w = np.maximum(0, xx2 - xx1 + 1)
  55. h = np.maximum(0, yy2 - yy1 + 1)
  56. # Compute the ratio of overlap
  57. overlap = (w * h) / area[idxs[:last]]
  58. # Delete all indexes from the index list that have overlap greater than the threshold
  59. idxs = np.delete(
  60. idxs, np.concatenate(([last], np.where(overlap > overlapThresh)[0]))
  61. )
  62. # Return only the bounding boxes that were picked
  63. return boxes[pick].astype("int")
  64. def screen_has_changed(prev_screenshot, threshold=0.01):
  65. # Take a new screenshot
  66. current_screenshot = device.screencap()
  67. # Convert to NumPy arrays
  68. prev_img = np.frombuffer(prev_screenshot, dtype=np.uint8)
  69. current_img = np.frombuffer(current_screenshot, dtype=np.uint8)
  70. # Load images
  71. prev_img = cv2.imdecode(prev_img, cv2.IMREAD_COLOR)
  72. current_img = cv2.imdecode(current_img, cv2.IMREAD_COLOR)
  73. # Calculate absolute difference
  74. diff = cv2.absdiff(prev_img, current_img)
  75. non_zero_count = np.count_nonzero(diff)
  76. # print(f"diff: {non_zero_count} > {threshold * diff.size} = {non_zero_count > threshold * diff.size}")
  77. # Check if the difference is greater than the threshold
  78. return non_zero_count > threshold * diff.size
  79. def save_screenshot():
  80. # Take a screenshot
  81. result = device.screencap()
  82. timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
  83. image = Image.open(io.BytesIO(result))
  84. jpeg_filename = f"/mnt/t/nextcloud/InstantUpload/Herowars/{timestamp}.jpg"
  85. image = image.convert("RGB") # Convert to RGB mode for JPEG
  86. with open(jpeg_filename, "wb") as fp:
  87. image.save(fp, format="JPEG", quality=85) # Adjust quality as needed
  88. time.sleep(0.5)
  89. def wait_for_screen_change():
  90. # Usage example
  91. prev_screenshot = device.screencap()
  92. while not screen_has_changed(prev_screenshot):
  93. time.sleep(0.1) # Polling interval
  94. def tap(location):
  95. device.shell(f"input tap {location}")
  96. time.sleep(1)
  97. def swipe(start, end):
  98. device.shell(f"input swipe {start} {end} 1000")
  99. time.sleep(0.5)
  100. def tap_button(template):
  101. button = find_templates(template)
  102. if len(button) == 0:
  103. return
  104. tap(f"{button[0][0]} {button[0][1]}")
  105. def is_end_of_log():
  106. templates = find_templates(end_of_log)
  107. if len(templates) == 0:
  108. templates = find_templates(end_of_log2)
  109. result = len(templates) > 0
  110. if result:
  111. print("reached end of guild war log!")
  112. return result
  113. def find_templates(template_image):
  114. screenshot = device.screencap()
  115. target_image = Image.open(BytesIO(screenshot))
  116. # Convert the image to a NumPy array and then to BGR format (which OpenCV uses)
  117. target_image = np.array(target_image)
  118. target_image = cv2.cvtColor(target_image, cv2.COLOR_RGB2BGR)
  119. w, h = template_image.shape[:-1]
  120. # Template matching
  121. result = cv2.matchTemplate(target_image, template_image, cv2.TM_CCOEFF_NORMED)
  122. # Define a threshold
  123. threshold = 0.9 # Adjust this threshold based on your requirements
  124. # Finding all locations where match exceeds threshold
  125. locations = np.where(result >= threshold)
  126. locations = list(zip(*locations[::-1]))
  127. # Create list of rectangles
  128. rectangles = [(*loc, loc[0] + w, loc[1] + h) for loc in locations]
  129. # Apply non-maximum suppression to remove overlaps
  130. rectangles = non_max_suppression(rectangles, 0.3)
  131. # Initialize an empty list to store coordinates
  132. coordinates = []
  133. for startX, startY, endX, endY in rectangles:
  134. # Calculate the center coordinates
  135. centerX = round(startX + (endX - startX) / 2)
  136. centerY = round(startY + (endY - startY) / 2)
  137. # Append the coordinate pair to the list
  138. coordinates.append((centerX, centerY))
  139. # Sort the coordinates by y value in ascending order
  140. return sorted(coordinates, key=lambda x: x[1])
  141. def find_max_y_pair(coordinates):
  142. # find the coordinate pair with the maximum y value
  143. result = max(coordinates, key=lambda x: x[1])
  144. return f"{result[0]} {result[1]}"
  145. def take_fight_screenshots():
  146. save_screenshot()
  147. # # if you desperately need submits
  148. # tap(damage_taken)
  149. # save_screenshot()
  150. tap_button(close_fight)
  151. time.sleep(1)
  152. def process_war_log():
  153. buttons = find_templates(fight_button)
  154. if len(buttons) == 0:
  155. swipe(fight_scroll_bottom, fight_scroll_top)
  156. return
  157. # process all found buttons
  158. for pair in buttons:
  159. tap(f"{pair[0]} {pair[1]}")
  160. sub_buttons = find_templates(fight_button)
  161. if len(sub_buttons) == 0:
  162. take_fight_screenshots()
  163. else:
  164. for pair2 in sub_buttons:
  165. tap(f"{pair2[0]} {pair2[1]}")
  166. take_fight_screenshots()
  167. tap_button(close_sub_fights)
  168. swipe(find_max_y_pair(buttons), fight_scroll_top)
  169. # start
  170. while not is_end_of_log():
  171. process_war_log()
  172. # possible duplicates here, but necessary to be sure to get the last fights
  173. process_war_log()