364 lines
15 KiB
Python
364 lines
15 KiB
Python
from data.type import OptResult
|
|
from opt.smm.path_plan import PathPlanOpt
|
|
from collections import defaultdict
|
|
|
|
import numpy as np
|
|
import copy
|
|
|
|
|
|
class BaseOpt:
|
|
def __init__(self, config, part_data, step_data, feeder_data=None):
|
|
self.part_data = part_data
|
|
self.step_data = step_data
|
|
self.feeder_data = feeder_data
|
|
self.config = config
|
|
|
|
self.result = OptResult()
|
|
self.path_planner = PathPlanOpt(config, part_data, step_data)
|
|
|
|
self.cycle_weight = 1
|
|
self.nozzle_change_weight = 1
|
|
self.pickup_weight = 1
|
|
self.place_weight = 1
|
|
self.move_weight = 1
|
|
|
|
|
|
class FeederAssignOpt:
|
|
def __init__(self, config, part_data, step_data, feeder_data=None):
|
|
self.part_data = part_data
|
|
self.step_data = step_data
|
|
self.feeder_data = feeder_data
|
|
self.config = config
|
|
|
|
def find_commonpart(self, head_group, feeder_group):
|
|
feeder_group_len = len(feeder_group)
|
|
|
|
max_length, max_common_part = -1, []
|
|
for offset in range(-self.config.head_num + 1, feeder_group_len - 1):
|
|
# offset: head_groupÏà¶ÔÓÚfeeder_groupµÄÆ«ÒÆÁ¿
|
|
length, common_part = 0, []
|
|
for hd_index in range(self.config.head_num):
|
|
fd_index = hd_index + offset
|
|
if fd_index < 0 or fd_index >= feeder_group_len:
|
|
common_part.append(-1)
|
|
continue
|
|
|
|
if head_group[hd_index] == feeder_group[fd_index] and head_group[hd_index] != -1:
|
|
length += 1
|
|
common_part.append(head_group[hd_index])
|
|
else:
|
|
common_part.append(-1)
|
|
if length > max_length:
|
|
max_length = length
|
|
max_common_part = common_part
|
|
|
|
return max_common_part
|
|
|
|
def do(self, part_result, cycle_result):
|
|
slot_result, feeder_group = [], []
|
|
feeder_limit = {idx: data.fdn for idx, data in self.part_data.iterrows()}
|
|
|
|
for part_cycle in part_result:
|
|
new_feeder_group = []
|
|
for part in part_cycle:
|
|
if part == -1 or feeder_limit[part] == 0 or new_feeder_group.count(part) >= feeder_limit[part]:
|
|
new_feeder_group.append(-1)
|
|
else:
|
|
new_feeder_group.append(part)
|
|
|
|
if len(new_feeder_group) == 0:
|
|
continue
|
|
|
|
while sum(i >= 0 for i in new_feeder_group) != 0:
|
|
max_common_part, index = [], -1
|
|
max_common_length = -1
|
|
for feeder_index in range(len(feeder_group)):
|
|
common_part = self.find_commonpart(new_feeder_group, feeder_group[feeder_index])
|
|
if sum(i > 0 for i in common_part) > max_common_length:
|
|
max_common_length = sum(i > 0 for i in common_part)
|
|
max_common_part, index = common_part, feeder_index
|
|
|
|
new_feeder_length = 0
|
|
for feeder in new_feeder_group:
|
|
if feeder != -1 and feeder_limit[feeder] > 0:
|
|
new_feeder_length += 1
|
|
|
|
if new_feeder_length > max_common_length:
|
|
# зÖÅ乩ÁÏÆ÷
|
|
feeder_group.append([])
|
|
for feeder_index in range(len(new_feeder_group)):
|
|
feeder = new_feeder_group[feeder_index]
|
|
if feeder != -1 and feeder_limit[feeder] > 0:
|
|
feeder_group[-1].append(feeder)
|
|
new_feeder_group[feeder_index] = -1
|
|
feeder_limit[feeder] -= 1
|
|
else:
|
|
feeder_group[-1].append(-1)
|
|
else:
|
|
# ʹÓþɹ©ÁÏÆ÷
|
|
for feeder_index, feeder_part in enumerate(max_common_part):
|
|
if feeder_part != -1:
|
|
new_feeder_group[feeder_index] = -1
|
|
|
|
# È¥³ý¶àÓàµÄÔªËØ
|
|
for group in feeder_group:
|
|
while len(group) > 0 and group[0] == -1:
|
|
group.pop(0)
|
|
|
|
while len(group) > 0 and group[-1] == -1:
|
|
group.pop(-1)
|
|
|
|
# È·¶¨¹©ÁÏÆ÷×éµÄ°²×°Î»ÖÃ
|
|
part_pos = defaultdict(list)
|
|
for _, data in self.step_data.iterrows():
|
|
idx = self.part_data[self.part_data['part'].values == data.part].index.tolist()[0]
|
|
part_pos[idx].append(data.x + self.config.stopper_pos.x)
|
|
|
|
# Ôª¼þʹÓõÄÍ·
|
|
CT_Head = defaultdict(list)
|
|
for part_cycle in part_result:
|
|
for head, part in enumerate(part_cycle):
|
|
if part == -1:
|
|
continue
|
|
if part not in CT_Head:
|
|
CT_Head[part] = [head, head]
|
|
CT_Head[part][0] = min(CT_Head[part][0], head)
|
|
CT_Head[part][1] = max(CT_Head[part][1], head)
|
|
|
|
# ¹©ÁÏÆ÷×é·ÖÅäµÄÓÅÏÈ˳Ðò
|
|
feeder_assign_sequence = []
|
|
for i in range(len(feeder_group)):
|
|
for j in range(len(feeder_group)):
|
|
if j in feeder_assign_sequence:
|
|
continue
|
|
|
|
if len(feeder_assign_sequence) == i:
|
|
feeder_assign_sequence.append(j)
|
|
else:
|
|
seq = feeder_assign_sequence[-1]
|
|
if cycle_result[seq] * len([k for k in feeder_group[seq] if k >= 0]) < cycle_result[j] * len(
|
|
[k for k in feeder_group[seq] if k >= 0]):
|
|
feeder_assign_sequence.pop(-1)
|
|
feeder_assign_sequence.append(j)
|
|
|
|
# TODO: ÔÝδ¿¼ÂÇ»úеÏÞλ
|
|
feeder_group_slot = [-1] * len(feeder_group)
|
|
feeder_lane_state = [0] * self.config.slot_num # 0±íʾ¿Õ£¬1±íʾÒÑÕ¼ÓÐ
|
|
intv_ratio = self.config.head_intv // self.config.slot_intv
|
|
for index in feeder_assign_sequence:
|
|
group = feeder_group[index]
|
|
best_slot = []
|
|
for cp_index, part in enumerate(group):
|
|
if part == -1:
|
|
continue
|
|
best_slot.append(round((sum(part_pos[part]) / len(part_pos[part]) - self.config.slotf1_pos.x)
|
|
/ self.config.slot_intv) + 1 - cp_index * intv_ratio)
|
|
best_slot = round(sum(best_slot) / len(best_slot))
|
|
|
|
search_dir, step = 0, 0 # dir: 1-ÏòÓÒ, 0-Ïò×ó
|
|
left_out_range, right_out_range = False, False
|
|
while True:
|
|
assign_slot = best_slot + step if search_dir else best_slot - step
|
|
# ³öÏÖÔ½½ç£¬·´ÏòËÑË÷
|
|
if assign_slot + (len(group) - 1) * intv_ratio >= self.config.slot_num / 2:
|
|
right_out_range = True
|
|
search_dir = 0
|
|
step += 1
|
|
elif assign_slot < 0:
|
|
left_out_range = True
|
|
search_dir = 1
|
|
step += 1
|
|
else:
|
|
if left_out_range or right_out_range:
|
|
step += 1 # µ¥ÏòËÑË÷
|
|
else:
|
|
search_dir = 1 - search_dir # Ë«ÏòËÑË÷
|
|
if search_dir == 0:
|
|
step += 1
|
|
|
|
assign_available = True
|
|
|
|
# === ·ÖÅä¶ÔÓ¦²Ûλ ===
|
|
for slot in range(assign_slot, assign_slot + intv_ratio * len(group), intv_ratio):
|
|
pick_part = group[(slot - assign_slot) // intv_ratio]
|
|
if feeder_lane_state[slot] == 1 and pick_part != -1:
|
|
assign_available = False
|
|
break
|
|
|
|
if pick_part != -1 and (slot - CT_Head[pick_part][0] * intv_ratio <= 0 or
|
|
slot + (self.config.head_num - CT_Head[pick_part][1] - 1) *
|
|
intv_ratio > self.config.slot_num // 2):
|
|
assign_available = False
|
|
break
|
|
|
|
if assign_available:
|
|
for idx, part in enumerate(group):
|
|
if part != -1:
|
|
feeder_lane_state[assign_slot + idx * intv_ratio] = 1
|
|
feeder_group_slot[index] = assign_slot
|
|
break
|
|
|
|
if feeder_group_slot[index] == -1:
|
|
raise Exception('feeder assign error!')
|
|
|
|
# °´ÕÕ×î´óÆ¥ÅäÔÔò£¬È·¶¨¸÷Ôª¼þÖÜÆÚʰȡ²Ûλ
|
|
for part_cycle in part_result:
|
|
slot_result.append([-1] * self.config.head_num)
|
|
head_index = [head for head, component in enumerate(part_cycle) if component >= 0]
|
|
while head_index:
|
|
max_overlap_counter = 0
|
|
overlap_feeder_group_index, overlap_feeder_group_offset = -1, -1
|
|
for index, group in enumerate(feeder_group):
|
|
# offset Í·1 Ïà¶ÔÓÚ ¹©ÁÏÆ÷×éµÚÒ»¸öÔª¼þµÄÆ«ÒÆÁ¿
|
|
for offset in range(-self.config.head_num + 1, self.config.head_num + len(group)):
|
|
overlap_counter = 0
|
|
for head in head_index:
|
|
if 0 <= head + offset < len(group) and part_cycle[head] == group[head + offset]:
|
|
overlap_counter += 1
|
|
|
|
if overlap_counter > max_overlap_counter:
|
|
max_overlap_counter = overlap_counter
|
|
overlap_feeder_group_index, overlap_feeder_group_offset = index, offset
|
|
|
|
group = feeder_group[overlap_feeder_group_index]
|
|
head_index_cpy = copy.deepcopy(head_index)
|
|
|
|
for idx, head in enumerate(head_index_cpy):
|
|
if 0 <= head + overlap_feeder_group_offset < len(group) and part_cycle[head] == \
|
|
group[head + overlap_feeder_group_offset]:
|
|
slot_result[-1][head] = feeder_group_slot[overlap_feeder_group_index] + intv_ratio * (
|
|
head + overlap_feeder_group_offset)
|
|
head_index.remove(head)
|
|
|
|
return slot_result
|
|
|
|
|
|
class GenOpe:
|
|
@staticmethod
|
|
def roulette_wheel_selection(pop_eval):
|
|
random_val = np.random.random() * sum(pop_eval)
|
|
for idx, val in enumerate(pop_eval):
|
|
random_val -= val
|
|
if random_val <= 0:
|
|
return idx
|
|
return len(pop_eval) - 1
|
|
|
|
@staticmethod
|
|
def get_top_kth(pop_val, k: int, reverse=True):
|
|
res = []
|
|
pop_val_cpy = copy.deepcopy(pop_val)
|
|
pop_val_cpy.sort(reverse=reverse)
|
|
|
|
for i in range(min(len(pop_val_cpy), k)):
|
|
for j in range(len(pop_val)):
|
|
if abs(pop_val_cpy[i] - pop_val[j]) < 1e-9 and j not in res:
|
|
res.append(j)
|
|
break
|
|
return res
|
|
|
|
@staticmethod
|
|
def partially_mapped_crossover(parent1, parent2):
|
|
size = len(parent1)
|
|
start, end = sorted(np.random.randint(0, size, 2))
|
|
|
|
def create_child(primary_parent, secondary_parent):
|
|
child = [-1] * size
|
|
child[start:end + 1] = copy.deepcopy(secondary_parent[start:end + 1])
|
|
|
|
for i in range(size):
|
|
if start <= i <= end:
|
|
continue
|
|
|
|
cur_ptr, cur_elem = 0, primary_parent[i]
|
|
while True:
|
|
child[i] = cur_elem
|
|
if child.count(cur_elem) == 1:
|
|
break
|
|
child[i] = -1
|
|
|
|
if cur_ptr == 0:
|
|
cur_ptr, cur_elem = 1, secondary_parent[i]
|
|
else:
|
|
index_ = child.index(cur_elem)
|
|
cur_elem = secondary_parent[index_]
|
|
|
|
return child
|
|
|
|
return create_child(parent1, parent2), create_child(parent2, parent1)
|
|
|
|
@staticmethod
|
|
def swap_mutation(parent):
|
|
range_ = np.random.randint(0, len(parent), 2)
|
|
parent[range_[0]], parent[range_[1]] = parent[range_[1]], parent[range_[0]]
|
|
return parent
|
|
|
|
@staticmethod
|
|
def directed_edge_recombine_crossover(individual1, individual2):
|
|
assert len(individual1) == len(individual2)
|
|
left_edge_list, right_edge_list = defaultdict(list), defaultdict(list)
|
|
|
|
for index in range(len(individual1) - 1):
|
|
elem1, elem2 = individual1[index], individual1[index + 1]
|
|
right_edge_list[elem1].append(elem2)
|
|
left_edge_list[elem2].append(elem1)
|
|
|
|
for index in range(len(individual2) - 1):
|
|
elem1, elem2 = individual2[index], individual2[index + 1]
|
|
right_edge_list[elem1].append(elem2)
|
|
left_edge_list[elem2].append(elem1)
|
|
|
|
offspring = []
|
|
while len(offspring) != len(individual1):
|
|
while True:
|
|
center_element = np.random.choice(individual1)
|
|
if center_element not in offspring: # ±ÜÃâÖØ¸´Ñ¡È¡
|
|
break
|
|
direction, candidate = 1, [center_element]
|
|
parent = center_element
|
|
for edge_list in left_edge_list.values():
|
|
while parent in edge_list:
|
|
edge_list.remove(parent)
|
|
|
|
for edge_list in right_edge_list.values():
|
|
while parent in edge_list:
|
|
edge_list.remove(parent)
|
|
|
|
while True:
|
|
max_len, max_len_neighbor = -1, 0
|
|
if direction == 1:
|
|
if len(right_edge_list[parent]) == 0:
|
|
direction, parent = -1, center_element
|
|
continue
|
|
for neighbor in right_edge_list[parent]:
|
|
if max_len < len(right_edge_list[neighbor]):
|
|
max_len_neighbor = neighbor
|
|
max_len = len(right_edge_list[neighbor])
|
|
candidate.append(max_len_neighbor)
|
|
parent = max_len_neighbor
|
|
elif direction == -1:
|
|
if len(left_edge_list[parent]) == 0:
|
|
direction, parent = 0, center_element
|
|
continue
|
|
for neighbor in left_edge_list[parent]:
|
|
if max_len < len(left_edge_list[neighbor]):
|
|
max_len_neighbor = neighbor
|
|
max_len = len(left_edge_list[neighbor])
|
|
candidate.insert(0, max_len_neighbor)
|
|
parent = max_len_neighbor
|
|
else:
|
|
break
|
|
|
|
# ÒÆ³ýÖØ¸´ÔªËØ
|
|
for edge_list in left_edge_list.values():
|
|
while max_len_neighbor in edge_list:
|
|
edge_list.remove(max_len_neighbor)
|
|
|
|
for edge_list in right_edge_list.values():
|
|
while max_len_neighbor in edge_list:
|
|
edge_list.remove(max_len_neighbor)
|
|
|
|
offspring += candidate
|
|
|
|
return offspring
|
|
|