加入八小时工作制;完善逻辑;优化代码
This commit is contained in:
parent
5950c1dab9
commit
1f9c0cbdc8
2
.idea/misc.xml
generated
2
.idea/misc.xml
generated
@ -3,5 +3,5 @@
|
||||
<component name="JavaScriptSettings">
|
||||
<option name="languageLevel" value="ES6" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (scheduling)" project-jdk-type="Python SDK" />
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (Scheduling)" project-jdk-type="Python SDK" />
|
||||
</project>
|
171
runtime.py
171
runtime.py
@ -2,17 +2,22 @@ from __future__ import annotations
|
||||
import model
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
from copy import deepcopy
|
||||
import math
|
||||
|
||||
|
||||
class RuntimeProduct:
|
||||
|
||||
def __init__(self, product, amount):
|
||||
def __init__(self, product: model.Product, amount: int, order: model.Order):
|
||||
self.ddl: datetime = datetime.today()
|
||||
self.start: datetime = datetime.today()
|
||||
self.delay = datetime.today()
|
||||
self.product: model.Product = product
|
||||
self.amount: int = amount
|
||||
self.order: model.Order = order
|
||||
self.father_product: Optional[model.Product] = None
|
||||
|
||||
def set_father_product(self, father_product: model.Product):
|
||||
self.father_product = father_product
|
||||
|
||||
def set_ddl_start(self, ddl: datetime, start: datetime):
|
||||
self.ddl = ddl
|
||||
@ -76,11 +81,12 @@ class RuntimeProductLines:
|
||||
# 运行时工序
|
||||
class RuntimeProcess:
|
||||
|
||||
def __init__(self, runtime_product: RuntimeProduct, process: model.Process):
|
||||
def __init__(self, runtime_product: RuntimeProduct, process: model.Process, order: model.Order):
|
||||
self.runtime_product = runtime_product
|
||||
self.process = process
|
||||
self.ddl = runtime_product.ddl
|
||||
self.delay = self.runtime_product.ddl - timedelta(minutes=process.pdt_time)
|
||||
self.order: model.Order = order
|
||||
|
||||
|
||||
# 运行时资源需求
|
||||
@ -92,13 +98,17 @@ class RuntimeResourceNeed:
|
||||
raise RuntimeError("the start time must before the end time")
|
||||
|
||||
self.process: model.Process = runtime_process.process
|
||||
self.order: model.Order = runtime_process.order
|
||||
self.resource_attr: str = resource_attr
|
||||
self.workspace: str = workspace
|
||||
self.start: datetime = start
|
||||
self.end: datetime = end
|
||||
self.plan_alloc_resources_id: List[str] = []
|
||||
self.could_alloc = False
|
||||
self.ddl = runtime_process.ddl
|
||||
self.could_alloc: bool = False
|
||||
self.ddl: datetime = runtime_process.ddl
|
||||
self.father_product: model.Product = runtime_process.runtime_product.father_product
|
||||
self.product: model.Product = runtime_process.runtime_product.product
|
||||
self.duration: timedelta = self.end - self.start
|
||||
|
||||
def add_plan_alloc_resource(self, runtime_resource_id: str):
|
||||
self.plan_alloc_resources_id.append(runtime_resource_id)
|
||||
@ -114,6 +124,7 @@ class RuntimeResource:
|
||||
self.resource_attrs = self.resource.attrs
|
||||
self.schedules: List[RuntimeResourceNeed] = []
|
||||
self.start_time: datetime = start_time
|
||||
self.durations_cache: Dict[int, int] = {}
|
||||
|
||||
def add_schedule(self, schedule: RuntimeResourceNeed, do: bool) -> bool:
|
||||
|
||||
@ -123,11 +134,56 @@ class RuntimeResource:
|
||||
if schedule.ddl < self.start_time:
|
||||
return False
|
||||
|
||||
# 统计开始前24小时工作时间
|
||||
work_time_statistical_start = schedule.start - timedelta(hours=24)
|
||||
work_time_statistical: int = 0
|
||||
|
||||
cache_index: int = 0
|
||||
|
||||
# 检查冲突
|
||||
for resource_need in self.schedules:
|
||||
if resource_need.end > schedule.start > resource_need.start:
|
||||
|
||||
if resource_need.end > schedule.end \
|
||||
and resource_need.start > schedule.end:
|
||||
break
|
||||
|
||||
# 工作开始前24小时加上该计划后已经累计工作超过8小时
|
||||
if work_time_statistical + schedule.duration.total_seconds() > 8 * 3600:
|
||||
return False
|
||||
|
||||
# 优化
|
||||
if schedule.end < resource_need.start:
|
||||
if resource_need.start > work_time_statistical_start \
|
||||
and resource_need.end < schedule.start:
|
||||
# 记录工作时间
|
||||
work_time_statistical += resource_need.duration.total_seconds()
|
||||
|
||||
continue
|
||||
|
||||
if schedule.start < resource_need.end < schedule.end:
|
||||
pre_need = resource_need
|
||||
break
|
||||
|
||||
if resource_need.start < schedule.end < resource_need.end:
|
||||
pre_need = resource_need
|
||||
break
|
||||
|
||||
if schedule.start < resource_need.start < schedule.end:
|
||||
back_need = resource_need
|
||||
break
|
||||
|
||||
if resource_need.start < schedule.start < resource_need.end:
|
||||
back_need = resource_need
|
||||
break
|
||||
|
||||
if schedule.start == resource_need.start and schedule.end == resource_need.end:
|
||||
pre_need = resource_need
|
||||
back_need = resource_need
|
||||
break
|
||||
|
||||
if resource_need.start < schedule.start \
|
||||
and resource_need.end < schedule.start:
|
||||
cache_index += 1
|
||||
|
||||
if pre_need is not None or back_need is not None:
|
||||
return False
|
||||
@ -135,6 +191,7 @@ class RuntimeResource:
|
||||
if do is True:
|
||||
self.schedules.append(schedule)
|
||||
self.schedules = sorted(self.schedules, key=lambda need: need.start)
|
||||
self.durations_cache[int(schedule.duration.total_seconds())] = cache_index
|
||||
return True
|
||||
|
||||
def get_earliest_available_free_times(self, duration: timedelta) -> datetime:
|
||||
@ -148,47 +205,88 @@ class RuntimeResource:
|
||||
target_schedule = self.schedules[0]
|
||||
if self.start_time < target_schedule.start:
|
||||
if target_schedule.start - self.start_time > duration:
|
||||
# 如果工作时间加起来小于8小时
|
||||
if (target_schedule.duration + duration).total_seconds() < 8 * 3600:
|
||||
return target_schedule.start - duration
|
||||
else:
|
||||
# 延后16小时
|
||||
return target_schedule.end + timedelta(hours=16)
|
||||
|
||||
else:
|
||||
if (target_schedule.duration + duration).total_seconds() < 8 * 3600:
|
||||
return target_schedule.end
|
||||
else:
|
||||
# 延后16小时
|
||||
return target_schedule.end + timedelta(hours=16)
|
||||
|
||||
cache_index: int = 0
|
||||
if int(duration.total_seconds()) in self.durations_cache.keys():
|
||||
cache_index = self.durations_cache[int(duration.total_seconds())]
|
||||
# print("CACHE HIT:", cache_index,
|
||||
# "DURATION:", int(duration.total_seconds()))
|
||||
|
||||
# 正常执行
|
||||
for i in range(len(self.schedules) - 2):
|
||||
for i in range(cache_index, len(self.schedules) - 2):
|
||||
pre_schedule = self.schedules[i]
|
||||
back_schedule = self.schedules[i + 1]
|
||||
|
||||
if self.start_time < pre_schedule.start and i == 0:
|
||||
if pre_schedule.start - self.start_time > duration:
|
||||
return pre_schedule.start - duration
|
||||
if back_schedule.start - pre_schedule.end > duration:
|
||||
|
||||
elif back_schedule.start - pre_schedule.end > duration:
|
||||
# 统计开始前24小时工作时间
|
||||
work_time_statistical_start = pre_schedule.end - timedelta(hours=24)
|
||||
work_time_statistical: int = 0
|
||||
|
||||
for schedule in self.schedules:
|
||||
|
||||
if schedule.start < work_time_statistical_start:
|
||||
continue
|
||||
|
||||
if schedule.start > pre_schedule.end:
|
||||
break
|
||||
|
||||
if schedule.start > work_time_statistical_start \
|
||||
and schedule.end <= pre_schedule.end:
|
||||
work_time_statistical += schedule.duration.total_seconds()
|
||||
|
||||
if work_time_statistical + duration.total_seconds() < 8 * 3600:
|
||||
return pre_schedule.end
|
||||
else:
|
||||
continue
|
||||
|
||||
# 统计开始前24小时工作时间
|
||||
work_time_statistical_start = self.schedules[-1].end - timedelta(hours=24)
|
||||
work_time_statistical: int = 0
|
||||
|
||||
for schedule in self.schedules:
|
||||
|
||||
if schedule.start < work_time_statistical_start:
|
||||
continue
|
||||
|
||||
if schedule.start > self.schedules[-1].end:
|
||||
break
|
||||
|
||||
if schedule.start > work_time_statistical_start:
|
||||
work_time_statistical += schedule.duration.total_seconds()
|
||||
if work_time_statistical + duration.total_seconds() < 8 * 3600:
|
||||
return self.schedules[-1].end
|
||||
else:
|
||||
return self.schedules[-1].end + timedelta(hours=16)
|
||||
|
||||
|
||||
class RuntimeResourcePool:
|
||||
|
||||
def __init__(self, resources: List[model.Resource], start_time: datetime):
|
||||
self.pool: Dict[str, RuntimeResource] = {}
|
||||
self.pools: Dict[int, Dict[str, RuntimeResource]] = {}
|
||||
self.start_time: datetime = start_time
|
||||
|
||||
for resource in resources:
|
||||
runtime_resource = RuntimeResource(resource, self.start_time)
|
||||
self.pool[runtime_resource.resource.rsc_id] = runtime_resource
|
||||
|
||||
# 资源分配操作
|
||||
def alloc_resource(self, resource_needs: List[RuntimeResourceNeed]):
|
||||
for resource_need in resource_needs:
|
||||
if resource_need.could_alloc is False:
|
||||
continue
|
||||
for plan_alloc_resource_id in resource_need.plan_alloc_resources_id:
|
||||
self.pool[plan_alloc_resource_id].add_schedule(resource_need, True)
|
||||
if hash(runtime_resource.workspace) not in self.pools.keys():
|
||||
self.pools[hash(runtime_resource.workspace)] = {}
|
||||
self.pools[hash(runtime_resource.workspace)][runtime_resource.resource.rsc_id] = runtime_resource
|
||||
|
||||
def try_alloc_resource(self, resource_needs: List[RuntimeResourceNeed]) -> bool:
|
||||
|
||||
temp_pool = deepcopy(list(self.pool.values()))
|
||||
|
||||
# 已经满足的需求
|
||||
fulfilled_needs = []
|
||||
|
||||
@ -199,16 +297,15 @@ class RuntimeResourcePool:
|
||||
if resource_need.could_alloc is True:
|
||||
continue
|
||||
|
||||
# 查找相同车间的资源
|
||||
temp_pool = list(self.pools[hash(resource_need.workspace)].values())
|
||||
|
||||
# 精确搜索
|
||||
for runtime_resource in temp_pool:
|
||||
# 排除不同车间
|
||||
if runtime_resource.workspace != resource_need.workspace:
|
||||
continue
|
||||
if runtime_resource.basic_attr == resource_need.resource_attr:
|
||||
if runtime_resource.add_schedule(resource_need, False) is True:
|
||||
if runtime_resource.add_schedule(resource_need, True) is True:
|
||||
resource_need.add_plan_alloc_resource(runtime_resource.resource.rsc_id)
|
||||
fulfilled_needs.append(resource_need)
|
||||
runtime_resource.add_schedule(resource_need, True)
|
||||
resource_need.could_alloc = True
|
||||
break
|
||||
|
||||
@ -220,10 +317,9 @@ class RuntimeResourcePool:
|
||||
if runtime_resource.workspace != resource_need.workspace:
|
||||
continue
|
||||
if resource_need.resource_attr in runtime_resource.resource_attrs:
|
||||
if runtime_resource.add_schedule(resource_need, False) is True:
|
||||
if runtime_resource.add_schedule(resource_need, True) is True:
|
||||
resource_need.add_plan_alloc_resource(runtime_resource.resource.rsc_id)
|
||||
fulfilled_needs.append(resource_need)
|
||||
runtime_resource.add_schedule(resource_need, True)
|
||||
resource_need.could_alloc = True
|
||||
break
|
||||
|
||||
@ -245,11 +341,11 @@ class RuntimeResourcePool:
|
||||
|
||||
duration = resource_need.end - resource_need.start
|
||||
|
||||
# 查找相同车间的资源
|
||||
temp_pool = list(self.pools[hash(resource_need.workspace)].values())
|
||||
|
||||
# 精确搜索
|
||||
for runtime_resource in self.pool.values():
|
||||
# 排除不同车间
|
||||
if runtime_resource.workspace != resource_need.workspace:
|
||||
continue
|
||||
for runtime_resource in temp_pool:
|
||||
if runtime_resource.basic_attr == resource_need.resource_attr:
|
||||
temp_earliest_time = runtime_resource.get_earliest_available_free_times(duration)
|
||||
# 时间不能超过DDL
|
||||
@ -263,10 +359,8 @@ class RuntimeResourcePool:
|
||||
resource_need.start = earliest_time
|
||||
# 放宽条件搜索
|
||||
else:
|
||||
for runtime_resource in self.pool.values():
|
||||
# 排除不同车间
|
||||
if runtime_resource.workspace != resource_need.workspace:
|
||||
continue
|
||||
for runtime_resource in temp_pool:
|
||||
|
||||
if resource_need.resource_attr in runtime_resource.resource_attrs:
|
||||
temp_earliest_time = runtime_resource.get_earliest_available_free_times(duration)
|
||||
# 时间不能超过DDL
|
||||
@ -283,4 +377,3 @@ class RuntimeResourcePool:
|
||||
if_found = True
|
||||
|
||||
return if_found
|
||||
|
||||
|
181
scheduling.py
181
scheduling.py
@ -1,38 +1,44 @@
|
||||
import runtime
|
||||
import model
|
||||
import csv
|
||||
from typing import List, Dict
|
||||
from typing import List, Dict, Tuple
|
||||
from datetime import datetime, timedelta, date
|
||||
import time
|
||||
import math
|
||||
import dataset_importer
|
||||
import os
|
||||
import utils
|
||||
|
||||
|
||||
def orders_processor(orders: Dict[str, model.Order]) -> List[runtime.RuntimeProduct]:
|
||||
def orders_processor(orders: Dict[str, model.Order]) -> Tuple[list, list, list]:
|
||||
orders_list = list(orders.values())
|
||||
sorted_orders_list = sorted(orders_list, key=lambda order: order.latest_end_time)
|
||||
products_lines = runtime.RuntimeProductLines()
|
||||
|
||||
for sorted_order in sorted_orders_list:
|
||||
for item in sorted_order.products:
|
||||
runtime_product = runtime.RuntimeProduct(item["product"], item["amount"])
|
||||
runtime_product = runtime.RuntimeProduct(item["product"], item["amount"], sorted_order)
|
||||
runtime_product.set_ddl_start(sorted_order.latest_end_time, sorted_order.earliest_start_time)
|
||||
products_lines.add_runtime_product(runtime_product)
|
||||
|
||||
runtime_product = products_lines.pop_runtime_product()
|
||||
|
||||
products_list = [runtime_product]
|
||||
|
||||
semi_products_list = []
|
||||
|
||||
produce_tree = []
|
||||
|
||||
produce_list = []
|
||||
|
||||
while runtime_product is not None:
|
||||
search_semi_products(0, produce_tree, produce_list, runtime_product)
|
||||
search_semi_products(0, produce_tree, produce_list, runtime_product, semi_products_list)
|
||||
runtime_product = products_lines.pop_runtime_product()
|
||||
if runtime_product is not None:
|
||||
products_list.append(runtime_product)
|
||||
|
||||
return produce_list
|
||||
return produce_list, products_list, semi_products_list
|
||||
|
||||
|
||||
def search_semi_products(floor, produce_tree, produce_list, runtime_product):
|
||||
def search_semi_products(floor, produce_tree, produce_list, runtime_product, semi_products_list):
|
||||
|
||||
runtime_semi_products = []
|
||||
produce_tree.append({"runtime_product": runtime_product, "runtime_semi_products": runtime_semi_products})
|
||||
@ -41,13 +47,25 @@ def search_semi_products(floor, produce_tree, produce_list, runtime_product):
|
||||
for i in range(runtime_product.amount):
|
||||
for item in runtime_product.product.semi_products:
|
||||
|
||||
runtime_semi_product = runtime.RuntimeProduct(item["semi_product"], item["amount"])
|
||||
runtime_semi_product = runtime.RuntimeProduct(item["semi_product"],
|
||||
item["amount"],
|
||||
runtime_product.order)
|
||||
|
||||
runtime_semi_product.set_father_product(runtime_product.product)
|
||||
|
||||
# 记录半成品
|
||||
semi_products_list.append(runtime_semi_product)
|
||||
|
||||
runtime_semi_product.set_ddl_start(runtime_product.ddl, runtime_product.start)
|
||||
|
||||
# print("C", runtime_semi_product.product.product_id, runtime_semi_product.ddl)
|
||||
|
||||
for k in range(runtime_semi_product.amount):
|
||||
search_semi_products(floor+1, runtime_semi_products, produce_list, runtime_semi_product)
|
||||
search_semi_products(floor+1,
|
||||
runtime_semi_products,
|
||||
produce_list,
|
||||
runtime_semi_product,
|
||||
semi_products_list)
|
||||
|
||||
print("L", floor, runtime_product.product.product_id, runtime_product.ddl)
|
||||
produce_list.append(runtime_product)
|
||||
@ -65,7 +83,7 @@ def products_processor(runtime_products: List[runtime.RuntimeProduct]):
|
||||
process_number = math.ceil(float(runtime_product.amount) / float(process.max_quantity))
|
||||
for i in range(process_number):
|
||||
runtime_process: runtime.RuntimeProcess = \
|
||||
runtime.RuntimeProcess(runtime_product, process)
|
||||
runtime.RuntimeProcess(runtime_product, process, runtime_product.order)
|
||||
production_times += runtime_process.process.pdt_time
|
||||
processes_list.append(runtime_process)
|
||||
|
||||
@ -81,7 +99,7 @@ def products_processor(runtime_products: List[runtime.RuntimeProduct]):
|
||||
for item in runtime_products_processes_list:
|
||||
for runtime_process in item["runtimeProcess"]:
|
||||
runtime_product: runtime.RuntimeProduct = item["runtimeProduct"]
|
||||
# print(runtime_product.product.product_id, runtime_product.delay, runtime_process.process.pcs_id)
|
||||
print(runtime_product.product.product_id, runtime_product.delay, runtime_process.process.pcs_id)
|
||||
|
||||
return runtime_products_processes_list
|
||||
|
||||
@ -89,15 +107,33 @@ def products_processor(runtime_products: List[runtime.RuntimeProduct]):
|
||||
def resource_processor(runtime_products_processes_list: List[Dict[str, any]],
|
||||
resource_pool: runtime.RuntimeResourcePool,
|
||||
start_time: datetime):
|
||||
|
||||
print("Resource Allocator Start Time", start_time)
|
||||
could_alloc = True
|
||||
index: int = 0
|
||||
|
||||
runtime_resource_needs_all = []
|
||||
|
||||
for item in runtime_products_processes_list:
|
||||
index += 1
|
||||
print("Processing", index, "OF", len(runtime_products_processes_list))
|
||||
|
||||
target_runtime_product: runtime.RuntimeProduct = item["runtimeProduct"]
|
||||
|
||||
print("Producing Product", target_runtime_product.product.product_id, "Amount", target_runtime_product.amount)
|
||||
|
||||
runtime_resource_needs: List[runtime.RuntimeResourceNeed] = []
|
||||
for runtime_process in item["runtimeProcess"]:
|
||||
print("Runtime Process", runtime_process.process.pcs_id,
|
||||
"FOR Runtime Product", runtime_process.runtime_product.product.product_id)
|
||||
|
||||
runtime_process: runtime.RuntimeProcess = runtime_process
|
||||
for resource_item in runtime_process.process.res_needs:
|
||||
resource_attr = resource_item["rcs_attr"]
|
||||
amount = resource_item["amount"]
|
||||
|
||||
print("Process Need Resource", resource_attr, " Amount", amount)
|
||||
|
||||
for i in range(amount):
|
||||
runtime_resource_need: runtime.RuntimeResourceNeed = runtime.RuntimeResourceNeed(
|
||||
runtime_process,
|
||||
@ -108,18 +144,119 @@ def resource_processor(runtime_products_processes_list: List[Dict[str, any]],
|
||||
runtime_resource_needs.append(runtime_resource_need)
|
||||
|
||||
if resource_pool.try_alloc_resource(runtime_resource_needs):
|
||||
resource_pool.alloc_resource(runtime_resource_needs)
|
||||
pass
|
||||
else:
|
||||
while resource_pool.reset_earliest_free_start_time(runtime_resource_needs):
|
||||
resource_pool.try_alloc_resource(runtime_resource_needs)
|
||||
resource_pool.alloc_resource(runtime_resource_needs)
|
||||
# resource_pool.alloc_resource(runtime_resource_needs)
|
||||
|
||||
for runtime_resource_need in runtime_resource_needs:
|
||||
if runtime_resource_need.could_alloc is False:
|
||||
could_alloc = False
|
||||
break
|
||||
|
||||
return could_alloc
|
||||
runtime_resource_needs_all.append(runtime_resource_needs)
|
||||
|
||||
return could_alloc, runtime_resource_needs_all, resource_pool.pools
|
||||
|
||||
|
||||
def json_writer(filename, obj):
|
||||
|
||||
file = open("./outputs/" + filename, 'w', encoding="utf8")
|
||||
|
||||
file.write(utils.dumps(obj))
|
||||
|
||||
file.close()
|
||||
|
||||
|
||||
def json_generator(orders,
|
||||
runtime_products: List[runtime.RuntimeProduct],
|
||||
runtime_semi_products: List[runtime.RuntimeProduct],
|
||||
runtime_products_processes_list: List[List[runtime.RuntimeResourceNeed]],
|
||||
resource_pools: Dict[int, Dict[str, runtime.RuntimeResource]]):
|
||||
|
||||
folder = os.path.exists("./outputs")
|
||||
|
||||
if not folder:
|
||||
os.mkdir("./outputs")
|
||||
|
||||
orders_json = []
|
||||
|
||||
for order in orders.values():
|
||||
orders_json.append({
|
||||
"name": order.order_id,
|
||||
"startTime": order.earliest_start_time.isoformat(),
|
||||
"endTime": order.latest_end_time.isoformat()
|
||||
})
|
||||
|
||||
json_writer("orders.json", orders_json)
|
||||
|
||||
products_json = []
|
||||
|
||||
for runtime_product in runtime_products:
|
||||
products_json.append({
|
||||
"name": runtime_product.product.product_id,
|
||||
"count": runtime_product.amount,
|
||||
"startTime": runtime_product.delay.isoformat(),
|
||||
"endTime": runtime_product.ddl.isoformat()
|
||||
})
|
||||
|
||||
json_writer("products.json", products_json)
|
||||
|
||||
semi_products_json = []
|
||||
|
||||
for runtime_semi_product in runtime_semi_products:
|
||||
semi_products_json.append({
|
||||
"name": runtime_semi_product.product.product_id,
|
||||
"startTime": runtime_semi_product.delay.isoformat(),
|
||||
"endTime": runtime_semi_product.ddl.isoformat()
|
||||
})
|
||||
|
||||
json_writer("semi_products.json", semi_products_json)
|
||||
|
||||
processes_json = []
|
||||
|
||||
# 输出检查
|
||||
for runtime_resource_needs in runtime_products_processes_list:
|
||||
for runtime_resource_need in runtime_resource_needs:
|
||||
processes_json.append({
|
||||
"name": runtime_resource_need.process.pcs_id,
|
||||
"startTime": runtime_resource_need.start.isoformat(),
|
||||
"endTime": runtime_resource_need.end.isoformat(),
|
||||
"allocResourceName": runtime_resource_need.plan_alloc_resources_id,
|
||||
"workspace": runtime_resource_need.workspace
|
||||
})
|
||||
|
||||
json_writer("processes.json", processes_json)
|
||||
|
||||
resources_json = []
|
||||
|
||||
# 输出检查
|
||||
for pool in resource_pools.values():
|
||||
for runtime_resource in pool.values():
|
||||
times = []
|
||||
|
||||
for schedule in runtime_resource.schedules:
|
||||
|
||||
in_product = None
|
||||
if schedule.father_product is not None:
|
||||
in_product = schedule.father_product.product_id
|
||||
|
||||
times.append({
|
||||
"startTime": schedule.start.isoformat(),
|
||||
"endTime": schedule.end.isoformat(),
|
||||
"inOrder": schedule.order.order_id,
|
||||
"inProduct": in_product,
|
||||
"inSemiProduct": schedule.product.product_id,
|
||||
"inProcess": schedule.product.product_id
|
||||
})
|
||||
|
||||
resources_json.append({
|
||||
"name": runtime_resource.resource.rsc_name,
|
||||
"times": times
|
||||
})
|
||||
|
||||
json_writer("resources.json", resources_json)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@ -127,7 +264,13 @@ if __name__ == "__main__":
|
||||
start_time: datetime = datetime.combine(date(2020, 8, 12), datetime.min.time())
|
||||
|
||||
m_orders, m_products, m_processes, m_resources = dataset_importer.import_dataset()
|
||||
resource_pool: runtime.RuntimeResourcePool = runtime.RuntimeResourcePool(m_resources.values(), start_time)
|
||||
produce_list = orders_processor(m_orders)
|
||||
rt_rcs_list = products_processor(produce_list)
|
||||
print(resource_processor(rt_rcs_list, resource_pool, start_time))
|
||||
m_resource_pool: runtime.RuntimeResourcePool = runtime.RuntimeResourcePool(m_resources.values(), start_time)
|
||||
m_produce_list, m_products_list, m_semi_products_list = orders_processor(m_orders)
|
||||
rt_rcs_list = products_processor(m_produce_list)
|
||||
|
||||
m_could_alloc, m_runtime_resource_needs_all, resource_pools = \
|
||||
resource_processor(rt_rcs_list, m_resource_pool, start_time)
|
||||
|
||||
json_generator(m_orders, m_products_list, m_semi_products_list, m_runtime_resource_needs_all, resource_pools)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user