summaryrefslogtreecommitdiff
path: root/sources
diff options
context:
space:
mode:
authorHaidong Ji2019-02-21 21:32:22 -0600
committerHaidong Ji2019-02-21 21:32:22 -0600
commit421f2e783af773712a5e7a799f7cbaca69ab7a21 (patch)
tree1bcc356b41444fcc03bfd84e86d5a91fa3b38c49 /sources
parent602995c859265493dd94996d1626e6de44d512e6 (diff)
Priority queue parallel job processing done!
Not too bad since I worked it out in Java. Good practice of heapq module in Python! A lot of fun :)
Diffstat (limited to 'sources')
-rw-r--r--sources/job_queue.py47
1 files changed, 47 insertions, 0 deletions
diff --git a/sources/job_queue.py b/sources/job_queue.py
new file mode 100644
index 0000000..8928d4a
--- /dev/null
+++ b/sources/job_queue.py
@@ -0,0 +1,47 @@
+# python3
+
+from heapq import heappush, heappop
+from collections import namedtuple
+
+AssignedJob = namedtuple("AssignedJob", ["worker", "started_at"])
+
+
+def assign_jobs(n_workers, jobs):
+ # Naive algorithm
+ result = []
+ next_free_time = [0] * n_workers
+ for job in jobs:
+ next_worker = min(range(n_workers), key=lambda w: next_free_time[w])
+ result.append(AssignedJob(next_worker, next_free_time[next_worker]))
+ next_free_time[next_worker] += job
+
+ return result
+
+
+def assign_jobs_improved(n_workers, jobs):
+ result = []
+ worker_priority_q = []
+ for i in range(n_workers):
+ heappush(worker_priority_q, (0, i))
+
+ for j in range(len(jobs)):
+ pair = heappop(worker_priority_q)
+ result.append(AssignedJob(pair[1], pair[0]))
+ heappush(worker_priority_q, (pair[0] + jobs[j], pair[1]))
+
+ return result
+
+
+def main():
+ n_workers, n_jobs = map(int, input().split())
+ jobs = list(map(int, input().split()))
+ assert len(jobs) == n_jobs
+
+ assigned_jobs = assign_jobs_improved(n_workers, jobs)
+
+ for job in assigned_jobs:
+ print(job.worker, job.started_at)
+
+
+if __name__ == "__main__":
+ main()