-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathqueue_worker.py
More file actions
executable file
·149 lines (122 loc) · 4 KB
/
queue_worker.py
File metadata and controls
executable file
·149 lines (122 loc) · 4 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
#!/usr/bin/env python3
import pickle
import random
import time
import pika
import os
from typing import TYPE_CHECKING, List
from typing_extensions import Self
if TYPE_CHECKING:
from pika.adapters.blocking_connection import BlockingChannel
import pika.spec
COLMAP_QUEUE = "colmap_queue"
TENSORF_QUEUE = "tensorf_queue"
TOTAL_JOBS_PROCESSED = 0
class QueueWorker:
def __init__(
self,
host: str = "localhost",
port: int = 5672,
queues: List[str] = [COLMAP_QUEUE, TENSORF_QUEUE],
) -> Self:
"""
Unless you really need to modify something, call this method with the default arguments, i.e. QueueWorker().
"""
if not queues:
raise ValueError("At least one queue is required")
self.host = host
self.port = port
self.queues = queues
self.connection: pika.BlockingConnection = pika.BlockingConnection(
pika.ConnectionParameters(host=self.host)
)
self.channel: BlockingChannel = self.connection.channel()
self.declare()
def start(self):
"""
Start consuming messages from the queues
"""
self.init_consume()
self.channel.start_consuming()
def declare(self):
"""
Declare queues, can add more later if needed
"""
for queue in self.queues:
self.channel.queue_declare(queue=queue, durable=True)
print(f" [*] Declared {queue}.")
def init_consume(self):
"""
Allow consumin messages from all queues
"""
self.channel.basic_consume(
queue=COLMAP_QUEUE, on_message_callback=self.colmap_callback
)
self.channel.basic_consume(
queue=TENSORF_QUEUE, on_message_callback=self.tensorf_callback
)
def colmap_callback(
self,
ch: pika.BlockingConnection,
method: pika.spec.Basic.Deliver,
properties: pika.BasicProperties,
body: bytes, # demoing as uuid for now
):
global TOTAL_JOBS_PROCESSED
# n = random.randint(1, 2)
n = 1
print(f" [x] Sleeping for {n} seconds as colmap process {body}")
time.sleep(n)
TOTAL_JOBS_PROCESSED += 1
print(f" [x] Done with colmap process {body}")
ch.basic_ack(delivery_tag=method.delivery_tag)
if TOTAL_JOBS_PROCESSED == 10:
print(f" [x] Done with all jobs")
os._exit(0)
def tensorf_callback(
self,
ch: pika.BlockingConnection,
method: pika.spec.Basic.Deliver,
properties: pika.BasicProperties,
body: bytes, # demoing as uuid for now
):
global TOTAL_JOBS_PROCESSED
# n = random.randint(1, 2)
n = 1
print(f" [x] Sleeping for {n} seconds as tensorf process {body}")
time.sleep(n)
TOTAL_JOBS_PROCESSED += 1
print(f" [x] Done with tensorf process {body}")
ch.basic_ack(delivery_tag=method.delivery_tag)
if TOTAL_JOBS_PROCESSED == 10:
print(f" [x] Done with all jobs")
os._exit(0)
@staticmethod
def Serialize(data) -> bytes:
"""
Serializes calling object to bytes
Example::
qs = QueueWorker()
data = qs.Serialize("Hello World")
QueueWorker.Serialize("Hello World")
"""
return pickle.dumps(data)
@staticmethod
def Deserialize(data: bytes) -> Self:
"""
Deserializes data to a QueueWorker object, qs2 is the same as qs but they are not the same object under the hood (i.e. their pointers are different, cool right?)
Example::
qw = QueueWorker()
data = qw.Serialize()
qw2 = qw.Deserialize(data)
QueueWorker.Deserialize(data)
"""
return pickle.loads(data)
def close(self):
"""
Close this object's connection to the RabbitMQ server
"""
self.connection.close()
if __name__ == "__main__":
qw = QueueWorker()
qw.start()