1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 """For API usage only, (for CLI use C{osh @FORK [ ... ]} syntax instead.)
19 """
20
21 import types
22
23 import osh.args
24 import osh.cluster
25 import osh.core
26 import osh.error
27 import osh.function
28 import osh.oshthread
29 import osh.spawn
30 import osh.util
31 import merge
32
33 LineOutputConsumer = osh.spawn.LineOutputConsumer
34 ObjectInputProvider = osh.spawn.ObjectInputProvider
35 ObjectOutputConsumer = osh.spawn.ObjectOutputConsumer
36 Spawn = osh.spawn.Spawn
37 Option = osh.args.Option
38
39
42
43
44 -def fork(threadgen, command, merge_key = None):
45 """Creates threads and executes C{command} on each. The number of threads is determined
46 by C{threadgen}. If C{threadgen} is an integer, then the specified number of threads is created,
47 and each thread has an integer label, from 0 through C{threadgen} - 1. If C{threadgen} is
48 a sequence, then for each element in the sequence, a thread is created, labelled with that
49 element. If C{threadgen} is a function, then it is evaluated, and is expected to yield an
50 integer or sequence, which is then handled as already described. If C{threadgen} is
51 a cluster specification, then the command is executed on each specified host; the thread label
52 identifies the host, (whose type is C{osh.cluster.Host}). If C{merge_key} is specified, then
53 the inputs of each thread are expected to be ordered by the C{merge_key}. The sequences
54 from the threads
55 are then merged into a single sequence using the C{merge_key}.
56 """
57 import osh.apiparser
58 op = _Fork()
59 if isinstance(command, osh.core.Op):
60 command = [command]
61 pipeline = osh.apiparser._sequence_op(command)
62 if merge_key:
63 return op.process_args(threadgen, pipeline, merge_key)
64 else:
65 return op.process_args(threadgen, pipeline)
66
67 -class _Fork(osh.core.Generator):
68
69
70
71 _threads = None
72 _pipeline = None
73 _merge_key = None
74 _function_store = None
75 _cluster_required = None
76
77
78
80 osh.core.Generator.__init__(self, '', (2, 3))
81 self._function_store = FunctionStore()
82 self._cluster_required = False
83
84
85
86
89
91 args = self.args()
92 threadgen = args.next()
93 self._pipeline = args.next()
94 self._merge_key = args.next()
95 cluster, thread_ids = self.thread_ids(threadgen)
96 self.setup_pipeline(cluster)
97 self.setup_threads(thread_ids)
98 self.setup_shared_state()
99
100
101
103 for thread in self._threads:
104 thread.pipeline.setup()
105 thread.pipeline.set_receiver(self._receiver)
106 thread.start()
107 for thread in self._threads:
108 while thread.isAlive():
109 thread.join(0.1)
110 thread_termination = thread.terminating_exception
111 if thread_termination:
112 osh.error.exception_handler(thread_termination, self, None, thread)
113
114
115
117 self._cluster_required = required
118
119
120
121 - def thread_ids(self, threadgen, already_evaled = False):
122 threadgen_type = type(threadgen)
123 try:
124 cluster = None
125 thread_ids = None
126 if threadgen_type in (list, tuple):
127 thread_ids = threadgen
128 elif isinstance(threadgen, int):
129 thread_ids = range(threadgen)
130 elif threadgen.isdigit():
131 thread_ids = range(int(threadgen))
132 elif threadgen_type is types.FunctionType:
133 if already_evaled:
134 self.usage()
135 else:
136 cluster, thread_ids = self.thread_ids(osh.function._Function(threadgen)(), True)
137 else:
138
139 cluster_name, pattern = (threadgen.split(':') + [None])[:2]
140 cluster = osh.cluster.cluster_named(cluster_name, pattern)
141 if cluster:
142 thread_ids = cluster.hosts
143 else:
144 evaled_threadgen = osh.function._Function(threadgen)()
145 cluster, thread_ids = self.thread_ids(evaled_threadgen, True)
146 if self._cluster_required and cluster is None:
147
148 import remote
149 self.usage(remote.__doc__)
150 if thread_ids is None:
151 self.usage()
152 return cluster, thread_ids
153 except:
154 self.usage()
155
157 if cluster and not self._pipeline.run_local():
158 remote_op = _Remote()
159 remote_op.process_args(self._pipeline)
160 self._pipeline = osh.core.Pipeline()
161 self._pipeline.append_op(remote_op)
162 self._pipeline.append_op(_AttachThreadState())
163 self._pipeline.append_op(merge.merge(self._merge_key))
164
166 pipeline_copier = _PipelineCopier(self)
167
168 self._function_store.hide_functions(self._pipeline)
169 threads = []
170 for thread_id in thread_ids:
171 pipeline_copy = pipeline_copier.pipeline(thread_id)
172 thread = osh.oshthread._OshThread(self, thread_id, pipeline_copy)
173 threads.append(thread)
174 self._function_store.restore_functions(self._pipeline)
175 self._threads = threads
176
178
179
180
181 pipeline_copy_iterators = [thread.pipeline.ops() for thread in self._threads]
182 for pipeline_template_op in self._pipeline.ops():
183 command_state = pipeline_template_op.create_command_state(self._threads)
184 for pipeline_copy_iterator in pipeline_copy_iterators:
185 pipeline_copy_op = pipeline_copy_iterator.next()
186 pipeline_copy_op.set_command_state(command_state)
187
189
190 _fork = None
191
194
196 copy = osh.util.clone(self._fork._pipeline)
197 self._fork._function_store.restore_functions(copy)
198 return copy
199
201
202 _thread_state = None
203
206
208 self._thread_state = (self.thread_state,)
209
211 if type(object) is list:
212 object = tuple(object)
213 self.send(self._thread_state + object)
214
215
216
217
218
219
220
221
222
223
224
225
228
230
231 _functions = None
232
235
236
237
239 pipeline.replace_function_by_reference(self)
240
242 pipeline.restore_function(self)
243
244
245
253
259
260
261
262 _REMOTE_EXECUTABLE = 'remoteosh'
263
264 -def _dump(stream, object):
266
268 if isinstance(object, osh.error.PickleableException):
269 exception = object.recreate_exception()
270 osh.error.exception_handler(exception, object.command_description(), object.input(), threadid)
271 else:
272 consumer.send(object)
273
275
276
277
278
279 if '[Errno 9] Bad file descriptor' not in line:
280 osh.error.stderr_handler(line, consumer, None, threadid)
281
283
284
285
286 _pipeline = None
287
288
289
292
293
294
297
299 self._pipeline = self.args().next()
300
301
302
304 host = self.thread_state
305 process = Spawn(
306 self._remote_command(host.address, host.user, host.db_profile),
307 ObjectInputProvider(lambda stream, object: _dump(stream, object),
308 [osh.core.verbosity, self._pipeline, self.thread_state]),
309 ObjectOutputConsumer(lambda object: _consume_remote_stdout(self, host, object)),
310 LineOutputConsumer(lambda line: _consume_remote_stderr(self, host, line)))
311 process.run()
312 if process.terminating_exception():
313 e = process.terminating_exception()
314 raise e
315
316
317
319 remote_command = [_REMOTE_EXECUTABLE]
320 if db_profile:
321 remote_command.append(db_profile)
322 ssh_command = 'ssh %s -l %s %s' % (host, user, ' '.join(remote_command))
323 return ssh_command
324