queue.acquire()
try:
+ if op.status == constants.OP_STATUS_CANCELED:
+ raise CancelJob()
assert op.status == constants.OP_STATUS_QUEUED
job.run_op_index = idx
op.status = constants.OP_STATUS_WAITLOCK
op.status = constants.OP_STATUS_ERROR
op.result = str(err)
op.end_timestamp = TimeStampNow()
- logging.info("Op %s/%s: Error in opcode %s", idx + 1, count,
- op_summary)
+ logging.info("Op %s/%s: Error in opcode %s: %s",
+ idx + 1, count, op_summary, err)
finally:
queue.UpdateJobUnlocked(job)
finally:
utils.RemoveFile(constants.JOB_QUEUE_DRAIN_FILE)
return True
- @utils.LockedMethod
@_RequireOpenQueue
- def SubmitJob(self, ops):
+ def _SubmitJobUnlocked(self, ops):
"""Create and store a new job.
This enters the job into our job queue and also puts it on the new
"""
if self._IsQueueMarkedDrain():
- raise errors.JobQueueDrainError()
+ raise errors.JobQueueDrainError("Job queue is drained, refusing job")
# Check job queue size
size = len(self._ListJobFiles())
return job.id
+ @utils.LockedMethod
+ @_RequireOpenQueue
+ def SubmitJob(self, ops):
+ """Create and store a new job.
+
+ @see: L{_SubmitJobUnlocked}
+
+ """
+ return self._SubmitJobUnlocked(ops)
+
+ @utils.LockedMethod
+ @_RequireOpenQueue
+ def SubmitManyJobs(self, jobs):
+ """Create and store multiple jobs.
+
+ @see: L{_SubmitJobUnlocked}
+
+ """
+ results = []
+ for ops in jobs:
+ try:
+ data = self._SubmitJobUnlocked(ops)
+ status = True
+ except errors.GenericError, err:
+ data = str(err)
+ status = False
+ results.append((status, data))
+
+ return results
+
+
@_RequireOpenQueue
def UpdateJobUnlocked(self, job):
"""Update a job's on disk storage.
"""
try:
for op in job.ops:
- op.status = constants.OP_STATUS_ERROR
+ op.status = constants.OP_STATUS_CANCELED
op.result = "Job canceled by request"
finally:
self.UpdateJobUnlocked(job)
logging.debug("Job %s not found", job_id)
return False
- return self._ArchiveJobUnlocked([job]) == 1
+ return self._ArchiveJobsUnlocked([job]) == 1
@utils.LockedMethod
@_RequireOpenQueue