Edit on GitHub

backend.workers.cancel_dataset

Delete and cancel a dataset

 1"""
 2Delete and cancel a dataset
 3"""
 4from backend.lib.worker import BasicWorker
 5from common.lib.exceptions import JobNotFoundException, DataSetException
 6from common.lib.dataset import DataSet
 7from common.lib.job import Job
 8
 9
10class DatasetCanceller(BasicWorker):
11	"""
12	Cancel a dataset's creation and delete it
13
14	Datasets can be deleted quite easily, but this becomes harder if one wants
15	to delete them while they're being created. This worker, given a dataset's
16	key, can take care of this.
17	"""
18	type = "cancel-dataset"
19	max_workers = 1
20
21	def work(self):
22		"""
23		Send pg_cancel_backend query to cancel query with given PID
24		"""
25
26		# delete dataset
27		try:
28			dataset = DataSet(key=self.job.data["remote_id"], db=self.db)
29			jobtype = dataset.data["type"]
30		except DataSetException:
31			# dataset already deleted, apparently
32			self.job.finish()
33			return
34
35		# now find the job that's tasked with creating this dataset, if it
36		# exists
37		try:
38			job = Job.get_by_remote_ID(remote_id=self.job.data["remote_id"], jobtype=jobtype, database=self.db)
39		except JobNotFoundException:
40			# no job... dataset already fully finished?
41			self.job.finish()
42			return
43
44		# ask the manager to interrupt this job
45		self.manager.request_interrupt(job, self.INTERRUPT_CANCEL)
46
47		# done
48		self.job.finish()
class DatasetCanceller(backend.lib.worker.BasicWorker):
11class DatasetCanceller(BasicWorker):
12	"""
13	Cancel a dataset's creation and delete it
14
15	Datasets can be deleted quite easily, but this becomes harder if one wants
16	to delete them while they're being created. This worker, given a dataset's
17	key, can take care of this.
18	"""
19	type = "cancel-dataset"
20	max_workers = 1
21
22	def work(self):
23		"""
24		Send pg_cancel_backend query to cancel query with given PID
25		"""
26
27		# delete dataset
28		try:
29			dataset = DataSet(key=self.job.data["remote_id"], db=self.db)
30			jobtype = dataset.data["type"]
31		except DataSetException:
32			# dataset already deleted, apparently
33			self.job.finish()
34			return
35
36		# now find the job that's tasked with creating this dataset, if it
37		# exists
38		try:
39			job = Job.get_by_remote_ID(remote_id=self.job.data["remote_id"], jobtype=jobtype, database=self.db)
40		except JobNotFoundException:
41			# no job... dataset already fully finished?
42			self.job.finish()
43			return
44
45		# ask the manager to interrupt this job
46		self.manager.request_interrupt(job, self.INTERRUPT_CANCEL)
47
48		# done
49		self.job.finish()

Cancel a dataset's creation and delete it

Datasets can be deleted quite easily, but this becomes harder if one wants to delete them while they're being created. This worker, given a dataset's key, can take care of this.

type = 'cancel-dataset'
max_workers = 1
def work(self):
22	def work(self):
23		"""
24		Send pg_cancel_backend query to cancel query with given PID
25		"""
26
27		# delete dataset
28		try:
29			dataset = DataSet(key=self.job.data["remote_id"], db=self.db)
30			jobtype = dataset.data["type"]
31		except DataSetException:
32			# dataset already deleted, apparently
33			self.job.finish()
34			return
35
36		# now find the job that's tasked with creating this dataset, if it
37		# exists
38		try:
39			job = Job.get_by_remote_ID(remote_id=self.job.data["remote_id"], jobtype=jobtype, database=self.db)
40		except JobNotFoundException:
41			# no job... dataset already fully finished?
42			self.job.finish()
43			return
44
45		# ask the manager to interrupt this job
46		self.manager.request_interrupt(job, self.INTERRUPT_CANCEL)
47
48		# done
49		self.job.finish()

Send pg_cancel_backend query to cancel query with given PID