Skip to content

Commit

Permalink
allow pause job when logged out
Browse files Browse the repository at this point in the history
  • Loading branch information
Allie Crevier committed Aug 12, 2019
1 parent e9dbe57 commit aec3999
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 32 deletions.
11 changes: 7 additions & 4 deletions securedrop_client/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,10 +260,13 @@ def call_api(self,
new_api_thread.start()

def on_queue_paused(self) -> None:
self.gui.update_error_status(
_('The SecureDrop server cannot be reached.'),
duration=0,
retry=True)
if self.api is None:
self.gui.update_error_status(_('The SecureDrop server cannot be reached.'))
else:
self.gui.update_error_status(
_('The SecureDrop server cannot be reached.'),
duration=0,
retry=True)

def resume_queues(self) -> None:
self.api_job_queue.resume_queues()
Expand Down
49 changes: 21 additions & 28 deletions securedrop_client/queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,27 +26,27 @@ class RunnableQueue(QObject):
pause = pyqtSignal()

def __init__(self, api_client: API, session_maker: scoped_session) -> None:
'''
One of the challenges of using Python's PriorityQueue is that
for objects (jobs) with equal priorities, they are not retrieved
in FIFO order due to the fact PriorityQueue is implemented using
heapq which does not have sort stability. In order to ensure sort
stability, we need to add a counter to ensure that objects with equal
priorities are retrived in FIFO order.
See also: https://bugs.python.org/issue17794
'''
super().__init__()
self.api_client = api_client
self.session_maker = session_maker
self.queue = PriorityQueue() # type: PriorityQueue[Tuple[int, ApiJob]]

# One of the challenges of using Python's PriorityQueue is that
# for objects (jobs) with equal priorities, they are not retrieved
# in FIFO order due to the fact PriorityQueue is implemented using
# heapq which does not have sort stability. In order to ensure sort
# stability, we need to add a counter to ensure that objects with equal
# priorities are retrived in FIFO order.
# See also: https://bugs.python.org/issue17794
self.order_number = itertools.count()

def add_job(self, priority: int, job: ApiJob) -> None:
"""
'''
Increment the queue's internal counter/order_number, assign an
order_number to the job to track its position in the queue,
and submit the job with its priority to the queue.
"""

'''
current_order_number = next(self.order_number)
job.order_number = current_order_number
self.queue.put_nowait((priority, job))
Expand Down Expand Up @@ -152,35 +152,28 @@ def pause_queues(self) -> None:

def resume_queues(self) -> None:
logger.info("Resuming queues")
# Reconnect the queues to the processing loop and resume processing
self.main_thread.started.connect(self.main_queue.process)
self.download_file_thread.started.connect(self.download_file_queue.process)
self.start_queues()
self.main_queue.process()
self.download_file_queue.process()

def enqueue(self, job: ApiJob) -> None:
# Additional defense in depth to prevent jobs being added to the queue when not
# logged in.
priority = self.JOB_PRIORITIES[type(job)]

if isinstance(job, PauseQueueJob):
self.main_queue.add_job(priority, job)
self.download_file_queue.add_job(priority, job)
self.paused.emit()
return

# Prevent api jobs being added to the queue when not logged in.
if not self.main_queue.api_client or not self.download_file_queue.api_client:
logger.info('Not adding job, we are not logged in')
return

# First check the queues are started in case they died for some reason.
self.start_queues()

priority = self.JOB_PRIORITIES[type(job)]

if isinstance(job, PauseQueueJob):
logger.debug('Adding pause job to both queues')
self.main_queue.add_job(priority, job)
self.download_file_queue.add_job(priority, job)
# Disconnect queues from processing loop in case threads restart while paused
self.main_thread.started.disconnect()
self.download_file_thread.started.disconnect()
# Tell the gui that the queues are now paused
self.paused.emit()
elif isinstance(job, FileDownloadJob):
if isinstance(job, FileDownloadJob):
logger.debug('Adding job to download queue')
self.download_file_queue.add_job(priority, job)
else:
Expand Down
16 changes: 16 additions & 0 deletions tests/test_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1354,13 +1354,29 @@ def test_APICallRunner_api_call_timeout(mocker):


def test_Controller_on_queue_paused(homedir, config, mocker, session_maker):
'''
Check that a paused queue is communicated to the user via the error status bar with retry option
'''
mock_gui = mocker.MagicMock()
co = Controller('http://localhost', mock_gui, session_maker, homedir)
co.api = 'mock'
co.on_queue_paused()
mock_gui.update_error_status.assert_called_once_with(
'The SecureDrop server cannot be reached.', duration=0, retry=True)


def test_Controller_on_queue_paused_when_logged_out(homedir, config, mocker, session_maker):
'''
Check that a paused queue is communicated to the user via the error status bar. There should not
be a retry option displayed to the user
'''
mock_gui = mocker.MagicMock()
co = Controller('http://localhost', mock_gui, session_maker, homedir)
co.api = None
co.on_queue_paused()
mock_gui.update_error_status.assert_called_once_with('The SecureDrop server cannot be reached.')


def test_Controller_call_update_star_success(homedir, config, mocker, session_maker, session):
'''
Check that a UpdateStar is submitted to the queue when update_star is called.
Expand Down
1 change: 1 addition & 0 deletions tests/test_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@ def test_ApiJobQueue_enqueue_no_auth(mocker):
mock_start_queues = mocker.patch.object(job_queue, 'start_queues')

dummy_job = factory.dummy_job_factory(mocker, 'mock')()
job_queue.JOB_PRIORITIES = {type(dummy_job): 1}
job_queue.enqueue(dummy_job)

assert not mock_download_file_add_job.called
Expand Down

0 comments on commit aec3999

Please sign in to comment.