Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion integration_tests/batch/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def test_batch_openapi(page):
expect(response).to_be_ok()


def test_batch_request(unique_id, register_test_user, test_domain):
def test_batch_request(unique_id, register_test_user, test_domain, docker_compose_exec):
"""A test via the Batch API should succeed."""
request_data = {"type": "web", "domains": [test_domain], "name": unique_id}

Expand Down Expand Up @@ -129,6 +129,24 @@ def test_batch_request(unique_id, register_test_user, test_domain):
response = requests.get(report_url, verify=False)
assert response.status_code == 200, "test results should be publicly accessible without authentication"

# delete result files (this can happen in production when cleanup is done by a cron job)
docker_compose_exec("app", "sh -c 'rm -v /app/batch_results/*'")

# try to get batch results again after files have been deleted (this should trigger a new generation task)
results_response = requests.get(INTERNETNL_API + "requests/" + test_id + "/results", auth=auth, verify=False)
# expect error because result need to be generated again
assert results_response.status_code == 400
assert "Report is being generated." in results_response.text

# wait for report generation and batch to be done
wait_for_request_status(INTERNETNL_API + "requests/" + test_id, "done", timeout=60, auth=auth)

# get batch results again after starting generation
results_response = requests.get(INTERNETNL_API + "requests/" + test_id + "/results", auth=auth, verify=False)
print(f"{results_response.text=}")
results_response.raise_for_status()
print("api results JSON:", results_response.text)


def test_batch_static_requires_no_auth():
"""Static files should be available without authentication for viewing batch results."""
Expand Down
3 changes: 3 additions & 0 deletions interface/batch/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,9 @@ def on_failure(exc, task_id, args, kwargs, einfo):
results = gather_batch_results_technical(user, batch_request, site_url)
save_batch_results_to_file(user, batch_request, results, technical=True)

request_lock_id = redis_id.batch_results_request_lock.id.format(batch_request.request_id)
cache.delete(request_lock_id)


def gather_batch_results(user, batch_request, site_url):
"""
Expand Down
6 changes: 4 additions & 2 deletions interface/batch/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
register_request,
)
from internetnl import log
from .util import request_already_generating


@require_http_methods(["GET", "POST"])
Expand Down Expand Up @@ -70,9 +71,10 @@ def results(request, request_id, *args, technical=False, **kwargs):
return bad_client_request_response("The request is not yet `done`.")
else:
if not batch_request.has_report_file():
if request_already_generating(batch_request.request_id):
return bad_client_request_response("Report is already being generated.")
batch_async_generate_results.delay(user=user, batch_request=batch_request, site_url=get_site_url(request))
return bad_client_request_response("The request is not yet `done`.")

return bad_client_request_response("Report is being generated.")
else:
report_file = batch_request.get_report_file(technical)
try:
Expand Down
19 changes: 16 additions & 3 deletions interface/test/batch/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from django.core.cache import cache
from checks.models import BatchRequest, BatchRequestType, BatchUser, BatchRequestStatus
from interface.batch.util import get_request, register_request
from interface.batch.views import results
from django.test.client import RequestFactory
import pytest
from interface.batch.util import batch_async_generate_results
Expand Down Expand Up @@ -68,15 +69,27 @@ def test_batch_request_result_generation(db, client, mocker):
batch_request.save()

# if batch request is done, a batch_async_generate_results task should be put on the queue to generate the results
get_request(request, batch_request, test_user)
response = get_request(request, batch_request, test_user)
assert response.status_code == 200
assert json.loads(response.content)["request"]["status"] == "generating"
assert batch_async_generate_results.delay.call_count == 1

# there should not be an additional task put on the queue when one is already present
get_request(request, batch_request, test_user)
response = get_request(request, batch_request, test_user)
assert response.status_code == 200
assert json.loads(response.content)["request"]["status"] == "generating"
assert batch_async_generate_results.delay.call_count == 1

# when directly accessing results there should also not be a extra task added to the queue
assert (
results(request, batch_request.request_id, batch_user=test_user).status_code == 400
), "should return `bad_client_request_response`"
assert batch_async_generate_results.delay.call_count == 1

# if the cache expires a new batch_async_generate_results task can be added
lock_id = redis_id.batch_results_request_lock.id.format(batch_request.request_id)
cache.delete(lock_id)
get_request(request, batch_request, test_user)
response = get_request(request, batch_request, test_user)
assert response.status_code == 200
assert json.loads(response.content)["request"]["status"] == "generating"
assert batch_async_generate_results.delay.call_count == 2