Skip to content

Commit

Permalink
Merge branch 'main' into pre-commit-ci-update-config
Browse files Browse the repository at this point in the history
  • Loading branch information
elisa-a-v authored Jan 9, 2025
2 parents 9a4c957 + 9f67ed6 commit 8c61cef
Show file tree
Hide file tree
Showing 12 changed files with 605 additions and 208 deletions.
2 changes: 1 addition & 1 deletion cl/favorites/api_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def validate(self, data):
)

# Check if the user is eligible to create a new prayer
if not async_to_sync(prayer_eligible)(user):
if not async_to_sync(prayer_eligible)(user)[0]:
raise ValidationError(
f"You have reached the maximum number of prayers ({settings.ALLOWED_PRAYER_COUNT}) allowed in the last 24 hours."
)
Expand Down
9 changes: 1 addition & 8 deletions cl/favorites/templates/user_prayers.html
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
{% block content %}
<div class="col-xs-12">
<h1 class="text-center v-offset-below-3">{% if is_page_owner %}Your PACER Document Prayers{% else %}PACER Document Requests for: {{ requested_user }}{% endif %}</h1>
{% if is_page_owner %}<h3 class="text-center"><b>{{ count|intcomma }}</b> prayers granted totaling <b>${{total_cost|floatformat:2 }}</b>.</h3>{% endif %}
{% if is_page_owner %}<h3 class="text-center"><b>{{ count|intcomma }}</b> prayers granted totaling <b>${{total_cost|floatformat:2 }}</b> (<b>{{ num_remaining }}</b> remaining today).</h3>{% endif %}
</div>

<div class="col-xs-12" id="prayer_summary" hx-swap-oob="true"
Expand All @@ -23,13 +23,6 @@ <h1 class="text-center v-offset-below-3">{% if is_page_owner %}Your PACER Docume
hx-trigger="prayersListChanged from:body" hx-swap="none"
{%endif %}
>
<div class="well well-sm">
{% if is_page_owner %}
<p>
{% if is_eligible %}You are eligible to make document requests.{% else %}You have reached your daily limit; wait 24 hours to make new requests.{% endif %}
</p>
{% endif %}
</div>
</div>

<div class="col-xs-12" id="prayer_list" hx-swap-oob="true">
Expand Down
8 changes: 4 additions & 4 deletions cl/favorites/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,7 +699,7 @@ async def test_prayer_eligible(self) -> None:
current_time = now()
with time_machine.travel(current_time, tick=False):
# No user prayers in the last 24 hours yet for this user.
user_is_eligible = await prayer_eligible(self.user)
user_is_eligible, _ = await prayer_eligible(self.user)
self.assertTrue(user_is_eligible)

# Add prays for this user.
Expand All @@ -709,7 +709,7 @@ async def test_prayer_eligible(self) -> None:

user_prays = Prayer.objects.filter(user=self.user)
self.assertEqual(await user_prays.acount(), 1)
user_is_eligible = await prayer_eligible(self.user)
user_is_eligible, _ = await prayer_eligible(self.user)
self.assertTrue(user_is_eligible)

await sync_to_async(PrayerFactory)(
Expand All @@ -719,7 +719,7 @@ async def test_prayer_eligible(self) -> None:

# After two prays (ALLOWED_PRAYER_COUNT) in the last 24 hours.
# The user is no longer eligible to create more prays
user_is_eligible = await prayer_eligible(self.user)
user_is_eligible, _ = await prayer_eligible(self.user)
self.assertFalse(user_is_eligible)

with time_machine.travel(
Expand All @@ -730,7 +730,7 @@ async def test_prayer_eligible(self) -> None:
user=self.user, recap_document=self.rd_3
)
self.assertEqual(await user_prays.acount(), 3)
user_is_eligible = await prayer_eligible(self.user)
user_is_eligible, _ = await prayer_eligible(self.user)
self.assertTrue(user_is_eligible)

async def test_create_prayer(self) -> None:
Expand Down
8 changes: 5 additions & 3 deletions cl/favorites/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from cl.search.models import RECAPDocument


async def prayer_eligible(user: User) -> bool:
async def prayer_eligible(user: User) -> tuple[bool, int]:
allowed_prayer_count = settings.ALLOWED_PRAYER_COUNT

now = timezone.now()
Expand All @@ -39,13 +39,15 @@ async def prayer_eligible(user: User) -> bool:
user=user, date_created__gte=last_24_hours
).acount()

return prayer_count < allowed_prayer_count
return prayer_count < allowed_prayer_count, (
allowed_prayer_count - prayer_count
)


async def create_prayer(
user: User, recap_document: RECAPDocument
) -> Prayer | None:
if await prayer_eligible(user) and not recap_document.is_available:
if (await prayer_eligible(user))[0] and not recap_document.is_available:
new_prayer, created = await Prayer.objects.aget_or_create(
user=user, recap_document=recap_document
)
Expand Down
5 changes: 3 additions & 2 deletions cl/favorites/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ async def create_prayer_view(
user = request.user
is_htmx_request = request.META.get("HTTP_HX_REQUEST", False)
regular_size = bool(request.POST.get("regular_size"))
if not await prayer_eligible(request.user):
if not (await prayer_eligible(request.user))[0]:
if is_htmx_request:
return TemplateResponse(
request,
Expand Down Expand Up @@ -291,7 +291,7 @@ async def user_prayers_view(

count, total_cost = await get_user_prayer_history(requested_user)

is_eligible = await prayer_eligible(requested_user)
is_eligible, num_remaining = await prayer_eligible(requested_user)

context = {
"rd_with_prayers": rd_with_prayers,
Expand All @@ -300,6 +300,7 @@ async def user_prayers_view(
"count": count,
"total_cost": total_cost,
"is_eligible": is_eligible,
"num_remaining": num_remaining,
"private": False,
}

Expand Down
34 changes: 12 additions & 22 deletions cl/recap/api_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,10 @@ def validate(self, attrs):
UPLOAD_TYPE.CASE_QUERY_RESULT_PAGE,
]:
# These are district or bankruptcy court dockets. Is the court valid?
court_ids = Court.federal_courts.district_or_bankruptcy_pacer_courts().values_list(
"pk", flat=True
court_ids = (
Court.federal_courts.district_or_bankruptcy_pacer_courts()
)
if attrs["court"].pk not in court_ids:
if not court_ids.filter(pk=attrs["court"].pk).exists():
raise ValidationError(
"%s is not a district or bankruptcy court ID. Did you "
"mean to use the upload_type for appellate dockets?"
Expand All @@ -108,11 +108,9 @@ def validate(self, attrs):
if attrs["upload_type"] == UPLOAD_TYPE.CLAIMS_REGISTER:
# Only allowed on bankruptcy courts
bankruptcy_court_ids = (
Court.federal_courts.bankruptcy_pacer_courts().values_list(
"pk", flat=True
)
Court.federal_courts.bankruptcy_pacer_courts()
)
if attrs["court"].pk not in bankruptcy_court_ids:
if not bankruptcy_court_ids.filter(pk=attrs["court"].pk).exists():
raise ValidationError(
"%s is not a bankruptcy court ID. Only bankruptcy cases "
"should have claims registry pages." % attrs["court"]
Expand All @@ -127,12 +125,8 @@ def validate(self, attrs):
UPLOAD_TYPE.APPELLATE_CASE_QUERY_RESULT_PAGE,
]:
# Appellate court dockets. Is the court valid?
appellate_court_ids = (
Court.federal_courts.appellate_pacer_courts().values_list(
"pk", flat=True
)
)
if attrs["court"].pk not in appellate_court_ids:
appellate_court_ids = Court.federal_courts.appellate_pacer_courts()
if not appellate_court_ids.filter(pk=attrs["court"].pk).exists():
raise ValidationError(
"%s is not an appellate court ID. Did you mean to use the "
"upload_type for district dockets?" % attrs["court"]
Expand Down Expand Up @@ -203,11 +197,8 @@ def validate(self, attrs):
mail = attrs["mail"]
receipt = attrs["receipt"]

all_court_ids = Court.federal_courts.all_pacer_courts().values_list(
"pk", flat=True
)

if court_id not in all_court_ids:
all_court_ids = Court.federal_courts.all_pacer_courts()
if not all_court_ids.filter(pk=court_id).exists():
raise ValidationError(
f"{attrs['court'].pk} is not a PACER court ID."
)
Expand Down Expand Up @@ -274,10 +265,9 @@ class Meta:

def validate(self, attrs):
# Is it a good court value?
valid_court_ids = Court.federal_courts.district_or_bankruptcy_pacer_courts().values_list(
"pk", flat=True
valid_court_ids = (
Court.federal_courts.district_or_bankruptcy_pacer_courts()
)

if (
attrs.get("court")
or attrs.get("docket")
Expand All @@ -293,7 +283,7 @@ def validate(self, attrs):
if attrs.get("court")
else attrs["docket"].court_id
)
if court_id not in valid_court_ids:
if not valid_court_ids.filter(pk=court_id).exists():
raise ValidationError(f"Invalid court id: {court_id}")

# Docket validations
Expand Down
1 change: 1 addition & 0 deletions cl/recap/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ class RECAPEmailDocketEntryDataFactory(DictFactory):
pacer_doc_id = Faker("random_id_string")
pacer_magic_num = Faker("random_id_string")
pacer_seq_no = Faker("random_id_string")
short_description = Faker("text", max_nb_chars=15)


class RECAPEmailDocketDataFactory(DictFactory):
Expand Down
112 changes: 57 additions & 55 deletions cl/recap/mergers.py
Original file line number Diff line number Diff line change
Expand Up @@ -822,6 +822,35 @@ async def get_or_make_docket_entry(
return de, de_created


async def keep_latest_rd_document(queryset: QuerySet) -> RECAPDocument:
"""Retains the most recent item with a PDF, if available otherwise,
retains the most recent item overall.
:param queryset: RECAPDocument QuerySet to clean duplicates from.
:return: The matched RECAPDocument after cleaning.
"""
rd_with_pdf_queryset = queryset.filter(is_available=True).exclude(
filepath_local=""
)
if await rd_with_pdf_queryset.aexists():
rd = await rd_with_pdf_queryset.alatest("date_created")
else:
rd = await queryset.alatest("date_created")
await queryset.exclude(pk=rd.pk).adelete()
return rd


async def clean_duplicate_documents(params: dict[str, Any]) -> RECAPDocument:
"""Removes duplicate RECAPDocuments, keeping the most recent with PDF if
available or otherwise the most recent overall.
:param params: Query parameters to filter the RECAPDocuments.
:return: The matched RECAPDocument after cleaning.
"""
duplicate_rd_queryset = RECAPDocument.objects.filter(**params)
return await keep_latest_rd_document(duplicate_rd_queryset)


async def add_docket_entries(
d: Docket,
docket_entries: list[dict[str, Any]],
Expand Down Expand Up @@ -934,35 +963,39 @@ async def add_docket_entries(
rd = await RECAPDocument.objects.aget(**get_params)
rds_updated.append(rd)
except RECAPDocument.DoesNotExist:
try:
params["pacer_doc_id"] = docket_entry["pacer_doc_id"]
rd = await RECAPDocument.objects.acreate(
document_number=docket_entry["document_number"] or "",
is_available=False,
**params,
)
except ValidationError:
# Happens from race conditions.
continue
rds_created.append(rd)
rd = None
if de_created is False and not appelate_court_id_exists:
try:
# Check for documents with a bad pacer_doc_id
rd = await RECAPDocument.objects.aget(**params)
except RECAPDocument.DoesNotExist:
# Fallback to creating document
pass
except RECAPDocument.MultipleObjectsReturned:
rd = await clean_duplicate_documents(params)
if rd is None:
try:
params["pacer_doc_id"] = docket_entry["pacer_doc_id"]
rd = await RECAPDocument.objects.acreate(
document_number=docket_entry["document_number"] or "",
is_available=False,
**params,
)
rds_created.append(rd)
except ValidationError:
# Happens from race conditions.
continue
except RECAPDocument.MultipleObjectsReturned:
logger.info(
"Multiple recap documents found for document entry number'%s' "
"while processing '%s'" % (docket_entry["document_number"], d)
)
if params["document_type"] == RECAPDocument.ATTACHMENT:
continue
duplicate_rd_queryset = RECAPDocument.objects.filter(**params)
rd_with_pdf_queryset = duplicate_rd_queryset.filter(
is_available=True
).exclude(filepath_local="")
if await rd_with_pdf_queryset.aexists():
rd = await rd_with_pdf_queryset.alatest("date_created")
else:
rd = await duplicate_rd_queryset.alatest("date_created")
await duplicate_rd_queryset.exclude(pk=rd.pk).adelete()
rd = await clean_duplicate_documents(params)

rd.pacer_doc_id = rd.pacer_doc_id or docket_entry["pacer_doc_id"]
if docket_entry["pacer_doc_id"]:
rd.pacer_doc_id = docket_entry["pacer_doc_id"]
description = docket_entry.get("short_description")
if rd.document_type == RECAPDocument.PACER_DOCUMENT and description:
rd.description = description
Expand Down Expand Up @@ -1604,14 +1637,7 @@ async def clean_duplicate_attachment_entries(
)
async for dupe in dupes.aiterator():
duplicate_rd_queryset = rds.filter(pacer_doc_id=dupe.pacer_doc_id)
rd_with_pdf_queryset = duplicate_rd_queryset.filter(
is_available=True
).exclude(filepath_local="")
if await rd_with_pdf_queryset.aexists():
keep_rd = await rd_with_pdf_queryset.alatest("date_created")
else:
keep_rd = await duplicate_rd_queryset.alatest("date_created")
await duplicate_rd_queryset.exclude(pk=keep_rd.pk).adelete()
await keep_latest_rd_document(duplicate_rd_queryset)


async def merge_attachment_page_data(
Expand Down Expand Up @@ -1673,15 +1699,7 @@ async def merge_attachment_page_data(

except RECAPDocument.MultipleObjectsReturned as exc:
if pacer_case_id:
duplicate_rd_queryset = RECAPDocument.objects.filter(**params)
rd_with_pdf_queryset = duplicate_rd_queryset.filter(
is_available=True
).exclude(filepath_local="")
if await rd_with_pdf_queryset.aexists():
keep_rd = await rd_with_pdf_queryset.alatest("date_created")
else:
keep_rd = await duplicate_rd_queryset.alatest("date_created")
await duplicate_rd_queryset.exclude(pk=keep_rd.pk).adelete()
await clean_duplicate_documents(params)
main_rd = await RECAPDocument.objects.select_related(
"docket_entry", "docket_entry__docket"
).aget(**params)
Expand Down Expand Up @@ -1711,23 +1729,7 @@ async def merge_attachment_page_data(
break
except RECAPDocument.MultipleObjectsReturned as exc:
if pacer_case_id:
duplicate_rd_queryset = RECAPDocument.objects.filter(
**params
)
rd_with_pdf_queryset = duplicate_rd_queryset.filter(
is_available=True
).exclude(filepath_local="")
if await rd_with_pdf_queryset.aexists():
keep_rd = await rd_with_pdf_queryset.alatest(
"date_created"
)
else:
keep_rd = await duplicate_rd_queryset.alatest(
"date_created"
)
await duplicate_rd_queryset.exclude(
pk=keep_rd.pk
).adelete()
await clean_duplicate_documents(params)
main_rd = await RECAPDocument.objects.select_related(
"docket_entry", "docket_entry__docket"
).aget(**params)
Expand Down
Loading

0 comments on commit 8c61cef

Please sign in to comment.