Skip to content

Commit

Permalink
fix(recap): Fixed RecapUploadsTest merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
albertisfu committed Jan 7, 2025
1 parent a925ccc commit f68adfc
Showing 1 changed file with 96 additions and 96 deletions.
192 changes: 96 additions & 96 deletions cl/recap/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,6 +772,102 @@ def test_processing_an_acms_attachment_page(self, mock_upload):
main_attachment[0].document_type, RECAPDocument.ATTACHMENT
)

def test_match_recap_document_with_wrong_pacer_doc_id(self, mock_upload):
"""Confirm that when an existing RECAPDocument has an invalid
pacer_doc_id, we can still match it after excluding the pacer_doc_id
from the lookup.
"""

de_data = DocketEntriesDataFactory(
docket_entries=[
RECAPEmailDocketEntryDataFactory(
pacer_doc_id="04505578690",
document_number=5,
)
],
)
de = DocketEntryWithParentsFactory(
docket__court=self.court, entry_number=5
)
rd = RECAPDocumentFactory(
docket_entry=de,
document_type=RECAPDocument.PACER_DOCUMENT,
pacer_doc_id="04505578691",
document_number="5",
description="",
)
# Add the docket entry with the updated pacer_doc_id
async_to_sync(add_docket_entries)(de.docket, de_data["docket_entries"])
recap_documents = RECAPDocument.objects.all()
self.assertEqual(
recap_documents.count(), 1, msg="Wrong number of RECAPDocuments"
)
rd.refresh_from_db()
self.assertEqual(
rd.description,
de_data["docket_entries"][0]["short_description"],
msg="The short description doesn't match.",
)
self.assertEqual(
rd.pacer_doc_id,
de_data["docket_entries"][0]["pacer_doc_id"],
msg="The pacer_doc_id doesn't match.",
)

def test_match_recap_document_with_wrong_pacer_doc_id_duplicated(
self, mock_upload
):
"""Confirm that when an existing RECAPDocument has an invalid
pacer_doc_id, we can still match it after excluding the pacer_doc_id
from the lookup, even if there is more than one PACER_DOCUMENT that
belongs to the docket entry.
"""

de_data = DocketEntriesDataFactory(
docket_entries=[
RECAPEmailDocketEntryDataFactory(
pacer_doc_id="04505578690",
document_number=5,
)
],
)
de = DocketEntryWithParentsFactory(
docket__court=self.court, entry_number=5
)
RECAPDocumentFactory(
document_type=RECAPDocument.PACER_DOCUMENT,
docket_entry=de,
pacer_doc_id="04505578691",
document_number="5",
description="",
)
rd_2 = RECAPDocumentFactory(
document_type=RECAPDocument.PACER_DOCUMENT,
docket_entry=de,
pacer_doc_id="04505578691",
document_number="6",
description="",
is_available=True,
)
# Add the docket entry with the updated pacer_doc_id, remove the
# duplicated RD, and keep the one that is available.
async_to_sync(add_docket_entries)(de.docket, de_data["docket_entries"])
recap_documents = RECAPDocument.objects.all()
self.assertEqual(
recap_documents.count(), 1, msg="Wrong number of RECAPDocuments"
)
rd_2.refresh_from_db()
self.assertEqual(
rd_2.description,
de_data["docket_entries"][0]["short_description"],
msg="The short description doesn't match.",
)
self.assertEqual(
rd_2.pacer_doc_id,
de_data["docket_entries"][0]["pacer_doc_id"],
msg="The pacer_doc_id doesn't match.",
)


class ReplicateRecapUploadsTest(TestCase):
"""Test RECAP uploads are properly replicated to subdockets."""
Expand Down Expand Up @@ -1304,102 +1400,6 @@ def test_processing_subdocket_case_pdf_attachment_upload(

transaction.set_rollback(True)

def test_match_recap_document_with_wrong_pacer_doc_id(self, mock_upload):
"""Confirm that when an existing RECAPDocument has an invalid
pacer_doc_id, we can still match it after excluding the pacer_doc_id
from the lookup.
"""

de_data = DocketEntriesDataFactory(
docket_entries=[
RECAPEmailDocketEntryDataFactory(
pacer_doc_id="04505578690",
document_number=5,
)
],
)
de = DocketEntryWithParentsFactory(
docket__court=self.court, entry_number=5
)
rd = RECAPDocumentFactory(
docket_entry=de,
document_type=RECAPDocument.PACER_DOCUMENT,
pacer_doc_id="04505578691",
document_number="5",
description="",
)
# Add the docket entry with the updated pacer_doc_id
async_to_sync(add_docket_entries)(de.docket, de_data["docket_entries"])
recap_documents = RECAPDocument.objects.all()
self.assertEqual(
recap_documents.count(), 1, msg="Wrong number of RECAPDocuments"
)
rd.refresh_from_db()
self.assertEqual(
rd.description,
de_data["docket_entries"][0]["short_description"],
msg="The short description doesn't match.",
)
self.assertEqual(
rd.pacer_doc_id,
de_data["docket_entries"][0]["pacer_doc_id"],
msg="The pacer_doc_id doesn't match.",
)

def test_match_recap_document_with_wrong_pacer_doc_id_duplicated(
self, mock_upload
):
"""Confirm that when an existing RECAPDocument has an invalid
pacer_doc_id, we can still match it after excluding the pacer_doc_id
from the lookup, even if there is more than one PACER_DOCUMENT that
belongs to the docket entry.
"""

de_data = DocketEntriesDataFactory(
docket_entries=[
RECAPEmailDocketEntryDataFactory(
pacer_doc_id="04505578690",
document_number=5,
)
],
)
de = DocketEntryWithParentsFactory(
docket__court=self.court, entry_number=5
)
RECAPDocumentFactory(
document_type=RECAPDocument.PACER_DOCUMENT,
docket_entry=de,
pacer_doc_id="04505578691",
document_number="5",
description="",
)
rd_2 = RECAPDocumentFactory(
document_type=RECAPDocument.PACER_DOCUMENT,
docket_entry=de,
pacer_doc_id="04505578691",
document_number="6",
description="",
is_available=True,
)
# Add the docket entry with the updated pacer_doc_id, remove the
# duplicated RD, and keep the one that is available.
async_to_sync(add_docket_entries)(de.docket, de_data["docket_entries"])
recap_documents = RECAPDocument.objects.all()
self.assertEqual(
recap_documents.count(), 1, msg="Wrong number of RECAPDocuments"
)
rd_2.refresh_from_db()
self.assertEqual(
rd_2.description,
de_data["docket_entries"][0]["short_description"],
msg="The short description doesn't match.",
)
self.assertEqual(
rd_2.pacer_doc_id,
de_data["docket_entries"][0]["pacer_doc_id"],
msg="The pacer_doc_id doesn't match.",
)


@mock.patch("cl.recap.tasks.DocketReport", new=fakes.FakeDocketReport)
@mock.patch(
Expand Down

0 comments on commit f68adfc

Please sign in to comment.