Skip to content

Commit

Permalink
Ajout job pour mettre en favoris les JDDs du pilote (#4438)
Browse files Browse the repository at this point in the history
  • Loading branch information
AntoineAugusti authored Feb 5, 2025
1 parent e016e65 commit 6bc3a4d
Show file tree
Hide file tree
Showing 4 changed files with 128 additions and 1 deletion.
2 changes: 1 addition & 1 deletion apps/transport/lib/db/dataset_follower.ex
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ defmodule DB.DatasetFollower do
typed_schema "dataset_followers" do
belongs_to(:dataset, DB.Dataset)
belongs_to(:contact, DB.Contact)
field(:source, Ecto.Enum, values: [:datagouv, :follow_button])
field(:source, Ecto.Enum, values: [:datagouv, :follow_button, :improved_data_pilot])
timestamps(type: :utc_datetime_usec)
end

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
defmodule Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob do
@moduledoc """
As part of the reuser improved data pilot, this job adds specific datasets
to the favorites list for users in participating organizations.
"""
use Oban.Worker, max_attempts: 3
import Ecto.Query

@impl Oban.Worker
def perform(%Oban.Job{}) do
dataset_ids_to_follow = relevant_dataset_ids()

Enum.each(relevant_contacts(), fn %DB.Contact{followed_datasets: followed_datasets} = contact ->
followed_dataset_ids = Enum.map(followed_datasets, & &1.id)

dataset_ids_to_follow
|> Enum.reject(&(&1 in followed_dataset_ids))
|> Enum.each(fn dataset_id ->
DB.DatasetFollower.follow!(contact, %DB.Dataset{id: dataset_id}, source: :improved_data_pilot)
end)
end)
end

def relevant_contacts do
dataset_query = from(d in DB.Dataset, select: [:id])
org_ids = Application.fetch_env!(:transport, :data_sharing_pilot_eligible_datagouv_organization_ids)

DB.Contact.base_query()
|> preload(followed_datasets: ^dataset_query)
|> join(:inner, [contact: c], o in assoc(c, :organizations), as: :organization)
|> where([organization: o], o.id in ^org_ids)
|> select([contact: c], c)
|> DB.Repo.all()
end

def relevant_dataset_ids do
DB.Dataset.base_query()
|> where([dataset: d], d.type == "public-transit")
|> DB.Dataset.filter_by_custom_tag(Application.fetch_env!(:transport, :data_sharing_pilot_dataset_custom_tag))
|> select([dataset: d], d.id)
|> DB.Repo.all()
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
defmodule Transport.Test.Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJobTest do
use ExUnit.Case, async: true
use Oban.Testing, repo: DB.Repo
import DB.Factory
import Ecto.Query
import Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob
alias Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob

@dataset_custom_tag "repartage_donnees"
@google_maps_org_id "63fdfe4f4cd1c437ac478323"
@transit_org_id "5c9a6477634f4133c7a5fc01"

setup do
Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo)
end

test "perform" do
random_contact = insert_contact()
google_maps_org = insert(:organization, id: @google_maps_org_id)

google_maps_contact =
insert_contact(%{
datagouv_user_id: Ecto.UUID.generate(),
organizations: [google_maps_org |> Map.from_struct()]
})

transit_org = insert(:organization, id: @transit_org_id)

transit_contact =
insert_contact(%{
datagouv_user_id: Ecto.UUID.generate(),
organizations: [transit_org |> Map.from_struct()]
})

eligible_dataset = insert(:dataset, custom_tags: [@dataset_custom_tag], type: "public-transit")
other_eligible_dataset = insert(:dataset, custom_tags: [@dataset_custom_tag], type: "public-transit")
random_dataset = insert(:dataset)

insert(:dataset_follower, contact: transit_contact, dataset: eligible_dataset, source: :follow_button)
insert(:dataset_follower, contact: transit_contact, dataset: random_dataset, source: :follow_button)

assert MapSet.new([eligible_dataset.id, other_eligible_dataset.id]) == relevant_dataset_ids() |> MapSet.new()

assert MapSet.new([google_maps_contact.id, transit_contact.id]) ==
relevant_contacts() |> Enum.map(& &1.id) |> MapSet.new()

assert 0 ==
DB.DatasetFollower.base_query()
|> where([dataset_follower: df], df.source == :improved_data_pilot)
|> DB.Repo.aggregate(:count)

assert :ok == perform_job(ImportDatasetFollowerReuserImprovedDataJob, %{})

# `random_contact` has no favorites
# `google_maps_contact` had 0 and now follows eligible datasets
# `transit_contact` followed a random dataset and an eligible one, the other eligible dataset has been added
assert MapSet.new([]) == followed_dataset_ids(random_contact)
assert MapSet.new([eligible_dataset.id, other_eligible_dataset.id]) == followed_dataset_ids(google_maps_contact)

assert MapSet.new([eligible_dataset.id, other_eligible_dataset.id, random_dataset.id]) ==
followed_dataset_ids(transit_contact)

assert 3 ==
DB.DatasetFollower.base_query()
|> where([dataset_follower: df], df.source == :improved_data_pilot)
|> DB.Repo.aggregate(:count)

# Can run the job again without problems, existing favorites are ignored
assert :ok == perform_job(ImportDatasetFollowerReuserImprovedDataJob, %{})
end

test "dataset custom tag has the right value" do
assert @dataset_custom_tag == Application.fetch_env!(:transport, :data_sharing_pilot_dataset_custom_tag)
end

defp followed_dataset_ids(%DB.Contact{} = contact) do
contact
|> DB.Repo.preload(:followed_datasets)
|> Map.fetch!(:followed_datasets)
|> Enum.map(& &1.id)
|> MapSet.new()
end
end
1 change: 1 addition & 0 deletions config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ oban_prod_crontab = [
# The job will make sure that it's executed only on the first Monday of these months
{"15 8 * 3,6,11 1", Transport.Jobs.PeriodicReminderProducersNotificationJob},
{"15 5 * * *", Transport.Jobs.ImportDatasetFollowersJob},
{"5 5 * * *", Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob},
{"20 5 * * *", Transport.Jobs.ImportDatasetContactPointsJob},
# Should be ideally executed after `GBFSMultiValidationDispatcherJob` to use fresh metadata
{"30 8 * * *", Transport.Jobs.ImportGBFSFeedContactEmailJob},
Expand Down

0 comments on commit 6bc3a4d

Please sign in to comment.