Skip to content

Commit

Permalink
Merge branch 'master' into update_contacts_job_404_410
Browse files Browse the repository at this point in the history
  • Loading branch information
AntoineAugusti authored Jan 8, 2024
2 parents d0c4d54 + e20d386 commit 33dc584
Show file tree
Hide file tree
Showing 13 changed files with 227 additions and 201 deletions.
9 changes: 5 additions & 4 deletions apps/transport/lib/transport_web/views/dataset_view.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,14 @@ defmodule TransportWeb.DatasetView do
@gtfs_rt_validator_name Transport.Validators.GTFSRT.validator_name()

@doc """
Count the number of resources (official + community), excluding resources with a `documentation` type.
Count the number of resources, excluding:
- community resources
- resources with a `documentation` type.
"""
@spec count_resources(Dataset.t()) :: non_neg_integer
def count_resources(dataset) do
nb_resources = Enum.count(official_available_resources(dataset))
nb_community_resources = Enum.count(community_resources(dataset))
nb_resources + nb_community_resources - count_documentation_resources(dataset)
nb_official_resources = dataset |> official_available_resources() |> Enum.count()
nb_official_resources - count_documentation_resources(dataset)
end

@spec count_documentation_resources(Dataset.t()) :: non_neg_integer
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
defmodule DB.Repo.Migrations.DatasetAddNotNull do
use Ecto.Migration

@attributes [
:datagouv_id,
:custom_title,
:licence,
:logo,
:full_logo,
:slug,
:tags,
:datagouv_title,
:type,
:frequency,
:has_realtime,
:is_active,
:nb_reuses
]

def up do
Enum.each(@attributes, fn attribute ->
execute("ALTER TABLE dataset ALTER COLUMN #{attribute} SET NOT NULL")
end)
end

def down do
Enum.each(@attributes, fn attribute ->
execute("ALTER TABLE dataset ALTER COLUMN #{attribute} DROP NOT NULL")
end)
end
end
38 changes: 35 additions & 3 deletions apps/transport/test/support/factory.ex
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,13 @@ defmodule DB.Factory do
# NOTE: need to figure out how to pass aom/region together with changeset checks here
aom: build(:aom),
tags: [],
type: "public-transit"
type: "public-transit",
logo: "https://example.com/#{Ecto.UUID.generate()}_small.png",
full_logo: "https://example.com/#{Ecto.UUID.generate()}.png",
frequency: "daily",
has_realtime: false,
is_active: true,
nb_reuses: Enum.random(0..10)
}
end

Expand Down Expand Up @@ -211,15 +217,17 @@ defmodule DB.Factory do
region_id: Keyword.get(opts, :region_id),
has_realtime: Keyword.get(opts, :has_realtime),
type: Keyword.get(opts, :type),
aom: Keyword.get(opts, :aom),
aom: aom = Keyword.get(opts, :aom),
custom_title: Keyword.get(opts, :custom_title)
]

dataset_opts =
case Keyword.get(opts, :aom) do
aom
|> case do
nil -> dataset_opts
aom -> dataset_opts |> Keyword.merge(aom: aom)
end
|> Enum.reject(fn {_, v} -> is_nil(v) end)

dataset = Keyword.get(opts, :dataset, insert(:dataset, dataset_opts))

Expand Down Expand Up @@ -319,4 +327,28 @@ defmodule DB.Factory do
|> Map.merge(args)
|> DB.Contact.insert!()
end

def datagouv_dataset_response(%{} = attributes \\ %{}) do
Map.merge(
%{
"id" => Ecto.UUID.generate(),
"title" => "dataset",
"created_at" => DateTime.utc_now() |> to_string(),
"last_update" => DateTime.utc_now() |> to_string(),
"slug" => "dataset-slug",
"license" => "lov2",
"frequency" => "daily",
"tags" => [],
"organization" => %{
"id" => Ecto.UUID.generate(),
"name" => "Org " <> Ecto.UUID.generate(),
"badges" => [],
"logo" => "https://example.com/img.jpg",
"logo_thumbnail" => "https://example.com/img.small.jpg",
"slug" => Ecto.UUID.generate()
}
},
attributes
)
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@ defmodule Transport.CommunityResourcesCleanerTest do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo)
end

defp insert_dataset_associated_with_ressources(resources, datagouv_id \\ nil) do
defp insert_dataset_associated_with_ressources(resources) do
insert_dataset_associated_with_ressources(resources, Ecto.UUID.generate())
end

defp insert_dataset_associated_with_ressources(resources, datagouv_id) do
:dataset
|> insert(%{datagouv_id: datagouv_id})
|> Repo.preload(:resources)
Expand Down
31 changes: 3 additions & 28 deletions apps/transport/test/transport/import_data_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -39,44 +39,19 @@ defmodule Transport.ImportDataTest do
"id" => id || "resource1_id",
"type" => "main",
"filetype" => filetype || "remote",
"format" => "zip",
"last_modified" => DateTime.utc_now() |> DateTime.add(-1, :hour) |> DateTime.to_iso8601(),
"schema" => %{"name" => schema_name, "version" => schema_version}
}
]
end

def generate_dataset_payload(datagouv_id, resources \\ nil) do
resources = resources || generate_resources_payload()

%{
"title" => "dataset1",
"id" => datagouv_id,
"created_at" => DateTime.utc_now() |> to_string(),
"last_update" => DateTime.utc_now() |> to_string(),
"slug" => "dataset-slug",
"resources" => resources,
"organization" => %{
"id" => Ecto.UUID.generate(),
"name" => "Org " <> Ecto.UUID.generate(),
"badges" => [],
"logo" => "https://example.com/img.jpg",
"logo_thumbnail" => "https://example.com/img.small.jpg",
"slug" => Ecto.UUID.generate()
}
}
datagouv_dataset_response(%{"id" => datagouv_id, "resources" => resources || generate_resources_payload()})
end

def insert_national_dataset(datagouv_id) do
{:ok, changes} =
DB.Dataset.changeset(%{
"created_at" => DateTime.utc_now(),
"last_update" => DateTime.utc_now(),
"datagouv_id" => datagouv_id,
"slug" => "ma_limace",
"national_dataset" => "true"
})

DB.Repo.insert!(changes)
insert(:dataset, datagouv_id: datagouv_id, aom: nil, region_id: DB.Repo.get_by!(DB.Region, nom: "National").id)
end

def http_get_mock_200(datagouv_id, payload \\ nil) do
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ defmodule TransportWeb.API.DatasetControllerTest do

test "GET /api/datasets/:id *without* history, multi_validation and resource_metadata", %{conn: conn} do
dataset =
%DB.Dataset{
insert(:dataset,
custom_title: "title",
is_active: true,
type: "public-transit",
Expand Down Expand Up @@ -287,9 +287,7 @@ defmodule TransportWeb.API.DatasetControllerTest do
created_at: ~U[2021-12-23 13:30:40.000000Z],
last_update: DateTime.utc_now(),
aom: %DB.AOM{id: 4242, nom: "Angers Métropole", siren: "siren"}
}
|> DB.Repo.insert!()
|> DB.Repo.preload(:resources)
)

Transport.History.Fetcher.Mock
|> expect(:history_resources, fn _, options ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,11 @@ defmodule TransportWeb.API.StatsControllerTest do
insert(:dataset, type: "public-transit", is_active: true, legal_owners_aom: [aom])
insert(:dataset, type: "public-transit", is_active: true, legal_owners_aom: [aom])

insert_resource_and_friends(Date.utc_today() |> Date.add(10), aom: aom, max_error: "Error")
insert_resource_and_friends(Date.utc_today() |> Date.add(10),
aom: aom,
max_error: "Error",
type: "low-emission-zones"
)

assert [
%{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,9 @@ defmodule TransportWeb.BackofficeControllerTest do
|> expect(:get!, fn "https://demo.data.gouv.fr/api/1/datasets/12/", [], _ ->
body =
%{
"created_at" => DateTime.utc_now(),
"last_update" => DateTime.utc_now(),
"id" => dataset_datagouv_id,
"slug" => "dataset-slug",
"type" => "public-transit",
"id" => dataset_datagouv_id,
"resources" => [
%{
"last_modified" => DateTime.utc_now() |> DateTime.to_iso8601(),
Expand All @@ -151,6 +149,7 @@ defmodule TransportWeb.BackofficeControllerTest do
}
]
}
|> DB.Factory.datagouv_dataset_response()
|> Jason.encode!()

%HTTPoison.Response{body: body, status_code: 200}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ defmodule TransportWeb.Backoffice.DatasetControllerTest do
assert "https://demo.data.gouv.fr/api/1/datasets/#{datagouv_id_2}/" == url

%HTTPoison.Response{
body:
~s({"id": "#{datagouv_id_2}", "resources": [], "slug": "#{slug_2}", "created_at": "2023-03-22 15:53:50+00:00", "last_update": "2023-03-22 15:53:50+00:00"}),
body: Jason.encode!(datagouv_dataset_response(%{"id" => datagouv_id_2, "slug" => slug_2, "resources" => []})),
status_code: 200
}
end)
Expand Down Expand Up @@ -151,8 +150,7 @@ defmodule TransportWeb.Backoffice.DatasetControllerTest do
Transport.HTTPoison.Mock
|> expect(:get!, fn "https://demo.data.gouv.fr/api/1/datasets/datagouv_id/", _, _ ->
%HTTPoison.Response{
body:
~s({"id": "datagouv_id", "resources": [], "created_at": "2023-03-22 15:53:50+00:00", "last_update": "2023-03-22 15:53:50+00:00"}),
body: Jason.encode!(datagouv_dataset_response(%{"resources" => []})),
status_code: 200
}
end)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,59 +2,53 @@ defmodule TransportWeb.DatasetSearchControllerTest do
use TransportWeb.ConnCase, async: true
use TransportWeb.DatabaseCase, cleanup: [:datasets]
import DB.Factory
alias DB.{AOM, Dataset, Repo, Resource}
import DB.Factory

doctest TransportWeb.DatasetController

setup do
{:ok, _} =
%Dataset{
created_at: DateTime.utc_now(),
last_update: DateTime.utc_now(),
description: "Un jeu de données",
licence: "odc-odbl",
datagouv_title: "Horaires et arrêts du réseau IRIGO - format GTFS",
custom_title: "Horaires Angers",
type: "public-transit",
slug: "horaires-et-arrets-du-reseau-irigo-format-gtfs",
datagouv_id: "5b4cd3a0b59508054dd496cd",
frequency: "yearly",
tags: [],
resources: [
%Resource{
last_update: DateTime.utc_now() |> DateTime.add(-6, :hour),
last_import: DateTime.utc_now() |> DateTime.add(-1, :hour),
url: "https://link.to/angers.zip",
title: "angers.zip"
}
],
aom: %AOM{id: 4242, nom: "Angers Métropôle"}
}
|> Repo.insert()

{:ok, _} =
%Dataset{
created_at: DateTime.utc_now(),
last_update: DateTime.utc_now(),
description: "Un autre jeu de données",
licence: "lov2",
datagouv_title: "offre de transport du réseau de LAVAL Agglomération (GTFS)",
custom_title: "Horaires Laval",
slug: "offre-de-transport-du-reseau-de-laval-agglomeration-gtfs",
type: "public-transit",
datagouv_id: "5bc493d08b4c416c84a69500",
frequency: "yearly",
tags: [],
resources: [
%Resource{
last_update: DateTime.utc_now() |> DateTime.add(-6, :hour),
last_import: DateTime.utc_now() |> DateTime.add(-1, :hour),
url: "https://link.to/angers.zip"
}
]
}
|> Repo.insert()
insert(:dataset,
created_at: DateTime.utc_now(),
last_update: DateTime.utc_now(),
description: "Un jeu de données",
licence: "odc-odbl",
datagouv_title: "Horaires et arrêts du réseau IRIGO - format GTFS",
custom_title: "Horaires Angers",
type: "public-transit",
slug: "horaires-et-arrets-du-reseau-irigo-format-gtfs",
datagouv_id: "5b4cd3a0b59508054dd496cd",
frequency: "yearly",
tags: [],
resources: [
%DB.Resource{
last_update: DateTime.utc_now() |> DateTime.add(-6, :hour),
last_import: DateTime.utc_now() |> DateTime.add(-1, :hour),
url: "https://link.to/angers.zip",
title: "angers.zip"
}
],
aom: %DB.AOM{id: 4242, nom: "Angers Métropôle"}
)

insert(:dataset,
created_at: DateTime.utc_now(),
last_update: DateTime.utc_now(),
description: "Un autre jeu de données",
licence: "lov2",
datagouv_title: "offre de transport du réseau de LAVAL Agglomération (GTFS)",
custom_title: "Horaires Laval",
slug: "offre-de-transport-du-reseau-de-laval-agglomeration-gtfs",
type: "public-transit",
datagouv_id: "5bc493d08b4c416c84a69500",
frequency: "yearly",
tags: [],
resources: [
%DB.Resource{
last_update: DateTime.utc_now() |> DateTime.add(-6, :hour),
last_import: DateTime.utc_now() |> DateTime.add(-1, :hour),
url: "https://link.to/angers.zip"
}
]
)

:ok
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,13 +120,14 @@ defmodule TransportWeb.DatasetViewTest do
test "count_resources and count_documentation_resources" do
dataset = insert(:dataset)
insert(:resource, type: "documentation", url: "https://example.com/doc", dataset: dataset)
insert(:resource, type: "documentation", url: "https://example.com/more_doc", dataset: dataset)
insert(:resource, type: "main", url: "https://example.com/file", dataset: dataset)
insert(:resource, type: "main", url: "https://example.com/community", dataset: dataset, is_community_resource: true)

dataset = dataset |> DB.Repo.preload(:resources)

assert count_resources(dataset) == 2
assert count_documentation_resources(dataset) == 1
assert count_resources(dataset) == 1
assert count_documentation_resources(dataset) == 2
end

describe "licence_link" do
Expand Down
Loading

0 comments on commit 33dc584

Please sign in to comment.