diff --git a/.circleci/config.yml b/.circleci/config.yml
index 86de651ea4..1c1ec6c2f1 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -12,7 +12,7 @@ default_docker: &default_docker
defaults: &defaults
parameters:
base_image:
- default: ghcr.io/etalab/transport-ops:elixir-1.16.2-erlang-25.3.2.10-ubuntu-focal-20240216-transport-tools-1.0.7
+ default: ghcr.io/etalab/transport-ops:elixir-1.17.3-erlang-27.1-ubuntu-focal-20240530-transport-tools-1.0.7
type: string
# useful to invalidate the build cache manually by bumping the version
build_cache_key:
diff --git a/.tool-versions b/.tool-versions
index 693ae08b43..9965d6612e 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -9,14 +9,14 @@
# - https://hexdocs.pm/elixir/compatibility-and-deprecations.html
# - https://github.com/elixir-lang/elixir/releases
# - `asdf list all elixir`
-elixir 1.16.2-otp-25
+elixir 1.17.3-otp-27
# See:
# - https://github.com/erlang/otp/releases
# - Blog, e.g. https://www.erlang.org/blog/my-otp-25-highlights/
# - https://github.com/erlang/otp/blob/master/otp_versions.table
# - `asdf list all erlang`
-erlang 25.3.2.10
+erlang 27.1
# Take an LTS version on https://nodejs.org/
-nodejs 20.11.1
+nodejs 20.17.0
diff --git a/Dockerfile b/Dockerfile
index e1ca670fdf..d6ac291e3a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/etalab/transport-ops:elixir-1.16.2-erlang-25.3.2.10-ubuntu-focal-20240216-transport-tools-1.0.7
+FROM ghcr.io/etalab/transport-ops:elixir-1.17.3-erlang-27.1-ubuntu-focal-20240530-transport-tools-1.0.7
RUN mkdir phoenixapp
WORKDIR /phoenixapp
diff --git a/Dockerfile.dev b/Dockerfile.dev
index 670d386c86..69c58b064c 100644
--- a/Dockerfile.dev
+++ b/Dockerfile.dev
@@ -1,4 +1,4 @@
-FROM ghcr.io/etalab/transport-ops:elixir-1.16.2-erlang-25.3.2.10-ubuntu-focal-20240216-transport-tools-1.0.7
+FROM ghcr.io/etalab/transport-ops:elixir-1.17.3-erlang-27.1-ubuntu-focal-20240530-transport-tools-1.0.7
RUN apt-get install -y git inotify-tools postgresql-client>=11
diff --git a/apps/transport/lib/transport_web/controllers/backoffice/contact_controller.ex b/apps/transport/lib/transport_web/controllers/backoffice/contact_controller.ex
index 27a41e379d..90a5bbe050 100644
--- a/apps/transport/lib/transport_web/controllers/backoffice/contact_controller.ex
+++ b/apps/transport/lib/transport_web/controllers/backoffice/contact_controller.ex
@@ -101,8 +101,8 @@ defmodule TransportWeb.Backoffice.ContactController do
"last_login_at",
"creation_source",
"organization_names",
- columns_for_role(:producer),
- columns_for_role(:reuser)
+ columns_for_role(:producer) |> Enum.sort(),
+ columns_for_role(:reuser) |> Enum.sort()
]
|> List.flatten()
diff --git a/apps/transport/lib/transport_web/live/on_demand_validation_select_live.ex b/apps/transport/lib/transport_web/live/on_demand_validation_select_live.ex
index e594e46bc4..2caf3e8c70 100644
--- a/apps/transport/lib/transport_web/live/on_demand_validation_select_live.ex
+++ b/apps/transport/lib/transport_web/live/on_demand_validation_select_live.ex
@@ -66,7 +66,7 @@ defmodule TransportWeb.Live.OnDemandValidationSelectLive do
defp form_fields(socket) do
changeset = socket_value(socket, :changeset)
- Map.merge(changeset.data(), changeset.changes())
+ Map.merge(changeset.data, changeset.changes)
end
defp socket_value(%Phoenix.LiveView.Socket{assigns: assigns}, key), do: Map.get(assigns, key)
diff --git a/apps/transport/lib/transport_web/views/dataset_view.ex b/apps/transport/lib/transport_web/views/dataset_view.ex
index 5919260ff0..ecf07a04db 100644
--- a/apps/transport/lib/transport_web/views/dataset_view.ex
+++ b/apps/transport/lib/transport_web/views/dataset_view.ex
@@ -72,7 +72,7 @@ defmodule TransportWeb.DatasetView do
"most_recent" => dgettext("page-shortlist", "Most recently added")
}[order_by]
- assigns = Plug.Conn.assign(conn, :msg, msg).assigns()
+ assigns = Plug.Conn.assign(conn, :msg, msg).assigns
case assigns do
%{order_by: ^order_by} -> ~H{<%= @msg %>}
@@ -81,7 +81,7 @@ defmodule TransportWeb.DatasetView do
end
def licence_link(%Plug.Conn{} = conn, %{licence: "all", count: count}) do
- assigns = Plug.Conn.assign(conn, :count, count).assigns()
+ assigns = Plug.Conn.assign(conn, :count, count).assigns
if Map.has_key?(conn.query_params, "licence") do
link("#{dgettext("page-shortlist", "All (feminine)")} (#{count})",
@@ -93,7 +93,7 @@ defmodule TransportWeb.DatasetView do
end
def licence_link(%Plug.Conn{} = conn, %{licence: licence, count: count}) when licence not in ["fr-lo", "lov2"] do
- assigns = Plug.Conn.merge_assigns(conn, count: count, name: name = licence(%Dataset{licence: licence})).assigns()
+ assigns = Plug.Conn.merge_assigns(conn, count: count, name: name = licence(%Dataset{licence: licence})).assigns
if Map.get(conn.query_params, "licence") == licence do
~H{<%= @name %> (<%= @count %>)}
@@ -113,7 +113,7 @@ defmodule TransportWeb.DatasetView do
params = conn.query_params
full_url = "#{url}?#{Query.encode(params)}"
- assigns = Plug.Conn.merge_assigns(conn, count: count, nom: nom).assigns()
+ assigns = Plug.Conn.merge_assigns(conn, count: count, nom: nom).assigns
case current_path(conn, %{}) do
^url -> ~H{<%= @nom %> (<%= @count %>)}
@@ -147,7 +147,7 @@ defmodule TransportWeb.DatasetView do
end
link_text = "#{msg} (#{count})"
- assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns()
+ assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns
active_filter_text = ~H{<%= @msg %> (<%= @count %>)}
case conn.params do
@@ -172,7 +172,7 @@ defmodule TransportWeb.DatasetView do
true -> current_url(conn, Map.put(conn.query_params, "filter", "has_realtime"))
end
- assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns()
+ assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns
case {only_rt, Map.get(conn.query_params, "filter")} do
{false, "has_realtime"} -> link("#{msg} (#{count})", to: full_url)
@@ -193,7 +193,7 @@ defmodule TransportWeb.DatasetView do
true -> current_url(conn, Map.put(conn.query_params, "loi-climat-resilience", true))
end
- assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns()
+ assigns = Plug.Conn.merge_assigns(conn, count: count, msg: msg).assigns
case {only, Map.get(conn.query_params, "loi-climat-resilience")} do
{false, "true"} -> link("#{msg} (#{count})", to: full_url)
diff --git a/apps/transport/test/build_test.exs b/apps/transport/test/build_test.exs
index 8c4c8c0a25..152165cac5 100644
--- a/apps/transport/test/build_test.exs
+++ b/apps/transport/test/build_test.exs
@@ -12,7 +12,7 @@ defmodule TransportWeb.BuildTest do
end
def asdf_erlang_version(return_full_version \\ false) do
- [[_, full_version, major_version]] = Regex.scan(~r/erlang ((\d+)\.\d+\.\d+.\d+)/, tool_versions())
+ [[_, full_version, major_version]] = Regex.scan(~r/^erlang ((\d+)\..*$)/m, tool_versions())
if return_full_version do
full_version
diff --git a/apps/transport/test/db/db_dataset_test.exs b/apps/transport/test/db/db_dataset_test.exs
index aa4c667efe..2598704ba3 100644
--- a/apps/transport/test/db/db_dataset_test.exs
+++ b/apps/transport/test/db/db_dataset_test.exs
@@ -210,7 +210,7 @@ defmodule DB.DatasetDBTest do
})
end)
- assert logs =~ "error while importing dataset: %{legal_owner_company_siren:"
+ assert logs =~ ~r/error while importing dataset(.*)legal_owner_company_siren/
assert {:ok, %Ecto.Changeset{}} =
Dataset.changeset(%{
diff --git a/apps/transport/test/transport_web/controllers/backoffice/contact_controller_test.exs b/apps/transport/test/transport_web/controllers/backoffice/contact_controller_test.exs
index a65dd6c3ea..14097df445 100644
--- a/apps/transport/test/transport_web/controllers/backoffice/contact_controller_test.exs
+++ b/apps/transport/test/transport_web/controllers/backoffice/contact_controller_test.exs
@@ -329,8 +329,8 @@ defmodule TransportWeb.Backoffice.ContactControllerTest do
content = response(response, 200)
# Check CSV header
- assert content |> String.split("\r\n") |> hd() ==
- "id,first_name,last_name,mailing_list_title,email,phone_number,job_title,organization,inserted_at,updated_at,datagouv_user_id,last_login_at,creation_source,organization_names,is_producer,producer_daily_new_comments,producer_dataset_with_error,producer_expiration,producer_resource_unavailable,is_reuser,reuser_daily_new_comments,reuser_dataset_with_error,reuser_datasets_switching_climate_resilience_bill,reuser_expiration,reuser_new_dataset,reuser_resource_unavailable,reuser_resources_changed"
+ assert content |> String.split("\r\n") |> hd() |> String.split(",") ==
+ "id,first_name,last_name,mailing_list_title,email,phone_number,job_title,organization,inserted_at,updated_at,datagouv_user_id,last_login_at,creation_source,organization_names,is_producer,producer_daily_new_comments,producer_dataset_with_error,producer_expiration,producer_resource_unavailable,is_reuser,reuser_daily_new_comments,reuser_dataset_with_error,reuser_datasets_switching_climate_resilience_bill,reuser_expiration,reuser_new_dataset,reuser_resource_unavailable,reuser_resources_changed" |> String.split(",")
# Check CSV content
csv_content = [content] |> CSV.decode!(headers: true) |> Enum.to_list()