diff --git a/config/runtime.exs b/config/runtime.exs index 1ad39e59da9..72533f509c2 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -338,4 +338,20 @@ if config_env() == :prod do config :oli, :datashop, cache_limit: String.to_integer(System.get_env("DATASHOP_CACHE_LIMIT", "200")) + + config :oli, Oban, + repo: Oli.Repo, + plugins: [Oban.Plugins.Pruner], + queues: [ + default: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_DEFAULT", "10")), + snapshots: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_SNAPSHOTS", "20")), + s3_uploader: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_S3UPLOADER", "20")), + selections: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_SELECTIONS", "20")), + updates: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_UPDATES", "2")), + grades: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_GRADES", "30")), + auto_submit: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_AUTOSUBMIT", "3")), + analytics_export: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_ANALYTICS", "1")), + datashop_export: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_DATASHOP", "1")), + objectives: String.to_integer(System.get_env("OBAN_QUEUE_SIZE_OBJECTIVES", "3")) + ] end diff --git a/lib/oli/delivery/experiments/log_worker.ex b/lib/oli/delivery/experiments/log_worker.ex index f894e7264b1..41e904e17de 100644 --- a/lib/oli/delivery/experiments/log_worker.ex +++ b/lib/oli/delivery/experiments/log_worker.ex @@ -19,45 +19,38 @@ defmodule Oli.Delivery.Experiments.LogWorker do @impl Oban.Worker def perform(%Oban.Job{ - args: %{"activity_attempt_guid" => attempt_guid, "section_slug" => section_slug} + args: %{"activity_attempt_guid" => attempt_guid} }) do - perform_now(attempt_guid, section_slug) + perform_now(attempt_guid) end - def perform_now(attempt_guid, section_slug) do - # Fetch the section, fail fast if experiments are not enabled, we are done - case Oli.Delivery.Sections.get_section_by(slug: section_slug) do - %Oli.Delivery.Sections.Section{has_experiments: true} -> - {score, out_of, enrollment_id} = - from(aa in ActivityAttempt, - join: ra in ResourceAttempt, - on: aa.resource_attempt_id == ra.id, - join: a in ResourceAccess, - on: ra.resource_access_id == a.id, - join: e in Enrollment, - on: a.section_id == e.section_id and a.user_id == e.user_id, - where: aa.attempt_guid == ^attempt_guid, - select: {aa.score, aa.out_of, e.id} - ) - |> Repo.one() + def perform_now(attempt_guid) do + {score, out_of, enrollment_id} = + from(aa in ActivityAttempt, + join: ra in ResourceAttempt, + on: aa.resource_attempt_id == ra.id, + join: a in ResourceAccess, + on: ra.resource_access_id == a.id, + join: e in Enrollment, + on: a.section_id == e.section_id and a.user_id == e.user_id, + where: aa.attempt_guid == ^attempt_guid, + select: {aa.score, aa.out_of, e.id} + ) + |> Repo.one() - correctness = - case score do - 0.0 -> - 0.0 + correctness = + case score do + 0.0 -> + 0.0 - s -> - case out_of do - 0.0 -> 0.0 - o -> s / o - end + s -> + case out_of do + 0.0 -> 0.0 + o -> s / o end + end - Oli.Delivery.Experiments.log(enrollment_id, correctness) - - _ -> - {:ok, :nothing_to_do} - end + Oli.Delivery.Experiments.log(enrollment_id, correctness) end @doc """ @@ -67,9 +60,19 @@ defmodule Oli.Delivery.Experiments.LogWorker do def maybe_schedule(result, activity_attempt_guid, section_slug) do case Oli.Delivery.Experiments.experiments_enabled?() do true -> - %{activity_attempt_guid: activity_attempt_guid, section_slug: section_slug} - |> Oli.Delivery.Experiments.LogWorker.new() - |> Oban.insert() + case from(s in Oli.Delivery.Sections.Section, + where: s.slug == ^section_slug, + select: s.has_experiments + ) + |> Repo.one() do + true -> + %{activity_attempt_guid: activity_attempt_guid} + |> Oli.Delivery.Experiments.LogWorker.new() + |> Oban.insert() + + _ -> + true + end _ -> true diff --git a/lib/oli/delivery/previous_next_index.ex b/lib/oli/delivery/previous_next_index.ex index 2af11ae80d4..5b71c70e54c 100644 --- a/lib/oli/delivery/previous_next_index.ex +++ b/lib/oli/delivery/previous_next_index.ex @@ -85,4 +85,16 @@ defmodule Oli.Delivery.PreviousNextIndex do {:error, e} -> e end end + + def rebuild(%Section{} = section, hierarchy) do + case Repo.transaction(fn _ -> + Hierarchy.build_navigation_link_map(hierarchy) + |> then(fn previous_next_index -> + Sections.update_section(section, %{previous_next_index: previous_next_index}) + end) + end) do + {:ok, result} -> result + {:error, e} -> e + end + end end diff --git a/lib/oli/delivery/sections.ex b/lib/oli/delivery/sections.ex index ec9cf70c80f..614e3572bfb 100644 --- a/lib/oli/delivery/sections.ex +++ b/lib/oli/delivery/sections.ex @@ -4,6 +4,7 @@ defmodule Oli.Delivery.Sections do """ import Ecto.Query, warn: false + alias Oli.Delivery.Sections.MinimalHierarchy alias Oli.Delivery.Sections.EnrollmentContextRole alias Oli.Repo alias Oli.Repo.{Paging, Sorting} @@ -1753,7 +1754,7 @@ defmodule Oli.Delivery.Sections do # generate a new set of section resources based on the hierarchy {section_resources, _} = collapse_section_hierarchy(hierarchy, section_id) - rebuild_section_resources(section, section_resources, project_publications) + rebuild_section_resources(section, section_resources, project_publications, hierarchy) end) |> Multi.run( :maybe_update_exploration_pages, @@ -1795,7 +1796,8 @@ defmodule Oli.Delivery.Sections do def rebuild_section_resources( %Section{id: section_id} = section, section_resources, - project_publications + project_publications, + hierarchy ) when is_list(section_resources) do Repo.transaction(fn -> @@ -1915,7 +1917,7 @@ defmodule Oli.Delivery.Sections do ) # Rebuild section previous next index - PreviousNextIndex.rebuild(section) + PreviousNextIndex.rebuild(section, hierarchy) {:ok, _} = rebuild_contained_pages(section, section_resources) {:ok, _} = rebuild_contained_objectives(section) @@ -2249,7 +2251,6 @@ defmodule Oli.Delivery.Sections do project_id = new_publication.project_id project = Oli.Repo.get(Oli.Authoring.Course.Project, project_id) current_publication = get_current_publication(section_id, project_id) - current_hierarchy = DeliveryResolver.full_hierarchy(section.slug) # fetch diff from cache if one is available. If not, compute one on the fly diff = Publishing.get_publication_diff(current_publication, new_publication) @@ -2257,6 +2258,7 @@ defmodule Oli.Delivery.Sections do result = case diff do %PublicationDiff{classification: :minor} -> + current_hierarchy = MinimalHierarchy.full_hierarchy(section.slug) perform_update(:minor, section, project_id, new_publication, current_hierarchy) %PublicationDiff{classification: :major} -> @@ -2277,15 +2279,18 @@ defmodule Oli.Delivery.Sections do if section.blueprint.apply_major_updates do perform_update(:major, section, project_id, current_publication, new_publication) else + current_hierarchy = MinimalHierarchy.full_hierarchy(section.slug) perform_update(:minor, section, project_id, new_publication, current_hierarchy) end # Case 3: The course section is a product based on this project section.base_project_id == project_id and section.type == :blueprint -> + current_hierarchy = MinimalHierarchy.full_hierarchy(section.slug) perform_update(:minor, section, project_id, new_publication, current_hierarchy) # Case 4: The course section is not based on this project (but it remixes some materials from project) true -> + current_hierarchy = MinimalHierarchy.full_hierarchy(section.slug) perform_update(:minor, section, project_id, new_publication, current_hierarchy) end end @@ -2305,216 +2310,238 @@ defmodule Oli.Delivery.Sections do # for minor update, all we need to do is update the spp record and # rebuild the section curriculum based on the current hierarchy defp perform_update(:minor, section, project_id, new_publication, current_hierarchy) do - Repo.transaction(fn -> - # Update the section project publication to the new publication - update_section_project_publication(section, project_id, new_publication.id) + mark = Oli.Timing.mark() - project_publications = get_pinned_project_publications(section.id) - rebuild_section_curriculum(section, current_hierarchy, project_publications) + result = + Repo.transaction(fn -> + # Update the section project publication to the new publication + update_section_project_publication(section, project_id, new_publication.id) - {:ok} - end) - end + project_publications = get_pinned_project_publications(section.id) + rebuild_section_curriculum(section, current_hierarchy, project_publications) - # for major update, update the spp record and use the diff and the AIRRO approach - defp perform_update(:major, section, project_id, prev_publication, new_publication) do - Repo.transaction(fn -> - container = ResourceType.get_id_by_type("container") - prev_published_resources_map = published_resources_map(prev_publication.id) - new_published_resources_map = published_resources_map(new_publication.id) - - # Update the section project publication to the new publication - update_section_project_publication(section, project_id, new_publication.id) - - # Bulk create new placeholder section resource records for new published resources. - # The children of these records may need the id of other section resource records - # created here, so children will be set to nil initially and set in the next step. - # - # This is more efficient than DFS traversing the hierarchy and creating these records - # one at a time in order to ensure that child record ids are available for the parent - # children. - now = DateTime.utc_now() |> DateTime.truncate(:second) - placeholders = %{timestamp: now} + {:ok} + end) - section_resource_rows = - new_published_resources_map - |> Enum.filter(fn {resource_id, _pr} -> - !Map.has_key?(prev_published_resources_map, resource_id) - end) - |> Enum.map(fn {resource_id, pr} -> - %{ - resource_id: resource_id, - project_id: project_id, - section_id: section.id, - # we set children to nil here so that we know it needs to be set in the next step - children: nil, - scoring_strategy_id: pr.revision.scoring_strategy_id, - slug: Oli.Utils.Slug.generate("section_resources", pr.revision.title), - inserted_at: {:placeholder, :timestamp}, - updated_at: {:placeholder, :timestamp} - } - end) + Logger.info( + "perform_update.MINOR: section[#{section.slug}] #{Oli.Timing.elapsed(mark) / 1000 / 1000}ms" + ) - Database.batch_insert_all(SectionResource, section_resource_rows, - placeholders: placeholders, - on_conflict: - {:replace_all_except, - [ - :inserted_at, - :scoring_strategy_id, - :scheduling_type, - :manually_scheduled, - :start_date, - :end_date, - :collab_space_config, - :explanation_strategy, - :max_attempts, - :retake_mode, - :password, - :late_submit, - :late_start, - :time_limit, - :grace_period, - :review_submission, - :feedback_mode, - :feedback_scheduled_date - ]}, - conflict_target: [:section_id, :resource_id] - ) + result + end - # get all section resources including freshly minted ones - section_resources = get_section_resources(section.id) + # for major update, update the spp record and use the diff and the AIRRO approach + defp perform_update(:major, section, project_id, prev_publication, new_publication) do + mark = Oli.Timing.mark() - # build mappings from section_resource_id to resource_id and the inverse - {sr_id_to_resource_id, resource_id_to_sr_id} = - section_resources - |> Enum.reduce({%{}, %{}}, fn %SectionResource{id: id, resource_id: resource_id}, - {sr_id_to_resource_id, resource_id_to_sr_id} -> - {Map.put(sr_id_to_resource_id, id, resource_id), - Map.put(resource_id_to_sr_id, resource_id, id)} - end) + result = + Repo.transaction(fn -> + container = ResourceType.get_id_by_type("container") + + prev_published_resources_map = + MinimalHierarchy.published_resources_map(prev_publication.id) + + new_published_resources_map = MinimalHierarchy.published_resources_map(new_publication.id) + + # Update the section project publication to the new publication + update_section_project_publication(section, project_id, new_publication.id) + + # Bulk create new placeholder section resource records for new published resources. + # The children of these records may need the id of other section resource records + # created here, so children will be set to nil initially and set in the next step. + # + # This is more efficient than DFS traversing the hierarchy and creating these records + # one at a time in order to ensure that child record ids are available for the parent + # children. + now = DateTime.utc_now() |> DateTime.truncate(:second) + placeholders = %{timestamp: now} + + section_resource_rows = + new_published_resources_map + |> Enum.filter(fn {resource_id, _pr} -> + !Map.has_key?(prev_published_resources_map, resource_id) + end) + |> Enum.map(fn {resource_id, pr} -> + %{ + resource_id: resource_id, + project_id: project_id, + section_id: section.id, + # we set children to nil here so that we know it needs to be set in the next step + children: nil, + scoring_strategy_id: pr.scoring_strategy_id, + slug: Oli.Utils.Slug.generate("section_resources", pr.title), + inserted_at: {:placeholder, :timestamp}, + updated_at: {:placeholder, :timestamp} + } + end) - # For all container section resources in the course project whose children attribute differs - # from the new publication’s container children, execute the three way merge algorithm - merged_section_resources = - section_resources - |> Enum.map(fn section_resource -> - %SectionResource{ - resource_id: resource_id, - children: current_children - } = section_resource + Database.batch_insert_all(SectionResource, section_resource_rows, + placeholders: placeholders, + on_conflict: + {:replace_all_except, + [ + :inserted_at, + :scoring_strategy_id, + :scheduling_type, + :manually_scheduled, + :start_date, + :end_date, + :collab_space_config, + :explanation_strategy, + :max_attempts, + :retake_mode, + :password, + :late_submit, + :late_start, + :time_limit, + :grace_period, + :review_submission, + :feedback_mode, + :feedback_scheduled_date + ]}, + conflict_target: [:section_id, :resource_id] + ) - prev_published_resource = prev_published_resources_map[resource_id] + # get all section resources including freshly minted ones + section_resources = get_section_resources(section.id) - is_container? = - case prev_published_resource do - %{revision: %{resource_type_id: ^container}} -> - true + # build mappings from section_resource_id to resource_id and the inverse + {sr_id_to_resource_id, resource_id_to_sr_id} = + section_resources + |> Enum.reduce({%{}, %{}}, fn %SectionResource{id: id, resource_id: resource_id}, + {sr_id_to_resource_id, resource_id_to_sr_id} -> + {Map.put(sr_id_to_resource_id, id, resource_id), + Map.put(resource_id_to_sr_id, resource_id, id)} + end) - _ -> - false - end + # For all container section resources in the course project whose children attribute differs + # from the new publication’s container children, execute the three way merge algorithm + merged_section_resources = + section_resources + |> Enum.map(fn section_resource -> + %SectionResource{ + resource_id: resource_id, + children: current_children + } = section_resource + + prev_published_resource = prev_published_resources_map[resource_id] + + is_container? = + case prev_published_resource do + %{resource_type_id: ^container} -> + true + + _ -> + false + end - if is_container? or is_nil(current_children) do - new_published_resource = new_published_resources_map[resource_id] - new_children = new_published_resource.revision.children - - case current_children do - nil -> - # this section resource was just created so it can assume the newly published value - %SectionResource{ - section_resource - | children: Enum.map(new_children, &resource_id_to_sr_id[&1]) - } - - current_children -> - # ensure we are comparing resource_ids to resource_ids (and not section_resource_ids) - # by translating the current section_resource children ids to resource_ids - current_children_resource_ids = - Enum.map(current_children, &sr_id_to_resource_id[&1]) - - # check if the children resource_ids have diverged from the new value - if current_children_resource_ids != new_children do - # There is a merge conflict between the current section resource and the new published resource. - # Use the AIRRO three way merge algorithm to resolve - base = prev_published_resource.revision.children - source = new_published_resource.revision.children - target = current_children_resource_ids - - case Oli.Publishing.Updating.Merge.merge(base, source, target) do - {:ok, merged} -> - %SectionResource{ + if is_container? or is_nil(current_children) do + new_published_resource = new_published_resources_map[resource_id] + new_children = new_published_resource.children + + case current_children do + nil -> + # this section resource was just created so it can assume the newly published value + %SectionResource{ + section_resource + | children: Enum.map(new_children, &resource_id_to_sr_id[&1]) + } + + current_children -> + # ensure we are comparing resource_ids to resource_ids (and not section_resource_ids) + # by translating the current section_resource children ids to resource_ids + current_children_resource_ids = + Enum.map(current_children, &sr_id_to_resource_id[&1]) + + # check if the children resource_ids have diverged from the new value + if current_children_resource_ids != new_children do + # There is a merge conflict between the current section resource and the new published resource. + # Use the AIRRO three way merge algorithm to resolve + base = prev_published_resource.children + source = new_published_resource.children + target = current_children_resource_ids + + case Oli.Publishing.Updating.Merge.merge(base, source, target) do + {:ok, merged} -> + %SectionResource{ + section_resource + | children: Enum.map(merged, &resource_id_to_sr_id[&1]) + } + + {:no_change} -> section_resource - | children: Enum.map(merged, &resource_id_to_sr_id[&1]) - } - - {:no_change} -> - section_resource + end + else + section_resource end - else - section_resource - end + end + else + section_resource end - else - section_resource - end - end) + end) - # Upsert all merged section resource records. Some of these records may have just been created - # and some may not have been changed, but that's okay we will just update them again - now = DateTime.utc_now() |> DateTime.truncate(:second) - placeholders = %{timestamp: now} + # Upsert all merged section resource records. Some of these records may have just been created + # and some may not have been changed, but that's okay we will just update them again + now = DateTime.utc_now() |> DateTime.truncate(:second) + placeholders = %{timestamp: now} + + section_resource_rows = + merged_section_resources + |> Enum.map(fn section_resource -> + %{ + SectionResource.to_map(section_resource) + | updated_at: {:placeholder, :timestamp} + } + end) - section_resource_rows = - merged_section_resources - |> Enum.map(fn section_resource -> - %{ - SectionResource.to_map(section_resource) - | updated_at: {:placeholder, :timestamp} - } - end) + Database.batch_insert_all(SectionResource, section_resource_rows, + placeholders: placeholders, + on_conflict: + {:replace_all_except, + [ + :inserted_at, + :scoring_strategy_id, + :scheduling_type, + :manually_scheduled, + :start_date, + :end_date, + :collab_space_config, + :explanation_strategy, + :max_attempts, + :retake_mode, + :password, + :late_submit, + :late_start, + :time_limit, + :grace_period, + :review_submission, + :feedback_mode, + :feedback_scheduled_date + ]}, + conflict_target: [:section_id, :resource_id] + ) - Database.batch_insert_all(SectionResource, section_resource_rows, - placeholders: placeholders, - on_conflict: - {:replace_all_except, - [ - :inserted_at, - :scoring_strategy_id, - :scheduling_type, - :manually_scheduled, - :start_date, - :end_date, - :collab_space_config, - :explanation_strategy, - :max_attempts, - :retake_mode, - :password, - :late_submit, - :late_start, - :time_limit, - :grace_period, - :review_submission, - :feedback_mode, - :feedback_scheduled_date - ]}, - conflict_target: [:section_id, :resource_id] - ) + # Finally, we must fetch and renumber the final hierarchy in order to generate the proper numberings + {new_hierarchy, _numberings} = + MinimalHierarchy.full_hierarchy(section.slug) + |> Numbering.renumber_hierarchy() - # Finally, we must fetch and renumber the final hierarchy in order to generate the proper numberings - {new_hierarchy, _numberings} = - DeliveryResolver.full_hierarchy(section.slug) - |> Numbering.renumber_hierarchy() + # Rebuild the section curriculum using the new hierarchy, adding any new non-hierarchical + # resources and cleaning up any deleted ones + pinned_project_publications = get_pinned_project_publications(section.id) + rebuild_section_curriculum(section, new_hierarchy, pinned_project_publications) - # Rebuild the section curriculum using the new hierarchy, adding any new non-hierarchical - # resources and cleaning up any deleted ones - pinned_project_publications = get_pinned_project_publications(section.id) - rebuild_section_curriculum(section, new_hierarchy, pinned_project_publications) - Delivery.maybe_update_section_contains_explorations(section) - Delivery.maybe_update_section_contains_deliberate_practice(section) + Delivery.maybe_update_section_contains_explorations(section) + Delivery.maybe_update_section_contains_deliberate_practice(section) - {:ok} - end) + {:ok} + end) + + Logger.info( + "perform_update.MAJOR: section[#{section.slug}] #{Oli.Timing.elapsed(mark) / 1000 / 1000}ms" + ) + + result end @doc """ @@ -2656,7 +2683,7 @@ defmodule Oli.Delivery.Sections do required_survey_resource_id: required_survey_resource_id ) do published_resources_by_resource_id = - published_resources_map(publication_ids, preload: [:revision, :publication]) + MinimalHierarchy.published_resources_map(publication_ids) now = DateTime.utc_now() |> DateTime.truncate(:second) @@ -2672,27 +2699,28 @@ defmodule Oli.Delivery.Sections do section_resource_rows = published_resources_by_resource_id - |> Enum.filter(fn {resource_id, %{revision: rev}} -> - !MapSet.member?(skip_set, resource_id) && !is_structural?(rev) + |> Enum.filter(fn {resource_id, %{resource_type_id: resource_type_id}} -> + !MapSet.member?(skip_set, resource_id) && + !(resource_type_id == ResourceType.get_id_by_type("container")) end) |> generate_slugs_until_uniq() - |> Enum.map(fn {slug, %PublishedResource{revision: revision, publication: pub}} -> + |> Enum.map(fn {slug, item} -> %{ slug: slug, - resource_id: revision.resource_id, - project_id: pub.project_id, + resource_id: item.resource_id, + project_id: item.project_id, section_id: section_id, inserted_at: now, updated_at: now, - collab_space_config: revision.collab_space_config, + collab_space_config: item.collab_space_config, max_attempts: - if is_nil(revision.max_attempts) do + if is_nil(item.max_attempts) do 0 else - revision.max_attempts + item.max_attempts end, - scoring_strategy_id: revision.scoring_strategy_id, - retake_mode: revision.retake_mode + scoring_strategy_id: item.scoring_strategy_id, + retake_mode: item.retake_mode } end) @@ -2712,7 +2740,7 @@ defmodule Oli.Delivery.Sections do # generate initial slugs for new section resources published_resources_by_slug = Enum.reduce(published_resources, %{}, fn {_, pr}, acc -> - title = pr.revision.title + title = pr.title # if a previous published resource has the same revision then generate a new initial slug different from the default slug_attempt = if Map.has_key?(acc, Slug.slugify(title)), do: 1, else: 0 @@ -2772,12 +2800,10 @@ defmodule Oli.Delivery.Sections do # Generates a new set of slug candidates defp regenerate_slugs(prs_by_slug, attempt) do - Enum.reduce(prs_by_slug, %{}, fn {_slug, - %PublishedResource{revision: revision} = published_resource}, - acc -> - new_slug = Slug.generate_nth(revision.title, attempt) + Enum.reduce(prs_by_slug, %{}, fn {_slug, %{title: title} = item}, acc -> + new_slug = Slug.generate_nth(title, attempt) - Map.put(acc, new_slug, published_resource) + Map.put(acc, new_slug, item) end) end diff --git a/lib/oli/delivery/sections/minimal_hierarchy.ex b/lib/oli/delivery/sections/minimal_hierarchy.ex new file mode 100644 index 00000000000..f52ba985e1b --- /dev/null +++ b/lib/oli/delivery/sections/minimal_hierarchy.ex @@ -0,0 +1,156 @@ +defmodule Oli.Delivery.Sections.MinimalHierarchy do + import Ecto.Query, warn: false + + alias Oli.Delivery.Sections + alias Oli.Delivery.Hierarchy.HierarchyNode + alias Oli.Resources.Numbering + alias Oli.Branding.CustomLabels + alias Oli.Repo + + alias Oli.Publishing.{ + PublishedResource + } + + import Oli.Utils + + require Logger + + def published_resources_map(publication_ids) when is_list(publication_ids) do + PublishedResource + |> join(:left, [pr], r in Oli.Resources.Revision, on: pr.revision_id == r.id) + |> join(:left, [pr, _], p in Oli.Publishing.Publications.Publication, + on: pr.publication_id == p.id + ) + |> where([pr, _r], pr.publication_id in ^publication_ids) + |> select([pr, r, p], %{ + resource_id: pr.resource_id, + children: r.children, + revision_id: pr.revision_id, + resource_type_id: r.resource_type_id, + title: r.title, + scoring_strategy_id: r.scoring_strategy_id, + collab_space_config: r.collab_space_config, + max_attempts: r.max_attempts, + retake_mode: r.retake_mode, + project_id: p.project_id + }) + |> Repo.all() + |> Enum.reduce(%{}, fn r, m -> Map.put(m, r.resource_id, r) end) + end + + def published_resources_map(publication_id) do + project_id = + from( + p in Oli.Publishing.Publications.Publication, + where: p.id == ^publication_id, + select: p.project_id + ) + |> Repo.one() + + PublishedResource + |> join(:left, [pr], r in Oli.Resources.Revision, on: pr.revision_id == r.id) + |> where([pr, _r], pr.publication_id == ^publication_id) + |> select([pr, r], %{ + resource_id: pr.resource_id, + children: r.children, + revision_id: pr.revision_id, + resource_type_id: r.resource_type_id, + title: r.title, + scoring_strategy_id: r.scoring_strategy_id, + collab_space_config: r.collab_space_config, + max_attempts: r.max_attempts, + retake_mode: r.retake_mode + }) + |> Repo.all() + |> Enum.map(fn pr -> Map.put(pr, :project_id, project_id) end) + |> Enum.reduce(%{}, fn r, m -> Map.put(m, r.resource_id, r) end) + end + + def full_hierarchy(section_slug) do + mark = Oli.Timing.mark() + + {hierarchy_nodes, root_hierarchy_node} = hierarchy_nodes_by_sr_id(section_slug) + result = hierarchy_node_with_children(root_hierarchy_node, hierarchy_nodes) + + Logger.info("MinimalHierarchy.full_hierarchy: #{Oli.Timing.elapsed(mark) / 1000 / 1000}ms") + + result + end + + defp hierarchy_node_with_children( + %HierarchyNode{children: children_ids} = node, + nodes_by_sr_id + ) do + Map.put( + node, + :children, + Enum.map(children_ids, fn sr_id -> + Map.get(nodes_by_sr_id, sr_id) + |> hierarchy_node_with_children(nodes_by_sr_id) + end) + ) + end + + # Returns a map of resource ids to hierarchy nodes and the root hierarchy node + defp hierarchy_nodes_by_sr_id(section_slug) do + page_id = Oli.Resources.ResourceType.get_id_by_type("page") + container_id = Oli.Resources.ResourceType.get_id_by_type("container") + + section = Sections.get_section_by(slug: section_slug) + + labels = + case section.customizations do + nil -> Map.from_struct(CustomLabels.default()) + l -> Map.from_struct(l) + end + + from( + [sr: sr, rev: rev, spp: spp] in Oli.Publishing.DeliveryResolver.section_resource_revisions( + section_slug + ), + join: p in Oli.Authoring.Course.Project, + on: p.id == spp.project_id, + where: + rev.resource_type_id == ^page_id or + rev.resource_type_id == ^container_id, + select: + {sr, + %{ + id: rev.id, + resource_id: rev.resource_id, + resource_type_id: rev.resource_type_id, + slug: rev.slug, + title: rev.title, + graded: rev.graded + }, p.slug} + ) + |> Repo.all() + |> Enum.reduce({%{}, nil}, fn {sr, rev, proj_slug}, {nodes, root} -> + is_root? = section.root_section_resource_id == sr.id + + node = %HierarchyNode{ + uuid: uuid(), + numbering: %Numbering{ + index: sr.numbering_index, + level: sr.numbering_level, + labels: labels + }, + children: sr.children, + resource_id: rev.resource_id, + project_id: sr.project_id, + project_slug: proj_slug, + revision: rev, + section_resource: sr + } + + { + Map.put( + nodes, + sr.id, + node + ), + if(is_root?, do: node, else: root) + } + end) + end +end diff --git a/lib/oli_web/live/insights/insights.ex b/lib/oli_web/live/insights/insights.ex index 71a0fef1ff9..e7c57434c07 100644 --- a/lib/oli_web/live/insights/insights.ex +++ b/lib/oli_web/live/insights/insights.ex @@ -34,6 +34,7 @@ defmodule OliWeb.Insights do {:ok, assign(socket, ctx: ctx, + is_admin?: Oli.Accounts.is_admin?(ctx.author), project: project, by_page_rows: nil, by_activity_rows: by_activity_rows, @@ -89,15 +90,17 @@ defmodule OliWeb.Insights do Insights can help you improve your course by providing a statistical analysis of the skills covered by each question to find areas where students are struggling.

-
- -
+ <%= if @is_admin? do %> +
+ +
+ <% end %>