DEV: minor refactors to ExportUserArchive(Spec)

DEV: minor refactors to ExportUserArchive(Spec)

diff --git a/app/jobs/regular/export_user_archive.rb b/app/jobs/regular/export_user_archive.rb
index 858522e..8c5ed2f 100644
--- a/app/jobs/regular/export_user_archive.rb
+++ b/app/jobs/regular/export_user_archive.rb
@@ -144,6 +144,16 @@ module Jobs
 
     private
 
+    def piped_category_name(category_id)
+      return "-" unless category_id
+      category = Category.find(category_id)
+      categories = [category.name]
+      while category.parent_category_id && category = category.parent_category
+        categories << category.name
+      end
+      categories.reverse.join("|")
+    end
+
     def get_user_archive_fields(user_archive)
       user_archive_array = []
       topic_data = user_archive.topic
@@ -151,17 +161,7 @@ module Jobs
       topic_data = Topic.with_deleted.find_by(id: user_archive['topic_id']) if topic_data.nil?
       return user_archive_array if topic_data.nil?
 
-      all_categories = Category.all.to_h { |category| [category.id, category] }
-
-      categories = "-"
-      if topic_data.category_id && category = all_categories[topic_data.category_id]
-        categories = [category.name]
-        while category.parent_category_id && category = all_categories[category.parent_category_id]
-          categories << category.name
-        end
-        categories = categories.reverse.join("|")
-      end
-
+      categories = piped_category_name(topic_data.category_id)
       is_pm = topic_data.archetype == "private_message" ? I18n.t("csv_export.boolean_yes") : I18n.t("csv_export.boolean_no")
       url = "#{Discourse.base_url}/t/#{topic_data.slug}/#{topic_data.id}/#{user_archive['post_number']}"
 
diff --git a/spec/jobs/export_user_archive_spec.rb b/spec/jobs/export_user_archive_spec.rb
index db37aaa..0dc84e3 100644
--- a/spec/jobs/export_user_archive_spec.rb
+++ b/spec/jobs/export_user_archive_spec.rb
@@ -4,8 +4,24 @@ require 'rails_helper'
 require 'csv'
 
 describe Jobs::ExportUserArchive do
+  let(:user) { Fabricate(:user, username: "john_doe") }
+  let(:extra) { {} }
+  let(:job) {
+    j = Jobs::ExportUserArchive.new
+    j.current_user = user
+    j.extra = extra
+    j
+  }
+  let(:component) { raise 'component not set' }
+
+  def make_component_csv
+    CSV.generate do |csv|
+      csv << job.get_header(component)
+      job.public_send(:"#{component}_export").each { |d| csv << d }
+    end
+  end
+
   context '#execute' do
-    let(:user) { Fabricate(:user, username: "john_doe") }
     let(:post) { Fabricate(:post, user: user) }
 
     before do
@@ -59,13 +75,7 @@ describe Jobs::ExportUserArchive do
 
   context 'user_archive posts' do
     let(:component) { 'user_archive' }
-    let(:user) { Fabricate(:user, username: "john_doe") }
     let(:user2) { Fabricate(:user) }
-    let(:job) {
-      j = Jobs::ExportUserArchive.new
-      j.current_user = user
-      j
-    }
     let(:category) { Fabricate(:category_with_definition) }
     let(:subcategory) { Fabricate(:category_with_definition, parent_category_id: category.id) }
     let(:subsubcategory) { Fabricate(:category_with_definition, parent_category_id: subcategory.id) }
@@ -119,12 +129,6 @@ describe Jobs::ExportUserArchive do
 
   context 'user_archive_profile' do
     let(:component) { 'user_archive_profile' }
-    let(:user) { Fabricate(:user, username: "john_doe") }
-    let(:job) {
-      j = Jobs::ExportUserArchive.new
-      j.current_user = user
-      j
-    }
 
     before do
       user.user_profile.website = 'https://doe.example.com/john'
@@ -133,10 +137,7 @@ describe Jobs::ExportUserArchive do
     end
 
     it 'properly includes the profile fields' do
-      csv_out = CSV.generate do |csv|
-        csv << job.get_header(component)
-        job.user_archive_profile_export.each { |d| csv << d }
-      end
+      csv_out = make_component_csv
 
       expect(csv_out).to match('doe.example.com')
       expect(csv_out).to match("Doe\n\nHere")

GitHub sha: 4aed8613

This commit appears in #10550 which was approved by eviltrout. It was merged by riking.