• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pulibrary / pdc_describe / 6e0486a6-b0d3-473d-bdb0-ec772992e9da

10 Apr 2025 07:46PM UTC coverage: 95.367% (-0.03%) from 95.399%
6e0486a6-b0d3-473d-bdb0-ec772992e9da

Pull #2094

circleci

hectorcorrea
Fixed test

Co-authored-by: Robert-Anthony Lee-Faison <leefaisonr@users.noreply.github.com>
Pull Request #2094: Move files to embargo bucket for approved embargoed works

22 of 24 new or added lines in 3 files covered. (91.67%)

28 existing lines in 3 files now uncovered.

3479 of 3648 relevant lines covered (95.37%)

398.81 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

95.59
/app/models/work.rb
1
# frozen_string_literal: true
2

3
# rubocop:disable Metrics/ClassLength
4
class Work < ApplicationRecord
2✔
5
  # Errors for cases where there is no valid Group
6
  class InvalidGroupError < ::ArgumentError; end
2✔
7

8
  has_many :work_activity, -> { order(updated_at: :desc) }, dependent: :destroy
498✔
9
  has_many :user_work, -> { order(updated_at: :desc) }, dependent: :destroy
20✔
10
  has_many :upload_snapshots, -> { order(updated_at: :desc) }, dependent: :destroy
634✔
11

12
  belongs_to :group, class_name: "Group"
2✔
13
  belongs_to :curator, class_name: "User", foreign_key: "curator_user_id", optional: true
2✔
14

15
  attribute :work_type, :string, default: "DATASET"
2✔
16
  attribute :profile, :string, default: "DATACITE"
2✔
17

18
  attr_accessor :user_entered_doi
2✔
19

20
  alias state_history user_work
2✔
21

22
  delegate :valid_to_submit, :valid_to_draft, :valid_to_approve, :valid_to_complete, to: :work_validator
2✔
23

24
  include AASM
2✔
25

26
  aasm column: :state do
2✔
27
    state :none, initial: true
2✔
28
    state :draft, :awaiting_approval, :approved, :withdrawn, :deletion_marker
2✔
29

30
    event :draft, after: :draft_doi do
2✔
31
      transitions from: :none, to: :draft, guard: :valid_to_draft
2✔
32
    end
33

34
    event :complete_submission do
2✔
35
      transitions from: :draft, to: :awaiting_approval, guard: :valid_to_complete
2✔
36
    end
37

38
    event :request_changes do
2✔
39
      transitions from: :awaiting_approval, to: :awaiting_approval, guard: :valid_to_submit
2✔
40
    end
41

42
    event :revert_to_draft do
2✔
43
      transitions from: :awaiting_approval, to: :draft, guard: :valid_to_draft
2✔
44
    end
45

46
    event :approve do
2✔
47
      transitions from: :awaiting_approval, to: :approved, guard: :valid_to_approve, after: :publish
2✔
48
    end
49

50
    event :withdraw do
2✔
51
      transitions from: [:draft, :awaiting_approval, :approved], to: :withdrawn
2✔
52
    end
53

54
    event :resubmit do
2✔
55
      transitions from: :withdrawn, to: :draft
2✔
56
    end
57

58
    event :remove do
2✔
59
      transitions from: :withdrawn, to: :deletion_marker
2✔
60
    end
61

62
    after_all_events :track_state_change
2✔
63
  end
64

65
  def state=(new_state)
2✔
66
    new_state_sym = new_state.to_sym
1,592✔
67
    valid_states = self.class.aasm.states.map(&:name)
1,592✔
68
    raise(StandardError, "Invalid state '#{new_state}'") unless valid_states.include?(new_state_sym)
1,592✔
69
    aasm_write_state_without_persistence(new_state_sym)
1,590✔
70
  end
71

72
  ##
73
  # Is this work editable by a given user?
74
  # A work is editable when:
75
  # * it is being edited by the person who made it
76
  # * it is being edited by a group admin of the group where is resides
77
  # * it is being edited by a super admin
78
  # @param [User]
79
  # @return [Boolean]
80
  def editable_by?(user)
2✔
81
    submitted_by?(user) || administered_by?(user)
732✔
82
  end
83

84
  def editable_in_current_state?(user)
2✔
85
    # anyone with edit privleges can edit a work while it is in draft
86
    return editable_by?(user) if draft?
482✔
87

88
    # Only admisitrators can edit a work in other states
89
    administered_by?(user)
110✔
90
  end
91

92
  def submitted_by?(user)
2✔
93
    created_by_user_id == user.id
732✔
94
  end
95

96
  def administered_by?(user)
2✔
97
    user.has_role?(:group_admin, group)
222✔
98
  end
99

100
  class << self
2✔
101
    def find_by_doi(doi)
2✔
102
      prefix = "10.34770/"
42✔
103
      doi = "#{prefix}#{doi}" unless doi.blank? || doi.start_with?(prefix)
42✔
104
      Work.find_by!("metadata @> ?", JSON.dump(doi:))
42✔
105
    end
106

107
    def find_by_ark(ark)
2✔
108
      prefix = "ark:/"
440✔
109
      ark = "#{prefix}#{ark}" unless ark.blank? || ark.start_with?(prefix)
440✔
110
      Work.find_by!("metadata @> ?", JSON.dump(ark:))
440✔
111
    end
112

113
    delegate :resource_type_general_values, to: PDCMetadata::Resource
2✔
114

115
    def list_embargoed
2✔
116
      Work.where("embargo_date >= current_date").where(state: "approved")
2✔
117
    end
118

119
    def list_released_embargo
2✔
120
      Work.where("embargo_date = current_date-1").where(state: "approved")
2✔
121
    end
122
  end
123

124
  include Rails.application.routes.url_helpers
2✔
125

126
  before_save do |work|
2✔
127
    # Ensure that the metadata JSONB postgres field is persisted properly
128
    work.metadata = JSON.parse(work.resource.to_json)
2,452✔
129
  end
130

131
  after_save do |work|
2✔
132
    if work.approved?
2,450✔
133
      work.reload
280✔
134
    end
135
  end
136

137
  validate do |_work|
2✔
138
    work_validator.valid?
2,502✔
139
  end
140

141
  # Overload ActiveRecord.reload method
142
  # https://apidock.com/rails/ActiveRecord/Base/reload
143
  #
144
  # NOTE: Usually `after_save` is a better place to put this kind of code:
145
  #
146
  #   after_save do |work|
147
  #     work.resource = nil
148
  #   end
149
  #
150
  # but that does not work in this case because the block points to a different
151
  # memory object for `work` than the we want we want to reload.
152
  def reload(options = nil)
2✔
153
    super
498✔
154
    # Force `resource` to be reloaded
155
    @resource = nil
498✔
156
    self
498✔
157
  end
158

159
  def title
2✔
160
    resource.main_title
888✔
161
  end
162

163
  def uploads_attributes
2✔
164
    return [] if approved? # once approved we no longer allow the updating of uploads via the application
170✔
165
    uploads.map do |upload|
158✔
166
      {
167
        id: upload.id,
88✔
168
        key: upload.key,
169
        filename: upload.filename.to_s,
170
        created_at: upload.created_at,
171
        url: upload.url
172
      }
173
    end
174
  end
175

176
  def form_attributes
2✔
177
    {
178
      uploads: uploads_attributes
170✔
179
    }
180
  end
181

182
  def draft_doi
2✔
183
    return if resource.doi.present?
66✔
184
    resource.doi = datacite_service.draft_doi
48✔
185
    save!
44✔
186
  end
187

188
  # Return the DOI formatted as a URL, so it can be used as a link on display pages
189
  # @return [String] A url formatted version of the DOI
190
  def doi_url
2✔
191
    return "https://doi.org/#{doi}" unless doi.starts_with?("https://doi.org")
2✔
192
    doi
×
193
  end
194

195
  def created_by_user
2✔
196
    User.find(created_by_user_id)
860✔
197
  rescue ActiveRecord::RecordNotFound
198
    nil
2✔
199
  end
200

201
  def resource=(resource)
2✔
202
    @resource = resource
1,962✔
203
    # Ensure that the metadata JSONB postgres field is persisted properly
204
    self.metadata = JSON.parse(resource.to_json)
1,962✔
205
  end
206

207
  def resource
2✔
208
    @resource ||= PDCMetadata::Resource.new_from_jsonb(metadata)
52,454✔
209
  end
210

211
  def url
2✔
212
    return unless persisted?
×
213

214
    @url ||= url_for(self)
×
215
  end
216

217
  def files_location_upload?
2✔
218
    files_location.blank? || files_location == "file_upload"
380✔
219
  end
220

221
  def files_location_cluster?
2✔
222
    files_location == "file_cluster"
4✔
223
  end
224

225
  def files_location_other?
2✔
226
    files_location == "file_other"
34✔
227
  end
228

229
  def change_curator(curator_user_id, current_user)
2✔
230
    if curator_user_id == "no-one"
12✔
231
      clear_curator(current_user)
2✔
232
    else
233
      update_curator(curator_user_id, current_user)
10✔
234
    end
235
  end
236

237
  def clear_curator(current_user)
2✔
238
    # Update the curator on the Work
239
    self.curator_user_id = nil
4✔
240
    save!
4✔
241

242
    # ...and log the activity
243
    WorkActivity.add_work_activity(id, "Unassigned existing curator", current_user.id, activity_type: WorkActivity::SYSTEM)
4✔
244
  end
245

246
  def update_curator(curator_user_id, current_user)
2✔
247
    # Update the curator on the Work
248
    self.curator_user_id = curator_user_id
12✔
249
    save!
12✔
250

251
    # ...and log the activity
252
    new_curator = User.find(curator_user_id)
10✔
253

254
    work_url = "[#{title}](#{Rails.application.routes.url_helpers.work_url(self)})"
10✔
255

256
    # Troubleshooting https://github.com/pulibrary/pdc_describe/issues/1783
257
    if work_url.include?("/describe/describe/")
10✔
258
      Rails.logger.error("URL #{work_url} included /describe/describe/ and was fixed. See https://github.com/pulibrary/pdc_describe/issues/1783")
×
259
      work_url = work_url.gsub("/describe/describe/", "/describe/")
×
260
    end
261

262
    message = if curator_user_id.to_i == current_user.id
10✔
263
                "Self-assigned @#{current_user.uid} as curator for work #{work_url}"
4✔
264
              else
265
                "Set curator to @#{new_curator.uid} for work #{work_url}"
6✔
266
              end
267
    WorkActivity.add_work_activity(id, message, current_user.id, activity_type: WorkActivity::SYSTEM)
10✔
268
  end
269

270
  def add_message(message, current_user_id)
2✔
271
    WorkActivity.add_work_activity(id, message, current_user_id, activity_type: WorkActivity::MESSAGE)
22✔
272
  end
273

274
  def add_provenance_note(date, note, current_user_id, change_label = "")
2✔
275
    WorkActivity.add_work_activity(id, { note:, change_label: }.to_json, current_user_id, activity_type: WorkActivity::PROVENANCE_NOTES, created_at: date)
58✔
276
  end
277

278
  def log_changes(resource_compare, current_user_id)
2✔
279
    return if resource_compare.identical?
142✔
280
    WorkActivity.add_work_activity(id, resource_compare.differences.to_json, current_user_id, activity_type: WorkActivity::CHANGES)
112✔
281
  end
282

283
  def log_file_changes(current_user_id)
2✔
284
    return if changes.count == 0
22✔
285
    WorkActivity.add_work_activity(id, changes.to_json, current_user_id, activity_type: WorkActivity::FILE_CHANGES)
22✔
286
  end
287

288
  def activities
2✔
289
    WorkActivity.activities_for_work(id, WorkActivity::MESSAGE_ACTIVITY_TYPES + WorkActivity::CHANGE_LOG_ACTIVITY_TYPES)
10✔
290
  end
291

292
  def new_notification_count_for_user(user_id)
2✔
293
    WorkActivityNotification.joins(:work_activity)
148✔
294
                            .where(user_id:, read_at: nil)
295
                            .where(work_activity: { work_id: id })
296
                            .count
297
  end
298

299
  # Marks as read the notifications for the given user_id in this work.
300
  # In practice, the user_id is the id of the current user and therefore this method marks the current's user
301
  # notifications as read.
302
  def mark_new_notifications_as_read(user_id)
2✔
303
    # Notice that we fetch and update the information in batches
304
    # so that we don't issue individual SQL SELECT + SQL UPDATE
305
    # for each notification.
306
    #
307
    # Rails batching information:
308
    #   https://guides.rubyonrails.org/active_record_querying.html
309
    #   https://api.rubyonrails.org/classes/ActiveRecord/Batches.html
310

311
    # Disable this validation since we want to force a SQL UPDATE.
312
    # rubocop:disable Rails/SkipsModelValidations
313
    now_utc = Time.now.utc
200✔
314
    WorkActivityNotification.joins(:work_activity).where("user_id=? and work_id=?", user_id, id).in_batches(of: 1000).update_all(read_at: now_utc)
200✔
315
    # rubocop:enable Rails/SkipsModelValidations
316
  end
317

318
  def current_transition
2✔
319
    aasm.current_event.to_s.humanize.delete("!")
32✔
320
  end
321

322
  # Retrieve the S3 file uploads associated with the Work
323
  # @return [Array<S3File>]
324
  def uploads
2✔
325
    return post_curation_uploads if approved?
430✔
326

327
    pre_curation_uploads
410✔
328
  end
329

330
  # Retrieve the S3 file uploads named "README"
331
  # @return [Array<S3File>]
332
  def readme_uploads
2✔
333
    uploads.select { |s3_file| s3_file.filename.include?("README") }
×
334
  end
335

336
  # Retrieve the S3 file uploads which are research artifacts proper (not README or other files providing metadata/documentation)
337
  # @return [Array<S3File>]
338
  def artifact_uploads
2✔
339
    uploads.reject { |s3_file| s3_file.filename.include?("README") }
×
340
  end
341

342
  # Returns the list of files for the work with some basic information about each of them.
343
  # This method is much faster than `uploads` because it does not return the actual S3File
344
  # objects to the client, instead it returns just a few selected data elements.
345
  # rubocop:disable Metrics/MethodLength
346
  def file_list
2✔
347
    start = Time.zone.now
310✔
348
    s3_files = approved? ? post_curation_uploads : pre_curation_uploads
310✔
349
    files_info = s3_files.map do |s3_file|
310✔
350
      {
351
        "safe_id": s3_file.safe_id,
230✔
352
        "filename": s3_file.filename,
353
        "filename_display": s3_file.filename_display,
354
        "last_modified": s3_file.last_modified,
355
        "last_modified_display": s3_file.last_modified_display,
356
        "size": s3_file.size,
357
        "display_size": s3_file.display_size,
358
        "url": s3_file.url,
359
        "is_folder": s3_file.is_folder
360
      }
361
    end
362
    log_performance(start, "file_list called for #{id}")
310✔
363
    files_info
310✔
364
  end
365
  # rubocop:enable Metrics/MethodLength
366

367
  def total_file_size
2✔
368
    total_size = 0
32✔
369
    file_list.each do |file|
32✔
370
      total_size += file[:size]
×
371
    end
372
    total_size
32✔
373
  end
374

375
  # Calculates the total file size from a given list of files
376
  # This is so that we don't fetch the list twice from AWS since it can be expensive when
377
  # there are thousands of files on the work.
378
  def total_file_size_from_list(files)
2✔
379
    files.sum { |file| file[:size] }
508✔
380
  end
381

382
  # Fetches the data from S3 directly bypassing ActiveStorage
383
  def pre_curation_uploads
2✔
384
    s3_query_service.client_s3_files.sort_by(&:filename)
1,078✔
385
  end
386

387
  # Accesses post-curation S3 Bucket Objects
388
  def post_curation_s3_resources
2✔
389
    if approved?
100✔
390
      s3_resources
80✔
391
    else
392
      []
20✔
393
    end
394
  end
395

396
  # Returns the files in post-curation for the work
397
  def post_curation_uploads(force_post_curation: false)
2✔
398
    if force_post_curation
110✔
399
      # Always use the post-curation data regardless of the work's status
400
      post_curation_s3_query_service = S3QueryService.new(self, "postcuration")
10✔
401
      post_curation_s3_query_service.data_profile.fetch(:objects, [])
10✔
402
    else
403
      # Return the list based of files honoring the work status
404
      post_curation_s3_resources
100✔
405
    end
406
  end
407

408
  def s3_files
2✔
409
    pre_curation_uploads
×
410
  end
411

412
  def s3_client
2✔
413
    s3_query_service.client
44✔
414
  end
415

416
  delegate :bucket_name, :prefix, to: :s3_query_service
2✔
417
  delegate :doi_attribute_url, :curator_or_current_uid, to: :datacite_service
2✔
418

419
  # Generates the S3 Object key
420
  # @return [String]
421
  def s3_object_key
2✔
422
    "#{doi}/#{id}"
136✔
423
  end
424

425
  # Transmit a HEAD request for the S3 Bucket directory for this Work
426
  # @param bucket_name location to be checked to be found
427
  # @return [Aws::S3::Types::HeadObjectOutput]
428
  def find_bucket_s3_dir(bucket_name:)
2✔
429
    # TODO: Directories really do not exists in S3
430
    #      if we really need this check then we need to do something else to check the bucket
431
    s3_client.head_object({
44✔
432
                            bucket: bucket_name,
433
                            key: s3_object_key
434
                          })
435
    true
2✔
436
  rescue Aws::S3::Errors::NotFound
437
    nil
42✔
438
  end
439

440
  # Generates the JSON serialized expression of the Work
441
  # @param args [Array<Hash>]
442
  # @option args [Boolean] :force_post_curation Force the request of AWS S3
443
  #   Resources, clearing the in-memory cache
444
  # @return [String]
445
  def as_json(*args)
2✔
446
    files = files_as_json(*args)
36✔
447

448
    # to_json returns a string of serialized JSON.
449
    # as_json returns the corresponding hash.
450
    {
451
      "resource" => resource.as_json,
36✔
452
      "files" => files,
453
      "group" => group.as_json.except("id"),
454
      "embargo_date" => embargo_date_as_json,
455
      "created_at" => format_date_for_solr(created_at),
456
      "updated_at" => format_date_for_solr(updated_at)
457
    }
458
  end
459

460
  # Format the date for Apache Solr
461
  # @param date [ActiveSupport::TimeWithZone]
462
  # @return [String]
463
  def format_date_for_solr(date)
2✔
464
    date.strftime("%Y-%m-%dT%H:%M:%SZ")
72✔
465
  end
466

467
  def pre_curation_uploads_count
2✔
468
    s3_query_service.file_count
4✔
469
  end
470

471
  delegate :ark, :doi, :resource_type, :resource_type=, :resource_type_general, :resource_type_general=,
2✔
472
           :to_xml, to: :resource
473

474
  # S3QueryService object associated with this Work
475
  # @return [S3QueryService]
476
  def s3_query_service
2✔
477
    mode = if approved?
2,022✔
478
             if embargoed?
176✔
479
               PULS3Client::EMBARGO
2✔
480
             else
481
               PULS3Client::POSTCURATION
174✔
482
             end
483
           else
484
             PULS3Client::PRECURATION
1,846✔
485
           end
486
    @s3_query_service ||= S3QueryService.new(self, mode)
2,022✔
487
  end
488

489
  def past_snapshots
2✔
UNCOV
490
    UploadSnapshot.where(work: self)
×
491
  end
492

493
  # Build or find persisted UploadSnapshot models for this Work
494
  # @param [integer] user_id optional user to assign the snapshot to
495
  # @return [UploadSnapshot]
496
  def reload_snapshots(user_id: nil)
2✔
497
    work_changes = []
76✔
498
    s3_files = pre_curation_uploads
76✔
499
    s3_filenames = s3_files.map(&:filename)
76✔
500

501
    upload_snapshot = latest_snapshot
76✔
502

503
    upload_snapshot.snapshot_deletions(work_changes, s3_filenames)
76✔
504

505
    upload_snapshot.snapshot_modifications(work_changes, s3_files)
76✔
506

507
    # Create WorkActivity models with the set of changes
508
    unless work_changes.empty?
76✔
509
      new_snapshot = UploadSnapshot.new(work: self, url: s3_query_service.prefix)
66✔
510
      new_snapshot.store_files(s3_files)
66✔
511
      new_snapshot.save!
66✔
512
      WorkActivity.add_work_activity(id, work_changes.to_json, user_id, activity_type: WorkActivity::FILE_CHANGES)
66✔
513
    end
514
  end
515

516
  def self.presenter_class
2✔
517
    WorkPresenter
240✔
518
  end
519

520
  def presenter
2✔
521
    self.class.presenter_class.new(work: self)
240✔
522
  end
523

524
  def changes
2✔
525
    @changes ||= []
90✔
526
  end
527

528
  def track_change(action, filename)
2✔
529
    changes << { action:, filename: }
46✔
530
  end
531

532
  # rubocop:disable Naming/PredicateName
533
  def has_rights?(rights_id)
2✔
534
    resource.rights_many.index { |rights| rights.identifier == rights_id } != nil
2,920✔
535
  end
536
  # rubocop:enable Naming/PredicateName
537

538
  # This is the solr id / work show page in PDC Discovery
539
  def pdc_discovery_url
2✔
540
    "https://datacommons.princeton.edu/discovery/catalog/doi-#{doi.tr('/', '-').tr('.', '-')}"
354✔
541
  end
542

543
  # Determine whether or not the Work is under active embargo
544
  # @return [Boolean]
545
  def embargoed?
2✔
546
    return false if embargo_date.blank?
700✔
547

548
    current_date = Time.zone.now
30✔
549
    embargo_date >= current_date
30✔
550
  end
551

552
  # Returns the bucket name for the files in the work
553
  def files_bucket_name
2✔
554
    if approved?
328✔
555
      if embargoed?
36✔
556
        "embargo"
2✔
557
      else
558
        "post-curation"
34✔
559
      end
560
    else
561
      "pre-curation"
292✔
562
    end
563
  end
564

565
  protected
2✔
566

567
    def work_validator
2✔
568
      @work_validator ||= WorkValidator.new(self)
2,758✔
569
    end
570

571
    # This must be protected, NOT private for ActiveRecord to work properly with this attribute.
572
    #   Protected will still keep others from setting the metatdata, but allows ActiveRecord the access it needs
573
    def metadata=(metadata)
2✔
574
      super
4,414✔
575
      @resource = PDCMetadata::Resource.new_from_jsonb(metadata)
4,414✔
576
    end
577

578
  private
2✔
579

580
    def publish(user)
2✔
581
      datacite_service.publish_doi(user)
56✔
582
      update_ark_information
56✔
583
      publish_precurated_files(user)
56✔
584
      save!
54✔
585
    end
586

587
    # Update EZID (our provider of ARKs) with the new information for this work.
588
    def update_ark_information
2✔
589
      # We only want to update the ark url under certain conditions.
590
      # Set this value in config/update_ark_url.yml
591
      if Rails.configuration.update_ark_url
56✔
592
        if ark.present?
24✔
593
          Ark.update(ark, datacite_service.doi_attribute_url)
6✔
594
        end
595
      end
596
    end
597

598
    def track_state_change(user, state = aasm.to_state)
2✔
599
      uw = UserWork.new(user_id: user.id, work_id: id, state:)
268✔
600
      uw.save!
268✔
601
      WorkActivity.add_work_activity(id, "marked as #{state.to_s.titleize}", user.id, activity_type: WorkActivity::SYSTEM)
268✔
602
      WorkStateTransitionNotification.new(self, user.id).send
268✔
603
    end
604

605
    # Request S3 Bucket Objects associated with this Work
606
    # @return [Array<S3File>]
607
    def s3_resources
2✔
608
      data_profile = s3_query_service.data_profile
80✔
609
      data_profile.fetch(:objects, [])
80✔
610
    end
611
    alias pre_curation_s3_resources s3_resources
2✔
612

613
    def s3_object_persisted?(s3_file)
2✔
UNCOV
614
      uploads_keys = uploads.map(&:key)
×
UNCOV
615
      uploads_keys.include?(s3_file.key)
×
616
    end
617

618
    def publish_precurated_files(user)
2✔
619
      # We need to explicitly check for the target bucket here (postcuration or embargo).
620
      target = if embargoed?
44✔
621
                 PULS3Client::EMBARGO
4✔
622
               else
623
                 PULS3Client::POSTCURATION
40✔
624
               end
625

626
      s3_target_query_service = S3QueryService.new(self, target)
44✔
627

628
      s3_dir = find_bucket_s3_dir(bucket_name: s3_target_query_service.bucket_name)
44✔
629
      raise(StandardError, "Attempting to publish a Work with an existing S3 Bucket directory for: #{s3_object_key}") unless s3_dir.nil?
44✔
630

631
      # Copy the pre-curation S3 Objects to the target S3 Bucket.
632
      s3_query_service.publish_files(user)
42✔
633
    end
634

635
    def latest_snapshot
2✔
636
      return upload_snapshots.first unless upload_snapshots.empty?
76✔
637

638
      UploadSnapshot.new(work: self, files: [])
40✔
639
    end
640

641
    def datacite_service
2✔
642
      @datacite_service ||= PULDatacite.new(self)
114✔
643
    end
644

645
    def files_as_json(*args)
2✔
646
      return [] if embargoed?
36✔
647

648
      force_post_curation = args.any? { |arg| arg[:force_post_curation] == true }
40✔
649

650
      # Pre-curation files are not accessible externally,
651
      # so we are not interested in listing them in JSON.
652
      post_curation_uploads(force_post_curation:).map do |upload|
30✔
653
        {
654
          "filename": upload.filename,
20✔
655
          "size": upload.size,
656
          "display_size": upload.display_size,
657
          "url": upload.globus_url
658
        }
659
      end
660
    end
661

662
    def embargo_date_as_json
2✔
663
      if embargo_date.present?
36✔
664
        embargo_datetime = embargo_date.to_datetime
10✔
665
        embargo_date_iso8601 = embargo_datetime.iso8601
10✔
666
        # Apache Solr timestamps require the following format:
667
        # 1972-05-20T17:33:18Z
668
        # https://solr.apache.org/guide/solr/latest/indexing-guide/date-formatting-math.html
669
        embargo_date_iso8601.gsub(/\+.+$/, "Z")
10✔
670
      end
671
    end
672

673
    def log_performance(start, message)
2✔
674
      elapsed = Time.zone.now - start
310✔
675
      if elapsed > 20
310✔
UNCOV
676
        Rails.logger.warn("PERFORMANCE: #{message}. Elapsed: #{elapsed} seconds")
×
677
      else
678
        Rails.logger.info("PERFORMANCE: #{message}. Elapsed: #{elapsed} seconds")
310✔
679
      end
680
    end
681
end
682
# rubocop:enable Metrics/ClassLength
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc