2018-02-21 22:21:32 +00:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require 'rubygems/package'
|
|
|
|
|
|
|
|
class BackupService < BaseService
|
2019-12-18 15:55:21 +00:00
|
|
|
include Payloadable
|
|
|
|
|
2018-02-21 22:21:32 +00:00
|
|
|
attr_reader :account, :backup, :collection
|
|
|
|
|
|
|
|
def call(backup)
|
|
|
|
@backup = backup
|
|
|
|
@account = backup.user.account
|
|
|
|
|
|
|
|
build_json!
|
|
|
|
build_archive!
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def build_json!
|
|
|
|
@collection = serialize(collection_presenter, ActivityPub::CollectionSerializer)
|
|
|
|
|
2018-08-21 11:25:50 +01:00
|
|
|
account.statuses.with_includes.reorder(nil).find_in_batches do |statuses|
|
2018-02-21 22:21:32 +00:00
|
|
|
statuses.each do |status|
|
2020-06-02 18:24:53 +01:00
|
|
|
item = serialize_payload(ActivityPub::ActivityPresenter.from_status(status), ActivityPub::ActivitySerializer, signer: @account)
|
2018-02-21 22:21:32 +00:00
|
|
|
item.delete(:'@context')
|
|
|
|
|
|
|
|
unless item[:type] == 'Announce' || item[:object][:attachment].blank?
|
|
|
|
item[:object][:attachment].each do |attachment|
|
|
|
|
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.gsub(/\A\/system\//, '')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@collection[:orderedItems] << item
|
|
|
|
end
|
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_archive!
|
|
|
|
tmp_file = Tempfile.new(%w(archive .tar.gz))
|
|
|
|
|
|
|
|
File.open(tmp_file, 'wb') do |file|
|
|
|
|
Zlib::GzipWriter.wrap(file) do |gz|
|
|
|
|
Gem::Package::TarWriter.new(gz) do |tar|
|
|
|
|
dump_media_attachments!(tar)
|
|
|
|
dump_outbox!(tar)
|
2018-08-22 18:33:10 +01:00
|
|
|
dump_likes!(tar)
|
2019-11-13 22:02:10 +00:00
|
|
|
dump_bookmarks!(tar)
|
2018-02-21 22:21:32 +00:00
|
|
|
dump_actor!(tar)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-17 16:39:28 +00:00
|
|
|
archive_filename = ['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-') + '.tar.gz'
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
|
|
|
|
@backup.processed = true
|
|
|
|
@backup.save!
|
|
|
|
ensure
|
|
|
|
tmp_file.close
|
|
|
|
tmp_file.unlink
|
|
|
|
end
|
|
|
|
|
|
|
|
def dump_media_attachments!(tar)
|
2018-08-21 11:25:50 +01:00
|
|
|
MediaAttachment.attached.where(account: account).reorder(nil).find_in_batches do |media_attachments|
|
2018-02-21 22:21:32 +00:00
|
|
|
media_attachments.each do |m|
|
2020-02-24 20:18:26 +00:00
|
|
|
next unless m.file&.path
|
|
|
|
|
2018-02-21 22:21:32 +00:00
|
|
|
download_to_tar(tar, m.file, m.file.path)
|
|
|
|
end
|
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def dump_outbox!(tar)
|
|
|
|
json = Oj.dump(collection)
|
|
|
|
|
|
|
|
tar.add_file_simple('outbox.json', 0o444, json.bytesize) do |io|
|
|
|
|
io.write(json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def dump_actor!(tar)
|
|
|
|
actor = serialize(account, ActivityPub::ActorSerializer)
|
|
|
|
|
|
|
|
actor[:icon][:url] = 'avatar' + File.extname(actor[:icon][:url]) if actor[:icon]
|
|
|
|
actor[:image][:url] = 'header' + File.extname(actor[:image][:url]) if actor[:image]
|
2018-08-22 18:33:10 +01:00
|
|
|
actor[:outbox] = 'outbox.json'
|
|
|
|
actor[:likes] = 'likes.json'
|
2019-11-13 22:02:10 +00:00
|
|
|
actor[:bookmarks] = 'bookmarks.json'
|
2018-02-21 22:21:32 +00:00
|
|
|
|
|
|
|
download_to_tar(tar, account.avatar, 'avatar' + File.extname(account.avatar.path)) if account.avatar.exists?
|
|
|
|
download_to_tar(tar, account.header, 'header' + File.extname(account.header.path)) if account.header.exists?
|
|
|
|
|
|
|
|
json = Oj.dump(actor)
|
|
|
|
|
|
|
|
tar.add_file_simple('actor.json', 0o444, json.bytesize) do |io|
|
|
|
|
io.write(json)
|
|
|
|
end
|
2018-08-22 18:33:10 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def dump_likes!(tar)
|
|
|
|
collection = serialize(ActivityPub::CollectionPresenter.new(id: 'likes.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
|
|
|
|
|
|
|
|
Status.reorder(nil).joins(:favourites).includes(:account).merge(account.favourites).find_in_batches do |statuses|
|
|
|
|
statuses.each do |status|
|
|
|
|
collection[:totalItems] += 1
|
|
|
|
collection[:orderedItems] << ActivityPub::TagManager.instance.uri_for(status)
|
|
|
|
end
|
2018-02-21 22:21:32 +00:00
|
|
|
|
2018-08-22 18:33:10 +01:00
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
|
|
|
|
json = Oj.dump(collection)
|
|
|
|
|
|
|
|
tar.add_file_simple('likes.json', 0o444, json.bytesize) do |io|
|
|
|
|
io.write(json)
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-13 22:02:10 +00:00
|
|
|
def dump_bookmarks!(tar)
|
|
|
|
collection = serialize(ActivityPub::CollectionPresenter.new(id: 'bookmarks.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)
|
|
|
|
|
|
|
|
Status.reorder(nil).joins(:bookmarks).includes(:account).merge(account.bookmarks).find_in_batches do |statuses|
|
|
|
|
statuses.each do |status|
|
|
|
|
collection[:totalItems] += 1
|
|
|
|
collection[:orderedItems] << ActivityPub::TagManager.instance.uri_for(status)
|
|
|
|
end
|
|
|
|
|
|
|
|
GC.start
|
|
|
|
end
|
|
|
|
|
|
|
|
json = Oj.dump(collection)
|
|
|
|
|
|
|
|
tar.add_file_simple('bookmarks.json', 0o444, json.bytesize) do |io|
|
|
|
|
io.write(json)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-21 22:21:32 +00:00
|
|
|
def collection_presenter
|
|
|
|
ActivityPub::CollectionPresenter.new(
|
2018-08-22 18:33:10 +01:00
|
|
|
id: 'outbox.json',
|
2018-02-21 22:21:32 +00:00
|
|
|
type: :ordered,
|
|
|
|
size: account.statuses_count,
|
|
|
|
items: []
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def serialize(object, serializer)
|
|
|
|
ActiveModelSerializers::SerializableResource.new(
|
|
|
|
object,
|
|
|
|
serializer: serializer,
|
|
|
|
adapter: ActivityPub::Adapter
|
|
|
|
).as_json
|
|
|
|
end
|
|
|
|
|
|
|
|
CHUNK_SIZE = 1.megabyte
|
|
|
|
|
|
|
|
def download_to_tar(tar, attachment, filename)
|
|
|
|
adapter = Paperclip.io_adapters.for(attachment)
|
|
|
|
|
|
|
|
tar.add_file_simple(filename, 0o444, adapter.size) do |io|
|
|
|
|
while (buffer = adapter.read(CHUNK_SIZE))
|
|
|
|
io.write(buffer)
|
|
|
|
end
|
|
|
|
end
|
2021-07-21 17:34:39 +01:00
|
|
|
rescue Errno::ENOENT, Seahorse::Client::NetworkingError => e
|
|
|
|
Rails.logger.warn "Could not backup file #{filename}: #{e}"
|
2018-02-21 22:21:32 +00:00
|
|
|
end
|
|
|
|
end
|