summaryrefslogtreecommitdiff
path: root/helpers.rb
diff options
context:
space:
mode:
authorpdp8 <pdp8@pdp8.info>2023-09-11 21:09:26 +0200
committerpdp8 <pdp8@pdp8.info>2023-09-11 21:09:26 +0200
commitd635057cb576c5570c5ceba5945cc5339b0f41ab (patch)
tree3baf2432690f221f67dc318a3fd5aa6b271c9961 /helpers.rb
parentda017e7cd9394cb759ee74440c5fd25860063905 (diff)
new create format, outbox refactoring
Diffstat (limited to 'helpers.rb')
-rw-r--r--helpers.rb228
1 files changed, 161 insertions, 67 deletions
diff --git a/helpers.rb b/helpers.rb
index 75221f8..26bcd9d 100644
--- a/helpers.rb
+++ b/helpers.rb
@@ -5,73 +5,166 @@ helpers do
File.open(path, 'w+') { |f| f.puts item.to_json }
end
- # add date and id, save
- def save_activity(activity, box)
+ def create_activity(type, object, to)
date = Time.now.utc.iso8601
- activity['published'] ||= date # if box == OUTBOX
- basename = "#{activity['published']}_#{mention(activity['actor'])}.json"
- activity_rel_path = File.join(activity['type'].downcase, basename)
- activity_path = File.join(box[:dir], activity_rel_path)
- if box == OUTBOX
- # return unless activity['to'].include? 'https://www.w3.org/ns/activitystreams#Public' # save only public messages
-
- activity['id'] = File.join(box[:url], activity_rel_path)
- activity['object']['published'] = date unless activity['object'].is_a? String
- # save object
- save_object activity['object'], box if %w[Create Announce Update].include? activity['type']
+ rel_path = File.join(type.downcase, "#{date}.json")
+ activity = {
+ '@context' => 'https://www.w3.org/ns/activitystreams',
+ 'id' => File.join(OUTBOX[:url], rel_path),
+ 'type' => type,
+ 'actor' => ACTOR,
+ 'published' => date,
+ 'to' => to,
+ 'object' => object
+ }
+ activity_path = File.join(OUTBOX[:dir], rel_path)
+ save_item activity, activity_path
+
+ unless activity['object'].is_a? String
+ object_rel_path = File.join('object', object['type'].downcase, "#{date}.json")
+ object = activity['object']
+ object['@context'] = 'https://www.w3.org/ns/activitystreams'
+ object['id'] = File.join(OUTBOX[:url], object_rel_path)
+ object['published'] = date
+ save_item activity['object'], File.join(OUTBOX[:dir], object_rel_path)
+ if object['tag']
+ object['tag'].each do |tag|
+ next unless tag['type'] == 'Hashtag'
+
+ tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
+ tag_collection = if File.exist? tag_path
+ JSON.load_file(tag_path)
+ else
+ {
+ '@context' => 'https://www.w3.org/ns/activitystreams',
+ 'id' => tag['href'],
+ 'type' => 'OrderedCollection',
+ 'totalItems' => 0,
+ 'orderedItems' => []
+ }
+ end
+ tag_collection['orderedItems'] << object['id']
+ tag_collection['totalItems'] = tag_collection['orderedItems'].size
+ File.open(tag_path, 'w+') do |f|
+ f.puts tag_collection.to_json
+ end
+ end
+ end
end
- # save activity
- FileUtils.mkdir_p File.dirname(activity_path)
- File.open(activity_path, 'w+') { |f| f.puts activity.to_json }
- activity_path
+ send_activity activity, activity_path
end
- def save_object(object, box)
- object = fetch(object) if object.is_a? String and object.match(/^http/)
- return unless object and object['type'] != 'Person'
- return if box == INBOX and object['id'] and File.readlines(VISITED, chomp: true).include? object['id']
+ def send_activity(activity, activity_path)
+ to = [to] if to = activity['to'].is_a?(String) ? [activity['to']] : activity['to']
+ inboxes = []
+ to.uniq.each do |url|
+ next if [ACTOR, 'https://www.w3.org/ns/activitystreams#Public'].include? url
- object['@context'] = 'https://www.w3.org/ns/activitystreams'
- if object['attributedTo']
- basename = "#{object['published']}_#{mention(object['attributedTo'])}.json"
- else
- basename = "#{object['published']}.json"
- jj object
- end
- object_rel_path = File.join 'object', object['type'].downcase, basename
- object['id'] ||= File.join box[:url], object_rel_path # if box == OUTBOX
- object_path = File.join box[:dir], object_rel_path
- FileUtils.mkdir_p File.dirname(object_path)
- File.open(object_path, 'w+') { |f| f.puts object.to_json }
- if box == OUTBOX and object['tag']
- object['tag'].each do |tag|
- next unless tag['type'] == 'Hashtag'
-
- tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
- tag_collection = if File.exist? tag_path
- JSON.load_file(tag_path)
- else
- {
- '@context' => 'https://www.w3.org/ns/activitystreams',
- 'id' => tag['href'],
- 'type' => 'OrderedCollection',
- 'totalItems' => 0,
- 'orderedItems' => []
- }
- end
- tag_collection['orderedItems'] << object['id']
- tag_collection['totalItems'] = tag_collection['orderedItems'].size
- File.open(tag_path, 'w+') do |f|
- f.puts tag_collection.to_json
+ if url == FOLLOWERS_URL
+ JSON.load_file(FOLLOWERS)['orderedItems'].each do |follower|
+ inboxes << actor_inbox(follower)
end
+ next
end
- elsif box == INBOX
- File.open(File.join(INBOX[:dir], 'visited'), 'a+') { |f| f.puts object['id'] }
+ inboxes << actor_inbox(url)
+ end
+
+ sha256 = OpenSSL::Digest.new('SHA256')
+ digest = "SHA-256=#{sha256.base64digest(activity)}"
+ keypair = OpenSSL::PKey::RSA.new(File.read('private.pem'))
+
+ inboxes.compact.uniq.each do |inbox|
+ uri = URI(inbox)
+ httpdate = Time.now.utc.httpdate
+ string = "(request-target): post #{uri.request_uri}\nhost: #{uri.host}\ndate: #{httpdate}\ndigest: #{digest}\ncontent-type: #{CONTENT_TYPE}"
+ signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), string))
+ signed_header = "keyId=\"#{ACTOR}#main-key\",algorithm=\"rsa-sha256\",headers=\"(request-target) host date digest content-type\",signature=\"#{signature}\""
+
+ # Net::HTTP fails with OpenSSL error
+ curl(
+ "-X POST -H 'Host: #{uri.host}' -H 'Date: #{httpdate}' -H 'Digest: #{digest}' -H 'Signature: #{signed_header}' --data-binary '@#{activity_path}'", inbox
+ )
+ end
+ end
+
+ def actor_inbox(url)
+ actor = fetch url
+ return unless actor
+
+ if actor['endpoints'] and actor['endpoints']['sharedInbox']
+ actor['endpoints']['sharedInbox']
+ elsif actor['inbox']
+ actor['inbox']
end
- object
end
- def update_collection(path, objects, delete = false)
+ # # add date and id, save
+ # def save_activity(activity, box)
+ # date = Time.now.utc.iso8601
+ # activity['published'] ||= date # if box == OUTBOX
+ # basename = "#{activity['published']}_#{mention(activity['actor'])}.json"
+ # activity_rel_path = File.join(activity['type'].downcase, basename)
+ # activity_path = File.join(box[:dir], activity_rel_path)
+ # if box == OUTBOX
+ # # return unless activity['to'].include? 'https://www.w3.org/ns/activitystreams#Public' # save only public messages
+ #
+ # activity['id'] = File.join(box[:url], activity_rel_path)
+ # activity['object']['published'] = date unless activity['object'].is_a? String
+ # # save object
+ # save_object activity['object'], box if %w[Create Announce Update].include? activity['type']
+ # end
+ # # save activity
+ # FileUtils.mkdir_p File.dirname(activity_path)
+ # File.open(activity_path, 'w+') { |f| f.puts activity.to_json }
+ # activity_path
+ # end
+ #
+ # def save_object(object, box)
+ # object = fetch(object) if object.is_a? String and object.match(/^http/)
+ # return unless object and object['type'] != 'Person'
+ # return if box == INBOX and object['id'] and File.readlines(VISITED, chomp: true).include? object['id']
+ #
+ # object['@context'] = 'https://www.w3.org/ns/activitystreams'
+ # if object['attributedTo']
+ # basename = "#{object['published']}_#{mention(object['attributedTo'])}.json"
+ # else
+ # basename = "#{object['published']}.json"
+ # jj object
+ # end
+ # object_rel_path = File.join 'object', object['type'].downcase, basename
+ # object['id'] ||= File.join box[:url], object_rel_path # if box == OUTBOX
+ # object_path = File.join box[:dir], object_rel_path
+ # FileUtils.mkdir_p File.dirname(object_path)
+ # File.open(object_path, 'w+') { |f| f.puts object.to_json }
+ # if box == OUTBOX and object['tag']
+ # object['tag'].each do |tag|
+ # next unless tag['type'] == 'Hashtag'
+ #
+ # tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
+ # tag_collection = if File.exist? tag_path
+ # JSON.load_file(tag_path)
+ # else
+ # {
+ # '@context' => 'https://www.w3.org/ns/activitystreams',
+ # 'id' => tag['href'],
+ # 'type' => 'OrderedCollection',
+ # 'totalItems' => 0,
+ # 'orderedItems' => []
+ # }
+ # end
+ # tag_collection['orderedItems'] << object['id']
+ # tag_collection['totalItems'] = tag_collection['orderedItems'].size
+ # File.open(tag_path, 'w+') do |f|
+ # f.puts tag_collection.to_json
+ # end
+ # end
+ # elsif box == INBOX
+ # File.open(File.join(INBOX[:dir], 'visited'), 'a+') { |f| f.puts object['id'] }
+ # end
+ # object
+ # end
+
+ def update_collection(path, objects, action = 'add')
objects = [objects] unless objects.is_a? Array
File.open(path, 'r+') do |f|
f.flock(File::LOCK_EX)
@@ -79,9 +172,9 @@ helpers do
collection = JSON.parse(json)
objects.each do |object|
id = object['id'] || object
- if delete
+ if action == 'add'
collection['orderedItems'].delete_if { |o| o['id'] == id or o == id }
- else
+ elsif action == 'delete'
ids = collection['orderedItems'].collect { |i| i['id'] }
collection['orderedItems'] << object unless ids.include?(id) or collection['orderedItems'].include?(id)
end
@@ -175,16 +268,17 @@ helpers do
def find_file(id)
Dir[File.join('*', 'object', '*', '*.json')].find do |f|
+ # Dir[File.join('*box', '**', '*.json')].find do |f|
JSON.load_file(f)['id'] == id
end
end
- def find_id(id, return_filename = true)
- Dir[File.join('**', '*.json')].find do |f|
- content = JSON.load_file(f)
- if content['id'] == id
- return_filename ? f : content
- end
- end
- end
+ # def find_id(id, return_filename = true)
+ # Dir[File.join('**', '*.json')].find do |f|
+ # content = JSON.load_file(f)
+ # if content['id'] == id
+ # return_filename ? f : content
+ # end
+ # end
+ # end
end