summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--client.rb37
-rw-r--r--create.rb117
-rw-r--r--helpers.rb228
-rw-r--r--server.rb107
4 files changed, 284 insertions, 205 deletions
diff --git a/client.rb b/client.rb
index ca5a338..8af00c9 100644
--- a/client.rb
+++ b/client.rb
@@ -16,24 +16,10 @@ post '/delete' do
200
end
-post '/undo' do # TODO: generalize for announce
- protected!
- Dir[File.join('outbox', '*', '*.json')].each do |f|
- activity = JSON.load_file(f)
- next unless activity['id'] == params['id']
-
- object_file = find_file activity['object']['id']
- outbox 'Undo', params['id'], activity['to']
- FileUtils.rm(object_file)
- FileUtils.rm(f)
- end
- 200
-end
-
post '/follow' do
protected!
params['id'] = actor params['mention'] if params['mention']
- outbox 'Follow', params['id'], [params['id']]
+ create_activity 'Follow', params['id'], [params['id']]
200
end
@@ -48,8 +34,8 @@ post '/unfollow' do
"actor": 'https://social.pdp8.info/pdp8',
"object": params['id']
}
- outbox 'Undo', activity, [params['id']]
- update_collection FOLLOWING, params['id'], true
+ create_activity 'Undo', activity, [params['id']]
+ update_collection FOLLOWING, params['id'], 'delete'
200
end
@@ -60,13 +46,28 @@ post '/share' do # TODO
recipients = ['https://www.w3.org/ns/activitystreams#Public']
recipients += JSON.load_file(FOLLOWERS)['orderedItems']
recipients << object['attributedTo']
- outbox 'Announce', object, recipients
+ create_activity 'Announce', object, recipients
dest = src.sub('inbox/', 'outbox/')
FileUtils.mkdir_p File.dirname(dest)
FileUtils.mv src, dest
200
end
+# post '/undo' do # TODO: generalize for announce
+# protected!
+# activity_file = find_file(params['id'])
+# Dir[File.join('outbox', '*', '*.json')].each do |f|
+# activity = JSON.load_file(f)
+# next unless activity['id'] == params['id']
+#
+# object_file = find_file activity['object']['id']
+# create_activity 'Undo', params['id'], activity['to']
+# FileUtils.rm(object_file)
+# FileUtils.rm(f)
+# end
+# 200
+# end
+
post '/login' do
session['client'] = (OpenSSL::Digest::SHA256.base64digest(params['secret']) == File.read('.digest').chomp)
200
diff --git a/create.rb b/create.rb
index 12d9a27..9241d2b 100644
--- a/create.rb
+++ b/create.rb
@@ -1,4 +1,11 @@
-post '/create' do # TODO
+TO_REGEXP = /^to:\s+/i
+REPLY_REGEXP = /^inreplyto:\s+/i
+ATTACH_REGEXP = /^attach:\s+/i
+URL_REGEXP = %r{\Ahttps?://\S+\Z}
+MENTION_REGEXP = /\A@\w+@\S+\Z/
+HASHTAG_REGEXP = /\A#\w+\Z/
+
+post '/create' do
protected!
request.body.rewind # in case someone already read it
@@ -8,72 +15,61 @@ post '/create' do # TODO
tag = []
attachment = []
- url_regexp = %r{\Ahttps?://\S+\Z}
- mention_regexp = /\A@\w+@\S+\Z/
- hashtag_regexp = /\A#\w+\Z/
-
- lines = request.body.read.each_line.to_a
- lines.each.with_index do |line, i|
+ request.body.read.each_line do |line|
line.chomp!
- if i == 0
- to = line.split(/\s+/).collect do |word|
+ case line
+ when TO_REGEXP
+ line.sub(TO_REGEXP, '').split(/\s+/).each do |word|
case word
when 'public'
- ['https://www.w3.org/ns/activitystreams#Public', FOLLOWERS_URL]
- when mention_regexp
- actor word
- when url_regexp
- word
+ to += ['https://www.w3.org/ns/activitystreams#Public', FOLLOWERS_URL]
+ when MENTION_REGEXP
+ to << actor(word)
+ when URL_REGEXP
+ to << word
end
- end.flatten
- elsif i == 1 and line.match url_regexp
- inReplyTo = line
- elsif line == ''
- content << '<p>'
- elsif line.match(/\A==\Z/)
- attachment = lines[i + 1..-1].collect do |url|
- url.chomp!
- url, name = url.split(/\s+/, 2)
- doc = {
- 'type' => 'Document',
- 'mediaType' => media_type(url),
- 'url' => url
- }
- doc['name'] = name if name
- doc
end
- break
- else
- # create links
+ when REPLY_REGEXP
+ inReplyTo = line.sub(REPLY_REGEXP, '')
+ when ATTACH_REGEXP
+ url = line.sub(ATTACH_REGEXP, '')
+ attachment << {
+ 'type' => 'Document',
+ 'mediaType' => media_type(url),
+ 'url' => url
+ }
+ when ''
+ content << '<p>'
+ else # create links
# single quotes in html invalidate digest, reason unknown
- line.split(/\s+/).grep(url_regexp).each { |u| line.gsub!(u, "<a href=\"#{u}\">#{u}</a>") }
- line.split(/\s+/).grep(URI::MailTo::EMAIL_REGEXP).each { |m| line.gsub!(m, "<a href=\"mailto:#{m}\">#{m}</a>") }
- tags = line.split(/\s+/).grep(hashtag_regexp)
- tags.each do |name|
- tag_url = File.join(TAGS[:url], name.sub('#', ''))
- tag << {
- 'type' => 'Hashtag',
- 'href' => tag_url,
- 'name' => name
- }
- # single quotes in html invalidate digest, reason unknown
- line.gsub!(name, "<a href=\"#{tag_url}\">#{name}</a>")
- end
- mentions = line.split(/\s+/).grep(mention_regexp)
- mentions.each do |mention|
- actor = actor(mention)
- tag << {
- 'type' => 'Mention',
- 'href' => actor,
- 'name' => mention
- }
- # single quotes in html invalidate digest, reason unknown
- line.gsub!(mention, "<a href=\"#{actor}\">#{mention}</a>")
+ content << line.split(/\s+/).collect do |word|
+ case word
+ when URL_REGEXP
+ "<a href=\"#{word}\">#{word}</a>"
+ when URI::MailTo::EMAIL_REGEXP
+ "<a href=\"mailto:#{word}\">#{word}</a>"
+ when HASHTAG_REGEXP
+ tag_url = File.join('https://social.pdp8.info', 'tags', word.sub('#', ''))
+ tag << {
+ 'type' => 'Hashtag',
+ 'href' => tag_url,
+ 'name' => word
+ }
+ "<a href=\"#{tag_url}\">#{word}</a>"
+ when MENTION_REGEXP
+ actor = actor(word)
+ tag << {
+ 'type' => 'Mention',
+ 'href' => actor,
+ 'name' => word
+ }
+ "<a href=\"#{actor}\">#{word}</a>"
+ else
+ word
+ end
end
- content << '<br>' + line
end
end
- content.shift while content[0] == '<p>'
object = {
'to' => to,
@@ -84,10 +80,9 @@ post '/create' do # TODO
object['inReplyTo'] = inReplyTo unless inReplyTo.empty?
object['attachment'] = attachment unless attachment.empty?
object['tag'] = tag unless tag.empty?
-
- p 'outbox'
jj object
- # outbox 'Create', object, to
+
+ # create_activity 'Create', object, to
200
end
diff --git a/helpers.rb b/helpers.rb
index 75221f8..26bcd9d 100644
--- a/helpers.rb
+++ b/helpers.rb
@@ -5,73 +5,166 @@ helpers do
File.open(path, 'w+') { |f| f.puts item.to_json }
end
- # add date and id, save
- def save_activity(activity, box)
+ def create_activity(type, object, to)
date = Time.now.utc.iso8601
- activity['published'] ||= date # if box == OUTBOX
- basename = "#{activity['published']}_#{mention(activity['actor'])}.json"
- activity_rel_path = File.join(activity['type'].downcase, basename)
- activity_path = File.join(box[:dir], activity_rel_path)
- if box == OUTBOX
- # return unless activity['to'].include? 'https://www.w3.org/ns/activitystreams#Public' # save only public messages
-
- activity['id'] = File.join(box[:url], activity_rel_path)
- activity['object']['published'] = date unless activity['object'].is_a? String
- # save object
- save_object activity['object'], box if %w[Create Announce Update].include? activity['type']
+ rel_path = File.join(type.downcase, "#{date}.json")
+ activity = {
+ '@context' => 'https://www.w3.org/ns/activitystreams',
+ 'id' => File.join(OUTBOX[:url], rel_path),
+ 'type' => type,
+ 'actor' => ACTOR,
+ 'published' => date,
+ 'to' => to,
+ 'object' => object
+ }
+ activity_path = File.join(OUTBOX[:dir], rel_path)
+ save_item activity, activity_path
+
+ unless activity['object'].is_a? String
+ object_rel_path = File.join('object', object['type'].downcase, "#{date}.json")
+ object = activity['object']
+ object['@context'] = 'https://www.w3.org/ns/activitystreams'
+ object['id'] = File.join(OUTBOX[:url], object_rel_path)
+ object['published'] = date
+ save_item activity['object'], File.join(OUTBOX[:dir], object_rel_path)
+ if object['tag']
+ object['tag'].each do |tag|
+ next unless tag['type'] == 'Hashtag'
+
+ tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
+ tag_collection = if File.exist? tag_path
+ JSON.load_file(tag_path)
+ else
+ {
+ '@context' => 'https://www.w3.org/ns/activitystreams',
+ 'id' => tag['href'],
+ 'type' => 'OrderedCollection',
+ 'totalItems' => 0,
+ 'orderedItems' => []
+ }
+ end
+ tag_collection['orderedItems'] << object['id']
+ tag_collection['totalItems'] = tag_collection['orderedItems'].size
+ File.open(tag_path, 'w+') do |f|
+ f.puts tag_collection.to_json
+ end
+ end
+ end
end
- # save activity
- FileUtils.mkdir_p File.dirname(activity_path)
- File.open(activity_path, 'w+') { |f| f.puts activity.to_json }
- activity_path
+ send_activity activity, activity_path
end
- def save_object(object, box)
- object = fetch(object) if object.is_a? String and object.match(/^http/)
- return unless object and object['type'] != 'Person'
- return if box == INBOX and object['id'] and File.readlines(VISITED, chomp: true).include? object['id']
+ def send_activity(activity, activity_path)
+ to = [to] if to = activity['to'].is_a?(String) ? [activity['to']] : activity['to']
+ inboxes = []
+ to.uniq.each do |url|
+ next if [ACTOR, 'https://www.w3.org/ns/activitystreams#Public'].include? url
- object['@context'] = 'https://www.w3.org/ns/activitystreams'
- if object['attributedTo']
- basename = "#{object['published']}_#{mention(object['attributedTo'])}.json"
- else
- basename = "#{object['published']}.json"
- jj object
- end
- object_rel_path = File.join 'object', object['type'].downcase, basename
- object['id'] ||= File.join box[:url], object_rel_path # if box == OUTBOX
- object_path = File.join box[:dir], object_rel_path
- FileUtils.mkdir_p File.dirname(object_path)
- File.open(object_path, 'w+') { |f| f.puts object.to_json }
- if box == OUTBOX and object['tag']
- object['tag'].each do |tag|
- next unless tag['type'] == 'Hashtag'
-
- tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
- tag_collection = if File.exist? tag_path
- JSON.load_file(tag_path)
- else
- {
- '@context' => 'https://www.w3.org/ns/activitystreams',
- 'id' => tag['href'],
- 'type' => 'OrderedCollection',
- 'totalItems' => 0,
- 'orderedItems' => []
- }
- end
- tag_collection['orderedItems'] << object['id']
- tag_collection['totalItems'] = tag_collection['orderedItems'].size
- File.open(tag_path, 'w+') do |f|
- f.puts tag_collection.to_json
+ if url == FOLLOWERS_URL
+ JSON.load_file(FOLLOWERS)['orderedItems'].each do |follower|
+ inboxes << actor_inbox(follower)
end
+ next
end
- elsif box == INBOX
- File.open(File.join(INBOX[:dir], 'visited'), 'a+') { |f| f.puts object['id'] }
+ inboxes << actor_inbox(url)
+ end
+
+ sha256 = OpenSSL::Digest.new('SHA256')
+ digest = "SHA-256=#{sha256.base64digest(activity)}"
+ keypair = OpenSSL::PKey::RSA.new(File.read('private.pem'))
+
+ inboxes.compact.uniq.each do |inbox|
+ uri = URI(inbox)
+ httpdate = Time.now.utc.httpdate
+ string = "(request-target): post #{uri.request_uri}\nhost: #{uri.host}\ndate: #{httpdate}\ndigest: #{digest}\ncontent-type: #{CONTENT_TYPE}"
+ signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), string))
+ signed_header = "keyId=\"#{ACTOR}#main-key\",algorithm=\"rsa-sha256\",headers=\"(request-target) host date digest content-type\",signature=\"#{signature}\""
+
+ # Net::HTTP fails with OpenSSL error
+ curl(
+ "-X POST -H 'Host: #{uri.host}' -H 'Date: #{httpdate}' -H 'Digest: #{digest}' -H 'Signature: #{signed_header}' --data-binary '@#{activity_path}'", inbox
+ )
+ end
+ end
+
+ def actor_inbox(url)
+ actor = fetch url
+ return unless actor
+
+ if actor['endpoints'] and actor['endpoints']['sharedInbox']
+ actor['endpoints']['sharedInbox']
+ elsif actor['inbox']
+ actor['inbox']
end
- object
end
- def update_collection(path, objects, delete = false)
+ # # add date and id, save
+ # def save_activity(activity, box)
+ # date = Time.now.utc.iso8601
+ # activity['published'] ||= date # if box == OUTBOX
+ # basename = "#{activity['published']}_#{mention(activity['actor'])}.json"
+ # activity_rel_path = File.join(activity['type'].downcase, basename)
+ # activity_path = File.join(box[:dir], activity_rel_path)
+ # if box == OUTBOX
+ # # return unless activity['to'].include? 'https://www.w3.org/ns/activitystreams#Public' # save only public messages
+ #
+ # activity['id'] = File.join(box[:url], activity_rel_path)
+ # activity['object']['published'] = date unless activity['object'].is_a? String
+ # # save object
+ # save_object activity['object'], box if %w[Create Announce Update].include? activity['type']
+ # end
+ # # save activity
+ # FileUtils.mkdir_p File.dirname(activity_path)
+ # File.open(activity_path, 'w+') { |f| f.puts activity.to_json }
+ # activity_path
+ # end
+ #
+ # def save_object(object, box)
+ # object = fetch(object) if object.is_a? String and object.match(/^http/)
+ # return unless object and object['type'] != 'Person'
+ # return if box == INBOX and object['id'] and File.readlines(VISITED, chomp: true).include? object['id']
+ #
+ # object['@context'] = 'https://www.w3.org/ns/activitystreams'
+ # if object['attributedTo']
+ # basename = "#{object['published']}_#{mention(object['attributedTo'])}.json"
+ # else
+ # basename = "#{object['published']}.json"
+ # jj object
+ # end
+ # object_rel_path = File.join 'object', object['type'].downcase, basename
+ # object['id'] ||= File.join box[:url], object_rel_path # if box == OUTBOX
+ # object_path = File.join box[:dir], object_rel_path
+ # FileUtils.mkdir_p File.dirname(object_path)
+ # File.open(object_path, 'w+') { |f| f.puts object.to_json }
+ # if box == OUTBOX and object['tag']
+ # object['tag'].each do |tag|
+ # next unless tag['type'] == 'Hashtag'
+ #
+ # tag_path = File.join(TAGS[:dir], tag['name'].sub('#', '')) + '.json'
+ # tag_collection = if File.exist? tag_path
+ # JSON.load_file(tag_path)
+ # else
+ # {
+ # '@context' => 'https://www.w3.org/ns/activitystreams',
+ # 'id' => tag['href'],
+ # 'type' => 'OrderedCollection',
+ # 'totalItems' => 0,
+ # 'orderedItems' => []
+ # }
+ # end
+ # tag_collection['orderedItems'] << object['id']
+ # tag_collection['totalItems'] = tag_collection['orderedItems'].size
+ # File.open(tag_path, 'w+') do |f|
+ # f.puts tag_collection.to_json
+ # end
+ # end
+ # elsif box == INBOX
+ # File.open(File.join(INBOX[:dir], 'visited'), 'a+') { |f| f.puts object['id'] }
+ # end
+ # object
+ # end
+
+ def update_collection(path, objects, action = 'add')
objects = [objects] unless objects.is_a? Array
File.open(path, 'r+') do |f|
f.flock(File::LOCK_EX)
@@ -79,9 +172,9 @@ helpers do
collection = JSON.parse(json)
objects.each do |object|
id = object['id'] || object
- if delete
+ if action == 'add'
collection['orderedItems'].delete_if { |o| o['id'] == id or o == id }
- else
+ elsif action == 'delete'
ids = collection['orderedItems'].collect { |i| i['id'] }
collection['orderedItems'] << object unless ids.include?(id) or collection['orderedItems'].include?(id)
end
@@ -175,16 +268,17 @@ helpers do
def find_file(id)
Dir[File.join('*', 'object', '*', '*.json')].find do |f|
+ # Dir[File.join('*box', '**', '*.json')].find do |f|
JSON.load_file(f)['id'] == id
end
end
- def find_id(id, return_filename = true)
- Dir[File.join('**', '*.json')].find do |f|
- content = JSON.load_file(f)
- if content['id'] == id
- return_filename ? f : content
- end
- end
- end
+ # def find_id(id, return_filename = true)
+ # Dir[File.join('**', '*.json')].find do |f|
+ # content = JSON.load_file(f)
+ # if content['id'] == id
+ # return_filename ? f : content
+ # end
+ # end
+ # end
end
diff --git a/server.rb b/server.rb
index f44e8f0..2bcb102 100644
--- a/server.rb
+++ b/server.rb
@@ -5,7 +5,7 @@ post '/inbox' do
begin
@activity = JSON.parse @body
rescue StandardError => e
- p @body
+ p e, @body
halt 400
end
# deleted actors return 403 => verification error
@@ -80,7 +80,7 @@ helpers do
def follow
update_collection FOLLOWERS, @activity['actor']
- outbox 'Accept', @activity, [@activity['actor']]
+ create_activity 'Accept', @activity, [@activity['actor']]
end
def accept
@@ -96,7 +96,7 @@ helpers do
def undo
case @activity['object']['type']
when 'Follow'
- update_collection FOLLOWERS, @activity['object']['actor'], true
+ update_collection FOLLOWERS, @activity['object']['actor'], 'delete'
when 'Create', 'Announce'
file = find_file @activity['object']['object']
FileUtils.rm(file) if file
@@ -119,7 +119,7 @@ helpers do
end
def move
- outbox 'Follow', @activity['target'], [@activity['target']] if @activity['actor'] == @activity['object']
+ create_activity 'Follow', @activity['target'], [@activity['target']] if @activity['actor'] == @activity['object']
end
def handle_activity
@@ -187,59 +187,48 @@ helpers do
halt 403 unless key.verify(OpenSSL::Digest.new('SHA256'), signature, comparison)
end
- def actor_inbox(url)
- actor = fetch url
- return unless actor
-
- if actor['endpoints'] and actor['endpoints']['sharedInbox']
- actor['endpoints']['sharedInbox']
- elsif actor['inbox']
- actor['inbox']
- end
- end
-
- def outbox(type, object, to) # https://github.com/mastodon/mastodon/blob/main/app/lib/request.rb
- to = [to] if to.is_a?(String)
- inboxes = []
- to.uniq.each do |url|
- next if [ACTOR, 'https://www.w3.org/ns/activitystreams#Public'].include? url
-
- if url == FOLLOWERS_URL
- JSON.load_file(FOLLOWERS)['orderedItems'].each do |follower|
- inboxes << actor_inbox(follower)
- end
- next
- end
- inboxes << actor_inbox(url)
- end
-
- # add date and id, save
- activity_path = save_activity({
- '@context' => 'https://www.w3.org/ns/activitystreams',
- 'type' => type,
- 'actor' => ACTOR,
- 'object' => object,
- 'to' => to
- }, OUTBOX)
-
- # p activity_path
- body = File.read(activity_path)
- sha256 = OpenSSL::Digest.new('SHA256')
- digest = "SHA-256=#{sha256.base64digest(body)}"
- keypair = OpenSSL::PKey::RSA.new(File.read('private.pem'))
-
- inboxes.compact.uniq.each do |inbox|
- uri = URI(inbox)
- httpdate = Time.now.utc.httpdate
- string = "(request-target): post #{uri.request_uri}\nhost: #{uri.host}\ndate: #{httpdate}\ndigest: #{digest}\ncontent-type: #{CONTENT_TYPE}"
- signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), string))
- signed_header = "keyId=\"#{ACTOR}#main-key\",algorithm=\"rsa-sha256\",headers=\"(request-target) host date digest content-type\",signature=\"#{signature}\""
-
- # Net::HTTP fails with OpenSSL error
- curl(
- "-X POST -H 'Host: #{uri.host}' -H 'Date: #{httpdate}' -H 'Digest: #{digest}' -H 'Signature: #{signed_header}' --data-binary '@#{activity_path}'", inbox
- )
- end
- activity_path
- end
+ # def outbox(type, object, to) # https://github.com/mastodon/mastodon/blob/main/app/lib/request.rb
+ # to = [to] if to.is_a?(String)
+ # inboxes = []
+ # to.uniq.each do |url|
+ # next if [ACTOR, 'https://www.w3.org/ns/activitystreams#Public'].include? url
+ #
+ # if url == FOLLOWERS_URL
+ # JSON.load_file(FOLLOWERS)['orderedItems'].each do |follower|
+ # inboxes << actor_inbox(follower)
+ # end
+ # next
+ # end
+ # inboxes << actor_inbox(url)
+ # end
+ #
+ # # add date and id, save
+ # activity_path = save_activity({
+ # '@context' => 'https://www.w3.org/ns/activitystreams',
+ # 'type' => type,
+ # 'actor' => ACTOR,
+ # 'object' => object,
+ # 'to' => to
+ # }, OUTBOX)
+ #
+ # # p activity_path
+ # body = File.read(activity_path)
+ # sha256 = OpenSSL::Digest.new('SHA256')
+ # digest = "SHA-256=#{sha256.base64digest(body)}"
+ # keypair = OpenSSL::PKey::RSA.new(File.read('private.pem'))
+ #
+ # inboxes.compact.uniq.each do |inbox|
+ # uri = URI(inbox)
+ # httpdate = Time.now.utc.httpdate
+ # string = "(request-target): post #{uri.request_uri}\nhost: #{uri.host}\ndate: #{httpdate}\ndigest: #{digest}\ncontent-type: #{CONTENT_TYPE}"
+ # signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), string))
+ # signed_header = "keyId=\"#{ACTOR}#main-key\",algorithm=\"rsa-sha256\",headers=\"(request-target) host date digest content-type\",signature=\"#{signature}\""
+ #
+ # # Net::HTTP fails with OpenSSL error
+ # curl(
+ # "-X POST -H 'Host: #{uri.host}' -H 'Date: #{httpdate}' -H 'Digest: #{digest}' -H 'Signature: #{signed_header}' --data-binary '@#{activity_path}'", inbox
+ # )
+ # end
+ # activity_path
+ # end
end