You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

97 lines
3.3 KiB

  1. import os.path
  2. import sys
  3. import feedparser
  4. from mastodon import Mastodon
  5. import json
  6. import requests
  7. import re
  8. import sqlite3
  9. from datetime import datetime, date, time, timedelta
  10. if len(sys.argv) < 3:
  11. print("Usage: python3 tootbot.py twitter_account mastodon_login mastodon_passwd mastodon_instance")
  12. sys.exit(1)
  13. # sqlite db to store processed tweets (and corresponding toots ids)
  14. sql = sqlite3.connect('tootbot.db')
  15. db = sql.cursor()
  16. db.execute('''CREATE TABLE IF NOT EXISTS tweets (tweet text, toot text, twitter text, mastodon text, instance text)''')
  17. if len(sys.argv)>3:
  18. instance = sys.argv[4]
  19. else:
  20. instance = 'amicale.net'
  21. twitter = sys.argv[1]
  22. mastodon = sys.argv[2]
  23. passwd = sys.argv[3]
  24. # Create application if it does not exist
  25. if not os.path.isfile(instance+'.secret'):
  26. if Mastodon.create_app(
  27. 'tootbot',
  28. api_base_url='https://'+instance,
  29. to_file = instance+'.secret'
  30. ):
  31. print('tootbot app created on instance '+instance)
  32. else:
  33. print('failed to create app on instance '+instance)
  34. sys.exit(1)
  35. try:
  36. mastodon_api = Mastodon(
  37. client_id=instance+'.secret',
  38. api_base_url='https://'+instance
  39. )
  40. mastodon_api.log_in(
  41. username=mastodon,
  42. password=passwd,
  43. scopes=['read', 'write'],
  44. to_file=mastodon+".secret"
  45. )
  46. except:
  47. print("ERROR: First Login Failed!")
  48. sys.exit(1)
  49. d = feedparser.parse('http://twitrss.me/twitter_user_to_rss/?user='+twitter)
  50. for t in reversed(d.entries):
  51. # check if this tweet has been processed
  52. db.execute('SELECT * FROM tweets WHERE tweet = ? AND twitter = ? and mastodon = ? and instance = ?',(t.id, twitter, mastodon, instance))
  53. last = db.fetchone()
  54. # process only unprocessed tweets less than 1 day old
  55. if last is None and (datetime.now()-datetime(t.published_parsed.tm_year, t.published_parsed.tm_mon, t.published_parsed.tm_mday, t.published_parsed.tm_hour, t.published_parsed.tm_min, t.published_parsed.tm_sec) < timedelta(days=1)):
  56. #h = BeautifulSoup(t.summary_detail.value, "html.parser")
  57. c = t.title
  58. toot_media = []
  59. # get the pictures...
  60. for p in re.finditer(r"https://pbs.twimg.com/[^ \xa0\"]*", t.summary):
  61. media = requests.get(p.group(0))
  62. media_posted = mastodon_api.media_post(media.content, mime_type=media.headers.get('content-type'))
  63. toot_media.append(media_posted['id'])
  64. # replace t.co link by original URL
  65. m = re.search(r"http[^ \xa0]*", c)
  66. if m != None:
  67. l = m.group(0)
  68. r = requests.get(l, allow_redirects=False)
  69. if r.status_code in {301,302}:
  70. c = c.replace(l,r.headers.get('Location'))
  71. # remove pic.twitter.com links
  72. m = re.search(r"pic.twitter.com[^ \xa0]*", c)
  73. if m != None:
  74. l = m.group(0)
  75. c = c.replace(l,'')
  76. # remove ellipsis
  77. c = c.replace('\xa0…','')
  78. if toot_media is not None:
  79. toot = mastodon_api.status_post(c, in_reply_to_id=None, media_ids=toot_media, sensitive=False, visibility='public', spoiler_text=None)
  80. if "id" in toot:
  81. db.execute("INSERT INTO tweets VALUES ( ? , ? , ? , ? , ? )",
  82. (t.id, toot["id"], twitter, mastodon, instance))
  83. sql.commit()