输出新闻内容(beta)

This commit is contained in:
qcminecraft 2021-02-06 11:58:25 +08:00
parent 83a0aeea34
commit b509684646
3 changed files with 16 additions and 4 deletions

View File

@ -13,6 +13,9 @@ class Misskey:
print("Creating new post to", self.baseurl, ":", content) print("Creating new post to", self.baseurl, ":", content)
req_url = self.baseurl + "/api/notes/create" req_url = self.baseurl + "/api/notes/create"
body = { body = {
"noExtractMentions": True,
"noExtractHashtags": True,
"noExtractEmojis": True,
"visibility": visibility, "visibility": visibility,
"text": content, "text": content,
"localOnly": channel != "", "localOnly": channel != "",

View File

@ -6,7 +6,7 @@
}, },
"NHK": { "NHK": {
"rss_source": "https://rsshub.app/nhk/news_web_easy", "rss_source": "https://rsshub.app/nhk/news_web_easy",
"identity": "misskey.io", "identity": "NHK_Easybot@x61.uk",
"extra_content": "" "extra_content": ""
}, },
"SoliDot": { "SoliDot": {
@ -23,5 +23,10 @@
"rss_source": "https://rsshub.app/nikkei/index", "rss_source": "https://rsshub.app/nikkei/index",
"identity": "NIKKEI@x61.uk", "identity": "NIKKEI@x61.uk",
"extra_content": "#News" "extra_content": "#News"
},
"Asahi": {
"rss_source": "https://rsshub.app/asahichinese-j/whatsnew",
"identity": "AsahiBot@x61.uk",
"extra_content": "#News"
} }
} }

View File

@ -39,7 +39,7 @@ def spider(rule_name, rss_url):
return False return False
result = xmltodict.parse(fetch.content) result = xmltodict.parse(fetch.content)
c.execute('INSERT INTO "main"."spider_log" ("rule_name", "rss_url", "result_json", "timestamp") ' c.execute('INSERT INTO "main"."spider_log" ("rule_name", "rss_url", "result_json", "timestamp") '
'VALUES (?, ?, ?, ?)', (rule_name, rss_url, json.dumps(result), time.time())) 'VALUES (?, ?, ?, ?)', (rule_name, rss_url, "{}", time.time()))
item_list = result['rss']['channel']['item'] item_list = result['rss']['channel']['item']
for i in item_list: for i in item_list:
unique = c.execute('SELECT * FROM "main"."result" WHERE "title" = ? LIMIT 0,1', (i['title'],)).fetchone() unique = c.execute('SELECT * FROM "main"."result" WHERE "title" = ? LIMIT 0,1', (i['title'],)).fetchone()
@ -49,7 +49,7 @@ def spider(rule_name, rss_url):
print("Skip: ", title) print("Skip: ", title)
continue continue
print("Got: ", title) print("Got: ", title)
desc = i['description'].replace("<blockquote>", "").replace("</blockquote>", "") desc = i['description'].replace("<blockquote>", "<i>").replace("</blockquote>", "</i>")
c.execute('INSERT INTO "main"."result" ("rule_name", "url", "title", "description", "timestamp")' c.execute('INSERT INTO "main"."result" ("rule_name", "url", "title", "description", "timestamp")'
' VALUES (?, ?, ?, ?, ?)', (rule_name, i['link'], title, desc, time.time())) ' VALUES (?, ?, ?, ?, ?)', (rule_name, i['link'], title, desc, time.time()))
@ -81,7 +81,11 @@ if __name__ == '__main__':
if not(r is None): if not(r is None):
res = c.execute('UPDATE "main"."result" SET "post_time" = ? WHERE rowid = ?', (time.time(), r[0])) res = c.execute('UPDATE "main"."result" SET "post_time" = ? WHERE rowid = ?', (time.time(), r[0]))
if not (res is None): if not (res is None):
content = r[3]+"\n<"+r[2]+">\n\n"+rules[key]['extra_content'] reg = re.compile('<[^>]*>')
desc = reg.sub('', r[4]).replace('\n\n', '\n').replace(' ', '').replace('\n\n\n', '')
content = "**"+r[3]+"**\n\n"+desc+"\n<"+r[2]+">\n"+rules[key]['extra_content']
if Misskey.debug:
config[name]['visibility'] = "specified"
Misskey.post(self=Misskey, Misskey.post(self=Misskey,
content=content, content=content,
i=config[name]['token'], visibility=config[name]['visibility']) i=config[name]['token'], visibility=config[name]['visibility'])