Merging Instances Repo with This

This commit is contained in:
ManeraKai 2022-02-16 21:45:32 +03:00
parent 7bd532609f
commit 7c80a3e3d7
No known key found for this signature in database
GPG Key ID: 5ABC31FFD562E337
2 changed files with 524 additions and 0 deletions

329
instances/data.json Normal file
View File

@ -0,0 +1,329 @@
{
"invidious": {
"normal": [
"https://yewtu.be",
"https://invidious.snopyta.org",
"https://vid.puffyan.us",
"https://invidious.kavin.rocks",
"https://invidio.xamh.de",
"https://inv.riverside.rocks",
"https://invidious-us.kavin.rocks",
"https://inv.cthd.icu",
"https://yt.artemislena.eu",
"https://invidious.flokinet.to",
"https://youtube.076.ne.jp",
"https://invidious.privacy.gd",
"https://invidious.osi.kr",
"https://invidious.namazso.eu"
],
"onion": [
"http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
"http://w6ijuptxiku4xpnnaetxvnkc5vqcdu7mgns2u77qefoixi63vbvnpnqd.onion",
"http://kbjggqkzv65ivcqj6bumvp337z6264huv5kpkwuv6gu5yjiskvan7fad.onion",
"http://grwp24hodrefzvjjuccrkw3mjq4tzhaaq32amf33dzpmuxe7ilepcmad.onion",
"http://hpniueoejy4opn7bc4ftgazyqjoeqwlvh2uiku2xqku6zpoa4bf5ruid.onion",
"http://osbivz6guyeahrwp2lnwyjk2xos342h4ocsxyqrlaopqjuhwn2djiiyd.onion",
"http://u2cvlit75owumwpy4dj2hsmvkq7nvrclkpht7xgyye2pyoxhpmclkrad.onion",
"http://2rorw2w54tr7jkasn53l5swbjnbvz3ubebhswscnc54yac6gmkxaeeqd.onion"
]
},
"nitter": {
"normal": [
"https://nitter.net",
"https://nitter.42l.fr",
"https://nitter.pussthecat.org",
"https://nitter.nixnet.services",
"https://nitter.fdn.fr",
"https://nitter.1d4.us",
"https://nitter.kavin.rocks",
"https://nitter.unixfox.eu",
"https://nitter.domain.glass",
"https://nitter.eu",
"https://nitter.namazso.eu",
"https://nitter.actionsack.com",
"https://birdsite.xanny.family",
"https://nitter.hu",
"https://twitr.gq",
"https://nitter.moomoo.me",
"https://nittereu.moomoo.me",
"https://bird.trom.tf",
"https://nitter.it",
"https://twitter.censors.us",
"https://nitter.grimneko.de",
"https://nitter.alefvanoon.xyz",
"https://n.hyperborea.cloud",
"https://nitter.ca",
"https://twitter.076.ne.jp",
"https://nitter.mstdn.social",
"https://nitter.fly.dev",
"https://notabird.site",
"https://nitter.weiler.rocks",
"https://nitter.silkky.cloud",
"https://nitter.sethforprivacy.com",
"https://nttr.stream",
"https://nitter.cutelab.space",
"https://nitter.nl",
"https://nitter.mint.lgbt",
"https://nitter.tokhmi.xyz",
"https://nitter.bus-hit.me",
"https://fuckthesacklers.network",
"https://nitter.govt.land",
"https://nitter.datatunnel.xyz",
"https://nitter.esmailelbob.xyz",
"https://tw.artemislena.eu",
"https://nitter.eu.org",
"https://de.nttr.stream"
],
"onion": [
"http://3nzoldnxplag42gqjs23xvghtzf6t6yzssrtytnntc6ppc7xxuoneoad.onion",
"http://nitter.l4qlywnpwqsluw65ts7md3khrivpirse744un3x7mlskqauz5pyuzgqd.onion",
"http://nitter7bryz3jv7e3uekphigvmoyoem4al3fynerxkj22dmoxoq553qd.onion",
"http://npf37k3mtzwxreiw52ccs5ay4e6qt2fkcs2ndieurdyn2cuzzsfyfvid.onion",
"http://nitter.v6vgyqpa7yefkorazmg5d5fimstmvm2vtbirt6676mt7qmllrcnwycqd.onion",
"http://i23nv6w3juvzlw32xzoxcqzktegd4i4fu3nmnc2ewv4ggiu4ledwklad.onion",
"http://26oq3gioiwcmfojub37nz5gzbkdiqp7fue5kvye7d4txv4ny6fb4wwid.onion",
"http://vfaomgh4jxphpbdfizkm5gbtjahmei234giqj4facbwhrfjtcldauqad.onion",
"http://iwgu3cv7ywf3gssed5iqtavmrlszgsxazkmwwnt4h2kdait75thdyrqd.onion",
"http://erpnncl5nhyji3c32dcfmztujtl3xaddqb457jsbkulq24zqq7ifdgad.onion",
"http://ckzuw5misyahmg7j5t5xwwuj3bwy62jfolxyux4brfflramzsvvd3syd.onion",
"http://jebqj47jgxleaiosfcxfibx2xdahjettuydlxbg64azd4khsxv6kawid.onion",
"http://nttr2iupbb6fazdpr2rgbooon2tzbbsvvkagkgkwohhodjzj43stxhad.onion",
"http://nitraeju2mipeziu2wtcrqsxg7h62v5y4eqgwi75uprynkj74gevvuqd.onion",
"http://nitter.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion"
]
},
"bibliogram": {
"normal": [
"https://bibliogram.art",
"https://bibliogram.snopyta.org",
"https://bibliogram.pussthecat.org",
"https://bibliogram.1d4.us",
"https://insta.trom.tf",
"https://bib.riverside.rocks",
"https://bibliogram.esmailelbob.xyz",
"https://bib.actionsack.com",
"https://biblio.alefvanoon.xyz"
]
},
"teddit": {
"normal": [
"https://teddit.net",
"https://teddit.ggc-project.de",
"https://teddit.kavin.rocks",
"https://teddit.zaggy.nl",
"https://teddit.namazso.eu",
"https://teddit.nautolan.racing",
"https://teddit.tinfoil-hat.net",
"https://teddit.domain.glass",
"https://snoo.ioens.is",
"https://teddit.httpjames.space",
"https://teddit.alefvanoon.xyz",
"https://incogsnoo.com",
"https://teddit.pussthecat.org",
"https://reddit.lol",
"https://teddit.sethforprivacy.com",
"https://teddit.totaldarkness.net",
"https://teddit.adminforge.de",
"https://teddit.bus-hit.me"
],
"onion": [
"http://teddit4w6cmzmj5kimhfcavs7yo5s7alszvsi2khqutqtlaanpcftfyd.onion",
"http://snoo.ioensistjs7wd746zluwixvojbbkxhr37lepdvwtdfeav673o64iflqd.onion",
"http://ibarajztopxnuhabfu7fg6gbudynxofbnmvis3ltj6lfx47b6fhrd5qd.onion",
"http://tedditfyn6idalzso5wam5qd3kdtxoljjhbrbbx34q2xkcisvshuytad.onion",
"http://dawtyi5e2cfyfmoht4izmczi42aa2zwh6wi34zwvc6rzf2acpxhrcrad.onion",
"http://qtpvyiaqhmwccxwzsqubd23xhmmrt75tdyw35kp43w4hvamsgl3x27ad.onion"
]
},
"wikiless": {
"normal": [
"https://wikiless.org",
"https://wikiless.alefvanoon.xyz",
"https://wikiless.sethforprivacy.com",
"https://wiki.604kph.xyz"
],
"onion": [
"http://dj2tbh2nqfxyfmvq33cjmhuw7nb6am7thzd3zsjvizeqf374fixbrxyd.onion"
]
},
"scribe": {
"normal": [
"https://scribe.rip",
"https://scribe.nixnet.services",
"https://scribe.citizen4.eu",
"https://scribe.bus-hit.me"
]
},
"simplyTranslate": {
"normal": [
"https://simplytranslate.org",
"https://st.alefvanoon.xyz",
"https://translate.josias.dev",
"https://translate.namazso.eu",
"https://translate.riverside.rocks",
"https://manerakai.asuscomm.com:447",
"https://translate.bus-hit.me",
"https://simplytranslate.pussthecat.org",
"https://translate.northboot.xyz"
],
"onion": [
"http://fyng2tsmzmvxmojzbbwmfnsn2lrcyftf4cw6rk5j2v2huliazud3fjid.onion",
"http://xxtbwyb5z5bdvy2f6l2yquu5qilgkjeewno4qfknvb3lkg3nmoklitid.onion"
]
},
"lingva": {
"normal": [
"https://lingva.ml",
"https://translate.alefvanoon.xyz",
"https://translate.igna.rocks",
"https://lingva.pussthecat.org",
"https://translate.datatunnel.xyz",
"https://lingva.esmailelbob.xyz"
]
},
"searx": {
"normal": [
"https://anon.sx",
"https://darmarit.org/searx",
"https://dynabyte.ca",
"https://engo.mint.lgbt",
"https://jsearch.pw",
"https://metasearch.nl",
"https://nibblehole.com",
"https://northboot.xyz",
"https://paulgo.io",
"https://procurx.pt",
"https://s.zhaocloud.net",
"https://search.antonkling.se",
"https://search.asynchronousexchange.com",
"https://search.biboumail.fr",
"https://search.bus-hit.me",
"https://search.disroot.org",
"https://search.ethibox.fr",
"https://search.jpope.org",
"https://search.mdosch.de",
"https://search.neet.works",
"https://search.ononoki.org",
"https://search.snopyta.org",
"https://search.st8.at",
"https://search.stinpriza.org",
"https://search.trom.tf",
"https://search.zdechov.net",
"https://search.zzls.xyz",
"https://searx-private-search.de",
"https://searx.bar",
"https://searx.be",
"https://searx.bissisoft.com",
"https://searx.divided-by-zero.eu",
"https://searx.dresden.network",
"https://searx.ebnar.xyz",
"https://searx.esmailelbob.xyz",
"https://searx.everdot.org",
"https://searx.fmac.xyz",
"https://searx.fossencdi.org",
"https://searx.gnous.eu",
"https://searx.gnu.style",
"https://searx.hardwired.link",
"https://searx.lavatech.top",
"https://searx.mastodontech.de",
"https://searx.mha.fi",
"https://searx.mxchange.org",
"https://searx.nakhan.net",
"https://searx.netzspielplatz.de",
"https://searx.nevrlands.de",
"https://searx.ninja",
"https://searx.nixnet.services",
"https://searx.openhoofd.nl",
"https://searx.operationtulip.com",
"https://searx.org",
"https://searx.prvcy.eu",
"https://searx.pwoss.org",
"https://searx.rasp.fr",
"https://searx.roughs.ru",
"https://searx.ru",
"https://searx.run",
"https://searx.sadblog.xyz",
"https://searx.semipvt.com",
"https://searx.slash-dev.de",
"https://searx.solusar.de",
"https://searx.sp-codes.de",
"https://searx.stuehieyr.com",
"https://searx.theanonymouse.xyz",
"https://searx.thegreenwebfoundation.org",
"https://searx.tiekoetter.com",
"https://searx.tk",
"https://searx.tux.land",
"https://searx.tuxcloud.net",
"https://searx.tyil.nl",
"https://searx.vitanetworks.link",
"https://searx.webheberg.info",
"https://searx.xkek.net",
"https://searx.xyz",
"https://searx.zackptg5.com",
"https://searx.zapashcanon.fr",
"https://searx.zecircle.xyz",
"https://serx.cf",
"https://spot.ecloud.global",
"https://suche.dasnetzundich.de",
"https://suche.uferwerk.org",
"https://swag.pw",
"https://sx.catgirl.cloud",
"https://sx.fedi.tech",
"https://timdor.noip.me/searx",
"https://trovu.komun.org",
"https://www.gruble.de",
"https://www.webrats.xyz",
"https://xeek.com",
"https://searx.roflcopter.fr"
],
"onion": [
"http://3afisqjw2rxm6z7mmstyt5rx75qfqrgxnkzftknbp2vhipr2nrmrjdyd.onion",
"http://searxbgetrkiwxhdwi6svpgh7eotopqyxhbqiokrwzg7dcte44t36kyd.onion",
"http://suche.xyzco456vwisukfg.onion",
"http://w5rl6wsd7mzj4bdkbuqvzidet5osdsm5jhg2f7nvfidakfq5exda5wid.onion",
"http://4n53nafyi77iplnbrpmxnp3x4exbswwxigujaxy3b37fvr7bvlopxeyd.onion",
"http://z34ambyi6makk6ta7ksog2sljly2ctt2sa3apekb7wkllk72sxecdtad.onion",
"http://search.4bkxscubgtxwvhpe.onion",
"http://juy4e6eicawzdrz7.onion",
"http://z5vawdol25vrmorm4yydmohsd4u6rdoj2sylvoi3e3nqvxkvpqul7bqd.onion",
"http://zbuc3bbzbfdqqo2x46repx2ddajbha6fpsjeeptjhhhhzji3zopxdqyd.onion",
"http://f4qfqajszpx5b7itzxt6mb7kj4ktpgbdq7lq6xaiqyqx6a7de3epptad.onion",
"http://searx.cwuzdtzlubq5uual.onion",
"http://rq2w52kyrif3xpfihkgjnhqm3a5aqhoikpv72z3drpjglfzc2wr5z4yd.onion",
"http://searx3aolosaf3urwnhpynlhuokqsgz47si4pzz5hvb7uuzyjncl2tid.onion",
"http://searx.bsbvtqi5oq2cqzn32zt4cr2f2z2rwots3dq7gmdcnlyqoxko2wx6reqd.onion"
],
"i2p": [
"http://ransack.i2p",
"http://mqamk4cfykdvhw5kjez2gnvse56gmnqxn7vkvvbuor4k4j2lbbnq.b32.i2p"
]
},
"whoogle": {
"normal": [
"https://gowogle.voring.me",
"https://s.alefvanoon.xyz",
"https://search.albony.xyz",
"https://search.garudalinux.org",
"https://search.sethforprivacy.com",
"https://whoogle.fossho.st",
"https://whooglesearch.net",
"https://www.whooglesearch.ml",
"https://whoogle.dcs0.hu",
"https://whoogle.esmailelbob.xyz"
]
},
"rimgo": {
"normal": [
"https://i.bcow.xyz",
"https://rimgo.bcow.xyz",
"https://rimgo.pussthecat.org",
"https://img.riverside.rocks",
"https://rimgo.totaldarkness.net",
"https://rimgo.bus-hit.me"
],
"onion": [
"http://l4d4owboqr6xcmd6lf64gbegel62kbudu3x3jnldz2mx6mhn3bsv3zyd.onion",
"http://jx3dpcwedpzu2mh54obk5gvl64i2ln7pt5mrzd75s4jnndkqwzaim7ad.onion"
]
}
}

195
instances/get_instances.py Normal file
View File

@ -0,0 +1,195 @@
# Note: Run this script from the root of the repo
import requests
import json
from urllib.parse import urlparse
from bs4 import BeautifulSoup
mightyList = {}
def get_host_name(link):
url = urlparse(link)
return url.netloc
# Invidious
r = requests.get('https://api.invidious.io/instances.json')
rJson = json.loads(r.text)
invidiousList = {}
invidiousList['normal'] = []
invidiousList['onion'] = []
for instance in rJson:
if instance[1]['type'] == 'https':
invidiousList['normal'].append(instance[1]['uri'])
elif instance[1]['type'] == 'onion':
invidiousList['onion'].append(instance[1]['uri'])
mightyList['invidious'] = invidiousList
print('fetched Invidious')
# Nitter
r = requests.get('https://github.com/zedeus/nitter/wiki/Instances')
soup = BeautifulSoup(r.text, 'html.parser')
markdownBody = soup.find(class_='markdown-body')
tables = markdownBody.find_all('table')
tables.pop(3)
tables.pop(3)
nitterList = {}
nitterList['normal'] = []
nitterList['onion'] = []
for table in tables:
tbody = table.find('tbody')
trs = tbody.find_all('tr')
for tr in trs:
td = tr.find('td')
a = td.find('a')
url = a.contents[0]
if url.endswith('.onion'):
url = 'http://' + url
nitterList['onion'].append(url)
else:
url = 'https://' + url
nitterList['normal'].append(url)
mightyList['nitter'] = nitterList
print('fetched Nitter')
# Bibliogram
r = requests.get('https://bibliogram.art/api/instances')
rJson = json.loads(r.text)
bibliogramList = {}
bibliogramList['normal'] = []
for item in rJson['data']:
bibliogramList['normal'].append(item['address'])
mightyList['bibliogram'] = bibliogramList
print('fetched Bibliogram')
# Teddit
r = requests.get(
'https://codeberg.org/teddit/teddit/raw/branch/main/instances.json')
rJson = json.loads(r.text)
tedditList = {}
tedditList['normal'] = []
tedditList['onion'] = []
for item in rJson:
url = item['url']
if url != '':
tedditList['normal'].append(url)
if 'onion' in item:
onion = item['onion']
if onion != '':
tedditList['onion'].append(onion)
mightyList['teddit'] = tedditList
print('fetched Teddit')
# Wikiless
r = requests.get('https://wikiless.org/instances.json')
rJson = json.loads(r.text)
wikilessList = {}
wikilessList['normal'] = []
wikilessList['onion'] = []
for item in rJson:
if item.endswith('.onion'):
wikilessList['onion'].append('http://' + item)
else:
wikilessList['normal'].append('https://' + item)
mightyList['wikiless'] = wikilessList
print('fetched Wikiless')
# Scribe
r = requests.get(
'https://git.sr.ht/~edwardloveall/scribe/blob/main/docs/instances.json')
rJson = json.loads(r.text)
scribeList = {}
scribeList['normal'] = []
for item in rJson:
scribeList['normal'].append(item)
mightyList['scribe'] = scribeList
print('fetched Scribe')
# SimplyTranslate
r = requests.get('https://simple-web.org/instances/simplytranslate')
simplyTranslateList = {}
simplyTranslateList['normal'] = []
for item in r.text.strip().split('\n'):
simplyTranslateList['normal'].append('https://' + item)
r = requests.get('https://simple-web.org/instances/simplytranslate_onion')
simplyTranslateList['onion'] = []
for item in r.text.strip().split('\n'):
simplyTranslateList['onion'].append('http://' + item)
mightyList['simplyTranslate'] = simplyTranslateList
print('fetched SimplyTranslate')
# LinvgaTranslate
r = requests.get(
'https://raw.githubusercontent.com/TheDavidDelta/lingva-translate/main/instances.json')
rJson = json.loads(r.text)
lingvaList = {}
lingvaList['normal'] = []
for item in rJson:
lingvaList['normal'].append(item)
mightyList['lingva'] = lingvaList
print('fetched LinvgaTranslate')
# SearX
r = requests.get('https://searx.space/data/instances.json')
rJson = json.loads(r.text)
searxList = {}
searxList['normal'] = []
searxList['onion'] = []
searxList['i2p'] = []
for item in rJson['instances'].keys():
item = item[:-1]
if item.endswith('.onion'):
searxList['onion'].append(item)
elif item.endswith('.i2p'):
searxList['i2p'].append(item)
else:
searxList['normal'].append(item)
mightyList['searx'] = searxList
print('fetched SearX')
# Whoogle
r = requests.get(
'https://raw.githubusercontent.com/benbusby/whoogle-search/main/misc/instances.txt')
tmpList = r.text.strip().split('\n')
whoogleList = {}
whoogleList['normal'] = []
for item in tmpList:
whoogleList['normal'].append(item)
mightyList['whoogle'] = whoogleList
print('fetched Whoogle')
# Rimgo
r = requests.get(
'https://codeberg.org/video-prize-ranch/rimgo/raw/branch/main/instances.json')
rJson = json.loads(r.text)
rimgoList = {}
rimgoList['normal'] = []
rimgoList['onion'] = []
for item in rJson:
if item.endswith('.onion'):
rimgoList['onion'].append('http://' + item)
else:
rimgoList['normal'].append('https://' + item)
mightyList['rimgo'] = rimgoList
print('fetched Rimgo')
# Writing to file
json_object = json.dumps(mightyList, ensure_ascii=False, indent=2)
with open('instances/data.json', 'w') as outfile:
outfile.write(json_object)
# print(json_object)
print('wrote instances/data.json')