Modified the settings convertion for the next release
This commit is contained in:
parent
acf04b61c3
commit
26624ea716
|
@ -2,14 +2,12 @@ window.browser = window.browser || window.chrome
|
|||
|
||||
import utils from "./utils.js"
|
||||
|
||||
let config, options, targets
|
||||
let config, options
|
||||
|
||||
function init() {
|
||||
return new Promise(async resolve => {
|
||||
browser.storage.local.get(["options", "targets", "embedTabs"], r => {
|
||||
browser.storage.local.get(["options"], r => {
|
||||
options = r.options
|
||||
targets = r.targets
|
||||
embedTabs = r.embedTabs
|
||||
fetch("/config.json")
|
||||
.then(response => response.text())
|
||||
.then(configData => {
|
||||
|
@ -38,25 +36,18 @@ function all(service, frontend, options, config) {
|
|||
}
|
||||
|
||||
function regexArray(service, url, config, frontend) {
|
||||
if (config.services[service].targets == "datajson") {
|
||||
for (const instance of targets[service]) {
|
||||
if (instance.startsWith(utils.protocolHost(url))) return true
|
||||
}
|
||||
} else {
|
||||
let targetList = config.services[service].targets
|
||||
if (frontend && config.services[service].frontends[frontend].excludeTargets)
|
||||
for (const i in config.services[service].frontends[frontend].excludeTargets) {
|
||||
targetList = targetList.splice(i, 1)
|
||||
}
|
||||
for (const targetString in targetList) {
|
||||
const target = new RegExp(targetList[targetString])
|
||||
if (target.test(url.href)) return true
|
||||
let targetList = config.services[service].targets
|
||||
if (frontend && config.services[service].frontends[frontend].excludeTargets)
|
||||
for (const i in config.services[service].frontends[frontend].excludeTargets) {
|
||||
targetList = targetList.splice(i, 1)
|
||||
}
|
||||
for (const targetString in targetList) {
|
||||
const target = new RegExp(targetList[targetString])
|
||||
if (target.test(url.href)) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
let embedTabs = {}
|
||||
function redirect(url, type, initiator, forceRedirection, tabId) {
|
||||
if (type != "main_frame" && type != "sub_frame" && type != "image") return
|
||||
let randomInstance
|
||||
|
@ -79,17 +70,7 @@ function redirect(url, type, initiator, forceRedirection, tabId) {
|
|||
}
|
||||
if (instanceList.length === 0) return
|
||||
|
||||
if ((type == "sub_frame" || type == "image") && embedTabs[tabId] && embedTabs[tabId][frontend] !== undefined) {
|
||||
randomInstance = embedTabs[tabId][frontend]
|
||||
} else {
|
||||
randomInstance = utils.getRandomInstance(instanceList)
|
||||
}
|
||||
|
||||
if ((type == "sub_frame" || type == "image") && embedTabs[tabId] === undefined) {
|
||||
embedTabs[tabId] = {}
|
||||
embedTabs[tabId][frontend] = randomInstance
|
||||
browser.storage.local.set(embedTabs)
|
||||
}
|
||||
randomInstance = utils.getRandomInstance(instanceList)
|
||||
|
||||
break
|
||||
}
|
||||
|
@ -491,7 +472,7 @@ function reverse(url, urlString) {
|
|||
await init()
|
||||
let protocolHost
|
||||
if (!urlString) protocolHost = utils.protocolHost(url)
|
||||
else protocolHost = url.match(/https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+/)[0]
|
||||
else protocolHost = url.match(/^https?:\/{2}/)[0]
|
||||
for (const service in config.services) {
|
||||
if (!all(service, null, options, config).includes(protocolHost)) continue
|
||||
|
||||
|
@ -530,9 +511,7 @@ function initDefaults() {
|
|||
.then(configData => {
|
||||
browser.storage.local.get(["options"], r => {
|
||||
let options = r.options
|
||||
let targets = {}
|
||||
let config = JSON.parse(configData)
|
||||
const localstorage = {}
|
||||
for (const service in config.services) {
|
||||
options[service] = {}
|
||||
for (const defaultOption in config.services[service].options) {
|
||||
|
@ -545,7 +524,7 @@ function initDefaults() {
|
|||
}
|
||||
}
|
||||
browser.storage.local.set(
|
||||
{ options, targets, localstorage, embedTabs: {} },
|
||||
{ options },
|
||||
() => resolve()
|
||||
)
|
||||
})
|
||||
|
@ -553,62 +532,51 @@ function initDefaults() {
|
|||
})
|
||||
}
|
||||
|
||||
function backupOptions() {
|
||||
return new Promise(resolve => {
|
||||
browser.storage.local.get(
|
||||
"options", r => {
|
||||
const oldOptions = r.options
|
||||
browser.storage.local.clear(() => {
|
||||
browser.storage.local.set({ oldOptions },
|
||||
() => resolve()
|
||||
)
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function upgradeOptions() {
|
||||
return new Promise(resolve => {
|
||||
fetch("/config.json")
|
||||
.then(response => response.text())
|
||||
.then(configData => {
|
||||
browser.storage.local.get(null, r => {
|
||||
browser.storage.local.get(["oldOptions", "options"], r => {
|
||||
const oldOptions = r.oldOptions
|
||||
let options = r.options
|
||||
const config = JSON.parse(configData)
|
||||
options.exceptions = r.exceptions
|
||||
if (r.theme != "DEFAULT") options.theme = r.theme
|
||||
options.popupServices = r.popupFrontends
|
||||
let tmp = options.popupServices.indexOf("tikTok")
|
||||
if (tmp > -1) {
|
||||
options.popupServices.splice(tmp, 1)
|
||||
options.popupServices.push("tiktok")
|
||||
}
|
||||
tmp = options.popupServices.indexOf("sendTarget")
|
||||
if (tmp > -1) {
|
||||
options.popupServices.splice(tmp, 1)
|
||||
options.popupServices.push("sendFiles")
|
||||
}
|
||||
switch (r.onlyEmbeddedVideo) {
|
||||
case "onlyNotEmbedded":
|
||||
options.youtube.redirectType = "main_frame"
|
||||
case "onlyEmbedded":
|
||||
options.youtube.redirectType = "sub_frame"
|
||||
case "both":
|
||||
options.youtube.redirectType = "both"
|
||||
}
|
||||
|
||||
options.exceptions = oldOptions.exceptions
|
||||
options.theme = oldOptions.theme
|
||||
options.popupServices = oldOptions.popupServices
|
||||
|
||||
for (const service in config.services) {
|
||||
let oldService
|
||||
switch (service) {
|
||||
case "tiktok":
|
||||
oldService = "tikTok"
|
||||
break
|
||||
case "sendFiles":
|
||||
oldService = "sendTarget"
|
||||
break
|
||||
default:
|
||||
oldService = service
|
||||
}
|
||||
options[service].enabled = !r["disable" + utils.camelCase(oldService)]
|
||||
if (r[oldService + "Frontend"]) {
|
||||
if (r[oldService + "Frontend"] == "yatte") options[service].frontend = "yattee"
|
||||
else options[service].frontend = r[oldService + "Frontend"]
|
||||
}
|
||||
if (r[oldService + "RedirectType"]) options[service].redirectType = r[oldService + "RedirectType"]
|
||||
for (const frontend in config.services[service].frontends) {
|
||||
for (const network in config.networks) {
|
||||
let protocol
|
||||
if (network == "clearnet") protocol = "normal"
|
||||
else protocol = network
|
||||
}
|
||||
options[service] = oldOptions[service]
|
||||
options[service].remove("embedFrontend")
|
||||
|
||||
for (const frontend in network.services[service].frontends) {
|
||||
options[frontend] = [
|
||||
...oldOptions[frontend].clearnet.enabled,
|
||||
...oldOptions[frontend].clearnet.custom
|
||||
]
|
||||
}
|
||||
}
|
||||
browser.storage.local.set({ options }, () => resolve())
|
||||
browser.storage.local.set({ options }, () => {
|
||||
browser.storage.local.remove("oldOptions", () => {
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -616,51 +584,23 @@ function upgradeOptions() {
|
|||
|
||||
function processUpdate() {
|
||||
return new Promise(resolve => {
|
||||
fetch("/instances/data.json")
|
||||
fetch("/config.json")
|
||||
.then(response => response.text())
|
||||
.then(data => {
|
||||
fetch("/config.json")
|
||||
.then(response => response.text())
|
||||
.then(configData => {
|
||||
browser.storage.local.get(["options", "targets"], async r => {
|
||||
let redirects = JSON.parse(data)
|
||||
let options = r.options
|
||||
let targets = r.targets
|
||||
let config = JSON.parse(configData)
|
||||
for (const service in config.services) {
|
||||
if (!options[service]) options[service] = {}
|
||||
if (config.services[service].targets == "datajson") {
|
||||
targets[service] = redirects[service]
|
||||
delete redirects[service]
|
||||
}
|
||||
for (const defaultOption in config.services[service].options) {
|
||||
if (options[service][defaultOption] === undefined) {
|
||||
options[service][defaultOption] = config.services[service].options[defaultOption]
|
||||
}
|
||||
}
|
||||
for (const frontend in config.services[service].frontends) {
|
||||
if (config.services[service].frontends[frontend].instanceList) {
|
||||
if (!options[frontend]) options[frontend] = {}
|
||||
for (const network in config.networks) {
|
||||
if (!options[frontend]) {
|
||||
options[frontend] = []
|
||||
if (network == "clearnet") {
|
||||
for (const blacklist of await utils.getBlacklist()) {
|
||||
for (const instance of blacklist) {
|
||||
let i = options[frontend].clearnet.enabled.indexOf(instance)
|
||||
if (i > -1) options[frontend].clearnet.enabled.splice(i, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.then(configJson => {
|
||||
let config = JSON.parse(configJson)
|
||||
browser.storage.local.get(["options"], async r => {
|
||||
let options = r.options
|
||||
for (const service in config.services) {
|
||||
if (!options[service]) options[service] = {}
|
||||
for (const defaultOption in config.services[service].options) {
|
||||
if (options[service][defaultOption] === undefined) {
|
||||
options[service][defaultOption] = config.services[service].options[defaultOption]
|
||||
}
|
||||
browser.storage.local.set({ redirects, options, targets })
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
browser.storage.local.set({ options })
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -710,6 +650,7 @@ export default {
|
|||
reverse,
|
||||
initDefaults,
|
||||
upgradeOptions,
|
||||
backupOptions,
|
||||
processUpdate,
|
||||
modifyContentSecurityPolicy,
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ function switchInstance(test) {
|
|||
function getBlacklist() {
|
||||
return new Promise(resolve => {
|
||||
const http = new XMLHttpRequest()
|
||||
http.open("GET", "https://codeberg.org/LibRedirect/libredirect/raw/branch/master/src/instances/blacklist.json", true)
|
||||
http.open("GET", "https://raw.githubusercontent.com/libredirect/instances/main/blacklist.json", true)
|
||||
http.onreadystatechange = () => {
|
||||
if (http.status === 200 && http.readyState == XMLHttpRequest.DONE) {
|
||||
resolve(JSON.parse(http.responseText))
|
||||
|
@ -87,7 +87,7 @@ function getBlacklist() {
|
|||
function getList() {
|
||||
return new Promise(resolve => {
|
||||
const http = new XMLHttpRequest()
|
||||
http.open("GET", "https://codeberg.org/LibRedirect/libredirect/raw/branch/master/src/instances/data.json", true)
|
||||
http.open("GET", "https://raw.githubusercontent.com/libredirect/instances/main/data.json", true)
|
||||
http.onreadystatechange = () => {
|
||||
if (http.status === 200 && http.readyState == XMLHttpRequest.DONE) {
|
||||
resolve(JSON.parse(http.responseText))
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"clearnet": ["https://beatbump.ml", "https://beatbump.esmailelbob.xyz"],
|
||||
"tor": ["http://beatbump.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion"],
|
||||
"i2p": [],
|
||||
"loki": []
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"clearnet": ["https://bibliogram.1d4.us", "https://bibliogram.froth.zone", "https://ig.tokhmi.xyz", "https://ig.beparanoid.de", "https://bibliogram.priv.pw"],
|
||||
"tor": [],
|
||||
"i2p": [],
|
||||
"loki": []
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"clearnet": ["https://tube.cadence.moe", "https://tube.boritsch.de"],
|
||||
"tor": [],
|
||||
"i2p": [],
|
||||
"loki": []
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"clearnet": ["https://facilmap.org"],
|
||||
"tor": [],
|
||||
"i2p": [],
|
||||
"loki": []
|
||||
}
|
|
@ -1,517 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
# Note: Run this script from the root of the repo
|
||||
|
||||
import traceback
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
import re
|
||||
from colorama import Fore, Style
|
||||
import socket
|
||||
|
||||
mightyList = {}
|
||||
config = {}
|
||||
|
||||
startRegex = r"https?:\/{2}(?:[^\s\/]+\.)*"
|
||||
endRegex = "(?:\/[^\s\/]+)*\/?"
|
||||
torRegex = startRegex + "onion" + endRegex
|
||||
i2pRegex = startRegex + "i2p" + endRegex
|
||||
lokiRegex = startRegex + "loki" + endRegex
|
||||
authRegex = r"https?:\/{2}\S+:\S+@(?:[^\s\/]+\.)*[a-zA-Z0-9]+" + endRegex
|
||||
|
||||
# 2.0 because Libredirect is currently on version 2.x.x
|
||||
headers = {'User-Agent': 'Libredirect-instance-fetcher/2.0'}
|
||||
|
||||
with open('./src/config.json', 'rt') as tmp:
|
||||
config['networks'] = json.load(tmp)['networks']
|
||||
|
||||
|
||||
def filterLastSlash(urlList):
|
||||
tmp = {}
|
||||
for frontend in urlList:
|
||||
tmp[frontend] = {}
|
||||
for network in urlList[frontend]:
|
||||
tmp[frontend][network] = []
|
||||
for url in urlList[frontend][network]:
|
||||
if url.endswith('/'):
|
||||
tmp[frontend][network].append(url[:-1])
|
||||
print(Fore.YELLOW + "Fixed " + Style.RESET_ALL + url)
|
||||
else:
|
||||
tmp[frontend][network].append(url)
|
||||
return tmp
|
||||
|
||||
|
||||
def idnaEncode(urlList):
|
||||
tmp = {}
|
||||
for frontend in urlList:
|
||||
tmp[frontend] = {}
|
||||
for network in urlList[frontend]:
|
||||
tmp[frontend][network] = []
|
||||
for url in urlList[frontend][network]:
|
||||
try:
|
||||
encodedUrl = url.encode("idna").decode("utf8")
|
||||
tmp[frontend][network].append(encodedUrl)
|
||||
if (encodedUrl != url):
|
||||
print(Fore.YELLOW + "Fixed " + Style.RESET_ALL + url)
|
||||
except Exception:
|
||||
tmp[frontend][network].append(url)
|
||||
return tmp
|
||||
|
||||
|
||||
def ip2bin(ip): return "".join(
|
||||
map(
|
||||
str,
|
||||
[
|
||||
"{0:08b}".format(int(x)) for x in ip.split(".")
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def get_cloudflare_ips():
|
||||
r = requests.get('https://www.cloudflare.com/ips-v4')
|
||||
return r.text.split('\n')
|
||||
|
||||
|
||||
cloudflare_ips = get_cloudflare_ips()
|
||||
|
||||
|
||||
def is_cloudflare(url):
|
||||
instance_ip = None
|
||||
try:
|
||||
instance_ip = socket.gethostbyname(urlparse(url).hostname)
|
||||
if instance_ip is None:
|
||||
return False
|
||||
except Exception:
|
||||
return False
|
||||
instance_bin = ip2bin(instance_ip)
|
||||
|
||||
for cloudflare_ip_mask in cloudflare_ips:
|
||||
cloudflare_ip = cloudflare_ip_mask.split('/')[0]
|
||||
cloudflare_bin = ip2bin(cloudflare_ip)
|
||||
|
||||
mask = int(cloudflare_ip_mask.split('/')[1])
|
||||
cloudflare_bin_masked = cloudflare_bin[:mask]
|
||||
instance_bin_masked = instance_bin[:mask]
|
||||
|
||||
if cloudflare_bin_masked == instance_bin_masked:
|
||||
print(url + ' is behind ' + Fore.RED +
|
||||
'cloudflare' + Style.RESET_ALL)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_authenticate(url):
|
||||
try:
|
||||
if re.match(authRegex, url):
|
||||
print(url + ' requires ' + Fore.RED +
|
||||
'authentication' + Style.RESET_ALL)
|
||||
return True
|
||||
r = requests.get(url, timeout=5, headers=headers)
|
||||
if 'www-authenticate' in r.headers:
|
||||
print(url + ' requires ' + Fore.RED +
|
||||
'authentication' + Style.RESET_ALL)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def fetchCache(frontend, name):
|
||||
try:
|
||||
with open('./src/instances/data.json') as file:
|
||||
mightyList[frontend] = json.load(file)[frontend]
|
||||
print(Fore.YELLOW + 'Failed' + Style.RESET_ALL + ' to fetch ' + name)
|
||||
except Exception:
|
||||
print(Fore.RED + 'Failed' + Style.RESET_ALL + ' to get cached ' + name)
|
||||
|
||||
|
||||
def fetchFromFile(frontend, name):
|
||||
with open('./src/instances/' + frontend + '.json') as file:
|
||||
mightyList[frontend] = json.load(file)
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
|
||||
|
||||
def fetchJsonList(frontend, name, url, urlItem, jsonObject):
|
||||
try:
|
||||
r = requests.get(url, headers=headers)
|
||||
rJson = json.loads(r.text)
|
||||
if jsonObject:
|
||||
rJson = rJson['instances']
|
||||
_list = {}
|
||||
for network in config['networks']:
|
||||
_list[network] = []
|
||||
if type(urlItem) == dict:
|
||||
for item in rJson:
|
||||
for network in config['networks']:
|
||||
if urlItem[network] is not None:
|
||||
if urlItem[network] in item and item[urlItem[network]] is not None:
|
||||
if item[urlItem[network]].strip() != '':
|
||||
_list[network].append(item[urlItem[network]])
|
||||
else:
|
||||
for item in rJson:
|
||||
tmpItem = item
|
||||
if urlItem is not None:
|
||||
tmpItem = item[urlItem]
|
||||
if tmpItem.strip() == '':
|
||||
continue
|
||||
elif re.search(torRegex, tmpItem):
|
||||
_list['tor'].append(tmpItem)
|
||||
elif re.search(i2pRegex, tmpItem):
|
||||
_list['i2p'].append(tmpItem)
|
||||
elif re.search(lokiRegex, tmpItem):
|
||||
_list['loki'].append(tmpItem)
|
||||
else:
|
||||
_list['clearnet'].append(tmpItem)
|
||||
|
||||
mightyList[frontend] = _list
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
except Exception:
|
||||
fetchCache(frontend, name)
|
||||
logging.error(traceback.format_exc())
|
||||
|
||||
|
||||
def fetchRegexList(frontend, name, url, regex):
|
||||
try:
|
||||
r = requests.get(url, headers=headers)
|
||||
_list = {}
|
||||
for network in config['networks']:
|
||||
_list[network] = []
|
||||
|
||||
tmp = re.findall(regex, r.text)
|
||||
|
||||
for item in tmp:
|
||||
if item.strip() == "":
|
||||
continue
|
||||
elif re.search(torRegex, item):
|
||||
_list['tor'].append(item)
|
||||
elif re.search(i2pRegex, item):
|
||||
_list['i2p'].append(item)
|
||||
elif re.search(lokiRegex, item):
|
||||
_list['loki'].append(item)
|
||||
else:
|
||||
_list['clearnet'].append(item)
|
||||
mightyList[frontend] = _list
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
except Exception:
|
||||
fetchCache(frontend, name)
|
||||
logging.error(traceback.format_exc())
|
||||
|
||||
|
||||
def fetchTextList(frontend, name, url, prepend):
|
||||
try:
|
||||
_list = {}
|
||||
for network in config['networks']:
|
||||
_list[network] = []
|
||||
|
||||
if type(url) == dict:
|
||||
for network in config['networks']:
|
||||
if url[network] is not None:
|
||||
r = requests.get(url[network], headers=headers)
|
||||
tmp = r.text.strip().split('\n')
|
||||
for item in tmp:
|
||||
item = prepend[network] + item
|
||||
_list[network].append(item)
|
||||
else:
|
||||
r = requests.get(url, headers=headers)
|
||||
tmp = r.text.strip().split('\n')
|
||||
|
||||
for item in tmp:
|
||||
item = prepend + item
|
||||
if re.search(torRegex, item):
|
||||
_list['tor'].append(item)
|
||||
elif re.search(i2pRegex, item):
|
||||
_list['i2p'].append(item)
|
||||
elif re.search(lokiRegex, item):
|
||||
_list['loki'].append(item)
|
||||
else:
|
||||
_list['clearnet'].append(item)
|
||||
mightyList[frontend] = _list
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
except Exception:
|
||||
fetchCache(frontend, name)
|
||||
logging.error(traceback.format_exc())
|
||||
|
||||
|
||||
def invidious():
|
||||
name = 'Invidious'
|
||||
frontend = 'invidious'
|
||||
url = 'https://api.invidious.io/instances.json'
|
||||
try:
|
||||
_list = {}
|
||||
_list['clearnet'] = []
|
||||
_list['tor'] = []
|
||||
_list['i2p'] = []
|
||||
_list['loki'] = []
|
||||
r = requests.get(url, headers=headers)
|
||||
rJson = json.loads(r.text)
|
||||
for instance in rJson:
|
||||
if instance[1]['type'] == 'https':
|
||||
_list['clearnet'].append(instance[1]['uri'])
|
||||
elif instance[1]['type'] == 'onion':
|
||||
_list['tor'].append(instance[1]['uri'])
|
||||
elif instance[1]['type'] == 'i2p':
|
||||
_list['i2p'].append(instance[1]['uri'])
|
||||
mightyList[frontend] = _list
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
except Exception:
|
||||
fetchCache(frontend, name)
|
||||
logging.error(traceback.format_exc())
|
||||
|
||||
|
||||
def piped():
|
||||
frontend = 'piped'
|
||||
name = 'Piped'
|
||||
try:
|
||||
_list = {}
|
||||
_list['clearnet'] = []
|
||||
_list['tor'] = []
|
||||
_list['i2p'] = []
|
||||
_list['loki'] = []
|
||||
r = requests.get(
|
||||
'https://raw.githubusercontent.com/wiki/TeamPiped/Piped/Instances.md', headers=headers)
|
||||
|
||||
tmp = re.findall(
|
||||
r'(?:[^\s\/]+\.)+[a-zA-Z]+ (?:\(Official\) )?\| (https:\/{2}(?:[^\s\/]+\.)+[a-zA-Z]+) \| ', r.text)
|
||||
for item in tmp:
|
||||
try:
|
||||
url = requests.get(item, timeout=5, headers=headers).url
|
||||
if url.strip("/") == item:
|
||||
continue
|
||||
else:
|
||||
_list['clearnet'].append(url)
|
||||
except Exception:
|
||||
logging.error(traceback.format_exc())
|
||||
continue
|
||||
mightyList[frontend] = _list
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + name)
|
||||
except Exception:
|
||||
fetchCache(frontend, name)
|
||||
logging.error(traceback.format_exc())
|
||||
|
||||
|
||||
def pipedMaterial():
|
||||
fetchRegexList('pipedMaterial', 'Piped-Material', 'https://raw.githubusercontent.com/mmjee/Piped-Material/master/README.md',
|
||||
r"\| (https?:\/{2}(?:\S+\.)+[a-zA-Z0-9]*) +\| Production")
|
||||
|
||||
|
||||
def cloudtube():
|
||||
fetchFromFile('cloudtube', 'Cloudtube')
|
||||
|
||||
|
||||
def proxitok():
|
||||
fetchRegexList('proxiTok', 'ProxiTok', 'https://raw.githubusercontent.com/wiki/pablouser1/ProxiTok/Public-instances.md',
|
||||
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)(?: \(Official\))? +\|(?:(?: [A-Z]*.*\|.*\|)|(?:$))")
|
||||
|
||||
|
||||
def send():
|
||||
fetchRegexList('send', 'Send', 'https://gitlab.com/timvisee/send-instances/-/raw/master/README.md',
|
||||
r"- ([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}")
|
||||
|
||||
|
||||
def nitter():
|
||||
fetchRegexList('nitter', 'Nitter', 'https://raw.githubusercontent.com/wiki/zedeus/nitter/Instances.md',
|
||||
r"(?:(?:\| )|(?:- ))\[(?:(?:\S+\.)+[a-zA-Z0-9]+)\/?\]\((https?:\/{2}(?:\S+\.)+[a-zA-Z0-9]+)\/?\)(?:(?: (?:\((?:\S+ ?\S*)\) )? *\| [^❌]{1,4} +\|(?:(?:\n)|(?: ❌)|(?: ✅)|(?: ❓)|(?: \[)))|(?:\n))")
|
||||
|
||||
|
||||
def bibliogram():
|
||||
fetchFromFile('bibliogram', 'Bibliogram')
|
||||
|
||||
|
||||
def libreddit():
|
||||
fetchJsonList('libreddit', 'Libreddit', 'https://github.com/libreddit/libreddit-instances/raw/master/instances.json',
|
||||
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, True)
|
||||
|
||||
|
||||
def teddit():
|
||||
fetchJsonList('teddit', 'Teddit', 'https://codeberg.org/teddit/teddit/raw/branch/main/instances.json',
|
||||
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, False)
|
||||
|
||||
def scribe():
|
||||
fetchJsonList('scribe', 'Scribe',
|
||||
'https://git.sr.ht/~edwardloveall/scribe/blob/main/docs/instances.json', None, False)
|
||||
|
||||
|
||||
def quetre():
|
||||
fetchRegexList('quetre', 'Quetre', 'https://raw.githubusercontent.com/zyachel/quetre/main/README.md',
|
||||
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|")
|
||||
|
||||
|
||||
def libremdb():
|
||||
fetchRegexList('libremdb', 'libremdb', 'https://raw.githubusercontent.com/zyachel/libremdb/main/README.md',
|
||||
r"\| \[.*\]\(([-a-zA-Z0-9@:%_\+.~#?&//=]{2,}\.[a-z0-9]{2,}\b(?:\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?)\)*\|*[A-Z]{0,}.*\|.*\|")
|
||||
|
||||
|
||||
def simpleertube():
|
||||
fetchTextList('simpleertube', 'SimpleerTube', {'clearnet': 'https://simple-web.org/instances/simpleertube', 'tor': 'https://simple-web.org/instances/simpleertube_onion',
|
||||
'i2p': 'https://simple-web.org/instances/simpleertube_i2p', 'loki': None}, {'clearnet': 'https://', 'tor': 'http://', 'i2p': 'http://', 'loki': 'http://'})
|
||||
|
||||
|
||||
def simplytranslate():
|
||||
fetchTextList('simplyTranslate', 'SimplyTranslate', {'clearnet': 'https://simple-web.org/instances/simplytranslate', 'tor': 'https://simple-web.org/instances/simplytranslate_onion',
|
||||
'i2p': 'https://simple-web.org/instances/simplytranslate_i2p', 'loki': 'https://simple-web.org/instances/simplytranslate_loki'}, {'clearnet': 'https://', 'tor': 'http://', 'i2p': 'http://', 'loki': 'http://'})
|
||||
|
||||
|
||||
def linvgatranslate():
|
||||
fetchJsonList('lingva', 'LingvaTranslate',
|
||||
'https://raw.githubusercontent.com/TheDavidDelta/lingva-translate/main/instances.json', None, False)
|
||||
|
||||
|
||||
def searx_searxng():
|
||||
r = requests.get(
|
||||
'https://searx.space/data/instances.json', headers=headers)
|
||||
rJson = json.loads(r.text)
|
||||
searxList = {}
|
||||
searxList['clearnet'] = []
|
||||
searxList['tor'] = []
|
||||
searxList['i2p'] = []
|
||||
searxList['loki'] = []
|
||||
searxngList = {}
|
||||
searxngList['clearnet'] = []
|
||||
searxngList['tor'] = []
|
||||
searxngList['i2p'] = []
|
||||
searxngList['loki'] = []
|
||||
for item in rJson['instances']:
|
||||
if re.search(torRegex, item[:-1]):
|
||||
if (rJson['instances'][item].get('generator') == 'searxng'):
|
||||
searxngList['tor'].append(item[:-1])
|
||||
else:
|
||||
searxList['tor'].append(item[:-1])
|
||||
elif re.search(i2pRegex, item[:-1]):
|
||||
if (rJson['instances'][item].get('generator') == 'searxng'):
|
||||
searxngList['i2p'].append(item[:-1])
|
||||
else:
|
||||
searxList['i2p'].append(item[:-1])
|
||||
else:
|
||||
if (rJson['instances'][item].get('generator') == 'searxng'):
|
||||
searxngList['clearnet'].append(item[:-1])
|
||||
else:
|
||||
searxList['clearnet'].append(item[:-1])
|
||||
|
||||
mightyList['searx'] = searxList
|
||||
mightyList['searxng'] = searxngList
|
||||
print(Fore.GREEN + 'Fetched ' + Style.RESET_ALL + 'SearX, SearXNG')
|
||||
|
||||
|
||||
def whoogle():
|
||||
fetchRegexList('whoogle', 'Whoogle', 'https://raw.githubusercontent.com/benbusby/whoogle-search/main/README.md',
|
||||
r"\| \[https?:\/{2}(?:[^\s\/]+\.)*(?:[^\s\/]+\.)+[a-zA-Z0-9]+\]\((https?:\/{2}(?:[^\s\/]+\.)*(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?\) \| ")
|
||||
|
||||
|
||||
def librex():
|
||||
fetchJsonList('librex', 'LibreX', 'https://raw.githubusercontent.com/hnhx/librex/main/instances.json',
|
||||
{'clearnet': 'clearnet', 'tor': 'tor', 'i2p': 'i2p', 'loki': None}, True)
|
||||
|
||||
|
||||
def rimgo():
|
||||
fetchJsonList('rimgo', 'rimgo', 'https://codeberg.org/video-prize-ranch/rimgo/raw/branch/main/instances.json',
|
||||
{'clearnet': 'url', 'tor': 'onion', 'i2p': 'i2p', 'loki': None}, False)
|
||||
|
||||
|
||||
def librarian():
|
||||
fetchJsonList('librarian', 'Librarian',
|
||||
'https://codeberg.org/librarian/librarian/raw/branch/main/instances.json', 'url', True)
|
||||
|
||||
|
||||
def beatbump():
|
||||
fetchFromFile('beatbump', 'Beatbump')
|
||||
|
||||
|
||||
def hyperpipe():
|
||||
fetchJsonList('hyperpipe', 'Hyperpipe',
|
||||
'https://codeberg.org/Hyperpipe/pages/raw/branch/main/api/frontend.json', 'url', False)
|
||||
|
||||
|
||||
def facil():
|
||||
fetchFromFile('facil', 'FacilMap')
|
||||
|
||||
|
||||
def osm():
|
||||
fetchFromFile('osm', 'OpenStreetMap')
|
||||
|
||||
|
||||
def libreTranslate():
|
||||
fetchRegexList('libreTranslate', 'LibreTranslate', 'https://raw.githubusercontent.com/LibreTranslate/LibreTranslate/main/README.md',
|
||||
r"\[(?:[^\s\/]+\.)+[a-zA-Z0-9]+\]\((https?:\/{2}(?:[^\s\/]+\.)+[a-zA-Z0-9]+)\/?\)\|")
|
||||
|
||||
|
||||
def breezeWiki():
|
||||
fetchJsonList('breezeWiki', 'BreezeWiki',
|
||||
'https://docs.breezewiki.com/files/instances.json', 'instance', False)
|
||||
|
||||
|
||||
def privateBin():
|
||||
fetchJsonList('privateBin', 'PrivateBin',
|
||||
'https://privatebin.info/directory/api?top=100&https_redirect=true&min_rating=A&csp_header=true&min_uptime=100&attachments=true', 'url', False)
|
||||
|
||||
def isValid(url): # This code is contributed by avanitrachhadiya2155
|
||||
try:
|
||||
result = urlparse(url)
|
||||
return all([result.scheme, result.netloc])
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
invidious()
|
||||
piped()
|
||||
pipedMaterial()
|
||||
cloudtube()
|
||||
proxitok()
|
||||
send()
|
||||
nitter()
|
||||
bibliogram()
|
||||
libreddit()
|
||||
teddit()
|
||||
scribe()
|
||||
quetre()
|
||||
libremdb()
|
||||
simplytranslate()
|
||||
linvgatranslate()
|
||||
libreTranslate()
|
||||
searx_searxng()
|
||||
whoogle()
|
||||
librex()
|
||||
rimgo()
|
||||
librarian()
|
||||
beatbump()
|
||||
hyperpipe()
|
||||
facil()
|
||||
osm()
|
||||
simpleertube()
|
||||
breezeWiki()
|
||||
privateBin()
|
||||
mightyList = filterLastSlash(mightyList)
|
||||
mightyList = idnaEncode(mightyList)
|
||||
|
||||
cloudflare = []
|
||||
authenticate = []
|
||||
for k1, v1 in mightyList.items():
|
||||
if type(mightyList[k1]) is dict:
|
||||
for k2, v2 in mightyList[k1].items():
|
||||
for instance in mightyList[k1][k2]:
|
||||
if (not isValid(instance)):
|
||||
mightyList[k1][k2].remove(instance)
|
||||
print("removed " + instance)
|
||||
else:
|
||||
if not instance.endswith('.onion') and not instance.endswith('.i2p') and not instance.endswith('.loki') and is_cloudflare(instance):
|
||||
cloudflare.append(instance)
|
||||
if not instance.endswith('.onion') and not instance.endswith('.i2p') and not instance.endswith('.loki') and is_authenticate(instance):
|
||||
authenticate.append(instance)
|
||||
blacklist = {
|
||||
'cloudflare': cloudflare,
|
||||
'authenticate': authenticate,
|
||||
}
|
||||
|
||||
# Writing to file
|
||||
json_object = json.dumps(mightyList, ensure_ascii=False, indent=2)
|
||||
with open('./src/instances/data.json', 'w') as outfile:
|
||||
outfile.write(json_object)
|
||||
print(Fore.BLUE + 'wrote ' + Style.RESET_ALL + 'instances/data.json')
|
||||
|
||||
json_object = json.dumps(blacklist, ensure_ascii=False, indent=2)
|
||||
with open('./src/instances/blacklist.json', 'w') as outfile:
|
||||
outfile.write(json_object)
|
||||
print(Fore.BLUE + 'wrote ' + Style.RESET_ALL + 'instances/blacklist.json')
|
||||
|
||||
# print(json_object)
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"clearnet": [
|
||||
"https://www.openstreetmap.org"
|
||||
],
|
||||
"tor": [],
|
||||
"i2p": [],
|
||||
"loki": []
|
||||
}
|
|
@ -9,6 +9,10 @@ window.browser = window.browser || window.chrome
|
|||
browser.runtime.onInstalled.addListener(async details => {
|
||||
if (details.previousVersion != browser.runtime.getManifest().version) {
|
||||
// ^Used to prevent this running when debugging with auto-reload
|
||||
browser.tabs.create({
|
||||
url: browser.runtime.getURL("/pages/options/new_release.html")
|
||||
});
|
||||
|
||||
switch (details.reason) {
|
||||
case "install":
|
||||
browser.storage.local.get("options", async r => {
|
||||
|
@ -20,10 +24,10 @@ browser.runtime.onInstalled.addListener(async details => {
|
|||
break
|
||||
case "update":
|
||||
switch (details.previousVersion) {
|
||||
case "2.2.0":
|
||||
case "2.2.1":
|
||||
case "2.3.4":
|
||||
browser.storage.local.get("options", async r => {
|
||||
if (!r.options) {
|
||||
await servicesHelper.backupOptions()
|
||||
await generalHelper.initDefaults()
|
||||
await servicesHelper.initDefaults()
|
||||
await servicesHelper.upgradeOptions()
|
||||
|
@ -39,13 +43,6 @@ browser.runtime.onInstalled.addListener(async details => {
|
|||
|
||||
let tabIdRedirects = {}
|
||||
|
||||
browser.storage.onChanged.addListener(() => {
|
||||
browser.storage.local.get(["embedTabs"], r => {
|
||||
embedTabs = r.embedTabs
|
||||
})
|
||||
})
|
||||
let embedTabs = {}
|
||||
|
||||
// true == Always redirect, false == Never redirect, null/undefined == follow options for services
|
||||
browser.webRequest.onBeforeRequest.addListener(
|
||||
details => {
|
||||
|
@ -89,11 +86,6 @@ browser.tabs.onRemoved.addListener(tabId => {
|
|||
delete tabIdRedirects[tabId]
|
||||
console.log("Removed tab " + tabId + " from tabIdRedirects")
|
||||
}
|
||||
if (embedTabs[tab] != undefined) {
|
||||
delete embedTabs[tabId]
|
||||
browser.storage.local.set(embedTabs)
|
||||
console.log("Removed tab " + tabId + " from embedTabs")
|
||||
}
|
||||
})
|
||||
|
||||
browser.commands.onCommand.addListener(command => {
|
||||
|
|
|
@ -65,8 +65,10 @@ function loadPage(path) {
|
|||
divs[service][option].addEventListener("change", () => {
|
||||
browser.storage.local.get("options", r => {
|
||||
let options = r.options
|
||||
if (typeof config.services[service].options[option] == "boolean") options[service][option] = divs[service][option].checked
|
||||
else options[service][option] = divs[service][option].value
|
||||
if (typeof config.services[service].options[option] == "boolean")
|
||||
options[service][option] = divs[service][option].checked
|
||||
else
|
||||
options[service][option] = divs[service][option].value
|
||||
browser.storage.local.set({ options })
|
||||
changeFrontendsSettings(service)
|
||||
})
|
||||
|
@ -167,38 +169,41 @@ async function processDefaultCustomInstances(frontend, document) {
|
|||
|
||||
function createList(frontend, networks, document, redirects, blacklist) {
|
||||
for (const network in networks) {
|
||||
if (redirects[frontend][network].length > 0) {
|
||||
document.getElementById(frontend).getElementsByClassName(network)[0].getElementsByClassName("checklist")[0].innerHTML = [
|
||||
`
|
||||
<div class="some-block option-block">
|
||||
<h4>${utils.camelCase(network)}</h4>
|
||||
</div>
|
||||
`,
|
||||
...redirects[frontend][network]
|
||||
.sort((a, b) =>
|
||||
(blacklist.cloudflare.includes(a) && !blacklist.cloudflare.includes(b))
|
||||
||
|
||||
(blacklist.authenticate.includes(a) && !blacklist.authenticate.includes(b))
|
||||
)
|
||||
.map(x => {
|
||||
const cloudflare = blacklist.cloudflare.includes(x) ? ' <span style="color:red;">cloudflare</span>' : ""
|
||||
const authenticate = blacklist.authenticate.includes(x) ? ' <span style="color:orange;">authenticate</span>' : ""
|
||||
if (redirects[frontend]) {
|
||||
if (redirects[frontend][network].length > 0) {
|
||||
document.getElementById(frontend).getElementsByClassName(network)[0].getElementsByClassName("checklist")[0].innerHTML = [
|
||||
`
|
||||
<div class="some-block option-block">
|
||||
<h4>${utils.camelCase(network)}</h4>
|
||||
</div>
|
||||
`,
|
||||
...redirects[frontend][network]
|
||||
.sort((a, b) =>
|
||||
(blacklist.cloudflare.includes(a) && !blacklist.cloudflare.includes(b))
|
||||
)
|
||||
.map(x => {
|
||||
const cloudflare = blacklist.cloudflare.includes(x) ? ' <a target="_blank" href="https://libredirect.github.io/docs.html#instances"><span style="color:red;">cloudflare</span></a>' : ""
|
||||
|
||||
let warnings = [cloudflare, authenticate].join(" ")
|
||||
return `
|
||||
<div>
|
||||
<x>
|
||||
<a href="${x}" target="_blank">${x}</a>${warnings}
|
||||
</x>
|
||||
</div>`
|
||||
}),
|
||||
'<br>'
|
||||
].join("\n<hr>\n")
|
||||
let warnings = [cloudflare].join(" ")
|
||||
return `
|
||||
<div>
|
||||
<x>
|
||||
<a href="${x}" target="_blank">${x}</a>${warnings}
|
||||
</x>
|
||||
</div>`
|
||||
}),
|
||||
'<br>'
|
||||
].join("\n<hr>\n")
|
||||
}
|
||||
} else {
|
||||
document.getElementById(frontend).getElementsByClassName(network)[0].getElementsByClassName("checklist")[0].innerHTML =
|
||||
`<div class="some-block option-block">No instances found...</div>`
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const r = window.location.href.match(/#(.*)/)
|
||||
if (r) loadPage(r[1])
|
||||
else loadPage("general")
|
|
@ -0,0 +1 @@
|
|||
Many things happenned in the past months. LibRedirect has gone through some hard decisions; removed Unify Settings, Test Latency,
|
|
@ -57,19 +57,16 @@ importSettingsElement.addEventListener("change", () => {
|
|||
reader.readAsText(file)
|
||||
reader.onload = async () => {
|
||||
const data = JSON.parse(reader.result)
|
||||
if ("theme" in data && "disableImgur" in data && "imgurRedirects" in data) {
|
||||
if (
|
||||
"theme" in data
|
||||
&& data.version == browser.runtime.getManifest().version
|
||||
) {
|
||||
browser.storage.local.clear(async () => {
|
||||
await generalHelper.initDefaults()
|
||||
await servicesHelper.initDefaults()
|
||||
await servicesHelper.upgradeOptions()
|
||||
location.reload()
|
||||
})
|
||||
} else if ("version" in data) {
|
||||
let options = data
|
||||
delete options.version
|
||||
browser.storage.local.set({ options: data }, async () => {
|
||||
await servicesHelper.processUpdate()
|
||||
location.reload()
|
||||
browser.storage.local.set({ options: data }, () => {
|
||||
location.reload()
|
||||
})
|
||||
})
|
||||
} else {
|
||||
console.log("incompatible settings")
|
||||
|
|
|
@ -49,7 +49,10 @@ each val, service in services
|
|||
svg(xmlns="https://www.w3.org/2000/svg" height="20px" viewBox="0 0 24 24" width="20px" fill="currentColor")
|
||||
path(d="M19 13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z")
|
||||
|
||||
div(class="checklist custom-checklist")
|
||||
div(class="checklist custom-checklist")
|
||||
|
||||
each val, network in networks
|
||||
div(class=network)
|
||||
div(class="checklist")
|
||||
div(class="checklist")
|
||||
if (network == 'clearnet')
|
||||
div(class="some-block option-block") Loading...
|
Loading…
Reference in New Issue