better router, add sharing support
This commit is contained in:
41
src/services/deep-set.js
Normal file
41
src/services/deep-set.js
Normal file
@@ -0,0 +1,41 @@
|
||||
export default class DeepSet {
|
||||
constructor() {
|
||||
this.map = new Map()
|
||||
this.set = new Set()
|
||||
}
|
||||
|
||||
_reduce(path) {
|
||||
return path.reduce((context, key) => {
|
||||
if (context.map.has(key)) {
|
||||
return context.map.get(key)
|
||||
} else {
|
||||
const newContext = new DeepSet()
|
||||
context.map.set(key, newContext)
|
||||
return newContext
|
||||
}
|
||||
}, this).set
|
||||
}
|
||||
|
||||
has(values) {
|
||||
const { keys, value } = destruct(values)
|
||||
return this._reduce(keys).has(value)
|
||||
}
|
||||
|
||||
add(values) {
|
||||
const { keys, value } = destruct(values)
|
||||
return this._reduce(keys).add(value)
|
||||
}
|
||||
}
|
||||
|
||||
const destruct = xs => {
|
||||
switch (xs.length) {
|
||||
case 0:
|
||||
return { keys: [], value: undefined }
|
||||
|
||||
case 1:
|
||||
return { keys: [], value: xs[0] }
|
||||
|
||||
default:
|
||||
return { keys: xs.slice(0, xs.length - 1), value: xs.slice(-1)[0] }
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import Observable from 'core-js-pure/features/observable'
|
||||
import { observableToAsyncIterator, raceIterator } from '/services/misc.js'
|
||||
import getUrls from 'get-urls'
|
||||
import { observableToAsyncIterator, raceIterator, urlsToMedia } from '/services/misc.js'
|
||||
|
||||
const LINK_RE = /<(.+?)>; rel="(\w+)"/gi
|
||||
|
||||
@@ -13,7 +14,9 @@ function parseLinkHeader(link) {
|
||||
return links
|
||||
}
|
||||
|
||||
export const fetchStatus = (domain, id) => fetch(`https://${domain}/api/v1/statuses/${id}`).then(x => x.json())
|
||||
export const fetchStatus = (domain, id) => fetch(`https://${domain}/api/v1/statuses/${id}`)
|
||||
.then(response => response.json())
|
||||
.then(status => processStatus(domain, status))
|
||||
|
||||
// Observable<{ domain : string, hashtag : string, status : Status}>
|
||||
export const hashtagStreamingObservable = (domain, hashtag) => {
|
||||
@@ -43,6 +46,7 @@ export const hashtagStreamingObservable = (domain, hashtag) => {
|
||||
eventSource.removeEventListener('open', onOpen)
|
||||
eventSource.removeEventListener('update', onStatus)
|
||||
eventSource.removeEventListener('error', onError)
|
||||
eventSource.close()
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -92,14 +96,13 @@ export async function* hashtagsIterator (domain, hashtags) {
|
||||
}
|
||||
|
||||
const processStatus = (domain, status) => ({
|
||||
title: null,
|
||||
username: status.account.username,
|
||||
title: '',
|
||||
date: new Date(status.createdAt),
|
||||
content: status.content,
|
||||
referer: {
|
||||
username: status.account.username,
|
||||
url: status.url,
|
||||
credentials: { type: 'mastodon', domain, id: status.id }
|
||||
},
|
||||
media: null
|
||||
media: urlsToMedia(getUrls(status.content))
|
||||
})
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import getUrls from 'get-urls'
|
||||
import { execPipe, asyncFilter, asyncMap, map, findOr } from 'iter-tools'
|
||||
|
||||
export const tap = f => x => {
|
||||
@@ -92,129 +91,82 @@ export async function* raceIterator(iterators) {
|
||||
}
|
||||
}
|
||||
|
||||
const mkMapSet = () => ({ set: new Set(), children: new Map() })
|
||||
|
||||
const pathSet = () => {
|
||||
const root = mkMapSet()
|
||||
|
||||
const has = (keys, value) => {
|
||||
let x = root
|
||||
|
||||
for (const key of keys) {
|
||||
if (x.children.has(key)) {
|
||||
x = x.children.get(key)
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return x.set.has(value)
|
||||
}
|
||||
|
||||
const add = (keys, value) => {
|
||||
let x = root
|
||||
|
||||
for (const key of keys) {
|
||||
if (!x.children.has(key)) {
|
||||
x.children.set(key, mkMapSet())
|
||||
}
|
||||
|
||||
x = x.children.get(key)
|
||||
}
|
||||
|
||||
x.set.add(value)
|
||||
}
|
||||
|
||||
return { root, has, add }
|
||||
}
|
||||
|
||||
export async function* tracksIterator(statusesIterator) {
|
||||
const known = pathSet()
|
||||
|
||||
export async function* tracksIterator(statusesIterator, cache) {
|
||||
yield* execPipe(
|
||||
statusesIterator,
|
||||
asyncFilter(knownByReferer(known)),
|
||||
asyncMap(processReferer),
|
||||
asyncFilter(knownByMedia(known)),
|
||||
asyncMap(processMedia)
|
||||
asyncFilter(track => track != null), // should not be necessary
|
||||
asyncFilter(notKnown(cache)),
|
||||
asyncMap(completeTrack)
|
||||
)
|
||||
}
|
||||
|
||||
const knownByReferer = known => track => {
|
||||
const notKnown = cache => track => {
|
||||
if (!track) {
|
||||
console.error(`No status, should not happen here`)
|
||||
return false
|
||||
} else {
|
||||
switch (track.referer.credentials.type) {
|
||||
default:
|
||||
throw new Error()
|
||||
|
||||
case 'mastodon':
|
||||
const path = [
|
||||
'referer',
|
||||
'mastodon',
|
||||
track.referer.credentials.domain
|
||||
]
|
||||
|
||||
const id = track.referer.credentials.id
|
||||
|
||||
if (known.has(path, id)) {
|
||||
console.log(`Drop already processed referer ${id}`)
|
||||
return false
|
||||
} else {
|
||||
known.add(path, id)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const knownByMedia = known => track => {
|
||||
if (track !== null) {
|
||||
switch (track.media.credentials.type) {
|
||||
default:
|
||||
throw new Error()
|
||||
|
||||
case 'youtube':
|
||||
const path = [
|
||||
'media',
|
||||
'youtube'
|
||||
]
|
||||
|
||||
const id = track.media.credentials.id
|
||||
|
||||
if (known.has(path, id)) {
|
||||
console.log(`Drop already processed media ${id}`)
|
||||
return false
|
||||
} else {
|
||||
known.add(path, id)
|
||||
return true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.error(`No track, should not happen here`)
|
||||
return false
|
||||
}
|
||||
|
||||
const isKnown = (values) => {
|
||||
if (cache.has(values)) {
|
||||
console.log(`Drop already processed ${values.join(':')}`)
|
||||
return true
|
||||
} else {
|
||||
cache.add(values)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
switch (track.referer.credentials.type) {
|
||||
default:
|
||||
throw new Error()
|
||||
|
||||
case 'mastodon':
|
||||
if (isKnown([
|
||||
'referer',
|
||||
'mastodon',
|
||||
track.referer.credentials.domain,
|
||||
track.referer.credentials.id
|
||||
])) {
|
||||
return false
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (track.media == null) {
|
||||
return false
|
||||
}
|
||||
|
||||
switch (track.media.credentials.type) {
|
||||
default:
|
||||
throw new Error()
|
||||
|
||||
case 'youtube':
|
||||
if (isKnown([
|
||||
'media',
|
||||
'youtube',
|
||||
track.media.credentials.id
|
||||
])) {
|
||||
return false
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
const processReferer = track => {
|
||||
const urls = getUrls(track.content)
|
||||
const completeTrack = async track => {
|
||||
const metadata = await fetchMetadata(track.media)
|
||||
return { ...track, title: metadata.title }
|
||||
}
|
||||
|
||||
const media = execPipe(
|
||||
export const urlsToMedia = urls => {
|
||||
return execPipe(
|
||||
urls,
|
||||
map(parseSource),
|
||||
findOr(null, x => x !== null)
|
||||
)
|
||||
|
||||
if (media) {
|
||||
return { ...track, media }
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const processMedia = async track => {
|
||||
const metadata = await fetchMetadata(track.media)
|
||||
return { ...track, title: metadata.title }
|
||||
}
|
||||
|
||||
const parseSource = (url) => {
|
||||
|
||||
9
src/services/radio.js
Normal file
9
src/services/radio.js
Normal file
@@ -0,0 +1,9 @@
|
||||
import { asyncPrepend } from 'iter-tools'
|
||||
import { hashtagsIterator } from '/services/mastodon.js'
|
||||
import { tracksIterator } from '/services/misc.js'
|
||||
|
||||
export const radioIterator = (domain, hashtags, cache) =>
|
||||
tracksIterator(hashtagsIterator(domain, hashtags), cache)
|
||||
|
||||
export const radioShareIterator = (track, domain, hashtags, cache) =>
|
||||
tracksIterator(asyncPrepend(track, hashtagsIterator(domain, hashtags)), cache)
|
||||
Reference in New Issue
Block a user