enhance queue entries

This commit is contained in:
wryk 2020-01-11 16:57:59 +01:00
parent e6a2a6d461
commit 3fb3773dfb
4 changed files with 89 additions and 60 deletions

View File

@ -22,18 +22,18 @@
</div> </div>
<div> <div>
<div>{entry.data.metadata.title}</div> <div>{entry.metadata.title}</div>
<div> <div>
<small>{entry.url}</small> <b>{entry.status.account.username} <small style="color: dimgray">{entry.status.account.acct}</small></b>
<b>{entry.status.account.username} <small>{entry.status.account.acct}</small></b> {entry.data.url}
</div> </div>
</div> </div>
</div> </div>
{/each} {/each}
</div> </div>
<button on:click={() => entries.load(5)}>LOAD MOAR</button> <button on:click={() => entries.load(5)}>LOAD 5 MOAR</button>
</section> </section>
<section class="controls"> <section class="controls">
@ -57,7 +57,7 @@
} }
onMount(() => { onMount(() => {
const unsubscribe = entries.subscribe(async (xs) => { const unsubscribe = entries.subscribe(async (xs) => {
if (xs.length) { if (xs.length) {
const [firstEntry] = xs const [firstEntry] = xs
currentEntry.set(firstEntry) currentEntry.set(firstEntry)
@ -65,7 +65,7 @@
} }
}) })
entries.load(7) entries.load(1)
}) })
</script> </script>

View File

@ -28,7 +28,6 @@
$: updateVolume($volume) $: updateVolume($volume)
const updateViewerDurationCallback = () => { const updateViewerDurationCallback = () => {
console.log('update')
if (player) { if (player) {
duration = player.getDuration() duration = player.getDuration()
currentTime = player.getCurrentTime() currentTime = player.getCurrentTime()
@ -97,6 +96,11 @@
console.log('unplayable', ...args) console.log('unplayable', ...args)
entry.next() entry.next()
}) })
player.on('error', (...args) => {
console.log('error', ...args)
entry.next()
})
}) })
onDestroy(() => { onDestroy(() => {

View File

@ -72,7 +72,6 @@ function entryStore(entries) {
entries.load(1) entries.load(1)
} }
return entriesList[nextIndex] return entriesList[nextIndex]
}) })
} }
@ -81,19 +80,19 @@ function entryStore(entries) {
} }
function entriesStore(domain, hashtags) { function entriesStore(domain, hashtags) {
const entriesSteam = util.statusesToEntries(util.statusesStreaming(domain, hashtags)) const tracksIterator = util.mkTracksIterator(domain, hashtags)
const store = writable([]) const store = writable([])
const { update, subscribe } = store const { update, subscribe } = store
const load = async (number) => { const load = async (number) => {
for (let i = 0; i < number; i++) { for (let i = 0; i < number; i++) {
const iteratorResult = await entriesSteam.next() const iteratorResult = await tracksIterator.next()
if (iteratorResult.value) { if (iteratorResult.value) {
// console.log(iteratorResult.value)
update(entries => [...entries, iteratorResult.value]) update(entries => [...entries, iteratorResult.value])
} else { } else {
// iterator don't have new entries for now
break break
} }
} }

View File

@ -1,36 +1,81 @@
import getUrls from 'get-urls' import getUrls from 'get-urls'
import { pipe, asyncFilter, asyncMap, asyncTap, asyncTake } from 'iter-tools' import { execPipe, asyncFilter, asyncMap } from 'iter-tools'
import YouTubePlayer from 'yt-player'
export async function* statusesStreaming(domain, [hashtag]) { const millisecond = 1
const initialLink = `https://${domain}/api/v1/timelines/tag/${hashtag}?limit=40` const second = 1000 * millisecond
const minute = 60 * second
let { statuses, nextLink, previousLink } = await fetchTimeline(initialLink) export async function* mkStatusesIterator(initialLink) {
let buffer = []
let { previousLink, nextLink } = initialLink
yield* statuses console.log('fetch initial')
const initial = await fetchTimeline(initialLink)
let latestPreviousFetch = Date.now()
while (nextLink) { if (initial.statuses.length > 0) {
const a = await fetchTimeline(nextLink) buffer = [...initial.statuses]
previousLink = initial.links.prev
nextLink = initial.links.next
}
nextLink = a.nextLink yield buffer.shift()
yield* a.statuses
while (true) {
const now = Date.now()
if (latestPreviousFetch + 5 * minute < now) {
console.log('fetch newer')
const previous = await fetchTimeline(previousLink)
console.log(`${previous.length} newers`)
buffer.unshift(...previous.statuses)
previousLink = previous.links.prev
latestPreviousFetch = now
}
if (buffer.length === 0) {
console.log('fetch older')
const next = await fetchTimeline(nextLink)
buffer.push(...next.statuses)
nextLink = next.links.next
}
yield buffer.shift()
} }
} }
export const statusesToEntries = pipe( export async function* mkTracksIterator(domain, hashtags) {
asyncMap(statusToEntry), const known = new Set()
asyncFilter(entry => entry.type !== 'unsupported') const [hashtag] = hashtags
)
const statuses = mkStatusesIterator(`https://${domain}/api/v1/timelines/tag/${hashtag}?limit=40`)
const tracks = execPipe(
statuses,
asyncMap(status => ({ status, data: mkData(status) })),
asyncFilter(({ data }) => {
if (data) {
const found = known.has(data.id)
known.add(data.id)
return !found
}
return false
}),
asyncMap(async ({ status, data }) => ({ status, data, metadata: await mkMetadata(data) }))
)
yield* tracks
}
export async function fetchTimeline(url) { export async function fetchTimeline(url) {
const urlBuilder = new URL(url)
urlBuilder.searchParams.set('limit', 40)
const response = await fetch(url) const response = await fetch(url)
const statuses = await response.json() const statuses = await response.json()
const { next, previous } = parseLinkHeader(response.headers.get('link'))
return { statuses, nextLink: next, previousLink: previous } const links = response.headers.has('link')
? parseLinkHeader(response.headers.get('link'))
: {}
return { statuses, links }
} }
const LINK_RE = /<(.+?)>; rel="(\w+)"/gi const LINK_RE = /<(.+?)>; rel="(\w+)"/gi
@ -45,44 +90,25 @@ function parseLinkHeader(link) {
return links return links
} }
async function statusToEntry(status) { function mkData(status)
{
const urls = getUrls(status.content) const urls = getUrls(status.content)
for await (const url of urls) { for (const urlAsString of urls) {
const { type, data } = await urlToEntry(url) const url = new URL(urlAsString)
if (type !== 'unsupported') { if (['youtube.com', 'music.youtube.com'].includes(url.hostname) && url.searchParams.has('v')) {
return { status, url, type, data } return { url: urlAsString, id: url.searchParams.get('v') }
} else if (url.hostname === 'youtu.be') {
return { url: urlAsString, id: url.pathname.substring(1) }
} }
} }
return { type: 'unsupported' } return null
} }
async function urlToEntry(urlAsString) { async function mkMetadata(entry) {
const url = new URL(urlAsString) return fetch(`https://noembed.com/embed?url=https://www.youtube.com/watch?v=${entry.id}`)
if (['youtube.com', 'music.youtube.com'].includes(url.hostname) && url.searchParams.has('v')) {
return await mkYoutubeEntry(url.searchParams.get('v'))
} else if (url.hostname === 'youtu.be') {
return await mkYoutubeEntry(url.pathname.substring(1))
} else {
return { type: 'unsupported' }
}
}
async function mkYoutubeEntry(id) {
return {
type: 'youtube',
data: {
id,
metadata: await fetchYoutubeMetadata(id)
}
}
}
function fetchYoutubeMetadata(id) {
return fetch(`https://noembed.com/embed?url=https://www.youtube.com/watch?v=${id}`)
.then(response => response.json()) .then(response => response.json())
} }