Initial Commit

This commit is contained in:
LowEel 2020-10-08 21:33:26 +02:00
parent 69ac5bcdb0
commit 923709f1d1
175 changed files with 187744 additions and 0 deletions

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
*.sh
zorg.conf
zorg
.vscode/*

26
0_gc.go Normal file
View File

@ -0,0 +1,26 @@
package main
import (
"log"
"runtime"
"time"
)
func init() {
log.Println("Garbage Collector Thread Starting")
go memoryCleanerThread()
}
func memoryCleanerThread() {
for {
time.Sleep(Zint)
log.Println("Time to clean memory...")
runtime.GC()
log.Println("Garbage Collection done.")
}
}

90
README.md Normal file
View File

@ -0,0 +1,90 @@
# Zorg
Zorg is a small bot which can follosw RSS feed and post the new entries to your MastodonAPI-compatible pod.
I tested it with Pleroma so far, and it works.
To install, just clone this repository like
```
git clone https://git.keinpfusch.net/git/Loweel/zorg.git
go build
```
in order to use it, you need app credentials for your specific pod, which you obtain when you register your app.
In case your pod makes it too complicate, you can just use the following go :
```
package main
import (
"context"
"fmt"
"log"
"github.com/mattn/go-mastodon"
)
func main() {
app, err := mastodon.RegisterApp(context.Background(), &mastodon.AppConfig{
Server: "https://yourpodhere.put",
ClientName: "client-name",
Scopes: "read write follow",
Website: "whereyour application has a website",
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("client-id : %s\n", app.ClientID)
fmt.Printf("client-secret: %s\n", app.ClientSecret)
}
```
just change the values to match your pod, and it will print a new ClientID and ClientSecret.
Once you have both, you can just feed the zorg configuration file, which is a JSON, and change his name as zorg.conf
```
{
"ZorgServer":"https://example-pleroma.net",
"ZorgClientID": "aqwfgqubvqerb348hü13vhnrqvqerg1ü3ohrgvqervq",
"ZorgClientSecret": "qergerinqieorjhgqrijhg+3higqirgqirjgqerjgqq",
"ZorgUname": "johndoe",
"ZorgPass" : "lalalalalalalala",
"ZorgInterval": 7200
}
```
Please notice that ZorgUname and ZorgPass are credentials for the user you want the RSS feeds to be published with.
ZorgInterval is the interval of polling all the feeds you like.
The list of RSS feeds must be inside a file named feeds.conf, and it's just a one-line-per-url list of RSS feed urls, like
```
https://www.youtube.com/feeds/videos.xml?channel_id=UCDmCBKaKOtOrEqgsL4-3C8Q
https://www.youtube.com/feeds/videos.xml?channel_id=UCYcXk-yEg9LgyAPm7mpIs-g
https://www.youtube.com/feeds/videos.xml?channel_id=UCNvsIonJdJ5E4EXMa65VYpA
https://www.youtube.com/feeds/videos.xml?channel_id=UCeYbbVx7CQn6tbkkmpPldTg
https://www.youtube.com/feeds/videos.xml?channel_id=UCvKejuca1oOhFRXpZpSn3ew
https://blog.soykaf.com/post/index.xml
https://blog.golang.org/feed.atom
https://writings.stephenwolfram.com/feed/
https://www.youtube.com/feeds/videos.xml?channel_id=UCwUizOU8pPWXdXNniXypQEQ
https://www.youtube.com/feeds/videos.xml?channel_id=UCuCYsYBaq3j0gM4wWo82LkQ
```
Have fun.
TODO:
- to have more than one item per feed polling.

45
env.go Normal file
View File

@ -0,0 +1,45 @@
package main
import (
"encoding/json"
"io/ioutil"
"log"
"os"
"time"
)
// ZorgConfig is the configuration of Zorg.
var ZorgConfig struct {
ZorgServer string `json:"ZorgServer"`
ZorgClientID string `json:"ZorgClientID"`
ZorgClientSecret string `json:"ZorgClientSecret"`
ZorgUname string `json:"ZorgUname"`
ZorgPass string `json:"ZorgPass"`
ZorgInterval int `json:"ZorgInterval"`
}
// Zint is the poll time
var Zint time.Duration
func init() {
//reading json file
file, err := ioutil.ReadFile("zorg.conf")
if err != nil {
log.Println("Cannot open config file", err.Error())
os.Exit(1)
}
err = json.Unmarshal([]byte(file), &ZorgConfig)
if err != nil {
log.Println("Cannot marshal json: ", err.Error())
os.Exit(1)
}
Zint = time.Duration(time.Duration(ZorgConfig.ZorgInterval) * time.Second)
log.Println("Inizialized ZORG")
}

93
feed.go Normal file
View File

@ -0,0 +1,93 @@
package main
import (
"bufio"
"fmt"
"log"
"os"
"time"
"github.com/mmcdole/gofeed"
)
func init() {
go feedDaemon()
log.Println("Feed daemon started.")
}
func forwardLastFeed(url string) {
fp := gofeed.NewParser()
feed, err := fp.ParseURL(url)
if err != nil {
log.Println(err)
return
}
var b *gofeed.Item
if len(feed.Items) > 0 {
b = feed.Items[0]
log.Printf("%d - %s %s\n", len(feed.Items), "Feeds from: ", feed.Title)
} else {
return
}
if time.Since(*b.PublishedParsed) < Zint {
postOnMastodon(b.Link, "["+feed.Title+"] "+b.Title)
log.Println("New content from: ", feed.Title, b.Title)
} else {
log.Println("No new content from: ", feed.Title)
}
}
//but we need to iterate on feeds....
func feedDaemon() {
ticker := time.NewTicker(Zint)
defer ticker.Stop()
scanFeeds(fileByLines("feeds.conf"))
log.Println("RSS poll done. Next in ", Zint.String())
for range ticker.C {
scanFeeds(fileByLines("feeds.conf"))
log.Println("RSS poll done. Next in ", Zint.String())
}
}
func scanFeeds(fiids []string) {
for _, a := range fiids {
forwardLastFeed(a)
}
}
func fileByLines(filename string) (blurls []string) {
file, err := os.Open(filename)
if err != nil {
fmt.Println(err.Error())
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
d := scanner.Text()
blurls = append(blurls, d)
}
if err := scanner.Err(); err != nil {
fmt.Println(err.Error())
}
return
}

10
feeds.conf Normal file
View File

@ -0,0 +1,10 @@
https://www.youtube.com/feeds/videos.xml?channel_id=UCDmCBKaKOtOrEqgsL4-3C8Q
https://www.youtube.com/feeds/videos.xml?channel_id=UCYcXk-yEg9LgyAPm7mpIs-g
https://www.youtube.com/feeds/videos.xml?channel_id=UCNvsIonJdJ5E4EXMa65VYpA
https://www.youtube.com/feeds/videos.xml?channel_id=UCeYbbVx7CQn6tbkkmpPldTg
https://www.youtube.com/feeds/videos.xml?channel_id=UCvKejuca1oOhFRXpZpSn3ew
https://blog.soykaf.com/post/index.xml
https://blog.golang.org/feed.atom
https://writings.stephenwolfram.com/feed/
https://www.youtube.com/feeds/videos.xml?channel_id=UCwUizOU8pPWXdXNniXypQEQ
https://www.youtube.com/feeds/videos.xml?channel_id=UCuCYsYBaq3j0gM4wWo82LkQ

8
go.mod Normal file
View File

@ -0,0 +1,8 @@
module zorg
go 1.13
require (
github.com/mattn/go-mastodon v0.0.4
github.com/mmcdole/gofeed v1.0.0
)

38
go.sum Normal file
View File

@ -0,0 +1,38 @@
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/codegangsta/cli v1.20.0/go.mod h1:/qJNoX69yVSKu5o4jLyXAENLRyk1uhi7zkbQ3slBdOA=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-mastodon v0.0.4 h1:+F2RbXbHkiBfx6SXMJEvEwZ0i8pI9nMnZhKkvjxq9Rs=
github.com/mattn/go-mastodon v0.0.4/go.mod h1:ZBkemyyYYhNAN5JJ0H/ZSW8HfPCW45rHFHyWNwSfpTA=
github.com/mattn/go-tty v0.0.0-20190424173100-523744f04859/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/mmcdole/gofeed v1.0.0 h1:PHqwr8fsEm8xarj9s53XeEAFYhRM3E9Ib7Ie766/LTE=
github.com/mmcdole/gofeed v1.0.0/go.mod h1:tkVcyzS3qVMlQrQxJoEH1hkTiuo9a8emDzkMi7TZBu0=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190509222800-a4d6f7feada5 h1:6M3SDHlHHDCx2PcQw3S4KsR170vGqDhJDOmpVd4Hjak=
golang.org/x/net v0.0.0-20190509222800-a4d6f7feada5/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

30
mast.go Normal file
View File

@ -0,0 +1,30 @@
package main
import (
"context"
"log"
"github.com/mattn/go-mastodon"
)
func postOnMastodon(message, title string) {
c := mastodon.NewClient(&mastodon.Config{
Server: ZorgConfig.ZorgServer,
ClientID: ZorgConfig.ZorgClientID,
ClientSecret: ZorgConfig.ZorgClientSecret,
})
err := c.Authenticate(context.Background(), ZorgConfig.ZorgUname, ZorgConfig.ZorgPass)
if err != nil {
log.Println(err)
} else {
log.Println("Authenticated to the server: ", ZorgConfig.ZorgServer)
}
c.PostStatus(context.Background(), &mastodon.Toot{
Status: message,
SpoilerText: title,
})
}

1
vendor/github.com/PuerkitoBio/goquery/.gitattributes generated vendored Normal file
View File

@ -0,0 +1 @@
testdata/* linguist-vendored

16
vendor/github.com/PuerkitoBio/goquery/.gitignore generated vendored Normal file
View File

@ -0,0 +1,16 @@
# editor temporary files
*.sublime-*
.DS_Store
*.swp
#*.*#
tags
# direnv config
.env*
# test binaries
*.test
# coverage and profilte outputs
*.out

16
vendor/github.com/PuerkitoBio/goquery/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,16 @@
language: go
go:
- 1.1
- 1.2.x
- 1.3.x
- 1.4.x
- 1.5.x
- 1.6.x
- 1.7.x
- 1.8.x
- 1.9.x
- "1.10.x"
- 1.11.x
- tip

12
vendor/github.com/PuerkitoBio/goquery/LICENSE generated vendored Normal file
View File

@ -0,0 +1,12 @@
Copyright (c) 2012-2016, Martin Angers & Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

179
vendor/github.com/PuerkitoBio/goquery/README.md generated vendored Normal file
View File

@ -0,0 +1,179 @@
# goquery - a little like that j-thing, only in Go
[![build status](https://secure.travis-ci.org/PuerkitoBio/goquery.svg?branch=master)](http://travis-ci.org/PuerkitoBio/goquery) [![GoDoc](https://godoc.org/github.com/PuerkitoBio/goquery?status.png)](http://godoc.org/github.com/PuerkitoBio/goquery) [![Sourcegraph Badge](https://sourcegraph.com/github.com/PuerkitoBio/goquery/-/badge.svg)](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
## Table of Contents
* [Installation](#installation)
* [Changelog](#changelog)
* [API](#api)
* [Examples](#examples)
* [Related Projects](#related-projects)
* [Support](#support)
* [License](#license)
## Installation
Please note that because of the net/html dependency, goquery requires Go1.1+.
$ go get github.com/PuerkitoBio/goquery
(optional) To run unit tests:
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
$ go test
(optional) To run benchmarks (warning: it runs for a few minutes):
$ cd $GOPATH/src/github.com/PuerkitoBio/goquery
$ go test -bench=".*"
## Changelog
**Note that goquery's API is now stable, and will not break.**
* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see godoc for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
* **v0.1.0** : Initial release.
## API
goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
The complete [godoc reference documentation can be found here][doc].
Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
* `Find("~")` returns an empty selection because the selector string doesn't match anything.
* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
## Examples
See some tips and tricks in the [wiki][].
Adapted from example_test.go:
```Go
package main
import (
"fmt"
"log"
"net/http"
"github.com/PuerkitoBio/goquery"
)
func ExampleScrape() {
// Request the HTML page.
res, err := http.Get("http://metalsucks.net")
if err != nil {
log.Fatal(err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
}
// Load the HTML document
doc, err := goquery.NewDocumentFromReader(res.Body)
if err != nil {
log.Fatal(err)
}
// Find the review items
doc.Find(".sidebar-reviews article .content-block").Each(func(i int, s *goquery.Selection) {
// For each item found, get the band and title
band := s.Find("a").Text()
title := s.Find("i").Text()
fmt.Printf("Review %d: %s - %s\n", i, band, title)
})
}
func main() {
ExampleScrape()
}
```
## Related Projects
- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
- [asciimoo/colly](https://github.com/asciimoo/colly), a lightning fast and elegant Scraping Framework
- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
## Support
There are a number of ways you can support the project:
* Use it, star it, build something with it, spread the word!
- If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
- Please search existing issues before opening a new one - it may have already been adressed.
* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
- Make sure new code is tested.
- Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
If you desperately want to send money my way, I have a BuyMeACoffee.com page:
<a href="https://www.buymeacoffee.com/mna" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
## License
The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
[jquery]: http://jquery.com/
[go]: http://golang.org/
[cascadia]: https://github.com/andybalholm/cascadia
[cascadiacli]: https://github.com/suntong/cascadia
[bsd]: http://opensource.org/licenses/BSD-3-Clause
[golic]: http://golang.org/LICENSE
[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
[doc]: http://godoc.org/github.com/PuerkitoBio/goquery
[index]: http://api.jquery.com/index/
[gonet]: https://github.com/golang/net/
[html]: http://godoc.org/golang.org/x/net/html
[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
[thatguystone]: https://github.com/thatguystone
[piotr]: https://github.com/piotrkowalczuk
[goq]: https://github.com/andrewstuart/goq

124
vendor/github.com/PuerkitoBio/goquery/array.go generated vendored Normal file
View File

@ -0,0 +1,124 @@
package goquery
import (
"golang.org/x/net/html"
)
const (
maxUint = ^uint(0)
maxInt = int(maxUint >> 1)
// ToEnd is a special index value that can be used as end index in a call
// to Slice so that all elements are selected until the end of the Selection.
// It is equivalent to passing (*Selection).Length().
ToEnd = maxInt
)
// First reduces the set of matched elements to the first in the set.
// It returns a new Selection object, and an empty Selection object if the
// the selection is empty.
func (s *Selection) First() *Selection {
return s.Eq(0)
}
// Last reduces the set of matched elements to the last in the set.
// It returns a new Selection object, and an empty Selection object if
// the selection is empty.
func (s *Selection) Last() *Selection {
return s.Eq(-1)
}
// Eq reduces the set of matched elements to the one at the specified index.
// If a negative index is given, it counts backwards starting at the end of the
// set. It returns a new Selection object, and an empty Selection object if the
// index is invalid.
func (s *Selection) Eq(index int) *Selection {
if index < 0 {
index += len(s.Nodes)
}
if index >= len(s.Nodes) || index < 0 {
return newEmptySelection(s.document)
}
return s.Slice(index, index+1)
}
// Slice reduces the set of matched elements to a subset specified by a range
// of indices. The start index is 0-based and indicates the index of the first
// element to select. The end index is 0-based and indicates the index at which
// the elements stop being selected (the end index is not selected).
//
// The indices may be negative, in which case they represent an offset from the
// end of the selection.
//
// The special value ToEnd may be specified as end index, in which case all elements
// until the end are selected. This works both for a positive and negative start
// index.
func (s *Selection) Slice(start, end int) *Selection {
if start < 0 {
start += len(s.Nodes)
}
if end == ToEnd {
end = len(s.Nodes)
} else if end < 0 {
end += len(s.Nodes)
}
return pushStack(s, s.Nodes[start:end])
}
// Get retrieves the underlying node at the specified index.
// Get without parameter is not implemented, since the node array is available
// on the Selection object.
func (s *Selection) Get(index int) *html.Node {
if index < 0 {
index += len(s.Nodes) // Negative index gets from the end
}
return s.Nodes[index]
}
// Index returns the position of the first element within the Selection object
// relative to its sibling elements.
func (s *Selection) Index() int {
if len(s.Nodes) > 0 {
return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
}
return -1
}
// IndexSelector returns the position of the first element within the
// Selection object relative to the elements matched by the selector, or -1 if
// not found.
func (s *Selection) IndexSelector(selector string) int {
if len(s.Nodes) > 0 {
sel := s.document.Find(selector)
return indexInSlice(sel.Nodes, s.Nodes[0])
}
return -1
}
// IndexMatcher returns the position of the first element within the
// Selection object relative to the elements matched by the matcher, or -1 if
// not found.
func (s *Selection) IndexMatcher(m Matcher) int {
if len(s.Nodes) > 0 {
sel := s.document.FindMatcher(m)
return indexInSlice(sel.Nodes, s.Nodes[0])
}
return -1
}
// IndexOfNode returns the position of the specified node within the Selection
// object, or -1 if not found.
func (s *Selection) IndexOfNode(node *html.Node) int {
return indexInSlice(s.Nodes, node)
}
// IndexOfSelection returns the position of the first node in the specified
// Selection object within this Selection object, or -1 if not found.
func (s *Selection) IndexOfSelection(sel *Selection) int {
if sel != nil && len(sel.Nodes) > 0 {
return indexInSlice(s.Nodes, sel.Nodes[0])
}
return -1
}

123
vendor/github.com/PuerkitoBio/goquery/doc.go generated vendored Normal file
View File

@ -0,0 +1,123 @@
// Copyright (c) 2012-2016, Martin Angers & Contributors
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation and/or
// other materials provided with the distribution.
// * Neither the name of the author nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
Package goquery implements features similar to jQuery, including the chainable
syntax, to manipulate and query an HTML document.
It brings a syntax and a set of features similar to jQuery to the Go language.
It is based on Go's net/html package and the CSS Selector library cascadia.
Since the net/html parser returns nodes, and not a full-featured DOM
tree, jQuery's stateful manipulation functions (like height(), css(), detach())
have been left off.
Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
See the repository's wiki for various options on how to do this.
Syntax-wise, it is as close as possible to jQuery, with the same method names when
possible, and that warm and fuzzy chainable interface. jQuery being the
ultra-popular library that it is, writing a similar HTML-manipulating
library was better to follow its API than to start anew (in the same spirit as
Go's fmt package), even though some of its methods are less than intuitive (looking
at you, index()...).
It is hosted on GitHub, along with additional documentation in the README.md
file: https://github.com/puerkitobio/goquery
Please note that because of the net/html dependency, goquery requires Go1.1+.
The various methods are split into files based on the category of behavior.
The three dots (...) indicate that various "overloads" are available.
* array.go : array-like positional manipulation of the selection.
- Eq()
- First()
- Get()
- Index...()
- Last()
- Slice()
* expand.go : methods that expand or augment the selection's set.
- Add...()
- AndSelf()
- Union(), which is an alias for AddSelection()
* filter.go : filtering methods, that reduce the selection's set.
- End()
- Filter...()
- Has...()
- Intersection(), which is an alias of FilterSelection()
- Not...()
* iteration.go : methods to loop over the selection's nodes.
- Each()
- EachWithBreak()
- Map()
* manipulation.go : methods for modifying the document
- After...()
- Append...()
- Before...()
- Clone()
- Empty()
- Prepend...()
- Remove...()
- ReplaceWith...()
- Unwrap()
- Wrap...()
- WrapAll...()
- WrapInner...()
* property.go : methods that inspect and get the node's properties values.
- Attr*(), RemoveAttr(), SetAttr()
- AddClass(), HasClass(), RemoveClass(), ToggleClass()
- Html()
- Length()
- Size(), which is an alias for Length()
- Text()
* query.go : methods that query, or reflect, a node's identity.
- Contains()
- Is...()
* traversal.go : methods to traverse the HTML document tree.
- Children...()
- Contents()
- Find...()
- Next...()
- Parent[s]...()
- Prev...()
- Siblings...()
* type.go : definition of the types exposed by goquery.
- Document
- Selection
- Matcher
* utilities.go : definition of helper functions (and not methods on a *Selection)
that are not part of jQuery, but are useful to goquery.
- NodeName
- OuterHtml
*/
package goquery

70
vendor/github.com/PuerkitoBio/goquery/expand.go generated vendored Normal file
View File

@ -0,0 +1,70 @@
package goquery
import "golang.org/x/net/html"
// Add adds the selector string's matching nodes to those in the current
// selection and returns a new Selection object.
// The selector string is run in the context of the document of the current
// Selection object.
func (s *Selection) Add(selector string) *Selection {
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
}
// AddMatcher adds the matcher's matching nodes to those in the current
// selection and returns a new Selection object.
// The matcher is run in the context of the document of the current
// Selection object.
func (s *Selection) AddMatcher(m Matcher) *Selection {
return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
}
// AddSelection adds the specified Selection object's nodes to those in the
// current selection and returns a new Selection object.
func (s *Selection) AddSelection(sel *Selection) *Selection {
if sel == nil {
return s.AddNodes()
}
return s.AddNodes(sel.Nodes...)
}
// Union is an alias for AddSelection.
func (s *Selection) Union(sel *Selection) *Selection {
return s.AddSelection(sel)
}
// AddNodes adds the specified nodes to those in the
// current selection and returns a new Selection object.
func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
}
// AndSelf adds the previous set of elements on the stack to the current set.
// It returns a new Selection object containing the current Selection combined
// with the previous one.
// Deprecated: This function has been deprecated and is now an alias for AddBack().
func (s *Selection) AndSelf() *Selection {
return s.AddBack()
}
// AddBack adds the previous set of elements on the stack to the current set.
// It returns a new Selection object containing the current Selection combined
// with the previous one.
func (s *Selection) AddBack() *Selection {
return s.AddSelection(s.prevSel)
}
// AddBackFiltered reduces the previous set of elements on the stack to those that
// match the selector string, and adds them to the current set.
// It returns a new Selection object containing the current Selection combined
// with the filtered previous one
func (s *Selection) AddBackFiltered(selector string) *Selection {
return s.AddSelection(s.prevSel.Filter(selector))
}
// AddBackMatcher reduces the previous set of elements on the stack to those that match
// the mateher, and adds them to the curernt set.
// It returns a new Selection object containing the current Selection combined
// with the filtered previous one
func (s *Selection) AddBackMatcher(m Matcher) *Selection {
return s.AddSelection(s.prevSel.FilterMatcher(m))
}

163
vendor/github.com/PuerkitoBio/goquery/filter.go generated vendored Normal file
View File

@ -0,0 +1,163 @@
package goquery
import "golang.org/x/net/html"
// Filter reduces the set of matched elements to those that match the selector string.
// It returns a new Selection object for this subset of matching elements.
func (s *Selection) Filter(selector string) *Selection {
return s.FilterMatcher(compileMatcher(selector))
}
// FilterMatcher reduces the set of matched elements to those that match
// the given matcher. It returns a new Selection object for this subset
// of matching elements.
func (s *Selection) FilterMatcher(m Matcher) *Selection {
return pushStack(s, winnow(s, m, true))
}
// Not removes elements from the Selection that match the selector string.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) Not(selector string) *Selection {
return s.NotMatcher(compileMatcher(selector))
}
// NotMatcher removes elements from the Selection that match the given matcher.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotMatcher(m Matcher) *Selection {
return pushStack(s, winnow(s, m, false))
}
// FilterFunction reduces the set of matched elements to those that pass the function's test.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
return pushStack(s, winnowFunction(s, f, true))
}
// NotFunction removes elements from the Selection that pass the function's test.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
return pushStack(s, winnowFunction(s, f, false))
}
// FilterNodes reduces the set of matched elements to those that match the specified nodes.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
return pushStack(s, winnowNodes(s, nodes, true))
}
// NotNodes removes elements from the Selection that match the specified nodes.
// It returns a new Selection object with the matching elements removed.
func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
return pushStack(s, winnowNodes(s, nodes, false))
}
// FilterSelection reduces the set of matched elements to those that match a
// node in the specified Selection object.
// It returns a new Selection object for this subset of elements.
func (s *Selection) FilterSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, winnowNodes(s, nil, true))
}
return pushStack(s, winnowNodes(s, sel.Nodes, true))
}
// NotSelection removes elements from the Selection that match a node in the specified
// Selection object. It returns a new Selection object with the matching elements removed.
func (s *Selection) NotSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, winnowNodes(s, nil, false))
}
return pushStack(s, winnowNodes(s, sel.Nodes, false))
}
// Intersection is an alias for FilterSelection.
func (s *Selection) Intersection(sel *Selection) *Selection {
return s.FilterSelection(sel)
}
// Has reduces the set of matched elements to those that have a descendant
// that matches the selector.
// It returns a new Selection object with the matching elements.
func (s *Selection) Has(selector string) *Selection {
return s.HasSelection(s.document.Find(selector))
}
// HasMatcher reduces the set of matched elements to those that have a descendant
// that matches the matcher.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasMatcher(m Matcher) *Selection {
return s.HasSelection(s.document.FindMatcher(m))
}
// HasNodes reduces the set of matched elements to those that have a
// descendant that matches one of the nodes.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
return s.FilterFunction(func(_ int, sel *Selection) bool {
// Add all nodes that contain one of the specified nodes
for _, n := range nodes {
if sel.Contains(n) {
return true
}
}
return false
})
}
// HasSelection reduces the set of matched elements to those that have a
// descendant that matches one of the nodes of the specified Selection object.
// It returns a new Selection object with the matching elements.
func (s *Selection) HasSelection(sel *Selection) *Selection {
if sel == nil {
return s.HasNodes()
}
return s.HasNodes(sel.Nodes...)
}
// End ends the most recent filtering operation in the current chain and
// returns the set of matched elements to its previous state.
func (s *Selection) End() *Selection {
if s.prevSel != nil {
return s.prevSel
}
return newEmptySelection(s.document)
}
// Filter based on the matcher, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
// Optimize if keep is requested
if keep {
return m.Filter(sel.Nodes)
}
// Use grep
return grep(sel, func(i int, s *Selection) bool {
return !m.Match(s.Get(0))
})
}
// Filter based on an array of nodes, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
if len(nodes)+len(sel.Nodes) < minNodesForSet {
return grep(sel, func(i int, s *Selection) bool {
return isInSlice(nodes, s.Get(0)) == keep
})
}
set := make(map[*html.Node]bool)
for _, n := range nodes {
set[n] = true
}
return grep(sel, func(i int, s *Selection) bool {
return set[s.Get(0)] == keep
})
}
// Filter based on a function test, and the indicator to keep (Filter) or
// to get rid of (Not) the matching elements.
func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
return grep(sel, func(i int, s *Selection) bool {
return f(i, s) == keep
})
}

6
vendor/github.com/PuerkitoBio/goquery/go.mod generated vendored Normal file
View File

@ -0,0 +1,6 @@
module github.com/PuerkitoBio/goquery
require (
github.com/andybalholm/cascadia v1.0.0
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
)

5
vendor/github.com/PuerkitoBio/goquery/go.sum generated vendored Normal file
View File

@ -0,0 +1,5 @@
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a h1:gOpx8G595UYyvj8UK4+OFyY4rx037g3fmfhe5SasG3U=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=

39
vendor/github.com/PuerkitoBio/goquery/iteration.go generated vendored Normal file
View File

@ -0,0 +1,39 @@
package goquery
// Each iterates over a Selection object, executing a function for each
// matched element. It returns the current Selection object. The function
// f is called for each element in the selection with the index of the
// element in that selection starting at 0, and a *Selection that contains
// only that element.
func (s *Selection) Each(f func(int, *Selection)) *Selection {
for i, n := range s.Nodes {
f(i, newSingleSelection(n, s.document))
}
return s
}
// EachWithBreak iterates over a Selection object, executing a function for each
// matched element. It is identical to Each except that it is possible to break
// out of the loop by returning false in the callback function. It returns the
// current Selection object.
func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
for i, n := range s.Nodes {
if !f(i, newSingleSelection(n, s.document)) {
return s
}
}
return s
}
// Map passes each element in the current matched set through a function,
// producing a slice of string holding the returned values. The function
// f is called for each element in the selection with the index of the
// element in that selection starting at 0, and a *Selection that contains
// only that element.
func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
for i, n := range s.Nodes {
result = append(result, f(i, newSingleSelection(n, s.document)))
}
return result
}

574
vendor/github.com/PuerkitoBio/goquery/manipulation.go generated vendored Normal file
View File

@ -0,0 +1,574 @@
package goquery
import (
"strings"
"golang.org/x/net/html"
)
// After applies the selector from the root document and inserts the matched elements
// after the elements in the set of matched elements.
//
// If one of the matched elements in the selection is not currently in the
// document, it's impossible to insert nodes after it, so it will be ignored.
//
// This follows the same rules as Selection.Append.
func (s *Selection) After(selector string) *Selection {
return s.AfterMatcher(compileMatcher(selector))
}
// AfterMatcher applies the matcher from the root document and inserts the matched elements
// after the elements in the set of matched elements.
//
// If one of the matched elements in the selection is not currently in the
// document, it's impossible to insert nodes after it, so it will be ignored.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterMatcher(m Matcher) *Selection {
return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
}
// AfterSelection inserts the elements in the selection after each element in the set of matched
// elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterSelection(sel *Selection) *Selection {
return s.AfterNodes(sel.Nodes...)
}
// AfterHtml parses the html and inserts it after the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterHtml(html string) *Selection {
return s.AfterNodes(parseHtml(html)...)
}
// AfterNodes inserts the nodes after each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
if sn.Parent != nil {
sn.Parent.InsertBefore(n, sn.NextSibling)
}
})
}
// Append appends the elements specified by the selector to the end of each element
// in the set of matched elements, following those rules:
//
// 1) The selector is applied to the root document.
//
// 2) Elements that are part of the document will be moved to the new location.
//
// 3) If there are multiple locations to append to, cloned nodes will be
// appended to all target locations except the last one, which will be moved
// as noted in (2).
func (s *Selection) Append(selector string) *Selection {
return s.AppendMatcher(compileMatcher(selector))
}
// AppendMatcher appends the elements specified by the matcher to the end of each element
// in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendMatcher(m Matcher) *Selection {
return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
}
// AppendSelection appends the elements in the selection to the end of each element
// in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendSelection(sel *Selection) *Selection {
return s.AppendNodes(sel.Nodes...)
}
// AppendHtml parses the html and appends it to the set of matched elements.
func (s *Selection) AppendHtml(html string) *Selection {
return s.AppendNodes(parseHtml(html)...)
}
// AppendNodes appends the specified nodes to each node in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
sn.AppendChild(n)
})
}
// Before inserts the matched elements before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) Before(selector string) *Selection {
return s.BeforeMatcher(compileMatcher(selector))
}
// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeMatcher(m Matcher) *Selection {
return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
}
// BeforeSelection inserts the elements in the selection before each element in the set of matched
// elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeSelection(sel *Selection) *Selection {
return s.BeforeNodes(sel.Nodes...)
}
// BeforeHtml parses the html and inserts it before the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeHtml(html string) *Selection {
return s.BeforeNodes(parseHtml(html)...)
}
// BeforeNodes inserts the nodes before each element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
if sn.Parent != nil {
sn.Parent.InsertBefore(n, sn)
}
})
}
// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
// attached to the document.
func (s *Selection) Clone() *Selection {
ns := newEmptySelection(s.document)
ns.Nodes = cloneNodes(s.Nodes)
return ns
}
// Empty removes all children nodes from the set of matched elements.
// It returns the children nodes in a new Selection.
func (s *Selection) Empty() *Selection {
var nodes []*html.Node
for _, n := range s.Nodes {
for c := n.FirstChild; c != nil; c = n.FirstChild {
n.RemoveChild(c)
nodes = append(nodes, c)
}
}
return pushStack(s, nodes)
}
// Prepend prepends the elements specified by the selector to each element in
// the set of matched elements, following the same rules as Append.
func (s *Selection) Prepend(selector string) *Selection {
return s.PrependMatcher(compileMatcher(selector))
}
// PrependMatcher prepends the elements specified by the matcher to each
// element in the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependMatcher(m Matcher) *Selection {
return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
}
// PrependSelection prepends the elements in the selection to each element in
// the set of matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependSelection(sel *Selection) *Selection {
return s.PrependNodes(sel.Nodes...)
}
// PrependHtml parses the html and prepends it to the set of matched elements.
func (s *Selection) PrependHtml(html string) *Selection {
return s.PrependNodes(parseHtml(html)...)
}
// PrependNodes prepends the specified nodes to each node in the set of
// matched elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
// sn.FirstChild may be nil, in which case this functions like
// sn.AppendChild()
sn.InsertBefore(n, sn.FirstChild)
})
}
// Remove removes the set of matched elements from the document.
// It returns the same selection, now consisting of nodes not in the document.
func (s *Selection) Remove() *Selection {
for _, n := range s.Nodes {
if n.Parent != nil {
n.Parent.RemoveChild(n)
}
}
return s
}
// RemoveFiltered removes the set of matched elements by selector.
// It returns the Selection of removed nodes.
func (s *Selection) RemoveFiltered(selector string) *Selection {
return s.RemoveMatcher(compileMatcher(selector))
}
// RemoveMatcher removes the set of matched elements.
// It returns the Selection of removed nodes.
func (s *Selection) RemoveMatcher(m Matcher) *Selection {
return s.FilterMatcher(m).Remove()
}
// ReplaceWith replaces each element in the set of matched elements with the
// nodes matched by the given selector.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWith(selector string) *Selection {
return s.ReplaceWithMatcher(compileMatcher(selector))
}
// ReplaceWithMatcher replaces each element in the set of matched elements with
// the nodes matched by the given Matcher.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
}
// ReplaceWithSelection replaces each element in the set of matched elements with
// the nodes from the given Selection.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
return s.ReplaceWithNodes(sel.Nodes...)
}
// ReplaceWithHtml replaces each element in the set of matched elements with
// the parsed HTML.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithHtml(html string) *Selection {
return s.ReplaceWithNodes(parseHtml(html)...)
}
// ReplaceWithNodes replaces each element in the set of matched elements with
// the given nodes.
// It returns the removed elements.
//
// This follows the same rules as Selection.Append.
func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
s.AfterNodes(ns...)
return s.Remove()
}
// SetHtml sets the html content of each element in the selection to
// specified html string.
func (s *Selection) SetHtml(html string) *Selection {
return setHtmlNodes(s, parseHtml(html)...)
}
// SetText sets the content of each element in the selection to specified content.
// The provided text string is escaped.
func (s *Selection) SetText(text string) *Selection {
return s.SetHtml(html.EscapeString(text))
}
// Unwrap removes the parents of the set of matched elements, leaving the matched
// elements (and their siblings, if any) in their place.
// It returns the original selection.
func (s *Selection) Unwrap() *Selection {
s.Parent().Each(func(i int, ss *Selection) {
// For some reason, jquery allows unwrap to remove the <head> element, so
// allowing it here too. Same for <html>. Why it allows those elements to
// be unwrapped while not allowing body is a mystery to me.
if ss.Nodes[0].Data != "body" {
ss.ReplaceWithSelection(ss.Contents())
}
})
return s
}
// Wrap wraps each element in the set of matched elements inside the first
// element matched by the given selector. The matched child is cloned before
// being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) Wrap(selector string) *Selection {
return s.WrapMatcher(compileMatcher(selector))
}
// WrapMatcher wraps each element in the set of matched elements inside the
// first element matched by the given matcher. The matched child is cloned
// before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapMatcher(m Matcher) *Selection {
return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapSelection wraps each element in the set of matched elements inside the
// first element in the given Selection. The element is cloned before being
// inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapSelection(sel *Selection) *Selection {
return s.wrapNodes(sel.Nodes...)
}
// WrapHtml wraps each element in the set of matched elements inside the inner-
// most child of the given HTML.
//
// It returns the original set of elements.
func (s *Selection) WrapHtml(html string) *Selection {
return s.wrapNodes(parseHtml(html)...)
}
// WrapNode wraps each element in the set of matched elements inside the inner-
// most child of the given node. The given node is copied before being inserted
// into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapNode(n *html.Node) *Selection {
return s.wrapNodes(n)
}
func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
s.Each(func(i int, ss *Selection) {
ss.wrapAllNodes(ns...)
})
return s
}
// WrapAll wraps a single HTML structure, matched by the given selector, around
// all elements in the set of matched elements. The matched child is cloned
// before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAll(selector string) *Selection {
return s.WrapAllMatcher(compileMatcher(selector))
}
// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
// around all elements in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapAllSelection wraps a single HTML structure, the first node of the given
// Selection, around all elements in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
return s.wrapAllNodes(sel.Nodes...)
}
// WrapAllHtml wraps the given HTML structure around all elements in the set of
// matched elements. The matched child is cloned before being inserted into the
// document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllHtml(html string) *Selection {
return s.wrapAllNodes(parseHtml(html)...)
}
func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
if len(ns) > 0 {
return s.WrapAllNode(ns[0])
}
return s
}
// WrapAllNode wraps the given node around the first element in the Selection,
// making all other nodes in the Selection children of the given node. The node
// is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapAllNode(n *html.Node) *Selection {
if s.Size() == 0 {
return s
}
wrap := cloneNode(n)
first := s.Nodes[0]
if first.Parent != nil {
first.Parent.InsertBefore(wrap, first)
first.Parent.RemoveChild(first)
}
for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
wrap = c
}
newSingleSelection(wrap, s.document).AppendSelection(s)
return s
}
// WrapInner wraps an HTML structure, matched by the given selector, around the
// content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInner(selector string) *Selection {
return s.WrapInnerMatcher(compileMatcher(selector))
}
// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
// around the content of element in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
}
// WrapInnerSelection wraps an HTML structure, matched by the given selector,
// around the content of element in the set of matched elements. The matched
// child is cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
return s.wrapInnerNodes(sel.Nodes...)
}
// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
// the content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerHtml(html string) *Selection {
return s.wrapInnerNodes(parseHtml(html)...)
}
// WrapInnerNode wraps an HTML structure, matched by the given selector, around
// the content of element in the set of matched elements. The matched child is
// cloned before being inserted into the document.
//
// It returns the original set of elements.
func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
return s.wrapInnerNodes(n)
}
func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
if len(ns) == 0 {
return s
}
s.Each(func(i int, s *Selection) {
contents := s.Contents()
if contents.Size() > 0 {
contents.wrapAllNodes(ns...)
} else {
s.AppendNodes(cloneNode(ns[0]))
}
})
return s
}
func parseHtml(h string) []*html.Node {
// Errors are only returned when the io.Reader returns any error besides
// EOF, but strings.Reader never will
nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
if err != nil {
panic("goquery: failed to parse HTML: " + err.Error())
}
return nodes
}
func setHtmlNodes(s *Selection, ns ...*html.Node) *Selection {
for _, n := range s.Nodes {
for c := n.FirstChild; c != nil; c = n.FirstChild {
n.RemoveChild(c)
}
for _, c := range ns {
n.AppendChild(cloneNode(c))
}
}
return s
}
// Get the first child that is an ElementNode
func getFirstChildEl(n *html.Node) *html.Node {
c := n.FirstChild
for c != nil && c.Type != html.ElementNode {
c = c.NextSibling
}
return c
}
// Deep copy a slice of nodes.
func cloneNodes(ns []*html.Node) []*html.Node {
cns := make([]*html.Node, 0, len(ns))
for _, n := range ns {
cns = append(cns, cloneNode(n))
}
return cns
}
// Deep copy a node. The new node has clones of all the original node's
// children but none of its parents or siblings.
func cloneNode(n *html.Node) *html.Node {
nn := &html.Node{
Type: n.Type,
DataAtom: n.DataAtom,
Data: n.Data,
Attr: make([]html.Attribute, len(n.Attr)),
}
copy(nn.Attr, n.Attr)
for c := n.FirstChild; c != nil; c = c.NextSibling {
nn.AppendChild(cloneNode(c))
}
return nn
}
func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
f func(sn *html.Node, n *html.Node)) *Selection {
lasti := s.Size() - 1
// net.Html doesn't provide document fragments for insertion, so to get
// things in the correct order with After() and Prepend(), the callback
// needs to be called on the reverse of the nodes.
if reverse {
for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
ns[i], ns[j] = ns[j], ns[i]
}
}
for i, sn := range s.Nodes {
for _, n := range ns {
if i != lasti {
f(sn, cloneNode(n))
} else {
if n.Parent != nil {
n.Parent.RemoveChild(n)
}
f(sn, n)
}
}
}
return s
}

275
vendor/github.com/PuerkitoBio/goquery/property.go generated vendored Normal file
View File

@ -0,0 +1,275 @@
package goquery
import (
"bytes"
"regexp"
"strings"
"golang.org/x/net/html"
)
var rxClassTrim = regexp.MustCompile("[\t\r\n]")
// Attr gets the specified attribute's value for the first element in the
// Selection. To get the value for each element individually, use a looping
// construct such as Each or Map method.
func (s *Selection) Attr(attrName string) (val string, exists bool) {
if len(s.Nodes) == 0 {
return
}
return getAttributeValue(attrName, s.Nodes[0])
}
// AttrOr works like Attr but returns default value if attribute is not present.
func (s *Selection) AttrOr(attrName, defaultValue string) string {
if len(s.Nodes) == 0 {
return defaultValue
}
val, exists := getAttributeValue(attrName, s.Nodes[0])
if !exists {
return defaultValue
}
return val
}
// RemoveAttr removes the named attribute from each element in the set of matched elements.
func (s *Selection) RemoveAttr(attrName string) *Selection {
for _, n := range s.Nodes {
removeAttr(n, attrName)
}
return s
}
// SetAttr sets the given attribute on each element in the set of matched elements.
func (s *Selection) SetAttr(attrName, val string) *Selection {
for _, n := range s.Nodes {
attr := getAttributePtr(attrName, n)
if attr == nil {
n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
} else {
attr.Val = val
}
}
return s
}
// Text gets the combined text contents of each element in the set of matched
// elements, including their descendants.
func (s *Selection) Text() string {
var buf bytes.Buffer
// Slightly optimized vs calling Each: no single selection object created
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.TextNode {
// Keep newlines and spaces, like jQuery
buf.WriteString(n.Data)
}
if n.FirstChild != nil {
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
}
}
}
for _, n := range s.Nodes {
f(n)
}
return buf.String()
}
// Size is an alias for Length.
func (s *Selection) Size() int {
return s.Length()
}
// Length returns the number of elements in the Selection object.
func (s *Selection) Length() int {
return len(s.Nodes)
}
// Html gets the HTML contents of the first element in the set of matched
// elements. It includes text and comment nodes.
func (s *Selection) Html() (ret string, e error) {
// Since there is no .innerHtml, the HTML content must be re-created from
// the nodes using html.Render.
var buf bytes.Buffer
if len(s.Nodes) > 0 {
for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
e = html.Render(&buf, c)
if e != nil {
return
}
}
ret = buf.String()
}
return
}
// AddClass adds the given class(es) to each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
func (s *Selection) AddClass(class ...string) *Selection {
classStr := strings.TrimSpace(strings.Join(class, " "))
if classStr == "" {
return s
}
tcls := getClassesSlice(classStr)
for _, n := range s.Nodes {
curClasses, attr := getClassesAndAttr(n, true)
for _, newClass := range tcls {
if !strings.Contains(curClasses, " "+newClass+" ") {
curClasses += newClass + " "
}
}
setClasses(n, attr, curClasses)
}
return s
}
// HasClass determines whether any of the matched elements are assigned the
// given class.
func (s *Selection) HasClass(class string) bool {
class = " " + class + " "
for _, n := range s.Nodes {
classes, _ := getClassesAndAttr(n, false)
if strings.Contains(classes, class) {
return true
}
}
return false
}
// RemoveClass removes the given class(es) from each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
// If no class name is provided, all classes are removed.
func (s *Selection) RemoveClass(class ...string) *Selection {
var rclasses []string
classStr := strings.TrimSpace(strings.Join(class, " "))
remove := classStr == ""
if !remove {
rclasses = getClassesSlice(classStr)
}
for _, n := range s.Nodes {
if remove {
removeAttr(n, "class")
} else {
classes, attr := getClassesAndAttr(n, true)
for _, rcl := range rclasses {
classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
}
setClasses(n, attr, classes)
}
}
return s
}
// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
// Multiple class names can be specified, separated by a space or via multiple arguments.
func (s *Selection) ToggleClass(class ...string) *Selection {
classStr := strings.TrimSpace(strings.Join(class, " "))
if classStr == "" {
return s
}
tcls := getClassesSlice(classStr)
for _, n := range s.Nodes {
classes, attr := getClassesAndAttr(n, true)
for _, tcl := range tcls {
if strings.Contains(classes, " "+tcl+" ") {
classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
} else {
classes += tcl + " "
}
}
setClasses(n, attr, classes)
}
return s
}
func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
if n == nil {
return nil
}
for i, a := range n.Attr {
if a.Key == attrName {
return &n.Attr[i]
}
}
return nil
}
// Private function to get the specified attribute's value from a node.
func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
if a := getAttributePtr(attrName, n); a != nil {
val = a.Val
exists = true
}
return
}
// Get and normalize the "class" attribute from the node.
func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
// Applies only to element nodes
if n.Type == html.ElementNode {
attr = getAttributePtr("class", n)
if attr == nil && create {
n.Attr = append(n.Attr, html.Attribute{
Key: "class",
Val: "",
})
attr = &n.Attr[len(n.Attr)-1]
}
}
if attr == nil {
classes = " "
} else {
classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
}
return
}
func getClassesSlice(classes string) []string {
return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
}
func removeAttr(n *html.Node, attrName string) {
for i, a := range n.Attr {
if a.Key == attrName {
n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
return
}
}
}
func setClasses(n *html.Node, attr *html.Attribute, classes string) {
classes = strings.TrimSpace(classes)
if classes == "" {
removeAttr(n, "class")
return
}
attr.Val = classes
}

49
vendor/github.com/PuerkitoBio/goquery/query.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
package goquery
import "golang.org/x/net/html"
// Is checks the current matched set of elements against a selector and
// returns true if at least one of these elements matches.
func (s *Selection) Is(selector string) bool {
return s.IsMatcher(compileMatcher(selector))
}
// IsMatcher checks the current matched set of elements against a matcher and
// returns true if at least one of these elements matches.
func (s *Selection) IsMatcher(m Matcher) bool {
if len(s.Nodes) > 0 {
if len(s.Nodes) == 1 {
return m.Match(s.Nodes[0])
}
return len(m.Filter(s.Nodes)) > 0
}
return false
}
// IsFunction checks the current matched set of elements against a predicate and
// returns true if at least one of these elements matches.
func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
return s.FilterFunction(f).Length() > 0
}
// IsSelection checks the current matched set of elements against a Selection object
// and returns true if at least one of these elements matches.
func (s *Selection) IsSelection(sel *Selection) bool {
return s.FilterSelection(sel).Length() > 0
}
// IsNodes checks the current matched set of elements against the specified nodes
// and returns true if at least one of these elements matches.
func (s *Selection) IsNodes(nodes ...*html.Node) bool {
return s.FilterNodes(nodes...).Length() > 0
}
// Contains returns true if the specified Node is within,
// at any depth, one of the nodes in the Selection object.
// It is NOT inclusive, to behave like jQuery's implementation, and
// unlike Javascript's .contains, so if the contained
// node is itself in the selection, it returns false.
func (s *Selection) Contains(n *html.Node) bool {
return sliceContains(s.Nodes, n)
}

698
vendor/github.com/PuerkitoBio/goquery/traversal.go generated vendored Normal file
View File

@ -0,0 +1,698 @@
package goquery
import "golang.org/x/net/html"
type siblingType int
// Sibling type, used internally when iterating over children at the same
// level (siblings) to specify which nodes are requested.
const (
siblingPrevUntil siblingType = iota - 3
siblingPrevAll
siblingPrev
siblingAll
siblingNext
siblingNextAll
siblingNextUntil
siblingAllIncludingNonElements
)
// Find gets the descendants of each element in the current set of matched
// elements, filtered by a selector. It returns a new Selection object
// containing these matched elements.
func (s *Selection) Find(selector string) *Selection {
return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
}
// FindMatcher gets the descendants of each element in the current set of matched
// elements, filtered by the matcher. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindMatcher(m Matcher) *Selection {
return pushStack(s, findWithMatcher(s.Nodes, m))
}
// FindSelection gets the descendants of each element in the current
// Selection, filtered by a Selection. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, nil)
}
return s.FindNodes(sel.Nodes...)
}
// FindNodes gets the descendants of each element in the current
// Selection, filtered by some nodes. It returns a new Selection object
// containing these matched elements.
func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
if sliceContains(s.Nodes, n) {
return []*html.Node{n}
}
return nil
}))
}
// Contents gets the children of each element in the Selection,
// including text and comment nodes. It returns a new Selection object
// containing these elements.
func (s *Selection) Contents() *Selection {
return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
}
// ContentsFiltered gets the children of each element in the Selection,
// filtered by the specified selector. It returns a new Selection
// object containing these elements. Since selectors only act on Element nodes,
// this function is an alias to ChildrenFiltered unless the selector is empty,
// in which case it is an alias to Contents.
func (s *Selection) ContentsFiltered(selector string) *Selection {
if selector != "" {
return s.ChildrenFiltered(selector)
}
return s.Contents()
}
// ContentsMatcher gets the children of each element in the Selection,
// filtered by the specified matcher. It returns a new Selection
// object containing these elements. Since matchers only act on Element nodes,
// this function is an alias to ChildrenMatcher.
func (s *Selection) ContentsMatcher(m Matcher) *Selection {
return s.ChildrenMatcher(m)
}
// Children gets the child elements of each element in the Selection.
// It returns a new Selection object containing these elements.
func (s *Selection) Children() *Selection {
return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
}
// ChildrenFiltered gets the child elements of each element in the Selection,
// filtered by the specified selector. It returns a new
// Selection object containing these elements.
func (s *Selection) ChildrenFiltered(selector string) *Selection {
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
}
// ChildrenMatcher gets the child elements of each element in the Selection,
// filtered by the specified matcher. It returns a new
// Selection object containing these elements.
func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
}
// Parent gets the parent of each element in the Selection. It returns a
// new Selection object containing the matched elements.
func (s *Selection) Parent() *Selection {
return pushStack(s, getParentNodes(s.Nodes))
}
// ParentFiltered gets the parent of each element in the Selection filtered by a
// selector. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentFiltered(selector string) *Selection {
return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
}
// ParentMatcher gets the parent of each element in the Selection filtered by a
// matcher. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentMatcher(m Matcher) *Selection {
return filterAndPush(s, getParentNodes(s.Nodes), m)
}
// Closest gets the first element that matches the selector by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) Closest(selector string) *Selection {
cs := compileMatcher(selector)
return s.ClosestMatcher(cs)
}
// ClosestMatcher gets the first element that matches the matcher by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) ClosestMatcher(m Matcher) *Selection {
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
// For each node in the selection, test the node itself, then each parent
// until a match is found.
for ; n != nil; n = n.Parent {
if m.Match(n) {
return []*html.Node{n}
}
}
return nil
}))
}
// ClosestNodes gets the first element that matches one of the nodes by testing the
// element itself and traversing up through its ancestors in the DOM tree.
func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
set := make(map[*html.Node]bool)
for _, n := range nodes {
set[n] = true
}
return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
// For each node in the selection, test the node itself, then each parent
// until a match is found.
for ; n != nil; n = n.Parent {
if set[n] {
return []*html.Node{n}
}
}
return nil
}))
}
// ClosestSelection gets the first element that matches one of the nodes in the
// Selection by testing the element itself and traversing up through its ancestors
// in the DOM tree.
func (s *Selection) ClosestSelection(sel *Selection) *Selection {
if sel == nil {
return pushStack(s, nil)
}
return s.ClosestNodes(sel.Nodes...)
}
// Parents gets the ancestors of each element in the current Selection. It
// returns a new Selection object with the matched elements.
func (s *Selection) Parents() *Selection {
return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
}
// ParentsFiltered gets the ancestors of each element in the current
// Selection. It returns a new Selection object with the matched elements.
func (s *Selection) ParentsFiltered(selector string) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
}
// ParentsMatcher gets the ancestors of each element in the current
// Selection. It returns a new Selection object with the matched elements.
func (s *Selection) ParentsMatcher(m Matcher) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
}
// ParentsUntil gets the ancestors of each element in the Selection, up to but
// not including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsUntil(selector string) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
}
// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
// not including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, m, nil))
}
// ParentsUntilSelection gets the ancestors of each element in the Selection,
// up to but not including the elements in the specified Selection. It returns a
// new Selection object containing the matched elements.
func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.Parents()
}
return s.ParentsUntilNodes(sel.Nodes...)
}
// ParentsUntilNodes gets the ancestors of each element in the Selection,
// up to but not including the specified nodes. It returns a
// new Selection object containing the matched elements.
func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
}
// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
// results based on a selector string. It returns a new Selection
// object containing the matched elements.
func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
// results based on a matcher. It returns a new Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
}
// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.ParentsMatcher(filter)
}
return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
}
// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
}
// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
}
// Siblings gets the siblings of each element in the Selection. It returns
// a new Selection object containing the matched elements.
func (s *Selection) Siblings() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
}
// SiblingsFiltered gets the siblings of each element in the Selection
// filtered by a selector. It returns a new Selection object containing the
// matched elements.
func (s *Selection) SiblingsFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
}
// SiblingsMatcher gets the siblings of each element in the Selection
// filtered by a matcher. It returns a new Selection object containing the
// matched elements.
func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
}
// Next gets the immediately following sibling of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) Next() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
}
// NextFiltered gets the immediately following sibling of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
}
// NextMatcher gets the immediately following sibling of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
}
// NextAll gets all the following siblings of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) NextAll() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
}
// NextAllFiltered gets all the following siblings of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextAllFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
}
// NextAllMatcher gets all the following siblings of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) NextAllMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
}
// Prev gets the immediately preceding sibling of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) Prev() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
}
// PrevFiltered gets the immediately preceding sibling of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
}
// PrevMatcher gets the immediately preceding sibling of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
}
// PrevAll gets all the preceding siblings of each element in the
// Selection. It returns a new Selection object containing the matched elements.
func (s *Selection) PrevAll() *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
}
// PrevAllFiltered gets all the preceding siblings of each element in the
// Selection filtered by a selector. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevAllFiltered(selector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
}
// PrevAllMatcher gets all the preceding siblings of each element in the
// Selection filtered by a matcher. It returns a new Selection object
// containing the matched elements.
func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
}
// NextUntil gets all following siblings of each element up to but not
// including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntil(selector string) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
compileMatcher(selector), nil))
}
// NextUntilMatcher gets all following siblings of each element up to but not
// including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
m, nil))
}
// NextUntilSelection gets all following siblings of each element up to but not
// including the element matched by the Selection. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.NextAll()
}
return s.NextUntilNodes(sel.Nodes...)
}
// NextUntilNodes gets all following siblings of each element up to but not
// including the element matched by the nodes. It returns a new Selection
// object containing the matched elements.
func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes))
}
// PrevUntil gets all preceding siblings of each element up to but not
// including the element matched by the selector. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntil(selector string) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
compileMatcher(selector), nil))
}
// PrevUntilMatcher gets all preceding siblings of each element up to but not
// including the element matched by the matcher. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
m, nil))
}
// PrevUntilSelection gets all preceding siblings of each element up to but not
// including the element matched by the Selection. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
if sel == nil {
return s.PrevAll()
}
return s.PrevUntilNodes(sel.Nodes...)
}
// PrevUntilNodes gets all preceding siblings of each element up to but not
// including the element matched by the nodes. It returns a new Selection
// object containing the matched elements.
func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes))
}
// NextFilteredUntil is like NextUntil, with the option to filter
// the results based on a selector string.
// It returns a new Selection object containing the matched elements.
func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
// the results based on a matcher.
// It returns a new Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
until, nil), filter)
}
// NextFilteredUntilSelection is like NextUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// NextMatcherUntilSelection is like NextUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.NextMatcher(filter)
}
return s.NextMatcherUntilNodes(filter, sel.Nodes...)
}
// NextFilteredUntilNodes is like NextUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes), compileMatcher(filterSelector))
}
// NextMatcherUntilNodes is like NextUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
nil, nodes), filter)
}
// PrevFilteredUntil is like PrevUntil, with the option to filter
// the results based on a selector string.
// It returns a new Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
}
// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
// the results based on a matcher.
// It returns a new Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
until, nil), filter)
}
// PrevFilteredUntilSelection is like PrevUntilSelection, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
}
// PrevMatcherUntilSelection is like PrevUntilSelection, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
if sel == nil {
return s.PrevMatcher(filter)
}
return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
}
// PrevFilteredUntilNodes is like PrevUntilNodes, with the
// option to filter the results based on a selector string. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes), compileMatcher(filterSelector))
}
// PrevMatcherUntilNodes is like PrevUntilNodes, with the
// option to filter the results based on a matcher. It returns a new
// Selection object containing the matched elements.
func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
nil, nodes), filter)
}
// Filter and push filters the nodes based on a matcher, and pushes the results
// on the stack, with the srcSel as previous selection.
func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
// Create a temporary Selection with the specified nodes to filter using winnow
sel := &Selection{nodes, srcSel.document, nil}
// Filter based on matcher and push on stack
return pushStack(srcSel, winnow(sel, m, true))
}
// Internal implementation of Find that return raw nodes.
func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
// Map nodes to find the matches within the children of each node
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
// Go down one level, becausejQuery's Find selects only within descendants
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.ElementNode {
result = append(result, m.MatchAll(c)...)
}
}
return
})
}
// Internal implementation to get all parent nodes, stopping at the specified
// node (or nil if no stop).
func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
for p := n.Parent; p != nil; p = p.Parent {
sel := newSingleSelection(p, nil)
if stopm != nil {
if sel.IsMatcher(stopm) {
break
}
} else if len(stopNodes) > 0 {
if sel.IsNodes(stopNodes...) {
break
}
}
if p.Type == html.ElementNode {
result = append(result, p)
}
}
return
})
}
// Internal implementation of sibling nodes that return a raw slice of matches.
func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
var f func(*html.Node) bool
// If the requested siblings are ...Until, create the test function to
// determine if the until condition is reached (returns true if it is)
if st == siblingNextUntil || st == siblingPrevUntil {
f = func(n *html.Node) bool {
if untilm != nil {
// Matcher-based condition
sel := newSingleSelection(n, nil)
return sel.IsMatcher(untilm)
} else if len(untilNodes) > 0 {
// Nodes-based condition
sel := newSingleSelection(n, nil)
return sel.IsNodes(untilNodes...)
}
return false
}
}
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
return getChildrenWithSiblingType(n.Parent, st, n, f)
})
}
// Gets the children nodes of each node in the specified slice of nodes,
// based on the sibling type request.
func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
return getChildrenWithSiblingType(n, st, nil, nil)
})
}
// Gets the children of the specified parent, based on the requested sibling
// type, skipping a specified node if required.
func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
untilFunc func(*html.Node) bool) (result []*html.Node) {
// Create the iterator function
var iter = func(cur *html.Node) (ret *html.Node) {
// Based on the sibling type requested, iterate the right way
for {
switch st {
case siblingAll, siblingAllIncludingNonElements:
if cur == nil {
// First iteration, start with first child of parent
// Skip node if required
if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
ret = skipNode.NextSibling
}
} else {
// Skip node if required
if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
ret = skipNode.NextSibling
}
}
case siblingPrev, siblingPrevAll, siblingPrevUntil:
if cur == nil {
// Start with previous sibling of the skip node
ret = skipNode.PrevSibling
} else {
ret = cur.PrevSibling
}
case siblingNext, siblingNextAll, siblingNextUntil:
if cur == nil {
// Start with next sibling of the skip node
ret = skipNode.NextSibling
} else {
ret = cur.NextSibling
}
default:
panic("Invalid sibling type.")
}
if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
return
}
// Not a valid node, try again from this one
cur = ret
}
}
for c := iter(nil); c != nil; c = iter(c) {
// If this is an ...Until case, test before append (returns true
// if the until condition is reached)
if st == siblingNextUntil || st == siblingPrevUntil {
if untilFunc(c) {
return
}
}
result = append(result, c)
if st == siblingNext || st == siblingPrev {
// Only one node was requested (immediate next or previous), so exit
return
}
}
return
}
// Internal implementation of parent nodes that return a raw slice of Nodes.
func getParentNodes(nodes []*html.Node) []*html.Node {
return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
if n.Parent != nil && n.Parent.Type == html.ElementNode {
return []*html.Node{n.Parent}
}
return nil
})
}
// Internal map function used by many traversing methods. Takes the source nodes
// to iterate on and the mapping function that returns an array of nodes.
// Returns an array of nodes mapped by calling the callback function once for
// each node in the source nodes.
func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
set := make(map[*html.Node]bool)
for i, n := range nodes {
if vals := f(i, n); len(vals) > 0 {
result = appendWithoutDuplicates(result, vals, set)
}
}
return result
}

141
vendor/github.com/PuerkitoBio/goquery/type.go generated vendored Normal file
View File

@ -0,0 +1,141 @@
package goquery
import (
"errors"
"io"
"net/http"
"net/url"
"github.com/andybalholm/cascadia"
"golang.org/x/net/html"
)
// Document represents an HTML document to be manipulated. Unlike jQuery, which
// is loaded as part of a DOM document, and thus acts upon its containing
// document, GoQuery doesn't know which HTML document to act upon. So it needs
// to be told, and that's what the Document class is for. It holds the root
// document node to manipulate, and can make selections on this document.
type Document struct {
*Selection
Url *url.URL
rootNode *html.Node
}
// NewDocumentFromNode is a Document constructor that takes a root html Node
// as argument.
func NewDocumentFromNode(root *html.Node) *Document {
return newDocument(root, nil)
}
// NewDocument is a Document constructor that takes a string URL as argument.
// It loads the specified document, parses it, and stores the root Document
// node, ready to be manipulated.
//
// Deprecated: Use the net/http standard library package to make the request
// and validate the response before calling goquery.NewDocumentFromReader
// with the response's body.
func NewDocument(url string) (*Document, error) {
// Load the URL
res, e := http.Get(url)
if e != nil {
return nil, e
}
return NewDocumentFromResponse(res)
}
// NewDocumentFromReader returns a Document from an io.Reader.
// It returns an error as second value if the reader's data cannot be parsed
// as html. It does not check if the reader is also an io.Closer, the
// provided reader is never closed by this call. It is the responsibility
// of the caller to close it if required.
func NewDocumentFromReader(r io.Reader) (*Document, error) {
root, e := html.Parse(r)
if e != nil {
return nil, e
}
return newDocument(root, nil), nil
}
// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
// It loads the specified response's document, parses it, and stores the root Document
// node, ready to be manipulated. The response's body is closed on return.
//
// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
func NewDocumentFromResponse(res *http.Response) (*Document, error) {
if res == nil {
return nil, errors.New("Response is nil")
}
defer res.Body.Close()
if res.Request == nil {
return nil, errors.New("Response.Request is nil")
}
// Parse the HTML into nodes
root, e := html.Parse(res.Body)
if e != nil {
return nil, e
}
// Create and fill the document
return newDocument(root, res.Request.URL), nil
}
// CloneDocument creates a deep-clone of a document.
func CloneDocument(doc *Document) *Document {
return newDocument(cloneNode(doc.rootNode), doc.Url)
}
// Private constructor, make sure all fields are correctly filled.
func newDocument(root *html.Node, url *url.URL) *Document {
// Create and fill the document
d := &Document{nil, url, root}
d.Selection = newSingleSelection(root, d)
return d
}
// Selection represents a collection of nodes matching some criteria. The
// initial Selection can be created by using Document.Find, and then
// manipulated using the jQuery-like chainable syntax and methods.
type Selection struct {
Nodes []*html.Node
document *Document
prevSel *Selection
}
// Helper constructor to create an empty selection
func newEmptySelection(doc *Document) *Selection {
return &Selection{nil, doc, nil}
}
// Helper constructor to create a selection of only one node
func newSingleSelection(node *html.Node, doc *Document) *Selection {
return &Selection{[]*html.Node{node}, doc, nil}
}
// Matcher is an interface that defines the methods to match
// HTML nodes against a compiled selector string. Cascadia's
// Selector implements this interface.
type Matcher interface {
Match(*html.Node) bool
MatchAll(*html.Node) []*html.Node
Filter([]*html.Node) []*html.Node
}
// compileMatcher compiles the selector string s and returns
// the corresponding Matcher. If s is an invalid selector string,
// it returns a Matcher that fails all matches.
func compileMatcher(s string) Matcher {
cs, err := cascadia.Compile(s)
if err != nil {
return invalidMatcher{}
}
return cs
}
// invalidMatcher is a Matcher that always fails to match.
type invalidMatcher struct{}
func (invalidMatcher) Match(n *html.Node) bool { return false }
func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }

161
vendor/github.com/PuerkitoBio/goquery/utilities.go generated vendored Normal file
View File

@ -0,0 +1,161 @@
package goquery
import (
"bytes"
"golang.org/x/net/html"
)
// used to determine if a set (map[*html.Node]bool) should be used
// instead of iterating over a slice. The set uses more memory and
// is slower than slice iteration for small N.
const minNodesForSet = 1000
var nodeNames = []string{
html.ErrorNode: "#error",
html.TextNode: "#text",
html.DocumentNode: "#document",
html.CommentNode: "#comment",
}
// NodeName returns the node name of the first element in the selection.
// It tries to behave in a similar way as the DOM's nodeName property
// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
//
// Go's net/html package defines the following node types, listed with
// the corresponding returned value from this function:
//
// ErrorNode : #error
// TextNode : #text
// DocumentNode : #document
// ElementNode : the element's tag name
// CommentNode : #comment
// DoctypeNode : the name of the document type
//
func NodeName(s *Selection) string {
if s.Length() == 0 {
return ""
}
switch n := s.Get(0); n.Type {
case html.ElementNode, html.DoctypeNode:
return n.Data
default:
if n.Type >= 0 && int(n.Type) < len(nodeNames) {
return nodeNames[n.Type]
}
return ""
}
}
// OuterHtml returns the outer HTML rendering of the first item in
// the selection - that is, the HTML including the first element's
// tag and attributes.
//
// Unlike InnerHtml, this is a function and not a method on the Selection,
// because this is not a jQuery method (in javascript-land, this is
// a property provided by the DOM).
func OuterHtml(s *Selection) (string, error) {
var buf bytes.Buffer
if s.Length() == 0 {
return "", nil
}
n := s.Get(0)
if err := html.Render(&buf, n); err != nil {
return "", err
}
return buf.String(), nil
}
// Loop through all container nodes to search for the target node.
func sliceContains(container []*html.Node, contained *html.Node) bool {
for _, n := range container {
if nodeContains(n, contained) {
return true
}
}
return false
}
// Checks if the contained node is within the container node.
func nodeContains(container *html.Node, contained *html.Node) bool {
// Check if the parent of the contained node is the container node, traversing
// upward until the top is reached, or the container is found.
for contained = contained.Parent; contained != nil; contained = contained.Parent {
if container == contained {
return true
}
}
return false
}
// Checks if the target node is in the slice of nodes.
func isInSlice(slice []*html.Node, node *html.Node) bool {
return indexInSlice(slice, node) > -1
}
// Returns the index of the target node in the slice, or -1.
func indexInSlice(slice []*html.Node, node *html.Node) int {
if node != nil {
for i, n := range slice {
if n == node {
return i
}
}
}
return -1
}
// Appends the new nodes to the target slice, making sure no duplicate is added.
// There is no check to the original state of the target slice, so it may still
// contain duplicates. The target slice is returned because append() may create
// a new underlying array. If targetSet is nil, a local set is created with the
// target if len(target) + len(nodes) is greater than minNodesForSet.
func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
// if there are not that many nodes, don't use the map, faster to just use nested loops
// (unless a non-nil targetSet is passed, in which case the caller knows better).
if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
for _, n := range nodes {
if !isInSlice(target, n) {
target = append(target, n)
}
}
return target
}
// if a targetSet is passed, then assume it is reliable, otherwise create one
// and initialize it with the current target contents.
if targetSet == nil {
targetSet = make(map[*html.Node]bool, len(target))
for _, n := range target {
targetSet[n] = true
}
}
for _, n := range nodes {
if !targetSet[n] {
target = append(target, n)
targetSet[n] = true
}
}
return target
}
// Loop through a selection, returning only those nodes that pass the predicate
// function.
func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
for i, n := range sel.Nodes {
if predicate(i, newSingleSelection(n, sel.document)) {
result = append(result, n)
}
}
return result
}
// Creates a new Selection object based on the specified nodes, and keeps the
// source Selection object on the stack (linked list).
func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
result := &Selection{nodes, fromSel.document, fromSel}
return result
}

14
vendor/github.com/andybalholm/cascadia/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,14 @@
language: go
go:
- 1.3
- 1.4
install:
- go get github.com/andybalholm/cascadia
script:
- go test -v
notifications:
email: false

24
vendor/github.com/andybalholm/cascadia/LICENSE generated vendored Normal file
View File

@ -0,0 +1,24 @@
Copyright (c) 2011 Andy Balholm. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

7
vendor/github.com/andybalholm/cascadia/README.md generated vendored Normal file
View File

@ -0,0 +1,7 @@
# cascadia
[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia)
The Cascadia package implements CSS selectors for use with the parse trees produced by the html package.
To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package.

3
vendor/github.com/andybalholm/cascadia/go.mod generated vendored Normal file
View File

@ -0,0 +1,3 @@
module "github.com/andybalholm/cascadia"
require "golang.org/x/net" v0.0.0-20180218175443-cbe0f9307d01

835
vendor/github.com/andybalholm/cascadia/parser.go generated vendored Normal file
View File

@ -0,0 +1,835 @@
// Package cascadia is an implementation of CSS selectors.
package cascadia
import (
"errors"
"fmt"
"regexp"
"strconv"
"strings"
"golang.org/x/net/html"
)
// a parser for CSS selectors
type parser struct {
s string // the source text
i int // the current position
}
// parseEscape parses a backslash escape.
func (p *parser) parseEscape() (result string, err error) {
if len(p.s) < p.i+2 || p.s[p.i] != '\\' {
return "", errors.New("invalid escape sequence")
}
start := p.i + 1
c := p.s[start]
switch {
case c == '\r' || c == '\n' || c == '\f':
return "", errors.New("escaped line ending outside string")
case hexDigit(c):
// unicode escape (hex)
var i int
for i = start; i < p.i+6 && i < len(p.s) && hexDigit(p.s[i]); i++ {
// empty
}
v, _ := strconv.ParseUint(p.s[start:i], 16, 21)
if len(p.s) > i {
switch p.s[i] {
case '\r':
i++
if len(p.s) > i && p.s[i] == '\n' {
i++
}
case ' ', '\t', '\n', '\f':
i++
}
}
p.i = i
return string(rune(v)), nil
}
// Return the literal character after the backslash.
result = p.s[start : start+1]
p.i += 2
return result, nil
}
func hexDigit(c byte) bool {
return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
}
// nameStart returns whether c can be the first character of an identifier
// (not counting an initial hyphen, or an escape sequence).
func nameStart(c byte) bool {
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127
}
// nameChar returns whether c can be a character within an identifier
// (not counting an escape sequence).
func nameChar(c byte) bool {
return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 ||
c == '-' || '0' <= c && c <= '9'
}
// parseIdentifier parses an identifier.
func (p *parser) parseIdentifier() (result string, err error) {
startingDash := false
if len(p.s) > p.i && p.s[p.i] == '-' {
startingDash = true
p.i++
}
if len(p.s) <= p.i {
return "", errors.New("expected identifier, found EOF instead")
}
if c := p.s[p.i]; !(nameStart(c) || c == '\\') {
return "", fmt.Errorf("expected identifier, found %c instead", c)
}
result, err = p.parseName()
if startingDash && err == nil {
result = "-" + result
}
return
}
// parseName parses a name (which is like an identifier, but doesn't have
// extra restrictions on the first character).
func (p *parser) parseName() (result string, err error) {
i := p.i
loop:
for i < len(p.s) {
c := p.s[i]
switch {
case nameChar(c):
start := i
for i < len(p.s) && nameChar(p.s[i]) {
i++
}
result += p.s[start:i]
case c == '\\':
p.i = i
val, err := p.parseEscape()
if err != nil {
return "", err
}
i = p.i
result += val
default:
break loop
}
}
if result == "" {
return "", errors.New("expected name, found EOF instead")
}
p.i = i
return result, nil
}
// parseString parses a single- or double-quoted string.
func (p *parser) parseString() (result string, err error) {
i := p.i
if len(p.s) < i+2 {
return "", errors.New("expected string, found EOF instead")
}
quote := p.s[i]
i++
loop:
for i < len(p.s) {
switch p.s[i] {
case '\\':
if len(p.s) > i+1 {
switch c := p.s[i+1]; c {
case '\r':
if len(p.s) > i+2 && p.s[i+2] == '\n' {
i += 3
continue loop
}
fallthrough
case '\n', '\f':
i += 2
continue loop
}
}
p.i = i
val, err := p.parseEscape()
if err != nil {
return "", err
}
i = p.i
result += val
case quote:
break loop
case '\r', '\n', '\f':
return "", errors.New("unexpected end of line in string")
default:
start := i
for i < len(p.s) {
if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' {
break
}
i++
}
result += p.s[start:i]
}
}
if i >= len(p.s) {
return "", errors.New("EOF in string")
}
// Consume the final quote.
i++
p.i = i
return result, nil
}
// parseRegex parses a regular expression; the end is defined by encountering an
// unmatched closing ')' or ']' which is not consumed
func (p *parser) parseRegex() (rx *regexp.Regexp, err error) {
i := p.i
if len(p.s) < i+2 {
return nil, errors.New("expected regular expression, found EOF instead")
}
// number of open parens or brackets;
// when it becomes negative, finished parsing regex
open := 0
loop:
for i < len(p.s) {
switch p.s[i] {
case '(', '[':
open++
case ')', ']':
open--
if open < 0 {
break loop
}
}
i++
}
if i >= len(p.s) {
return nil, errors.New("EOF in regular expression")
}
rx, err = regexp.Compile(p.s[p.i:i])
p.i = i
return rx, err
}
// skipWhitespace consumes whitespace characters and comments.
// It returns true if there was actually anything to skip.
func (p *parser) skipWhitespace() bool {
i := p.i
for i < len(p.s) {
switch p.s[i] {
case ' ', '\t', '\r', '\n', '\f':
i++
continue
case '/':
if strings.HasPrefix(p.s[i:], "/*") {
end := strings.Index(p.s[i+len("/*"):], "*/")
if end != -1 {
i += end + len("/**/")
continue
}
}
}
break
}
if i > p.i {
p.i = i
return true
}
return false
}
// consumeParenthesis consumes an opening parenthesis and any following
// whitespace. It returns true if there was actually a parenthesis to skip.
func (p *parser) consumeParenthesis() bool {
if p.i < len(p.s) && p.s[p.i] == '(' {
p.i++
p.skipWhitespace()
return true
}
return false
}
// consumeClosingParenthesis consumes a closing parenthesis and any preceding
// whitespace. It returns true if there was actually a parenthesis to skip.
func (p *parser) consumeClosingParenthesis() bool {
i := p.i
p.skipWhitespace()
if p.i < len(p.s) && p.s[p.i] == ')' {
p.i++
return true
}
p.i = i
return false
}
// parseTypeSelector parses a type selector (one that matches by tag name).
func (p *parser) parseTypeSelector() (result Selector, err error) {
tag, err := p.parseIdentifier()
if err != nil {
return nil, err
}
return typeSelector(tag), nil
}
// parseIDSelector parses a selector that matches by id attribute.
func (p *parser) parseIDSelector() (Selector, error) {
if p.i >= len(p.s) {
return nil, fmt.Errorf("expected id selector (#id), found EOF instead")
}
if p.s[p.i] != '#' {
return nil, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i])
}
p.i++
id, err := p.parseName()
if err != nil {
return nil, err
}
return attributeEqualsSelector("id", id), nil
}
// parseClassSelector parses a selector that matches by class attribute.
func (p *parser) parseClassSelector() (Selector, error) {
if p.i >= len(p.s) {
return nil, fmt.Errorf("expected class selector (.class), found EOF instead")
}
if p.s[p.i] != '.' {
return nil, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i])
}
p.i++
class, err := p.parseIdentifier()
if err != nil {
return nil, err
}
return attributeIncludesSelector("class", class), nil
}
// parseAttributeSelector parses a selector that matches by attribute value.
func (p *parser) parseAttributeSelector() (Selector, error) {
if p.i >= len(p.s) {
return nil, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead")
}
if p.s[p.i] != '[' {
return nil, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i])
}
p.i++
p.skipWhitespace()
key, err := p.parseIdentifier()
if err != nil {
return nil, err
}
p.skipWhitespace()
if p.i >= len(p.s) {
return nil, errors.New("unexpected EOF in attribute selector")
}
if p.s[p.i] == ']' {
p.i++
return attributeExistsSelector(key), nil
}
if p.i+2 >= len(p.s) {
return nil, errors.New("unexpected EOF in attribute selector")
}
op := p.s[p.i : p.i+2]
if op[0] == '=' {
op = "="
} else if op[1] != '=' {
return nil, fmt.Errorf(`expected equality operator, found "%s" instead`, op)
}
p.i += len(op)
p.skipWhitespace()
if p.i >= len(p.s) {
return nil, errors.New("unexpected EOF in attribute selector")
}
var val string
var rx *regexp.Regexp
if op == "#=" {
rx, err = p.parseRegex()
} else {
switch p.s[p.i] {
case '\'', '"':
val, err = p.parseString()
default:
val, err = p.parseIdentifier()
}
}
if err != nil {
return nil, err
}
p.skipWhitespace()
if p.i >= len(p.s) {
return nil, errors.New("unexpected EOF in attribute selector")
}
if p.s[p.i] != ']' {
return nil, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i])
}
p.i++
switch op {
case "=":
return attributeEqualsSelector(key, val), nil
case "!=":
return attributeNotEqualSelector(key, val), nil
case "~=":
return attributeIncludesSelector(key, val), nil
case "|=":
return attributeDashmatchSelector(key, val), nil
case "^=":
return attributePrefixSelector(key, val), nil
case "$=":
return attributeSuffixSelector(key, val), nil
case "*=":
return attributeSubstringSelector(key, val), nil
case "#=":
return attributeRegexSelector(key, rx), nil
}
return nil, fmt.Errorf("attribute operator %q is not supported", op)
}
var errExpectedParenthesis = errors.New("expected '(' but didn't find it")
var errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it")
var errUnmatchedParenthesis = errors.New("unmatched '('")
// parsePseudoclassSelector parses a pseudoclass selector like :not(p).
func (p *parser) parsePseudoclassSelector() (Selector, error) {
if p.i >= len(p.s) {
return nil, fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead")
}
if p.s[p.i] != ':' {
return nil, fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i])
}
p.i++
name, err := p.parseIdentifier()
if err != nil {
return nil, err
}
name = toLowerASCII(name)
switch name {
case "not", "has", "haschild":
if !p.consumeParenthesis() {
return nil, errExpectedParenthesis
}
sel, parseErr := p.parseSelectorGroup()
if parseErr != nil {
return nil, parseErr
}
if !p.consumeClosingParenthesis() {
return nil, errExpectedClosingParenthesis
}
switch name {
case "not":
return negatedSelector(sel), nil
case "has":
return hasDescendantSelector(sel), nil
case "haschild":
return hasChildSelector(sel), nil
}
case "contains", "containsown":
if !p.consumeParenthesis() {
return nil, errExpectedParenthesis
}
if p.i == len(p.s) {
return nil, errUnmatchedParenthesis
}
var val string
switch p.s[p.i] {
case '\'', '"':
val, err = p.parseString()
default:
val, err = p.parseIdentifier()
}
if err != nil {
return nil, err
}
val = strings.ToLower(val)
p.skipWhitespace()
if p.i >= len(p.s) {
return nil, errors.New("unexpected EOF in pseudo selector")
}
if !p.consumeClosingParenthesis() {
return nil, errExpectedClosingParenthesis
}
switch name {
case "contains":
return textSubstrSelector(val), nil
case "containsown":
return ownTextSubstrSelector(val), nil
}
case "matches", "matchesown":
if !p.consumeParenthesis() {
return nil, errExpectedParenthesis
}
rx, err := p.parseRegex()
if err != nil {
return nil, err
}
if p.i >= len(p.s) {
return nil, errors.New("unexpected EOF in pseudo selector")
}
if !p.consumeClosingParenthesis() {
return nil, errExpectedClosingParenthesis
}
switch name {
case "matches":
return textRegexSelector(rx), nil
case "matchesown":
return ownTextRegexSelector(rx), nil
}
case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type":
if !p.consumeParenthesis() {
return nil, errExpectedParenthesis
}
a, b, err := p.parseNth()
if err != nil {
return nil, err
}
if !p.consumeClosingParenthesis() {
return nil, errExpectedClosingParenthesis
}
if a == 0 {
switch name {
case "nth-child":
return simpleNthChildSelector(b, false), nil
case "nth-of-type":
return simpleNthChildSelector(b, true), nil
case "nth-last-child":
return simpleNthLastChildSelector(b, false), nil
case "nth-last-of-type":
return simpleNthLastChildSelector(b, true), nil
}
}
return nthChildSelector(a, b,
name == "nth-last-child" || name == "nth-last-of-type",
name == "nth-of-type" || name == "nth-last-of-type"),
nil
case "first-child":
return simpleNthChildSelector(1, false), nil
case "last-child":
return simpleNthLastChildSelector(1, false), nil
case "first-of-type":
return simpleNthChildSelector(1, true), nil
case "last-of-type":
return simpleNthLastChildSelector(1, true), nil
case "only-child":
return onlyChildSelector(false), nil
case "only-of-type":
return onlyChildSelector(true), nil
case "input":
return inputSelector, nil
case "empty":
return emptyElementSelector, nil
case "root":
return rootSelector, nil
}
return nil, fmt.Errorf("unknown pseudoclass :%s", name)
}
// parseInteger parses a decimal integer.
func (p *parser) parseInteger() (int, error) {
i := p.i
start := i
for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' {
i++
}
if i == start {
return 0, errors.New("expected integer, but didn't find it")
}
p.i = i
val, err := strconv.Atoi(p.s[start:i])
if err != nil {
return 0, err
}
return val, nil
}
// parseNth parses the argument for :nth-child (normally of the form an+b).
func (p *parser) parseNth() (a, b int, err error) {
// initial state
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '-':
p.i++
goto negativeA
case '+':
p.i++
goto positiveA
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
goto positiveA
case 'n', 'N':
a = 1
p.i++
goto readN
case 'o', 'O', 'e', 'E':
id, nameErr := p.parseName()
if nameErr != nil {
return 0, 0, nameErr
}
id = toLowerASCII(id)
if id == "odd" {
return 2, 1, nil
}
if id == "even" {
return 2, 0, nil
}
return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id)
default:
goto invalid
}
positiveA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
a, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
goto readA
case 'n', 'N':
a = 1
p.i++
goto readN
default:
goto invalid
}
negativeA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
a, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
a = -a
goto readA
case 'n', 'N':
a = -1
p.i++
goto readN
default:
goto invalid
}
readA:
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case 'n', 'N':
p.i++
goto readN
default:
// The number we read as a is actually b.
return 0, a, nil
}
readN:
p.skipWhitespace()
if p.i >= len(p.s) {
goto eof
}
switch p.s[p.i] {
case '+':
p.i++
p.skipWhitespace()
b, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
return a, b, nil
case '-':
p.i++
p.skipWhitespace()
b, err = p.parseInteger()
if err != nil {
return 0, 0, err
}
return a, -b, nil
default:
return a, 0, nil
}
eof:
return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b")
invalid:
return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b")
}
// parseSimpleSelectorSequence parses a selector sequence that applies to
// a single element.
func (p *parser) parseSimpleSelectorSequence() (Selector, error) {
var result Selector
if p.i >= len(p.s) {
return nil, errors.New("expected selector, found EOF instead")
}
switch p.s[p.i] {
case '*':
// It's the universal selector. Just skip over it, since it doesn't affect the meaning.
p.i++
case '#', '.', '[', ':':
// There's no type selector. Wait to process the other till the main loop.
default:
r, err := p.parseTypeSelector()
if err != nil {
return nil, err
}
result = r
}
loop:
for p.i < len(p.s) {
var ns Selector
var err error
switch p.s[p.i] {
case '#':
ns, err = p.parseIDSelector()
case '.':
ns, err = p.parseClassSelector()
case '[':
ns, err = p.parseAttributeSelector()
case ':':
ns, err = p.parsePseudoclassSelector()
default:
break loop
}
if err != nil {
return nil, err
}
if result == nil {
result = ns
} else {
result = intersectionSelector(result, ns)
}
}
if result == nil {
result = func(n *html.Node) bool {
return n.Type == html.ElementNode
}
}
return result, nil
}
// parseSelector parses a selector that may include combinators.
func (p *parser) parseSelector() (result Selector, err error) {
p.skipWhitespace()
result, err = p.parseSimpleSelectorSequence()
if err != nil {
return
}
for {
var combinator byte
if p.skipWhitespace() {
combinator = ' '
}
if p.i >= len(p.s) {
return
}
switch p.s[p.i] {
case '+', '>', '~':
combinator = p.s[p.i]
p.i++
p.skipWhitespace()
case ',', ')':
// These characters can't begin a selector, but they can legally occur after one.
return
}
if combinator == 0 {
return
}
c, err := p.parseSimpleSelectorSequence()
if err != nil {
return nil, err
}
switch combinator {
case ' ':
result = descendantSelector(result, c)
case '>':
result = childSelector(result, c)
case '+':
result = siblingSelector(result, c, true)
case '~':
result = siblingSelector(result, c, false)
}
}
panic("unreachable")
}
// parseSelectorGroup parses a group of selectors, separated by commas.
func (p *parser) parseSelectorGroup() (result Selector, err error) {
result, err = p.parseSelector()
if err != nil {
return
}
for p.i < len(p.s) {
if p.s[p.i] != ',' {
return result, nil
}
p.i++
c, err := p.parseSelector()
if err != nil {
return nil, err
}
result = unionSelector(result, c)
}
return
}

622
vendor/github.com/andybalholm/cascadia/selector.go generated vendored Normal file
View File

@ -0,0 +1,622 @@
package cascadia
import (
"bytes"
"fmt"
"regexp"
"strings"
"golang.org/x/net/html"
)
// the Selector type, and functions for creating them
// A Selector is a function which tells whether a node matches or not.
type Selector func(*html.Node) bool
// hasChildMatch returns whether n has any child that matches a.
func hasChildMatch(n *html.Node, a Selector) bool {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if a(c) {
return true
}
}
return false
}
// hasDescendantMatch performs a depth-first search of n's descendants,
// testing whether any of them match a. It returns true as soon as a match is
// found, or false if no match is found.
func hasDescendantMatch(n *html.Node, a Selector) bool {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if a(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) {
return true
}
}
return false
}
// Compile parses a selector and returns, if successful, a Selector object
// that can be used to match against html.Node objects.
func Compile(sel string) (Selector, error) {
p := &parser{s: sel}
compiled, err := p.parseSelectorGroup()
if err != nil {
return nil, err
}
if p.i < len(sel) {
return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i)
}
return compiled, nil
}
// MustCompile is like Compile, but panics instead of returning an error.
func MustCompile(sel string) Selector {
compiled, err := Compile(sel)
if err != nil {
panic(err)
}
return compiled
}
// MatchAll returns a slice of the nodes that match the selector,
// from n and its children.
func (s Selector) MatchAll(n *html.Node) []*html.Node {
return s.matchAllInto(n, nil)
}
func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node {
if s(n) {
storage = append(storage, n)
}
for child := n.FirstChild; child != nil; child = child.NextSibling {
storage = s.matchAllInto(child, storage)
}
return storage
}
// Match returns true if the node matches the selector.
func (s Selector) Match(n *html.Node) bool {
return s(n)
}
// MatchFirst returns the first node that matches s, from n and its children.
func (s Selector) MatchFirst(n *html.Node) *html.Node {
if s.Match(n) {
return n
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
m := s.MatchFirst(c)
if m != nil {
return m
}
}
return nil
}
// Filter returns the nodes in nodes that match the selector.
func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) {
for _, n := range nodes {
if s(n) {
result = append(result, n)
}
}
return result
}
// typeSelector returns a Selector that matches elements with a given tag name.
func typeSelector(tag string) Selector {
tag = toLowerASCII(tag)
return func(n *html.Node) bool {
return n.Type == html.ElementNode && n.Data == tag
}
}
// toLowerASCII returns s with all ASCII capital letters lowercased.
func toLowerASCII(s string) string {
var b []byte
for i := 0; i < len(s); i++ {
if c := s[i]; 'A' <= c && c <= 'Z' {
if b == nil {
b = make([]byte, len(s))
copy(b, s)
}
b[i] = s[i] + ('a' - 'A')
}
}
if b == nil {
return s
}
return string(b)
}
// attributeSelector returns a Selector that matches elements
// where the attribute named key satisifes the function f.
func attributeSelector(key string, f func(string) bool) Selector {
key = toLowerASCII(key)
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
for _, a := range n.Attr {
if a.Key == key && f(a.Val) {
return true
}
}
return false
}
}
// attributeExistsSelector returns a Selector that matches elements that have
// an attribute named key.
func attributeExistsSelector(key string) Selector {
return attributeSelector(key, func(string) bool { return true })
}
// attributeEqualsSelector returns a Selector that matches elements where
// the attribute named key has the value val.
func attributeEqualsSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
return s == val
})
}
// attributeNotEqualSelector returns a Selector that matches elements where
// the attribute named key does not have the value val.
func attributeNotEqualSelector(key, val string) Selector {
key = toLowerASCII(key)
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
for _, a := range n.Attr {
if a.Key == key && a.Val == val {
return false
}
}
return true
}
}
// attributeIncludesSelector returns a Selector that matches elements where
// the attribute named key is a whitespace-separated list that includes val.
func attributeIncludesSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
for s != "" {
i := strings.IndexAny(s, " \t\r\n\f")
if i == -1 {
return s == val
}
if s[:i] == val {
return true
}
s = s[i+1:]
}
return false
})
}
// attributeDashmatchSelector returns a Selector that matches elements where
// the attribute named key equals val or starts with val plus a hyphen.
func attributeDashmatchSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
if s == val {
return true
}
if len(s) <= len(val) {
return false
}
if s[:len(val)] == val && s[len(val)] == '-' {
return true
}
return false
})
}
// attributePrefixSelector returns a Selector that matches elements where
// the attribute named key starts with val.
func attributePrefixSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.HasPrefix(s, val)
})
}
// attributeSuffixSelector returns a Selector that matches elements where
// the attribute named key ends with val.
func attributeSuffixSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.HasSuffix(s, val)
})
}
// attributeSubstringSelector returns a Selector that matches nodes where
// the attribute named key contains val.
func attributeSubstringSelector(key, val string) Selector {
return attributeSelector(key,
func(s string) bool {
if strings.TrimSpace(s) == "" {
return false
}
return strings.Contains(s, val)
})
}
// attributeRegexSelector returns a Selector that matches nodes where
// the attribute named key matches the regular expression rx
func attributeRegexSelector(key string, rx *regexp.Regexp) Selector {
return attributeSelector(key,
func(s string) bool {
return rx.MatchString(s)
})
}
// intersectionSelector returns a selector that matches nodes that match
// both a and b.
func intersectionSelector(a, b Selector) Selector {
return func(n *html.Node) bool {
return a(n) && b(n)
}
}
// unionSelector returns a selector that matches elements that match
// either a or b.
func unionSelector(a, b Selector) Selector {
return func(n *html.Node) bool {
return a(n) || b(n)
}
}
// negatedSelector returns a selector that matches elements that do not match a.
func negatedSelector(a Selector) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
return !a(n)
}
}
// writeNodeText writes the text contained in n and its descendants to b.
func writeNodeText(n *html.Node, b *bytes.Buffer) {
switch n.Type {
case html.TextNode:
b.WriteString(n.Data)
case html.ElementNode:
for c := n.FirstChild; c != nil; c = c.NextSibling {
writeNodeText(c, b)
}
}
}
// nodeText returns the text contained in n and its descendants.
func nodeText(n *html.Node) string {
var b bytes.Buffer
writeNodeText(n, &b)
return b.String()
}
// nodeOwnText returns the contents of the text nodes that are direct
// children of n.
func nodeOwnText(n *html.Node) string {
var b bytes.Buffer
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.TextNode {
b.WriteString(c.Data)
}
}
return b.String()
}
// textSubstrSelector returns a selector that matches nodes that
// contain the given text.
func textSubstrSelector(val string) Selector {
return func(n *html.Node) bool {
text := strings.ToLower(nodeText(n))
return strings.Contains(text, val)
}
}
// ownTextSubstrSelector returns a selector that matches nodes that
// directly contain the given text
func ownTextSubstrSelector(val string) Selector {
return func(n *html.Node) bool {
text := strings.ToLower(nodeOwnText(n))
return strings.Contains(text, val)
}
}
// textRegexSelector returns a selector that matches nodes whose text matches
// the specified regular expression
func textRegexSelector(rx *regexp.Regexp) Selector {
return func(n *html.Node) bool {
return rx.MatchString(nodeText(n))
}
}
// ownTextRegexSelector returns a selector that matches nodes whose text
// directly matches the specified regular expression
func ownTextRegexSelector(rx *regexp.Regexp) Selector {
return func(n *html.Node) bool {
return rx.MatchString(nodeOwnText(n))
}
}
// hasChildSelector returns a selector that matches elements
// with a child that matches a.
func hasChildSelector(a Selector) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
return hasChildMatch(n, a)
}
}
// hasDescendantSelector returns a selector that matches elements
// with any descendant that matches a.
func hasDescendantSelector(a Selector) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
return hasDescendantMatch(n, a)
}
}
// nthChildSelector returns a selector that implements :nth-child(an+b).
// If last is true, implements :nth-last-child instead.
// If ofType is true, implements :nth-of-type instead.
func nthChildSelector(a, b int, last, ofType bool) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
i := -1
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
i = count
if !last {
break
}
}
}
if i == -1 {
// This shouldn't happen, since n should always be one of its parent's children.
return false
}
if last {
i = count - i + 1
}
i -= b
if a == 0 {
return i == 0
}
return i%a == 0 && i/a >= 0
}
}
// simpleNthChildSelector returns a selector that implements :nth-child(b).
// If ofType is true, implements :nth-of-type instead.
func simpleNthChildSelector(b int, ofType bool) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
return count == b
}
if count >= b {
return false
}
}
return false
}
}
// simpleNthLastChildSelector returns a selector that implements
// :nth-last-child(b). If ofType is true, implements :nth-last-of-type
// instead.
func simpleNthLastChildSelector(b int, ofType bool) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.LastChild; c != nil; c = c.PrevSibling {
if c.Type != html.ElementNode || (ofType && c.Data != n.Data) {
continue
}
count++
if c == n {
return count == b
}
if count >= b {
return false
}
}
return false
}
}
// onlyChildSelector returns a selector that implements :only-child.
// If ofType is true, it implements :only-of-type instead.
func onlyChildSelector(ofType bool) Selector {
return func(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
parent := n.Parent
if parent == nil {
return false
}
if parent.Type == html.DocumentNode {
return false
}
count := 0
for c := parent.FirstChild; c != nil; c = c.NextSibling {
if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) {
continue
}
count++
if count > 1 {
return false
}
}
return count == 1
}
}
// inputSelector is a Selector that matches input, select, textarea and button elements.
func inputSelector(n *html.Node) bool {
return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button")
}
// emptyElementSelector is a Selector that matches empty elements.
func emptyElementSelector(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
switch c.Type {
case html.ElementNode, html.TextNode:
return false
}
}
return true
}
// descendantSelector returns a Selector that matches an element if
// it matches d and has an ancestor that matches a.
func descendantSelector(a, d Selector) Selector {
return func(n *html.Node) bool {
if !d(n) {
return false
}
for p := n.Parent; p != nil; p = p.Parent {
if a(p) {
return true
}
}
return false
}
}
// childSelector returns a Selector that matches an element if
// it matches d and its parent matches a.
func childSelector(a, d Selector) Selector {
return func(n *html.Node) bool {
return d(n) && n.Parent != nil && a(n.Parent)
}
}
// siblingSelector returns a Selector that matches an element
// if it matches s2 and in is preceded by an element that matches s1.
// If adjacent is true, the sibling must be immediately before the element.
func siblingSelector(s1, s2 Selector, adjacent bool) Selector {
return func(n *html.Node) bool {
if !s2(n) {
return false
}
if adjacent {
for n = n.PrevSibling; n != nil; n = n.PrevSibling {
if n.Type == html.TextNode || n.Type == html.CommentNode {
continue
}
return s1(n)
}
return false
}
// Walk backwards looking for element that matches s1
for c := n.PrevSibling; c != nil; c = c.PrevSibling {
if s1(c) {
return true
}
}
return false
}
}
// rootSelector implements :root
func rootSelector(n *html.Node) bool {
if n.Type != html.ElementNode {
return false
}
if n.Parent == nil {
return false
}
return n.Parent.Type == html.DocumentNode
}

25
vendor/github.com/gorilla/websocket/.gitignore generated vendored Normal file
View File

@ -0,0 +1,25 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
.idea/
*.iml

19
vendor/github.com/gorilla/websocket/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,19 @@
language: go
sudo: false
matrix:
include:
- go: 1.7.x
- go: 1.8.x
- go: 1.9.x
- go: 1.10.x
- go: 1.11.x
- go: tip
allow_failures:
- go: tip
script:
- go get -t -v ./...
- diff -u <(echo -n) <(gofmt -d .)
- go vet $(go list ./... | grep -v /vendor/)
- go test -v -race ./...

9
vendor/github.com/gorilla/websocket/AUTHORS generated vendored Normal file
View File

@ -0,0 +1,9 @@
# This is the official list of Gorilla WebSocket authors for copyright
# purposes.
#
# Please keep the list sorted.
Gary Burd <gary@beagledreams.com>
Google LLC (https://opensource.google.com/)
Joachim Bauch <mail@joachim-bauch.de>

22
vendor/github.com/gorilla/websocket/LICENSE generated vendored Normal file
View File

@ -0,0 +1,22 @@
Copyright (c) 2013 The Gorilla WebSocket Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

64
vendor/github.com/gorilla/websocket/README.md generated vendored Normal file
View File

@ -0,0 +1,64 @@
# Gorilla WebSocket
Gorilla WebSocket is a [Go](http://golang.org/) implementation of the
[WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol.
[![Build Status](https://travis-ci.org/gorilla/websocket.svg?branch=master)](https://travis-ci.org/gorilla/websocket)
[![GoDoc](https://godoc.org/github.com/gorilla/websocket?status.svg)](https://godoc.org/github.com/gorilla/websocket)
### Documentation
* [API Reference](http://godoc.org/github.com/gorilla/websocket)
* [Chat example](https://github.com/gorilla/websocket/tree/master/examples/chat)
* [Command example](https://github.com/gorilla/websocket/tree/master/examples/command)
* [Client and server example](https://github.com/gorilla/websocket/tree/master/examples/echo)
* [File watch example](https://github.com/gorilla/websocket/tree/master/examples/filewatch)
### Status
The Gorilla WebSocket package provides a complete and tested implementation of
the [WebSocket](http://www.rfc-editor.org/rfc/rfc6455.txt) protocol. The
package API is stable.
### Installation
go get github.com/gorilla/websocket
### Protocol Compliance
The Gorilla WebSocket package passes the server tests in the [Autobahn Test
Suite](http://autobahn.ws/testsuite) using the application in the [examples/autobahn
subdirectory](https://github.com/gorilla/websocket/tree/master/examples/autobahn).
### Gorilla WebSocket compared with other packages
<table>
<tr>
<th></th>
<th><a href="http://godoc.org/github.com/gorilla/websocket">github.com/gorilla</a></th>
<th><a href="http://godoc.org/golang.org/x/net/websocket">golang.org/x/net</a></th>
</tr>
<tr>
<tr><td colspan="3"><a href="http://tools.ietf.org/html/rfc6455">RFC 6455</a> Features</td></tr>
<tr><td>Passes <a href="http://autobahn.ws/testsuite/">Autobahn Test Suite</a></td><td><a href="https://github.com/gorilla/websocket/tree/master/examples/autobahn">Yes</a></td><td>No</td></tr>
<tr><td>Receive <a href="https://tools.ietf.org/html/rfc6455#section-5.4">fragmented</a> message<td>Yes</td><td><a href="https://code.google.com/p/go/issues/detail?id=7632">No</a>, see note 1</td></tr>
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.1">close</a> message</td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td><a href="https://code.google.com/p/go/issues/detail?id=4588">No</a></td></tr>
<tr><td>Send <a href="https://tools.ietf.org/html/rfc6455#section-5.5.2">pings</a> and receive <a href="https://tools.ietf.org/html/rfc6455#section-5.5.3">pongs</a></td><td><a href="http://godoc.org/github.com/gorilla/websocket#hdr-Control_Messages">Yes</a></td><td>No</td></tr>
<tr><td>Get the <a href="https://tools.ietf.org/html/rfc6455#section-5.6">type</a> of a received data message</td><td>Yes</td><td>Yes, see note 2</td></tr>
<tr><td colspan="3">Other Features</tr></td>
<tr><td><a href="https://tools.ietf.org/html/rfc7692">Compression Extensions</a></td><td>Experimental</td><td>No</td></tr>
<tr><td>Read message using io.Reader</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextReader">Yes</a></td><td>No, see note 3</td></tr>
<tr><td>Write message using io.WriteCloser</td><td><a href="http://godoc.org/github.com/gorilla/websocket#Conn.NextWriter">Yes</a></td><td>No, see note 3</td></tr>
</table>
Notes:
1. Large messages are fragmented in [Chrome's new WebSocket implementation](http://www.ietf.org/mail-archive/web/hybi/current/msg10503.html).
2. The application can get the type of a received data message by implementing
a [Codec marshal](http://godoc.org/golang.org/x/net/websocket#Codec.Marshal)
function.
3. The go.net io.Reader and io.Writer operate across WebSocket frame boundaries.
Read returns when the input buffer is full or a frame boundary is
encountered. Each call to Write sends a single frame message. The Gorilla
io.Reader and io.WriteCloser operate on a single WebSocket message.

395
vendor/github.com/gorilla/websocket/client.go generated vendored Normal file
View File

@ -0,0 +1,395 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bytes"
"context"
"crypto/tls"
"errors"
"io"
"io/ioutil"
"net"
"net/http"
"net/http/httptrace"
"net/url"
"strings"
"time"
)
// ErrBadHandshake is returned when the server response to opening handshake is
// invalid.
var ErrBadHandshake = errors.New("websocket: bad handshake")
var errInvalidCompression = errors.New("websocket: invalid compression negotiation")
// NewClient creates a new client connection using the given net connection.
// The URL u specifies the host and request URI. Use requestHeader to specify
// the origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies
// (Cookie). Use the response.Header to get the selected subprotocol
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
//
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
// non-nil *http.Response so that callers can handle redirects, authentication,
// etc.
//
// Deprecated: Use Dialer instead.
func NewClient(netConn net.Conn, u *url.URL, requestHeader http.Header, readBufSize, writeBufSize int) (c *Conn, response *http.Response, err error) {
d := Dialer{
ReadBufferSize: readBufSize,
WriteBufferSize: writeBufSize,
NetDial: func(net, addr string) (net.Conn, error) {
return netConn, nil
},
}
return d.Dial(u.String(), requestHeader)
}
// A Dialer contains options for connecting to WebSocket server.
type Dialer struct {
// NetDial specifies the dial function for creating TCP connections. If
// NetDial is nil, net.Dial is used.
NetDial func(network, addr string) (net.Conn, error)
// NetDialContext specifies the dial function for creating TCP connections. If
// NetDialContext is nil, net.DialContext is used.
NetDialContext func(ctx context.Context, network, addr string) (net.Conn, error)
// Proxy specifies a function to return a proxy for a given
// Request. If the function returns a non-nil error, the
// request is aborted with the provided error.
// If Proxy is nil or returns a nil *URL, no proxy is used.
Proxy func(*http.Request) (*url.URL, error)
// TLSClientConfig specifies the TLS configuration to use with tls.Client.
// If nil, the default configuration is used.
TLSClientConfig *tls.Config
// HandshakeTimeout specifies the duration for the handshake to complete.
HandshakeTimeout time.Duration
// ReadBufferSize and WriteBufferSize specify I/O buffer sizes. If a buffer
// size is zero, then a useful default size is used. The I/O buffer sizes
// do not limit the size of the messages that can be sent or received.
ReadBufferSize, WriteBufferSize int
// WriteBufferPool is a pool of buffers for write operations. If the value
// is not set, then write buffers are allocated to the connection for the
// lifetime of the connection.
//
// A pool is most useful when the application has a modest volume of writes
// across a large number of connections.
//
// Applications should use a single pool for each unique value of
// WriteBufferSize.
WriteBufferPool BufferPool
// Subprotocols specifies the client's requested subprotocols.
Subprotocols []string
// EnableCompression specifies if the client should attempt to negotiate
// per message compression (RFC 7692). Setting this value to true does not
// guarantee that compression will be supported. Currently only "no context
// takeover" modes are supported.
EnableCompression bool
// Jar specifies the cookie jar.
// If Jar is nil, cookies are not sent in requests and ignored
// in responses.
Jar http.CookieJar
}
// Dial creates a new client connection by calling DialContext with a background context.
func (d *Dialer) Dial(urlStr string, requestHeader http.Header) (*Conn, *http.Response, error) {
return d.DialContext(context.Background(), urlStr, requestHeader)
}
var errMalformedURL = errors.New("malformed ws or wss URL")
func hostPortNoPort(u *url.URL) (hostPort, hostNoPort string) {
hostPort = u.Host
hostNoPort = u.Host
if i := strings.LastIndex(u.Host, ":"); i > strings.LastIndex(u.Host, "]") {
hostNoPort = hostNoPort[:i]
} else {
switch u.Scheme {
case "wss":
hostPort += ":443"
case "https":
hostPort += ":443"
default:
hostPort += ":80"
}
}
return hostPort, hostNoPort
}
// DefaultDialer is a dialer with all fields set to the default values.
var DefaultDialer = &Dialer{
Proxy: http.ProxyFromEnvironment,
HandshakeTimeout: 45 * time.Second,
}
// nilDialer is dialer to use when receiver is nil.
var nilDialer = *DefaultDialer
// DialContext creates a new client connection. Use requestHeader to specify the
// origin (Origin), subprotocols (Sec-WebSocket-Protocol) and cookies (Cookie).
// Use the response.Header to get the selected subprotocol
// (Sec-WebSocket-Protocol) and cookies (Set-Cookie).
//
// The context will be used in the request and in the Dialer
//
// If the WebSocket handshake fails, ErrBadHandshake is returned along with a
// non-nil *http.Response so that callers can handle redirects, authentication,
// etcetera. The response body may not contain the entire response and does not
// need to be closed by the application.
func (d *Dialer) DialContext(ctx context.Context, urlStr string, requestHeader http.Header) (*Conn, *http.Response, error) {
if d == nil {
d = &nilDialer
}
challengeKey, err := generateChallengeKey()
if err != nil {
return nil, nil, err
}
u, err := url.Parse(urlStr)
if err != nil {
return nil, nil, err
}
switch u.Scheme {
case "ws":
u.Scheme = "http"
case "wss":
u.Scheme = "https"
default:
return nil, nil, errMalformedURL
}
if u.User != nil {
// User name and password are not allowed in websocket URIs.
return nil, nil, errMalformedURL
}
req := &http.Request{
Method: "GET",
URL: u,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: make(http.Header),
Host: u.Host,
}
req = req.WithContext(ctx)
// Set the cookies present in the cookie jar of the dialer
if d.Jar != nil {
for _, cookie := range d.Jar.Cookies(u) {
req.AddCookie(cookie)
}
}
// Set the request headers using the capitalization for names and values in
// RFC examples. Although the capitalization shouldn't matter, there are
// servers that depend on it. The Header.Set method is not used because the
// method canonicalizes the header names.
req.Header["Upgrade"] = []string{"websocket"}
req.Header["Connection"] = []string{"Upgrade"}
req.Header["Sec-WebSocket-Key"] = []string{challengeKey}
req.Header["Sec-WebSocket-Version"] = []string{"13"}
if len(d.Subprotocols) > 0 {
req.Header["Sec-WebSocket-Protocol"] = []string{strings.Join(d.Subprotocols, ", ")}
}
for k, vs := range requestHeader {
switch {
case k == "Host":
if len(vs) > 0 {
req.Host = vs[0]
}
case k == "Upgrade" ||
k == "Connection" ||
k == "Sec-Websocket-Key" ||
k == "Sec-Websocket-Version" ||
k == "Sec-Websocket-Extensions" ||
(k == "Sec-Websocket-Protocol" && len(d.Subprotocols) > 0):
return nil, nil, errors.New("websocket: duplicate header not allowed: " + k)
case k == "Sec-Websocket-Protocol":
req.Header["Sec-WebSocket-Protocol"] = vs
default:
req.Header[k] = vs
}
}
if d.EnableCompression {
req.Header["Sec-WebSocket-Extensions"] = []string{"permessage-deflate; server_no_context_takeover; client_no_context_takeover"}
}
if d.HandshakeTimeout != 0 {
var cancel func()
ctx, cancel = context.WithTimeout(ctx, d.HandshakeTimeout)
defer cancel()
}
// Get network dial function.
var netDial func(network, add string) (net.Conn, error)
if d.NetDialContext != nil {
netDial = func(network, addr string) (net.Conn, error) {
return d.NetDialContext(ctx, network, addr)
}
} else if d.NetDial != nil {
netDial = d.NetDial
} else {
netDialer := &net.Dialer{}
netDial = func(network, addr string) (net.Conn, error) {
return netDialer.DialContext(ctx, network, addr)
}
}
// If needed, wrap the dial function to set the connection deadline.
if deadline, ok := ctx.Deadline(); ok {
forwardDial := netDial
netDial = func(network, addr string) (net.Conn, error) {
c, err := forwardDial(network, addr)
if err != nil {
return nil, err
}
err = c.SetDeadline(deadline)
if err != nil {
c.Close()
return nil, err
}
return c, nil
}
}
// If needed, wrap the dial function to connect through a proxy.
if d.Proxy != nil {
proxyURL, err := d.Proxy(req)
if err != nil {
return nil, nil, err
}
if proxyURL != nil {
dialer, err := proxy_FromURL(proxyURL, netDialerFunc(netDial))
if err != nil {
return nil, nil, err
}
netDial = dialer.Dial
}
}
hostPort, hostNoPort := hostPortNoPort(u)
trace := httptrace.ContextClientTrace(ctx)
if trace != nil && trace.GetConn != nil {
trace.GetConn(hostPort)
}
netConn, err := netDial("tcp", hostPort)
if trace != nil && trace.GotConn != nil {
trace.GotConn(httptrace.GotConnInfo{
Conn: netConn,
})
}
if err != nil {
return nil, nil, err
}
defer func() {
if netConn != nil {
netConn.Close()
}
}()
if u.Scheme == "https" {
cfg := cloneTLSConfig(d.TLSClientConfig)
if cfg.ServerName == "" {
cfg.ServerName = hostNoPort
}
tlsConn := tls.Client(netConn, cfg)
netConn = tlsConn
var err error
if trace != nil {
err = doHandshakeWithTrace(trace, tlsConn, cfg)
} else {
err = doHandshake(tlsConn, cfg)
}
if err != nil {
return nil, nil, err
}
}
conn := newConn(netConn, false, d.ReadBufferSize, d.WriteBufferSize, d.WriteBufferPool, nil, nil)
if err := req.Write(netConn); err != nil {
return nil, nil, err
}
if trace != nil && trace.GotFirstResponseByte != nil {
if peek, err := conn.br.Peek(1); err == nil && len(peek) == 1 {
trace.GotFirstResponseByte()
}
}
resp, err := http.ReadResponse(conn.br, req)
if err != nil {
return nil, nil, err
}
if d.Jar != nil {
if rc := resp.Cookies(); len(rc) > 0 {
d.Jar.SetCookies(u, rc)
}
}
if resp.StatusCode != 101 ||
!strings.EqualFold(resp.Header.Get("Upgrade"), "websocket") ||
!strings.EqualFold(resp.Header.Get("Connection"), "upgrade") ||
resp.Header.Get("Sec-Websocket-Accept") != computeAcceptKey(challengeKey) {
// Before closing the network connection on return from this
// function, slurp up some of the response to aid application
// debugging.
buf := make([]byte, 1024)
n, _ := io.ReadFull(resp.Body, buf)
resp.Body = ioutil.NopCloser(bytes.NewReader(buf[:n]))
return nil, resp, ErrBadHandshake
}
for _, ext := range parseExtensions(resp.Header) {
if ext[""] != "permessage-deflate" {
continue
}
_, snct := ext["server_no_context_takeover"]
_, cnct := ext["client_no_context_takeover"]
if !snct || !cnct {
return nil, resp, errInvalidCompression
}
conn.newCompressionWriter = compressNoContextTakeover
conn.newDecompressionReader = decompressNoContextTakeover
break
}
resp.Body = ioutil.NopCloser(bytes.NewReader([]byte{}))
conn.subprotocol = resp.Header.Get("Sec-Websocket-Protocol")
netConn.SetDeadline(time.Time{})
netConn = nil // to avoid close in defer.
return conn, resp, nil
}
func doHandshake(tlsConn *tls.Conn, cfg *tls.Config) error {
if err := tlsConn.Handshake(); err != nil {
return err
}
if !cfg.InsecureSkipVerify {
if err := tlsConn.VerifyHostname(cfg.ServerName); err != nil {
return err
}
}
return nil
}

16
vendor/github.com/gorilla/websocket/client_clone.go generated vendored Normal file
View File

@ -0,0 +1,16 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.8
package websocket
import "crypto/tls"
func cloneTLSConfig(cfg *tls.Config) *tls.Config {
if cfg == nil {
return &tls.Config{}
}
return cfg.Clone()
}

View File

@ -0,0 +1,38 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.8
package websocket
import "crypto/tls"
// cloneTLSConfig clones all public fields except the fields
// SessionTicketsDisabled and SessionTicketKey. This avoids copying the
// sync.Mutex in the sync.Once and makes it safe to call cloneTLSConfig on a
// config in active use.
func cloneTLSConfig(cfg *tls.Config) *tls.Config {
if cfg == nil {
return &tls.Config{}
}
return &tls.Config{
Rand: cfg.Rand,
Time: cfg.Time,
Certificates: cfg.Certificates,
NameToCertificate: cfg.NameToCertificate,
GetCertificate: cfg.GetCertificate,
RootCAs: cfg.RootCAs,
NextProtos: cfg.NextProtos,
ServerName: cfg.ServerName,
ClientAuth: cfg.ClientAuth,
ClientCAs: cfg.ClientCAs,
InsecureSkipVerify: cfg.InsecureSkipVerify,
CipherSuites: cfg.CipherSuites,
PreferServerCipherSuites: cfg.PreferServerCipherSuites,
ClientSessionCache: cfg.ClientSessionCache,
MinVersion: cfg.MinVersion,
MaxVersion: cfg.MaxVersion,
CurvePreferences: cfg.CurvePreferences,
}
}

148
vendor/github.com/gorilla/websocket/compression.go generated vendored Normal file
View File

@ -0,0 +1,148 @@
// Copyright 2017 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"compress/flate"
"errors"
"io"
"strings"
"sync"
)
const (
minCompressionLevel = -2 // flate.HuffmanOnly not defined in Go < 1.6
maxCompressionLevel = flate.BestCompression
defaultCompressionLevel = 1
)
var (
flateWriterPools [maxCompressionLevel - minCompressionLevel + 1]sync.Pool
flateReaderPool = sync.Pool{New: func() interface{} {
return flate.NewReader(nil)
}}
)
func decompressNoContextTakeover(r io.Reader) io.ReadCloser {
const tail =
// Add four bytes as specified in RFC
"\x00\x00\xff\xff" +
// Add final block to squelch unexpected EOF error from flate reader.
"\x01\x00\x00\xff\xff"
fr, _ := flateReaderPool.Get().(io.ReadCloser)
fr.(flate.Resetter).Reset(io.MultiReader(r, strings.NewReader(tail)), nil)
return &flateReadWrapper{fr}
}
func isValidCompressionLevel(level int) bool {
return minCompressionLevel <= level && level <= maxCompressionLevel
}
func compressNoContextTakeover(w io.WriteCloser, level int) io.WriteCloser {
p := &flateWriterPools[level-minCompressionLevel]
tw := &truncWriter{w: w}
fw, _ := p.Get().(*flate.Writer)
if fw == nil {
fw, _ = flate.NewWriter(tw, level)
} else {
fw.Reset(tw)
}
return &flateWriteWrapper{fw: fw, tw: tw, p: p}
}
// truncWriter is an io.Writer that writes all but the last four bytes of the
// stream to another io.Writer.
type truncWriter struct {
w io.WriteCloser
n int
p [4]byte
}
func (w *truncWriter) Write(p []byte) (int, error) {
n := 0
// fill buffer first for simplicity.
if w.n < len(w.p) {
n = copy(w.p[w.n:], p)
p = p[n:]
w.n += n
if len(p) == 0 {
return n, nil
}
}
m := len(p)
if m > len(w.p) {
m = len(w.p)
}
if nn, err := w.w.Write(w.p[:m]); err != nil {
return n + nn, err
}
copy(w.p[:], w.p[m:])
copy(w.p[len(w.p)-m:], p[len(p)-m:])
nn, err := w.w.Write(p[:len(p)-m])
return n + nn, err
}
type flateWriteWrapper struct {
fw *flate.Writer
tw *truncWriter
p *sync.Pool
}
func (w *flateWriteWrapper) Write(p []byte) (int, error) {
if w.fw == nil {
return 0, errWriteClosed
}
return w.fw.Write(p)
}
func (w *flateWriteWrapper) Close() error {
if w.fw == nil {
return errWriteClosed
}
err1 := w.fw.Flush()
w.p.Put(w.fw)
w.fw = nil
if w.tw.p != [4]byte{0, 0, 0xff, 0xff} {
return errors.New("websocket: internal error, unexpected bytes at end of flate stream")
}
err2 := w.tw.w.Close()
if err1 != nil {
return err1
}
return err2
}
type flateReadWrapper struct {
fr io.ReadCloser
}
func (r *flateReadWrapper) Read(p []byte) (int, error) {
if r.fr == nil {
return 0, io.ErrClosedPipe
}
n, err := r.fr.Read(p)
if err == io.EOF {
// Preemptively place the reader back in the pool. This helps with
// scenarios where the application does not call NextReader() soon after
// this final read.
r.Close()
}
return n, err
}
func (r *flateReadWrapper) Close() error {
if r.fr == nil {
return io.ErrClosedPipe
}
err := r.fr.Close()
flateReaderPool.Put(r.fr)
r.fr = nil
return err
}

1165
vendor/github.com/gorilla/websocket/conn.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

15
vendor/github.com/gorilla/websocket/conn_write.go generated vendored Normal file
View File

@ -0,0 +1,15 @@
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.8
package websocket
import "net"
func (c *Conn) writeBufs(bufs ...[]byte) error {
b := net.Buffers(bufs)
_, err := b.WriteTo(c.conn)
return err
}

View File

@ -0,0 +1,18 @@
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.8
package websocket
func (c *Conn) writeBufs(bufs ...[]byte) error {
for _, buf := range bufs {
if len(buf) > 0 {
if _, err := c.conn.Write(buf); err != nil {
return err
}
}
}
return nil
}

180
vendor/github.com/gorilla/websocket/doc.go generated vendored Normal file
View File

@ -0,0 +1,180 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package websocket implements the WebSocket protocol defined in RFC 6455.
//
// Overview
//
// The Conn type represents a WebSocket connection. A server application calls
// the Upgrader.Upgrade method from an HTTP request handler to get a *Conn:
//
// var upgrader = websocket.Upgrader{
// ReadBufferSize: 1024,
// WriteBufferSize: 1024,
// }
//
// func handler(w http.ResponseWriter, r *http.Request) {
// conn, err := upgrader.Upgrade(w, r, nil)
// if err != nil {
// log.Println(err)
// return
// }
// ... Use conn to send and receive messages.
// }
//
// Call the connection's WriteMessage and ReadMessage methods to send and
// receive messages as a slice of bytes. This snippet of code shows how to echo
// messages using these methods:
//
// for {
// messageType, p, err := conn.ReadMessage()
// if err != nil {
// log.Println(err)
// return
// }
// if err := conn.WriteMessage(messageType, p); err != nil {
// log.Println(err)
// return
// }
// }
//
// In above snippet of code, p is a []byte and messageType is an int with value
// websocket.BinaryMessage or websocket.TextMessage.
//
// An application can also send and receive messages using the io.WriteCloser
// and io.Reader interfaces. To send a message, call the connection NextWriter
// method to get an io.WriteCloser, write the message to the writer and close
// the writer when done. To receive a message, call the connection NextReader
// method to get an io.Reader and read until io.EOF is returned. This snippet
// shows how to echo messages using the NextWriter and NextReader methods:
//
// for {
// messageType, r, err := conn.NextReader()
// if err != nil {
// return
// }
// w, err := conn.NextWriter(messageType)
// if err != nil {
// return err
// }
// if _, err := io.Copy(w, r); err != nil {
// return err
// }
// if err := w.Close(); err != nil {
// return err
// }
// }
//
// Data Messages
//
// The WebSocket protocol distinguishes between text and binary data messages.
// Text messages are interpreted as UTF-8 encoded text. The interpretation of
// binary messages is left to the application.
//
// This package uses the TextMessage and BinaryMessage integer constants to
// identify the two data message types. The ReadMessage and NextReader methods
// return the type of the received message. The messageType argument to the
// WriteMessage and NextWriter methods specifies the type of a sent message.
//
// It is the application's responsibility to ensure that text messages are
// valid UTF-8 encoded text.
//
// Control Messages
//
// The WebSocket protocol defines three types of control messages: close, ping
// and pong. Call the connection WriteControl, WriteMessage or NextWriter
// methods to send a control message to the peer.
//
// Connections handle received close messages by calling the handler function
// set with the SetCloseHandler method and by returning a *CloseError from the
// NextReader, ReadMessage or the message Read method. The default close
// handler sends a close message to the peer.
//
// Connections handle received ping messages by calling the handler function
// set with the SetPingHandler method. The default ping handler sends a pong
// message to the peer.
//
// Connections handle received pong messages by calling the handler function
// set with the SetPongHandler method. The default pong handler does nothing.
// If an application sends ping messages, then the application should set a
// pong handler to receive the corresponding pong.
//
// The control message handler functions are called from the NextReader,
// ReadMessage and message reader Read methods. The default close and ping
// handlers can block these methods for a short time when the handler writes to
// the connection.
//
// The application must read the connection to process close, ping and pong
// messages sent from the peer. If the application is not otherwise interested
// in messages from the peer, then the application should start a goroutine to
// read and discard messages from the peer. A simple example is:
//
// func readLoop(c *websocket.Conn) {
// for {
// if _, _, err := c.NextReader(); err != nil {
// c.Close()
// break
// }
// }
// }
//
// Concurrency
//
// Connections support one concurrent reader and one concurrent writer.
//
// Applications are responsible for ensuring that no more than one goroutine
// calls the write methods (NextWriter, SetWriteDeadline, WriteMessage,
// WriteJSON, EnableWriteCompression, SetCompressionLevel) concurrently and
// that no more than one goroutine calls the read methods (NextReader,
// SetReadDeadline, ReadMessage, ReadJSON, SetPongHandler, SetPingHandler)
// concurrently.
//
// The Close and WriteControl methods can be called concurrently with all other
// methods.
//
// Origin Considerations
//
// Web browsers allow Javascript applications to open a WebSocket connection to
// any host. It's up to the server to enforce an origin policy using the Origin
// request header sent by the browser.
//
// The Upgrader calls the function specified in the CheckOrigin field to check
// the origin. If the CheckOrigin function returns false, then the Upgrade
// method fails the WebSocket handshake with HTTP status 403.
//
// If the CheckOrigin field is nil, then the Upgrader uses a safe default: fail
// the handshake if the Origin request header is present and the Origin host is
// not equal to the Host request header.
//
// The deprecated package-level Upgrade function does not perform origin
// checking. The application is responsible for checking the Origin header
// before calling the Upgrade function.
//
// Compression EXPERIMENTAL
//
// Per message compression extensions (RFC 7692) are experimentally supported
// by this package in a limited capacity. Setting the EnableCompression option
// to true in Dialer or Upgrader will attempt to negotiate per message deflate
// support.
//
// var upgrader = websocket.Upgrader{
// EnableCompression: true,
// }
//
// If compression was successfully negotiated with the connection's peer, any
// message received in compressed form will be automatically decompressed.
// All Read methods will return uncompressed bytes.
//
// Per message compression of messages written to a connection can be enabled
// or disabled by calling the corresponding Conn method:
//
// conn.EnableWriteCompression(false)
//
// Currently this package does not support compression with "context takeover".
// This means that messages must be compressed and decompressed in isolation,
// without retaining sliding window or dictionary state across messages. For
// more details refer to RFC 7692.
//
// Use of compression is experimental and may result in decreased performance.
package websocket

60
vendor/github.com/gorilla/websocket/json.go generated vendored Normal file
View File

@ -0,0 +1,60 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"encoding/json"
"io"
)
// WriteJSON writes the JSON encoding of v as a message.
//
// Deprecated: Use c.WriteJSON instead.
func WriteJSON(c *Conn, v interface{}) error {
return c.WriteJSON(v)
}
// WriteJSON writes the JSON encoding of v as a message.
//
// See the documentation for encoding/json Marshal for details about the
// conversion of Go values to JSON.
func (c *Conn) WriteJSON(v interface{}) error {
w, err := c.NextWriter(TextMessage)
if err != nil {
return err
}
err1 := json.NewEncoder(w).Encode(v)
err2 := w.Close()
if err1 != nil {
return err1
}
return err2
}
// ReadJSON reads the next JSON-encoded message from the connection and stores
// it in the value pointed to by v.
//
// Deprecated: Use c.ReadJSON instead.
func ReadJSON(c *Conn, v interface{}) error {
return c.ReadJSON(v)
}
// ReadJSON reads the next JSON-encoded message from the connection and stores
// it in the value pointed to by v.
//
// See the documentation for the encoding/json Unmarshal function for details
// about the conversion of JSON to a Go value.
func (c *Conn) ReadJSON(v interface{}) error {
_, r, err := c.NextReader()
if err != nil {
return err
}
err = json.NewDecoder(r).Decode(v)
if err == io.EOF {
// One value is expected in the message.
err = io.ErrUnexpectedEOF
}
return err
}

54
vendor/github.com/gorilla/websocket/mask.go generated vendored Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved. Use of
// this source code is governed by a BSD-style license that can be found in the
// LICENSE file.
// +build !appengine
package websocket
import "unsafe"
const wordSize = int(unsafe.Sizeof(uintptr(0)))
func maskBytes(key [4]byte, pos int, b []byte) int {
// Mask one byte at a time for small buffers.
if len(b) < 2*wordSize {
for i := range b {
b[i] ^= key[pos&3]
pos++
}
return pos & 3
}
// Mask one byte at a time to word boundary.
if n := int(uintptr(unsafe.Pointer(&b[0]))) % wordSize; n != 0 {
n = wordSize - n
for i := range b[:n] {
b[i] ^= key[pos&3]
pos++
}
b = b[n:]
}
// Create aligned word size key.
var k [wordSize]byte
for i := range k {
k[i] = key[(pos+i)&3]
}
kw := *(*uintptr)(unsafe.Pointer(&k))
// Mask one word at a time.
n := (len(b) / wordSize) * wordSize
for i := 0; i < n; i += wordSize {
*(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&b[0])) + uintptr(i))) ^= kw
}
// Mask one byte at a time for remaining bytes.
b = b[n:]
for i := range b {
b[i] ^= key[pos&3]
pos++
}
return pos & 3
}

15
vendor/github.com/gorilla/websocket/mask_safe.go generated vendored Normal file
View File

@ -0,0 +1,15 @@
// Copyright 2016 The Gorilla WebSocket Authors. All rights reserved. Use of
// this source code is governed by a BSD-style license that can be found in the
// LICENSE file.
// +build appengine
package websocket
func maskBytes(key [4]byte, pos int, b []byte) int {
for i := range b {
b[i] ^= key[pos&3]
pos++
}
return pos & 3
}

102
vendor/github.com/gorilla/websocket/prepared.go generated vendored Normal file
View File

@ -0,0 +1,102 @@
// Copyright 2017 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bytes"
"net"
"sync"
"time"
)
// PreparedMessage caches on the wire representations of a message payload.
// Use PreparedMessage to efficiently send a message payload to multiple
// connections. PreparedMessage is especially useful when compression is used
// because the CPU and memory expensive compression operation can be executed
// once for a given set of compression options.
type PreparedMessage struct {
messageType int
data []byte
mu sync.Mutex
frames map[prepareKey]*preparedFrame
}
// prepareKey defines a unique set of options to cache prepared frames in PreparedMessage.
type prepareKey struct {
isServer bool
compress bool
compressionLevel int
}
// preparedFrame contains data in wire representation.
type preparedFrame struct {
once sync.Once
data []byte
}
// NewPreparedMessage returns an initialized PreparedMessage. You can then send
// it to connection using WritePreparedMessage method. Valid wire
// representation will be calculated lazily only once for a set of current
// connection options.
func NewPreparedMessage(messageType int, data []byte) (*PreparedMessage, error) {
pm := &PreparedMessage{
messageType: messageType,
frames: make(map[prepareKey]*preparedFrame),
data: data,
}
// Prepare a plain server frame.
_, frameData, err := pm.frame(prepareKey{isServer: true, compress: false})
if err != nil {
return nil, err
}
// To protect against caller modifying the data argument, remember the data
// copied to the plain server frame.
pm.data = frameData[len(frameData)-len(data):]
return pm, nil
}
func (pm *PreparedMessage) frame(key prepareKey) (int, []byte, error) {
pm.mu.Lock()
frame, ok := pm.frames[key]
if !ok {
frame = &preparedFrame{}
pm.frames[key] = frame
}
pm.mu.Unlock()
var err error
frame.once.Do(func() {
// Prepare a frame using a 'fake' connection.
// TODO: Refactor code in conn.go to allow more direct construction of
// the frame.
mu := make(chan bool, 1)
mu <- true
var nc prepareConn
c := &Conn{
conn: &nc,
mu: mu,
isServer: key.isServer,
compressionLevel: key.compressionLevel,
enableWriteCompression: true,
writeBuf: make([]byte, defaultWriteBufferSize+maxFrameHeaderSize),
}
if key.compress {
c.newCompressionWriter = compressNoContextTakeover
}
err = c.WriteMessage(pm.messageType, pm.data)
frame.data = nc.buf.Bytes()
})
return pm.messageType, frame.data, err
}
type prepareConn struct {
buf bytes.Buffer
net.Conn
}
func (pc *prepareConn) Write(p []byte) (int, error) { return pc.buf.Write(p) }
func (pc *prepareConn) SetWriteDeadline(t time.Time) error { return nil }

77
vendor/github.com/gorilla/websocket/proxy.go generated vendored Normal file
View File

@ -0,0 +1,77 @@
// Copyright 2017 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bufio"
"encoding/base64"
"errors"
"net"
"net/http"
"net/url"
"strings"
)
type netDialerFunc func(network, addr string) (net.Conn, error)
func (fn netDialerFunc) Dial(network, addr string) (net.Conn, error) {
return fn(network, addr)
}
func init() {
proxy_RegisterDialerType("http", func(proxyURL *url.URL, forwardDialer proxy_Dialer) (proxy_Dialer, error) {
return &httpProxyDialer{proxyURL: proxyURL, fowardDial: forwardDialer.Dial}, nil
})
}
type httpProxyDialer struct {
proxyURL *url.URL
fowardDial func(network, addr string) (net.Conn, error)
}
func (hpd *httpProxyDialer) Dial(network string, addr string) (net.Conn, error) {
hostPort, _ := hostPortNoPort(hpd.proxyURL)
conn, err := hpd.fowardDial(network, hostPort)
if err != nil {
return nil, err
}
connectHeader := make(http.Header)
if user := hpd.proxyURL.User; user != nil {
proxyUser := user.Username()
if proxyPassword, passwordSet := user.Password(); passwordSet {
credential := base64.StdEncoding.EncodeToString([]byte(proxyUser + ":" + proxyPassword))
connectHeader.Set("Proxy-Authorization", "Basic "+credential)
}
}
connectReq := &http.Request{
Method: "CONNECT",
URL: &url.URL{Opaque: addr},
Host: addr,
Header: connectHeader,
}
if err := connectReq.Write(conn); err != nil {
conn.Close()
return nil, err
}
// Read response. It's OK to use and discard buffered reader here becaue
// the remote server does not speak until spoken to.
br := bufio.NewReader(conn)
resp, err := http.ReadResponse(br, connectReq)
if err != nil {
conn.Close()
return nil, err
}
if resp.StatusCode != 200 {
conn.Close()
f := strings.SplitN(resp.Status, " ", 2)
return nil, errors.New(f[1])
}
return conn, nil
}

363
vendor/github.com/gorilla/websocket/server.go generated vendored Normal file
View File

@ -0,0 +1,363 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"bufio"
"errors"
"io"
"net/http"
"net/url"
"strings"
"time"
)
// HandshakeError describes an error with the handshake from the peer.
type HandshakeError struct {
message string
}
func (e HandshakeError) Error() string { return e.message }
// Upgrader specifies parameters for upgrading an HTTP connection to a
// WebSocket connection.
type Upgrader struct {
// HandshakeTimeout specifies the duration for the handshake to complete.
HandshakeTimeout time.Duration
// ReadBufferSize and WriteBufferSize specify I/O buffer sizes. If a buffer
// size is zero, then buffers allocated by the HTTP server are used. The
// I/O buffer sizes do not limit the size of the messages that can be sent
// or received.
ReadBufferSize, WriteBufferSize int
// WriteBufferPool is a pool of buffers for write operations. If the value
// is not set, then write buffers are allocated to the connection for the
// lifetime of the connection.
//
// A pool is most useful when the application has a modest volume of writes
// across a large number of connections.
//
// Applications should use a single pool for each unique value of
// WriteBufferSize.
WriteBufferPool BufferPool
// Subprotocols specifies the server's supported protocols in order of
// preference. If this field is not nil, then the Upgrade method negotiates a
// subprotocol by selecting the first match in this list with a protocol
// requested by the client. If there's no match, then no protocol is
// negotiated (the Sec-Websocket-Protocol header is not included in the
// handshake response).
Subprotocols []string
// Error specifies the function for generating HTTP error responses. If Error
// is nil, then http.Error is used to generate the HTTP response.
Error func(w http.ResponseWriter, r *http.Request, status int, reason error)
// CheckOrigin returns true if the request Origin header is acceptable. If
// CheckOrigin is nil, then a safe default is used: return false if the
// Origin request header is present and the origin host is not equal to
// request Host header.
//
// A CheckOrigin function should carefully validate the request origin to
// prevent cross-site request forgery.
CheckOrigin func(r *http.Request) bool
// EnableCompression specify if the server should attempt to negotiate per
// message compression (RFC 7692). Setting this value to true does not
// guarantee that compression will be supported. Currently only "no context
// takeover" modes are supported.
EnableCompression bool
}
func (u *Upgrader) returnError(w http.ResponseWriter, r *http.Request, status int, reason string) (*Conn, error) {
err := HandshakeError{reason}
if u.Error != nil {
u.Error(w, r, status, err)
} else {
w.Header().Set("Sec-Websocket-Version", "13")
http.Error(w, http.StatusText(status), status)
}
return nil, err
}
// checkSameOrigin returns true if the origin is not set or is equal to the request host.
func checkSameOrigin(r *http.Request) bool {
origin := r.Header["Origin"]
if len(origin) == 0 {
return true
}
u, err := url.Parse(origin[0])
if err != nil {
return false
}
return equalASCIIFold(u.Host, r.Host)
}
func (u *Upgrader) selectSubprotocol(r *http.Request, responseHeader http.Header) string {
if u.Subprotocols != nil {
clientProtocols := Subprotocols(r)
for _, serverProtocol := range u.Subprotocols {
for _, clientProtocol := range clientProtocols {
if clientProtocol == serverProtocol {
return clientProtocol
}
}
}
} else if responseHeader != nil {
return responseHeader.Get("Sec-Websocket-Protocol")
}
return ""
}
// Upgrade upgrades the HTTP server connection to the WebSocket protocol.
//
// The responseHeader is included in the response to the client's upgrade
// request. Use the responseHeader to specify cookies (Set-Cookie) and the
// application negotiated subprotocol (Sec-WebSocket-Protocol).
//
// If the upgrade fails, then Upgrade replies to the client with an HTTP error
// response.
func (u *Upgrader) Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header) (*Conn, error) {
const badHandshake = "websocket: the client is not using the websocket protocol: "
if !tokenListContainsValue(r.Header, "Connection", "upgrade") {
return u.returnError(w, r, http.StatusBadRequest, badHandshake+"'upgrade' token not found in 'Connection' header")
}
if !tokenListContainsValue(r.Header, "Upgrade", "websocket") {
return u.returnError(w, r, http.StatusBadRequest, badHandshake+"'websocket' token not found in 'Upgrade' header")
}
if r.Method != "GET" {
return u.returnError(w, r, http.StatusMethodNotAllowed, badHandshake+"request method is not GET")
}
if !tokenListContainsValue(r.Header, "Sec-Websocket-Version", "13") {
return u.returnError(w, r, http.StatusBadRequest, "websocket: unsupported version: 13 not found in 'Sec-Websocket-Version' header")
}
if _, ok := responseHeader["Sec-Websocket-Extensions"]; ok {
return u.returnError(w, r, http.StatusInternalServerError, "websocket: application specific 'Sec-WebSocket-Extensions' headers are unsupported")
}
checkOrigin := u.CheckOrigin
if checkOrigin == nil {
checkOrigin = checkSameOrigin
}
if !checkOrigin(r) {
return u.returnError(w, r, http.StatusForbidden, "websocket: request origin not allowed by Upgrader.CheckOrigin")
}
challengeKey := r.Header.Get("Sec-Websocket-Key")
if challengeKey == "" {
return u.returnError(w, r, http.StatusBadRequest, "websocket: not a websocket handshake: `Sec-WebSocket-Key' header is missing or blank")
}
subprotocol := u.selectSubprotocol(r, responseHeader)
// Negotiate PMCE
var compress bool
if u.EnableCompression {
for _, ext := range parseExtensions(r.Header) {
if ext[""] != "permessage-deflate" {
continue
}
compress = true
break
}
}
h, ok := w.(http.Hijacker)
if !ok {
return u.returnError(w, r, http.StatusInternalServerError, "websocket: response does not implement http.Hijacker")
}
var brw *bufio.ReadWriter
netConn, brw, err := h.Hijack()
if err != nil {
return u.returnError(w, r, http.StatusInternalServerError, err.Error())
}
if brw.Reader.Buffered() > 0 {
netConn.Close()
return nil, errors.New("websocket: client sent data before handshake is complete")
}
var br *bufio.Reader
if u.ReadBufferSize == 0 && bufioReaderSize(netConn, brw.Reader) > 256 {
// Reuse hijacked buffered reader as connection reader.
br = brw.Reader
}
buf := bufioWriterBuffer(netConn, brw.Writer)
var writeBuf []byte
if u.WriteBufferPool == nil && u.WriteBufferSize == 0 && len(buf) >= maxFrameHeaderSize+256 {
// Reuse hijacked write buffer as connection buffer.
writeBuf = buf
}
c := newConn(netConn, true, u.ReadBufferSize, u.WriteBufferSize, u.WriteBufferPool, br, writeBuf)
c.subprotocol = subprotocol
if compress {
c.newCompressionWriter = compressNoContextTakeover
c.newDecompressionReader = decompressNoContextTakeover
}
// Use larger of hijacked buffer and connection write buffer for header.
p := buf
if len(c.writeBuf) > len(p) {
p = c.writeBuf
}
p = p[:0]
p = append(p, "HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: "...)
p = append(p, computeAcceptKey(challengeKey)...)
p = append(p, "\r\n"...)
if c.subprotocol != "" {
p = append(p, "Sec-WebSocket-Protocol: "...)
p = append(p, c.subprotocol...)
p = append(p, "\r\n"...)
}
if compress {
p = append(p, "Sec-WebSocket-Extensions: permessage-deflate; server_no_context_takeover; client_no_context_takeover\r\n"...)
}
for k, vs := range responseHeader {
if k == "Sec-Websocket-Protocol" {
continue
}
for _, v := range vs {
p = append(p, k...)
p = append(p, ": "...)
for i := 0; i < len(v); i++ {
b := v[i]
if b <= 31 {
// prevent response splitting.
b = ' '
}
p = append(p, b)
}
p = append(p, "\r\n"...)
}
}
p = append(p, "\r\n"...)
// Clear deadlines set by HTTP server.
netConn.SetDeadline(time.Time{})
if u.HandshakeTimeout > 0 {
netConn.SetWriteDeadline(time.Now().Add(u.HandshakeTimeout))
}
if _, err = netConn.Write(p); err != nil {
netConn.Close()
return nil, err
}
if u.HandshakeTimeout > 0 {
netConn.SetWriteDeadline(time.Time{})
}
return c, nil
}
// Upgrade upgrades the HTTP server connection to the WebSocket protocol.
//
// Deprecated: Use websocket.Upgrader instead.
//
// Upgrade does not perform origin checking. The application is responsible for
// checking the Origin header before calling Upgrade. An example implementation
// of the same origin policy check is:
//
// if req.Header.Get("Origin") != "http://"+req.Host {
// http.Error(w, "Origin not allowed", http.StatusForbidden)
// return
// }
//
// If the endpoint supports subprotocols, then the application is responsible
// for negotiating the protocol used on the connection. Use the Subprotocols()
// function to get the subprotocols requested by the client. Use the
// Sec-Websocket-Protocol response header to specify the subprotocol selected
// by the application.
//
// The responseHeader is included in the response to the client's upgrade
// request. Use the responseHeader to specify cookies (Set-Cookie) and the
// negotiated subprotocol (Sec-Websocket-Protocol).
//
// The connection buffers IO to the underlying network connection. The
// readBufSize and writeBufSize parameters specify the size of the buffers to
// use. Messages can be larger than the buffers.
//
// If the request is not a valid WebSocket handshake, then Upgrade returns an
// error of type HandshakeError. Applications should handle this error by
// replying to the client with an HTTP error response.
func Upgrade(w http.ResponseWriter, r *http.Request, responseHeader http.Header, readBufSize, writeBufSize int) (*Conn, error) {
u := Upgrader{ReadBufferSize: readBufSize, WriteBufferSize: writeBufSize}
u.Error = func(w http.ResponseWriter, r *http.Request, status int, reason error) {
// don't return errors to maintain backwards compatibility
}
u.CheckOrigin = func(r *http.Request) bool {
// allow all connections by default
return true
}
return u.Upgrade(w, r, responseHeader)
}
// Subprotocols returns the subprotocols requested by the client in the
// Sec-Websocket-Protocol header.
func Subprotocols(r *http.Request) []string {
h := strings.TrimSpace(r.Header.Get("Sec-Websocket-Protocol"))
if h == "" {
return nil
}
protocols := strings.Split(h, ",")
for i := range protocols {
protocols[i] = strings.TrimSpace(protocols[i])
}
return protocols
}
// IsWebSocketUpgrade returns true if the client requested upgrade to the
// WebSocket protocol.
func IsWebSocketUpgrade(r *http.Request) bool {
return tokenListContainsValue(r.Header, "Connection", "upgrade") &&
tokenListContainsValue(r.Header, "Upgrade", "websocket")
}
// bufioReaderSize size returns the size of a bufio.Reader.
func bufioReaderSize(originalReader io.Reader, br *bufio.Reader) int {
// This code assumes that peek on a reset reader returns
// bufio.Reader.buf[:0].
// TODO: Use bufio.Reader.Size() after Go 1.10
br.Reset(originalReader)
if p, err := br.Peek(0); err == nil {
return cap(p)
}
return 0
}
// writeHook is an io.Writer that records the last slice passed to it vio
// io.Writer.Write.
type writeHook struct {
p []byte
}
func (wh *writeHook) Write(p []byte) (int, error) {
wh.p = p
return len(p), nil
}
// bufioWriterBuffer grabs the buffer from a bufio.Writer.
func bufioWriterBuffer(originalWriter io.Writer, bw *bufio.Writer) []byte {
// This code assumes that bufio.Writer.buf[:1] is passed to the
// bufio.Writer's underlying writer.
var wh writeHook
bw.Reset(&wh)
bw.WriteByte(0)
bw.Flush()
bw.Reset(originalWriter)
return wh.p[:cap(wh.p)]
}

19
vendor/github.com/gorilla/websocket/trace.go generated vendored Normal file
View File

@ -0,0 +1,19 @@
// +build go1.8
package websocket
import (
"crypto/tls"
"net/http/httptrace"
)
func doHandshakeWithTrace(trace *httptrace.ClientTrace, tlsConn *tls.Conn, cfg *tls.Config) error {
if trace.TLSHandshakeStart != nil {
trace.TLSHandshakeStart()
}
err := doHandshake(tlsConn, cfg)
if trace.TLSHandshakeDone != nil {
trace.TLSHandshakeDone(tlsConn.ConnectionState(), err)
}
return err
}

12
vendor/github.com/gorilla/websocket/trace_17.go generated vendored Normal file
View File

@ -0,0 +1,12 @@
// +build !go1.8
package websocket
import (
"crypto/tls"
"net/http/httptrace"
)
func doHandshakeWithTrace(trace *httptrace.ClientTrace, tlsConn *tls.Conn, cfg *tls.Config) error {
return doHandshake(tlsConn, cfg)
}

237
vendor/github.com/gorilla/websocket/util.go generated vendored Normal file
View File

@ -0,0 +1,237 @@
// Copyright 2013 The Gorilla WebSocket Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package websocket
import (
"crypto/rand"
"crypto/sha1"
"encoding/base64"
"io"
"net/http"
"strings"
"unicode/utf8"
)
var keyGUID = []byte("258EAFA5-E914-47DA-95CA-C5AB0DC85B11")
func computeAcceptKey(challengeKey string) string {
h := sha1.New()
h.Write([]byte(challengeKey))
h.Write(keyGUID)
return base64.StdEncoding.EncodeToString(h.Sum(nil))
}
func generateChallengeKey() (string, error) {
p := make([]byte, 16)
if _, err := io.ReadFull(rand.Reader, p); err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(p), nil
}
// Octet types from RFC 2616.
var octetTypes [256]byte
const (
isTokenOctet = 1 << iota
isSpaceOctet
)
func init() {
// From RFC 2616
//
// OCTET = <any 8-bit sequence of data>
// CHAR = <any US-ASCII character (octets 0 - 127)>
// CTL = <any US-ASCII control character (octets 0 - 31) and DEL (127)>
// CR = <US-ASCII CR, carriage return (13)>
// LF = <US-ASCII LF, linefeed (10)>
// SP = <US-ASCII SP, space (32)>
// HT = <US-ASCII HT, horizontal-tab (9)>
// <"> = <US-ASCII double-quote mark (34)>
// CRLF = CR LF
// LWS = [CRLF] 1*( SP | HT )
// TEXT = <any OCTET except CTLs, but including LWS>
// separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
// | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
// token = 1*<any CHAR except CTLs or separators>
// qdtext = <any TEXT except <">>
for c := 0; c < 256; c++ {
var t byte
isCtl := c <= 31 || c == 127
isChar := 0 <= c && c <= 127
isSeparator := strings.IndexRune(" \t\"(),/:;<=>?@[]\\{}", rune(c)) >= 0
if strings.IndexRune(" \t\r\n", rune(c)) >= 0 {
t |= isSpaceOctet
}
if isChar && !isCtl && !isSeparator {
t |= isTokenOctet
}
octetTypes[c] = t
}
}
func skipSpace(s string) (rest string) {
i := 0
for ; i < len(s); i++ {
if octetTypes[s[i]]&isSpaceOctet == 0 {
break
}
}
return s[i:]
}
func nextToken(s string) (token, rest string) {
i := 0
for ; i < len(s); i++ {
if octetTypes[s[i]]&isTokenOctet == 0 {
break
}
}
return s[:i], s[i:]
}
func nextTokenOrQuoted(s string) (value string, rest string) {
if !strings.HasPrefix(s, "\"") {
return nextToken(s)
}
s = s[1:]
for i := 0; i < len(s); i++ {
switch s[i] {
case '"':
return s[:i], s[i+1:]
case '\\':
p := make([]byte, len(s)-1)
j := copy(p, s[:i])
escape := true
for i = i + 1; i < len(s); i++ {
b := s[i]
switch {
case escape:
escape = false
p[j] = b
j++
case b == '\\':
escape = true
case b == '"':
return string(p[:j]), s[i+1:]
default:
p[j] = b
j++
}
}
return "", ""
}
}
return "", ""
}
// equalASCIIFold returns true if s is equal to t with ASCII case folding.
func equalASCIIFold(s, t string) bool {
for s != "" && t != "" {
sr, size := utf8.DecodeRuneInString(s)
s = s[size:]
tr, size := utf8.DecodeRuneInString(t)
t = t[size:]
if sr == tr {
continue
}
if 'A' <= sr && sr <= 'Z' {
sr = sr + 'a' - 'A'
}
if 'A' <= tr && tr <= 'Z' {
tr = tr + 'a' - 'A'
}
if sr != tr {
return false
}
}
return s == t
}
// tokenListContainsValue returns true if the 1#token header with the given
// name contains a token equal to value with ASCII case folding.
func tokenListContainsValue(header http.Header, name string, value string) bool {
headers:
for _, s := range header[name] {
for {
var t string
t, s = nextToken(skipSpace(s))
if t == "" {
continue headers
}
s = skipSpace(s)
if s != "" && s[0] != ',' {
continue headers
}
if equalASCIIFold(t, value) {
return true
}
if s == "" {
continue headers
}
s = s[1:]
}
}
return false
}
// parseExtensions parses WebSocket extensions from a header.
func parseExtensions(header http.Header) []map[string]string {
// From RFC 6455:
//
// Sec-WebSocket-Extensions = extension-list
// extension-list = 1#extension
// extension = extension-token *( ";" extension-param )
// extension-token = registered-token
// registered-token = token
// extension-param = token [ "=" (token | quoted-string) ]
// ;When using the quoted-string syntax variant, the value
// ;after quoted-string unescaping MUST conform to the
// ;'token' ABNF.
var result []map[string]string
headers:
for _, s := range header["Sec-Websocket-Extensions"] {
for {
var t string
t, s = nextToken(skipSpace(s))
if t == "" {
continue headers
}
ext := map[string]string{"": t}
for {
s = skipSpace(s)
if !strings.HasPrefix(s, ";") {
break
}
var k string
k, s = nextToken(skipSpace(s[1:]))
if k == "" {
continue headers
}
s = skipSpace(s)
var v string
if strings.HasPrefix(s, "=") {
v, s = nextTokenOrQuoted(skipSpace(s[1:]))
s = skipSpace(s)
}
if s != "" && s[0] != ',' && s[0] != ';' {
continue headers
}
ext[k] = v
}
if s != "" && s[0] != ',' {
continue headers
}
result = append(result, ext)
if s == "" {
continue headers
}
s = s[1:]
}
}
return result
}

473
vendor/github.com/gorilla/websocket/x_net_proxy.go generated vendored Normal file
View File

@ -0,0 +1,473 @@
// Code generated by golang.org/x/tools/cmd/bundle. DO NOT EDIT.
//go:generate bundle -o x_net_proxy.go golang.org/x/net/proxy
// Package proxy provides support for a variety of protocols to proxy network
// data.
//
package websocket
import (
"errors"
"io"
"net"
"net/url"
"os"
"strconv"
"strings"
"sync"
)
type proxy_direct struct{}
// Direct is a direct proxy: one that makes network connections directly.
var proxy_Direct = proxy_direct{}
func (proxy_direct) Dial(network, addr string) (net.Conn, error) {
return net.Dial(network, addr)
}
// A PerHost directs connections to a default Dialer unless the host name
// requested matches one of a number of exceptions.
type proxy_PerHost struct {
def, bypass proxy_Dialer
bypassNetworks []*net.IPNet
bypassIPs []net.IP
bypassZones []string
bypassHosts []string
}
// NewPerHost returns a PerHost Dialer that directs connections to either
// defaultDialer or bypass, depending on whether the connection matches one of
// the configured rules.
func proxy_NewPerHost(defaultDialer, bypass proxy_Dialer) *proxy_PerHost {
return &proxy_PerHost{
def: defaultDialer,
bypass: bypass,
}
}
// Dial connects to the address addr on the given network through either
// defaultDialer or bypass.
func (p *proxy_PerHost) Dial(network, addr string) (c net.Conn, err error) {
host, _, err := net.SplitHostPort(addr)
if err != nil {
return nil, err
}
return p.dialerForRequest(host).Dial(network, addr)
}
func (p *proxy_PerHost) dialerForRequest(host string) proxy_Dialer {
if ip := net.ParseIP(host); ip != nil {
for _, net := range p.bypassNetworks {
if net.Contains(ip) {
return p.bypass
}
}
for _, bypassIP := range p.bypassIPs {
if bypassIP.Equal(ip) {
return p.bypass
}
}
return p.def
}
for _, zone := range p.bypassZones {
if strings.HasSuffix(host, zone) {
return p.bypass
}
if host == zone[1:] {
// For a zone ".example.com", we match "example.com"
// too.
return p.bypass
}
}
for _, bypassHost := range p.bypassHosts {
if bypassHost == host {
return p.bypass
}
}
return p.def
}
// AddFromString parses a string that contains comma-separated values
// specifying hosts that should use the bypass proxy. Each value is either an
// IP address, a CIDR range, a zone (*.example.com) or a host name
// (localhost). A best effort is made to parse the string and errors are
// ignored.
func (p *proxy_PerHost) AddFromString(s string) {
hosts := strings.Split(s, ",")
for _, host := range hosts {
host = strings.TrimSpace(host)
if len(host) == 0 {
continue
}
if strings.Contains(host, "/") {
// We assume that it's a CIDR address like 127.0.0.0/8
if _, net, err := net.ParseCIDR(host); err == nil {
p.AddNetwork(net)
}
continue
}
if ip := net.ParseIP(host); ip != nil {
p.AddIP(ip)
continue
}
if strings.HasPrefix(host, "*.") {
p.AddZone(host[1:])
continue
}
p.AddHost(host)
}
}
// AddIP specifies an IP address that will use the bypass proxy. Note that
// this will only take effect if a literal IP address is dialed. A connection
// to a named host will never match an IP.
func (p *proxy_PerHost) AddIP(ip net.IP) {
p.bypassIPs = append(p.bypassIPs, ip)
}
// AddNetwork specifies an IP range that will use the bypass proxy. Note that
// this will only take effect if a literal IP address is dialed. A connection
// to a named host will never match.
func (p *proxy_PerHost) AddNetwork(net *net.IPNet) {
p.bypassNetworks = append(p.bypassNetworks, net)
}
// AddZone specifies a DNS suffix that will use the bypass proxy. A zone of
// "example.com" matches "example.com" and all of its subdomains.
func (p *proxy_PerHost) AddZone(zone string) {
if strings.HasSuffix(zone, ".") {
zone = zone[:len(zone)-1]
}
if !strings.HasPrefix(zone, ".") {
zone = "." + zone
}
p.bypassZones = append(p.bypassZones, zone)
}
// AddHost specifies a host name that will use the bypass proxy.
func (p *proxy_PerHost) AddHost(host string) {
if strings.HasSuffix(host, ".") {
host = host[:len(host)-1]
}
p.bypassHosts = append(p.bypassHosts, host)
}
// A Dialer is a means to establish a connection.
type proxy_Dialer interface {
// Dial connects to the given address via the proxy.
Dial(network, addr string) (c net.Conn, err error)
}
// Auth contains authentication parameters that specific Dialers may require.
type proxy_Auth struct {
User, Password string
}
// FromEnvironment returns the dialer specified by the proxy related variables in
// the environment.
func proxy_FromEnvironment() proxy_Dialer {
allProxy := proxy_allProxyEnv.Get()
if len(allProxy) == 0 {
return proxy_Direct
}
proxyURL, err := url.Parse(allProxy)
if err != nil {
return proxy_Direct
}
proxy, err := proxy_FromURL(proxyURL, proxy_Direct)
if err != nil {
return proxy_Direct
}
noProxy := proxy_noProxyEnv.Get()
if len(noProxy) == 0 {
return proxy
}
perHost := proxy_NewPerHost(proxy, proxy_Direct)
perHost.AddFromString(noProxy)
return perHost
}
// proxySchemes is a map from URL schemes to a function that creates a Dialer
// from a URL with such a scheme.
var proxy_proxySchemes map[string]func(*url.URL, proxy_Dialer) (proxy_Dialer, error)
// RegisterDialerType takes a URL scheme and a function to generate Dialers from
// a URL with that scheme and a forwarding Dialer. Registered schemes are used
// by FromURL.
func proxy_RegisterDialerType(scheme string, f func(*url.URL, proxy_Dialer) (proxy_Dialer, error)) {
if proxy_proxySchemes == nil {
proxy_proxySchemes = make(map[string]func(*url.URL, proxy_Dialer) (proxy_Dialer, error))
}
proxy_proxySchemes[scheme] = f
}
// FromURL returns a Dialer given a URL specification and an underlying
// Dialer for it to make network requests.
func proxy_FromURL(u *url.URL, forward proxy_Dialer) (proxy_Dialer, error) {
var auth *proxy_Auth
if u.User != nil {
auth = new(proxy_Auth)
auth.User = u.User.Username()
if p, ok := u.User.Password(); ok {
auth.Password = p
}
}
switch u.Scheme {
case "socks5":
return proxy_SOCKS5("tcp", u.Host, auth, forward)
}
// If the scheme doesn't match any of the built-in schemes, see if it
// was registered by another package.
if proxy_proxySchemes != nil {
if f, ok := proxy_proxySchemes[u.Scheme]; ok {
return f(u, forward)
}
}
return nil, errors.New("proxy: unknown scheme: " + u.Scheme)
}
var (
proxy_allProxyEnv = &proxy_envOnce{
names: []string{"ALL_PROXY", "all_proxy"},
}
proxy_noProxyEnv = &proxy_envOnce{
names: []string{"NO_PROXY", "no_proxy"},
}
)
// envOnce looks up an environment variable (optionally by multiple
// names) once. It mitigates expensive lookups on some platforms
// (e.g. Windows).
// (Borrowed from net/http/transport.go)
type proxy_envOnce struct {
names []string
once sync.Once
val string
}
func (e *proxy_envOnce) Get() string {
e.once.Do(e.init)
return e.val
}
func (e *proxy_envOnce) init() {
for _, n := range e.names {
e.val = os.Getenv(n)
if e.val != "" {
return
}
}
}
// SOCKS5 returns a Dialer that makes SOCKSv5 connections to the given address
// with an optional username and password. See RFC 1928 and RFC 1929.
func proxy_SOCKS5(network, addr string, auth *proxy_Auth, forward proxy_Dialer) (proxy_Dialer, error) {
s := &proxy_socks5{
network: network,
addr: addr,
forward: forward,
}
if auth != nil {
s.user = auth.User
s.password = auth.Password
}
return s, nil
}
type proxy_socks5 struct {
user, password string
network, addr string
forward proxy_Dialer
}
const proxy_socks5Version = 5
const (
proxy_socks5AuthNone = 0
proxy_socks5AuthPassword = 2
)
const proxy_socks5Connect = 1
const (
proxy_socks5IP4 = 1
proxy_socks5Domain = 3
proxy_socks5IP6 = 4
)
var proxy_socks5Errors = []string{
"",
"general failure",
"connection forbidden",
"network unreachable",
"host unreachable",
"connection refused",
"TTL expired",
"command not supported",
"address type not supported",
}
// Dial connects to the address addr on the given network via the SOCKS5 proxy.
func (s *proxy_socks5) Dial(network, addr string) (net.Conn, error) {
switch network {
case "tcp", "tcp6", "tcp4":
default:
return nil, errors.New("proxy: no support for SOCKS5 proxy connections of type " + network)
}
conn, err := s.forward.Dial(s.network, s.addr)
if err != nil {
return nil, err
}
if err := s.connect(conn, addr); err != nil {
conn.Close()
return nil, err
}
return conn, nil
}
// connect takes an existing connection to a socks5 proxy server,
// and commands the server to extend that connection to target,
// which must be a canonical address with a host and port.
func (s *proxy_socks5) connect(conn net.Conn, target string) error {
host, portStr, err := net.SplitHostPort(target)
if err != nil {
return err
}
port, err := strconv.Atoi(portStr)
if err != nil {
return errors.New("proxy: failed to parse port number: " + portStr)
}
if port < 1 || port > 0xffff {
return errors.New("proxy: port number out of range: " + portStr)
}
// the size here is just an estimate
buf := make([]byte, 0, 6+len(host))
buf = append(buf, proxy_socks5Version)
if len(s.user) > 0 && len(s.user) < 256 && len(s.password) < 256 {
buf = append(buf, 2 /* num auth methods */, proxy_socks5AuthNone, proxy_socks5AuthPassword)
} else {
buf = append(buf, 1 /* num auth methods */, proxy_socks5AuthNone)
}
if _, err := conn.Write(buf); err != nil {
return errors.New("proxy: failed to write greeting to SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
if _, err := io.ReadFull(conn, buf[:2]); err != nil {
return errors.New("proxy: failed to read greeting from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
if buf[0] != 5 {
return errors.New("proxy: SOCKS5 proxy at " + s.addr + " has unexpected version " + strconv.Itoa(int(buf[0])))
}
if buf[1] == 0xff {
return errors.New("proxy: SOCKS5 proxy at " + s.addr + " requires authentication")
}
// See RFC 1929
if buf[1] == proxy_socks5AuthPassword {
buf = buf[:0]
buf = append(buf, 1 /* password protocol version */)
buf = append(buf, uint8(len(s.user)))
buf = append(buf, s.user...)
buf = append(buf, uint8(len(s.password)))
buf = append(buf, s.password...)
if _, err := conn.Write(buf); err != nil {
return errors.New("proxy: failed to write authentication request to SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
if _, err := io.ReadFull(conn, buf[:2]); err != nil {
return errors.New("proxy: failed to read authentication reply from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
if buf[1] != 0 {
return errors.New("proxy: SOCKS5 proxy at " + s.addr + " rejected username/password")
}
}
buf = buf[:0]
buf = append(buf, proxy_socks5Version, proxy_socks5Connect, 0 /* reserved */)
if ip := net.ParseIP(host); ip != nil {
if ip4 := ip.To4(); ip4 != nil {
buf = append(buf, proxy_socks5IP4)
ip = ip4
} else {
buf = append(buf, proxy_socks5IP6)
}
buf = append(buf, ip...)
} else {
if len(host) > 255 {
return errors.New("proxy: destination host name too long: " + host)
}
buf = append(buf, proxy_socks5Domain)
buf = append(buf, byte(len(host)))
buf = append(buf, host...)
}
buf = append(buf, byte(port>>8), byte(port))
if _, err := conn.Write(buf); err != nil {
return errors.New("proxy: failed to write connect request to SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
if _, err := io.ReadFull(conn, buf[:4]); err != nil {
return errors.New("proxy: failed to read connect reply from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
failure := "unknown error"
if int(buf[1]) < len(proxy_socks5Errors) {
failure = proxy_socks5Errors[buf[1]]
}
if len(failure) > 0 {
return errors.New("proxy: SOCKS5 proxy at " + s.addr + " failed to connect: " + failure)
}
bytesToDiscard := 0
switch buf[3] {
case proxy_socks5IP4:
bytesToDiscard = net.IPv4len
case proxy_socks5IP6:
bytesToDiscard = net.IPv6len
case proxy_socks5Domain:
_, err := io.ReadFull(conn, buf[:1])
if err != nil {
return errors.New("proxy: failed to read domain length from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
bytesToDiscard = int(buf[0])
default:
return errors.New("proxy: got unknown address type " + strconv.Itoa(int(buf[3])) + " from SOCKS5 proxy at " + s.addr)
}
if cap(buf) < bytesToDiscard {
buf = make([]byte, bytesToDiscard)
} else {
buf = buf[:bytesToDiscard]
}
if _, err := io.ReadFull(conn, buf); err != nil {
return errors.New("proxy: failed to read address from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
// Also need to discard the port number
if _, err := io.ReadFull(conn, buf[:2]); err != nil {
return errors.New("proxy: failed to read port from SOCKS5 proxy at " + s.addr + ": " + err.Error())
}
return nil
}

1
vendor/github.com/mattn/go-mastodon/.gitignore generated vendored Normal file
View File

@ -0,0 +1 @@
*.exe

8
vendor/github.com/mattn/go-mastodon/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,8 @@
language: go
go:
- tip
before_install:
- go get github.com/mattn/goveralls
- go get golang.org/x/tools/cmd/cover
script:
- $HOME/gopath/bin/goveralls -repotoken u2dqXvOxbIBr8eGxCjcgTkkN2JOSGx1fy

21
vendor/github.com/mattn/go-mastodon/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Yasuhiro Matsumoto
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

142
vendor/github.com/mattn/go-mastodon/README.md generated vendored Normal file
View File

@ -0,0 +1,142 @@
# go-mastodon
[![Build Status](https://travis-ci.org/mattn/go-mastodon.svg?branch=master)](https://travis-ci.org/mattn/go-mastodon)
[![Coverage Status](https://coveralls.io/repos/github/mattn/go-mastodon/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-mastodon?branch=master)
[![GoDoc](https://godoc.org/github.com/mattn/go-mastodon?status.svg)](http://godoc.org/github.com/mattn/go-mastodon)
[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-mastodon)](https://goreportcard.com/report/github.com/mattn/go-mastodon)
## Usage
### Application
```go
package main
import (
"context"
"fmt"
"log"
"github.com/mattn/go-mastodon"
)
func main() {
app, err := mastodon.RegisterApp(context.Background(), &mastodon.AppConfig{
Server: "https://mstdn.jp",
ClientName: "client-name",
Scopes: "read write follow",
Website: "https://github.com/mattn/go-mastodon",
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("client-id : %s\n", app.ClientID)
fmt.Printf("client-secret: %s\n", app.ClientSecret)
}
```
### Client
```go
package main
import (
"context"
"fmt"
"log"
"github.com/mattn/go-mastodon"
)
func main() {
c := mastodon.NewClient(&mastodon.Config{
Server: "https://mstdn.jp",
ClientID: "client-id",
ClientSecret: "client-secret",
})
err := c.Authenticate(context.Background(), "your-email", "your-password")
if err != nil {
log.Fatal(err)
}
timeline, err := c.GetTimelineHome(context.Background(), nil)
if err != nil {
log.Fatal(err)
}
for i := len(timeline) - 1; i >= 0; i-- {
fmt.Println(timeline[i])
}
}
```
## Status of implementations
* [x] GET /api/v1/accounts/:id
* [x] GET /api/v1/accounts/verify_credentials
* [x] PATCH /api/v1/accounts/update_credentials
* [x] GET /api/v1/accounts/:id/followers
* [x] GET /api/v1/accounts/:id/following
* [x] GET /api/v1/accounts/:id/statuses
* [x] POST /api/v1/accounts/:id/follow
* [x] POST /api/v1/accounts/:id/unfollow
* [x] GET /api/v1/accounts/:id/block
* [x] GET /api/v1/accounts/:id/unblock
* [x] GET /api/v1/accounts/:id/mute
* [x] GET /api/v1/accounts/:id/unmute
* [x] GET /api/v1/accounts/:id/lists
* [x] GET /api/v1/accounts/relationships
* [x] GET /api/v1/accounts/search
* [x] POST /api/v1/apps
* [x] GET /api/v1/blocks
* [x] GET /api/v1/favourites
* [x] GET /api/v1/follow_requests
* [x] POST /api/v1/follow_requests/:id/authorize
* [x] POST /api/v1/follow_requests/:id/reject
* [x] POST /api/v1/follows
* [x] GET /api/v1/instance
* [x] GET /api/v1/instance/activity
* [x] GET /api/v1/instance/peers
* [x] GET /api/v1/lists
* [x] GET /api/v1/lists/:id/accounts
* [x] GET /api/v1/lists/:id
* [x] POST /api/v1/lists
* [x] PUT /api/v1/lists/:id
* [x] DELETE /api/v1/lists/:id
* [x] POST /api/v1/lists/:id/accounts
* [x] DELETE /api/v1/lists/:id/accounts
* [x] POST /api/v1/media
* [x] GET /api/v1/mutes
* [x] GET /api/v1/notifications
* [x] GET /api/v1/notifications/:id
* [x] POST /api/v1/notifications/clear
* [x] GET /api/v1/reports
* [x] POST /api/v1/reports
* [x] GET /api/v1/search
* [x] GET /api/v1/statuses/:id
* [x] GET /api/v1/statuses/:id/context
* [x] GET /api/v1/statuses/:id/card
* [x] GET /api/v1/statuses/:id/reblogged_by
* [x] GET /api/v1/statuses/:id/favourited_by
* [x] POST /api/v1/statuses
* [x] DELETE /api/v1/statuses/:id
* [x] POST /api/v1/statuses/:id/reblog
* [x] POST /api/v1/statuses/:id/unreblog
* [x] POST /api/v1/statuses/:id/favourite
* [x] POST /api/v1/statuses/:id/unfavourite
* [x] GET /api/v1/timelines/home
* [x] GET /api/v1/timelines/public
* [x] GET /api/v1/timelines/tag/:hashtag
* [x] GET /api/v1/timelines/list/:id
## Installation
```
$ go get github.com/mattn/go-mastodon
```
## License
MIT
## Author
Yasuhiro Matsumoto (a.k.a. mattn)

314
vendor/github.com/mattn/go-mastodon/accounts.go generated vendored Normal file
View File

@ -0,0 +1,314 @@
package mastodon
import (
"context"
"fmt"
"net/http"
"net/url"
"strconv"
"time"
)
// Account hold information for mastodon account.
type Account struct {
ID ID `json:"id"`
Username string `json:"username"`
Acct string `json:"acct"`
DisplayName string `json:"display_name"`
Locked bool `json:"locked"`
CreatedAt time.Time `json:"created_at"`
FollowersCount int64 `json:"followers_count"`
FollowingCount int64 `json:"following_count"`
StatusesCount int64 `json:"statuses_count"`
Note string `json:"note"`
URL string `json:"url"`
Avatar string `json:"avatar"`
AvatarStatic string `json:"avatar_static"`
Header string `json:"header"`
HeaderStatic string `json:"header_static"`
Emojis []Emoji `json:"emojis"`
Moved *Account `json:"moved"`
Fields []Field `json:"fields"`
Bot bool `json:"bot"`
}
// Field is a Mastodon account profile field.
type Field struct {
Name string `json:"name"`
Value string `json:"value"`
VerifiedAt time.Time `json:"verified_at"`
}
// AccountSource is a Mastodon account profile field.
type AccountSource struct {
Privacy *string `json:"privacy"`
Sensitive *bool `json:"sensitive"`
Language *string `json:"language"`
Note *string `json:"note"`
Fields *[]Field `json:"fields"`
}
// GetAccount return Account.
func (c *Client) GetAccount(ctx context.Context, id ID) (*Account, error) {
var account Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/accounts/%s", url.PathEscape(string(id))), nil, &account, nil)
if err != nil {
return nil, err
}
return &account, nil
}
// GetAccountCurrentUser return Account of current user.
func (c *Client) GetAccountCurrentUser(ctx context.Context) (*Account, error) {
var account Account
err := c.doAPI(ctx, http.MethodGet, "/api/v1/accounts/verify_credentials", nil, &account, nil)
if err != nil {
return nil, err
}
return &account, nil
}
// Profile is a struct for updating profiles.
type Profile struct {
// If it is nil it will not be updated.
// If it is empty, update it with empty.
DisplayName *string
Note *string
Locked *bool
Fields *[]Field
Source *AccountSource
// Set the base64 encoded character string of the image.
Avatar string
Header string
}
// AccountUpdate updates the information of the current user.
func (c *Client) AccountUpdate(ctx context.Context, profile *Profile) (*Account, error) {
params := url.Values{}
if profile.DisplayName != nil {
params.Set("display_name", *profile.DisplayName)
}
if profile.Note != nil {
params.Set("note", *profile.Note)
}
if profile.Locked != nil {
params.Set("locked", strconv.FormatBool(*profile.Locked))
}
if profile.Fields != nil {
for idx, field := range *profile.Fields {
params.Set(fmt.Sprintf("fields_attributes[%d][name]", idx), field.Name)
params.Set(fmt.Sprintf("fields_attributes[%d][value]", idx), field.Value)
}
}
if profile.Source != nil {
if profile.Source.Privacy != nil {
params.Set("source[privacy]", *profile.Source.Privacy)
}
if profile.Source.Sensitive != nil {
params.Set("source[sensitive]", strconv.FormatBool(*profile.Source.Sensitive))
}
if profile.Source.Language != nil {
params.Set("source[language]", *profile.Source.Language)
}
}
if profile.Avatar != "" {
params.Set("avatar", profile.Avatar)
}
if profile.Header != "" {
params.Set("header", profile.Header)
}
var account Account
err := c.doAPI(ctx, http.MethodPatch, "/api/v1/accounts/update_credentials", params, &account, nil)
if err != nil {
return nil, err
}
return &account, nil
}
// GetAccountStatuses return statuses by specified accuont.
func (c *Client) GetAccountStatuses(ctx context.Context, id ID, pg *Pagination) ([]*Status, error) {
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/accounts/%s/statuses", url.PathEscape(string(id))), nil, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetAccountFollowers return followers list.
func (c *Client) GetAccountFollowers(ctx context.Context, id ID, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/accounts/%s/followers", url.PathEscape(string(id))), nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// GetAccountFollowing return following list.
func (c *Client) GetAccountFollowing(ctx context.Context, id ID, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/accounts/%s/following", url.PathEscape(string(id))), nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// GetBlocks return block list.
func (c *Client) GetBlocks(ctx context.Context, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, "/api/v1/blocks", nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// Relationship hold information for relation-ship to the account.
type Relationship struct {
ID ID `json:"id"`
Following bool `json:"following"`
FollowedBy bool `json:"followed_by"`
Blocking bool `json:"blocking"`
Muting bool `json:"muting"`
MutingNotifications bool `json:"muting_notifications"`
Requested bool `json:"requested"`
DomainBlocking bool `json:"domain_blocking"`
ShowingReblogs bool `json:"showing_reblogs"`
Endorsed bool `json:"endorsed"`
}
// AccountFollow follow the account.
func (c *Client) AccountFollow(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/follow", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// AccountUnfollow unfollow the account.
func (c *Client) AccountUnfollow(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/unfollow", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// AccountBlock block the account.
func (c *Client) AccountBlock(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/block", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// AccountUnblock unblock the account.
func (c *Client) AccountUnblock(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/unblock", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// AccountMute mute the account.
func (c *Client) AccountMute(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/mute", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// AccountUnmute unmute the account.
func (c *Client) AccountUnmute(ctx context.Context, id ID) (*Relationship, error) {
var relationship Relationship
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/accounts/%s/unmute", url.PathEscape(string(id))), nil, &relationship, nil)
if err != nil {
return nil, err
}
return &relationship, nil
}
// GetAccountRelationships return relationship for the account.
func (c *Client) GetAccountRelationships(ctx context.Context, ids []string) ([]*Relationship, error) {
params := url.Values{}
for _, id := range ids {
params.Add("id[]", id)
}
var relationships []*Relationship
err := c.doAPI(ctx, http.MethodGet, "/api/v1/accounts/relationships", params, &relationships, nil)
if err != nil {
return nil, err
}
return relationships, nil
}
// AccountsSearch search accounts by query.
func (c *Client) AccountsSearch(ctx context.Context, q string, limit int64) ([]*Account, error) {
params := url.Values{}
params.Set("q", q)
params.Set("limit", fmt.Sprint(limit))
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, "/api/v1/accounts/search", params, &accounts, nil)
if err != nil {
return nil, err
}
return accounts, nil
}
// FollowRemoteUser send follow-request.
func (c *Client) FollowRemoteUser(ctx context.Context, uri string) (*Account, error) {
params := url.Values{}
params.Set("uri", uri)
var account Account
err := c.doAPI(ctx, http.MethodPost, "/api/v1/follows", params, &account, nil)
if err != nil {
return nil, err
}
return &account, nil
}
// GetFollowRequests return follow-requests.
func (c *Client) GetFollowRequests(ctx context.Context, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, "/api/v1/follow_requests", nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// FollowRequestAuthorize is authorize the follow request of user with id.
func (c *Client) FollowRequestAuthorize(ctx context.Context, id ID) error {
return c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/follow_requests/%s/authorize", url.PathEscape(string(id))), nil, nil, nil)
}
// FollowRequestReject is rejects the follow request of user with id.
func (c *Client) FollowRequestReject(ctx context.Context, id ID) error {
return c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/follow_requests/%s/reject", url.PathEscape(string(id))), nil, nil, nil)
}
// GetMutes returns the list of users muted by the current user.
func (c *Client) GetMutes(ctx context.Context, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, "/api/v1/mutes", nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}

96
vendor/github.com/mattn/go-mastodon/apps.go generated vendored Normal file
View File

@ -0,0 +1,96 @@
package mastodon
import (
"context"
"encoding/json"
"net/http"
"net/url"
"path"
"strings"
)
// AppConfig is a setting for registering applications.
type AppConfig struct {
http.Client
Server string
ClientName string
// Where the user should be redirected after authorization (for no redirect, use urn:ietf:wg:oauth:2.0:oob)
RedirectURIs string
// This can be a space-separated list of items listed on the /settings/applications/new page of any Mastodon
// instance. "read", "write", and "follow" are top-level scopes that include all the permissions of the more
// specific scopes like "read:favourites", "write:statuses", and "write:follows".
Scopes string
// Optional.
Website string
}
// Application is mastodon application.
type Application struct {
ID ID `json:"id"`
RedirectURI string `json:"redirect_uri"`
ClientID string `json:"client_id"`
ClientSecret string `json:"client_secret"`
// AuthURI is not part of the Mastodon API; it is generated by go-mastodon.
AuthURI string `json:"auth_uri,omitempty"`
}
// RegisterApp returns the mastodon application.
func RegisterApp(ctx context.Context, appConfig *AppConfig) (*Application, error) {
params := url.Values{}
params.Set("client_name", appConfig.ClientName)
if appConfig.RedirectURIs == "" {
params.Set("redirect_uris", "urn:ietf:wg:oauth:2.0:oob")
} else {
params.Set("redirect_uris", appConfig.RedirectURIs)
}
params.Set("scopes", appConfig.Scopes)
params.Set("website", appConfig.Website)
u, err := url.Parse(appConfig.Server)
if err != nil {
return nil, err
}
u.Path = path.Join(u.Path, "/api/v1/apps")
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(params.Encode()))
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
resp, err := appConfig.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, parseAPIError("bad request", resp)
}
var app Application
err = json.NewDecoder(resp.Body).Decode(&app)
if err != nil {
return nil, err
}
u, err = url.Parse(appConfig.Server)
if err != nil {
return nil, err
}
u.Path = path.Join(u.Path, "/oauth/authorize")
u.RawQuery = url.Values{
"scope": {appConfig.Scopes},
"response_type": {"code"},
"redirect_uri": {app.RedirectURI},
"client_id": {app.ClientID},
}.Encode()
app.AuthURI = u.String()
return &app, nil
}

25
vendor/github.com/mattn/go-mastodon/compat.go generated vendored Normal file
View File

@ -0,0 +1,25 @@
package mastodon
import (
"encoding/json"
"fmt"
)
type ID string
func (id *ID) UnmarshalJSON(data []byte) error {
if len(data) > 0 && data[0] == '"' && data[len(data)-1] == '"' {
var s string
if err := json.Unmarshal(data, &s); err != nil {
return err
}
*id = ID(s)
return nil
}
var n int64
if err := json.Unmarshal(data, &n); err != nil {
return err
}
*id = ID(fmt.Sprint(n))
return nil
}

16
vendor/github.com/mattn/go-mastodon/go.mod generated vendored Normal file
View File

@ -0,0 +1,16 @@
module github.com/mattn/go-mastodon
go 1.12
require (
github.com/PuerkitoBio/goquery v1.5.0
github.com/fatih/color v1.7.0
github.com/gorilla/websocket v1.4.0
github.com/mattn/go-colorable v0.1.1 // indirect
github.com/mattn/go-isatty v0.0.7 // indirect
github.com/mattn/go-tty v0.0.0-20190424173100-523744f04859
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
github.com/urfave/cli v1.20.0
golang.org/x/net v0.0.0-20190509222800-a4d6f7feada5
golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862 // indirect
)

31
vendor/github.com/mattn/go-mastodon/go.sum generated vendored Normal file
View File

@ -0,0 +1,31 @@
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-tty v0.0.0-20190424173100-523744f04859 h1:smQbSzmT3EHl4EUwtFwFGmGIpiYgIiiPeVv1uguIQEE=
github.com/mattn/go-tty v0.0.0-20190424173100-523744f04859/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190509222800-a4d6f7feada5 h1:6M3SDHlHHDCx2PcQw3S4KsR170vGqDhJDOmpVd4Hjak=
golang.org/x/net v0.0.0-20190509222800-a4d6f7feada5/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a h1:1BGLXjeY4akVXGgbC9HugT3Jv3hCI0z56oJR5vAMgBU=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862 h1:rM0ROo5vb9AdYJi1110yjWGMej9ITfKddS89P3Fkhug=
golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

55
vendor/github.com/mattn/go-mastodon/helper.go generated vendored Normal file
View File

@ -0,0 +1,55 @@
package mastodon
import (
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"net/http"
"os"
)
// Base64EncodeFileName returns the base64 data URI format string of the file with the file name.
func Base64EncodeFileName(filename string) (string, error) {
file, err := os.Open(filename)
if err != nil {
return "", err
}
defer file.Close()
return Base64Encode(file)
}
// Base64Encode returns the base64 data URI format string of the file.
func Base64Encode(file *os.File) (string, error) {
fi, err := file.Stat()
if err != nil {
return "", err
}
d := make([]byte, fi.Size())
_, err = file.Read(d)
if err != nil {
return "", err
}
return "data:" + http.DetectContentType(d) +
";base64," + base64.StdEncoding.EncodeToString(d), nil
}
// String is a helper function to get the pointer value of a string.
func String(v string) *string { return &v }
func parseAPIError(prefix string, resp *http.Response) error {
errMsg := fmt.Sprintf("%s: %s", prefix, resp.Status)
var e struct {
Error string `json:"error"`
}
json.NewDecoder(resp.Body).Decode(&e)
if e.Error != "" {
errMsg = fmt.Sprintf("%s: %s", errMsg, e.Error)
}
return errors.New(errMsg)
}

65
vendor/github.com/mattn/go-mastodon/instance.go generated vendored Normal file
View File

@ -0,0 +1,65 @@
package mastodon
import (
"context"
"net/http"
)
// Instance hold information for mastodon instance.
type Instance struct {
URI string `json:"uri"`
Title string `json:"title"`
Description string `json:"description"`
EMail string `json:"email"`
Version string `json:"version,omitempty"`
Thumbnail string `json:"thumbnail,omitempty"`
URLs map[string]string `json:"urls,omitempty"`
Stats *InstanceStats `json:"stats,omitempty"`
Languages []string `json:"languages"`
ContactAccount *Account `json:"account"`
}
// InstanceStats hold information for mastodon instance stats.
type InstanceStats struct {
UserCount int64 `json:"user_count"`
StatusCount int64 `json:"status_count"`
DomainCount int64 `json:"domain_count"`
}
// GetInstance return Instance.
func (c *Client) GetInstance(ctx context.Context) (*Instance, error) {
var instance Instance
err := c.doAPI(ctx, http.MethodGet, "/api/v1/instance", nil, &instance, nil)
if err != nil {
return nil, err
}
return &instance, nil
}
// WeeklyActivity hold information for mastodon weekly activity.
type WeeklyActivity struct {
Week Unixtime `json:"week"`
Statuses int64 `json:"statuses,string"`
Logins int64 `json:"logins,string"`
Registrations int64 `json:"registrations,string"`
}
// GetInstanceActivity return instance activity.
func (c *Client) GetInstanceActivity(ctx context.Context) ([]*WeeklyActivity, error) {
var activity []*WeeklyActivity
err := c.doAPI(ctx, http.MethodGet, "/api/v1/instance/activity", nil, &activity, nil)
if err != nil {
return nil, err
}
return activity, nil
}
// GetInstancePeers return instance peers.
func (c *Client) GetInstancePeers(ctx context.Context) ([]string, error) {
var peers []string
err := c.doAPI(ctx, http.MethodGet, "/api/v1/instance/peers", nil, &peers, nil)
if err != nil {
return nil, err
}
return peers, nil
}

107
vendor/github.com/mattn/go-mastodon/lists.go generated vendored Normal file
View File

@ -0,0 +1,107 @@
package mastodon
import (
"context"
"fmt"
"net/http"
"net/url"
)
// List is metadata for a list of users.
type List struct {
ID ID `json:"id"`
Title string `json:"title"`
}
// GetLists returns all the lists on the current account.
func (c *Client) GetLists(ctx context.Context) ([]*List, error) {
var lists []*List
err := c.doAPI(ctx, http.MethodGet, "/api/v1/lists", nil, &lists, nil)
if err != nil {
return nil, err
}
return lists, nil
}
// GetAccountLists returns the lists containing a given account.
func (c *Client) GetAccountLists(ctx context.Context, id ID) ([]*List, error) {
var lists []*List
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/accounts/%s/lists", url.PathEscape(string(id))), nil, &lists, nil)
if err != nil {
return nil, err
}
return lists, nil
}
// GetListAccounts returns the accounts in a given list.
func (c *Client) GetListAccounts(ctx context.Context, id ID) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/lists/%s/accounts", url.PathEscape(string(id))), url.Values{"limit": {"0"}}, &accounts, nil)
if err != nil {
return nil, err
}
return accounts, nil
}
// GetList retrieves a list by ID.
func (c *Client) GetList(ctx context.Context, id ID) (*List, error) {
var list List
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/lists/%s", url.PathEscape(string(id))), nil, &list, nil)
if err != nil {
return nil, err
}
return &list, nil
}
// CreateList creates a new list with a given title.
func (c *Client) CreateList(ctx context.Context, title string) (*List, error) {
params := url.Values{}
params.Set("title", title)
var list List
err := c.doAPI(ctx, http.MethodPost, "/api/v1/lists", params, &list, nil)
if err != nil {
return nil, err
}
return &list, nil
}
// RenameList assigns a new title to a list.
func (c *Client) RenameList(ctx context.Context, id ID, title string) (*List, error) {
params := url.Values{}
params.Set("title", title)
var list List
err := c.doAPI(ctx, http.MethodPut, fmt.Sprintf("/api/v1/lists/%s", url.PathEscape(string(id))), params, &list, nil)
if err != nil {
return nil, err
}
return &list, nil
}
// DeleteList removes a list.
func (c *Client) DeleteList(ctx context.Context, id ID) error {
return c.doAPI(ctx, http.MethodDelete, fmt.Sprintf("/api/v1/lists/%s", url.PathEscape(string(id))), nil, nil, nil)
}
// AddToList adds accounts to a list.
//
// Only accounts already followed by the user can be added to a list.
func (c *Client) AddToList(ctx context.Context, list ID, accounts ...ID) error {
params := url.Values{}
for _, acct := range accounts {
params.Add("account_ids", string(acct))
}
return c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/lists/%s/accounts", url.PathEscape(string(list))), params, nil, nil)
}
// RemoveFromList removes accounts from a list.
func (c *Client) RemoveFromList(ctx context.Context, list ID, accounts ...ID) error {
params := url.Values{}
for _, acct := range accounts {
params.Add("account_ids", string(acct))
}
return c.doAPI(ctx, http.MethodDelete, fmt.Sprintf("/api/v1/lists/%s/accounts", url.PathEscape(string(list))), params, nil, nil)
}

387
vendor/github.com/mattn/go-mastodon/mastodon.go generated vendored Normal file
View File

@ -0,0 +1,387 @@
// Package mastodon provides functions and structs for accessing the mastodon API.
package mastodon
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/tomnomnom/linkheader"
)
// Config is a setting for access mastodon APIs.
type Config struct {
Server string
ClientID string
ClientSecret string
AccessToken string
}
// Client is a API client for mastodon.
type Client struct {
http.Client
config *Config
}
func (c *Client) doAPI(ctx context.Context, method string, uri string, params interface{}, res interface{}, pg *Pagination) error {
u, err := url.Parse(c.config.Server)
if err != nil {
return err
}
u.Path = path.Join(u.Path, uri)
var req *http.Request
ct := "application/x-www-form-urlencoded"
if values, ok := params.(url.Values); ok {
var body io.Reader
if method == http.MethodGet {
if pg != nil {
values = pg.setValues(values)
}
u.RawQuery = values.Encode()
} else {
body = strings.NewReader(values.Encode())
}
req, err = http.NewRequest(method, u.String(), body)
if err != nil {
return err
}
} else if file, ok := params.(string); ok {
f, err := os.Open(file)
if err != nil {
return err
}
defer f.Close()
var buf bytes.Buffer
mw := multipart.NewWriter(&buf)
part, err := mw.CreateFormFile("file", filepath.Base(file))
if err != nil {
return err
}
_, err = io.Copy(part, f)
if err != nil {
return err
}
err = mw.Close()
if err != nil {
return err
}
req, err = http.NewRequest(method, u.String(), &buf)
if err != nil {
return err
}
ct = mw.FormDataContentType()
} else if reader, ok := params.(io.Reader); ok {
var buf bytes.Buffer
mw := multipart.NewWriter(&buf)
part, err := mw.CreateFormFile("file", "upload")
if err != nil {
return err
}
_, err = io.Copy(part, reader)
if err != nil {
return err
}
err = mw.Close()
if err != nil {
return err
}
req, err = http.NewRequest(method, u.String(), &buf)
if err != nil {
return err
}
ct = mw.FormDataContentType()
} else {
if method == http.MethodGet && pg != nil {
u.RawQuery = pg.toValues().Encode()
}
req, err = http.NewRequest(method, u.String(), nil)
if err != nil {
return err
}
}
req = req.WithContext(ctx)
req.Header.Set("Authorization", "Bearer "+c.config.AccessToken)
if params != nil {
req.Header.Set("Content-Type", ct)
}
var resp *http.Response
backoff := 1000 * time.Millisecond
for {
resp, err = c.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
// handle status code 429, which indicates the server is throttling
// our requests. Do an exponential backoff and retry the request.
if resp.StatusCode == 429 {
if backoff > time.Hour {
break
}
backoff *= 2
select {
case <-time.After(backoff):
case <-ctx.Done():
return ctx.Err()
}
continue
}
break
}
if resp.StatusCode != http.StatusOK {
return parseAPIError("bad request", resp)
} else if res == nil {
return nil
} else if pg != nil {
if lh := resp.Header.Get("Link"); lh != "" {
pg2, err := newPagination(lh)
if err != nil {
return err
}
*pg = *pg2
}
}
return json.NewDecoder(resp.Body).Decode(&res)
}
// NewClient return new mastodon API client.
func NewClient(config *Config) *Client {
return &Client{
Client: *http.DefaultClient,
config: config,
}
}
// Authenticate get access-token to the API.
func (c *Client) Authenticate(ctx context.Context, username, password string) error {
params := url.Values{
"client_id": {c.config.ClientID},
"client_secret": {c.config.ClientSecret},
"grant_type": {"password"},
"username": {username},
"password": {password},
"scope": {"read write follow"},
}
return c.authenticate(ctx, params)
}
// AuthenticateToken logs in using a grant token returned by Application.AuthURI.
//
// redirectURI should be the same as Application.RedirectURI.
func (c *Client) AuthenticateToken(ctx context.Context, authCode, redirectURI string) error {
params := url.Values{
"client_id": {c.config.ClientID},
"client_secret": {c.config.ClientSecret},
"grant_type": {"authorization_code"},
"code": {authCode},
"redirect_uri": {redirectURI},
}
return c.authenticate(ctx, params)
}
func (c *Client) authenticate(ctx context.Context, params url.Values) error {
u, err := url.Parse(c.config.Server)
if err != nil {
return err
}
u.Path = path.Join(u.Path, "/oauth/token")
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(params.Encode()))
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
resp, err := c.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return parseAPIError("bad authorization", resp)
}
var res struct {
AccessToken string `json:"access_token"`
}
err = json.NewDecoder(resp.Body).Decode(&res)
if err != nil {
return err
}
c.config.AccessToken = res.AccessToken
return nil
}
// Toot is struct to post status.
type Toot struct {
Status string `json:"status"`
InReplyToID ID `json:"in_reply_to_id"`
MediaIDs []ID `json:"media_ids"`
Sensitive bool `json:"sensitive"`
SpoilerText string `json:"spoiler_text"`
Visibility string `json:"visibility"`
}
// Mention hold information for mention.
type Mention struct {
URL string `json:"url"`
Username string `json:"username"`
Acct string `json:"acct"`
ID ID `json:"id"`
}
// Tag hold information for tag.
type Tag struct {
Name string `json:"name"`
URL string `json:"url"`
History []History `json:"history"`
}
// History hold information for history.
type History struct {
Day string `json:"day"`
Uses int64 `json:"uses"`
Accounts int64 `json:"accounts"`
}
// Attachment hold information for attachment.
type Attachment struct {
ID ID `json:"id"`
Type string `json:"type"`
URL string `json:"url"`
RemoteURL string `json:"remote_url"`
PreviewURL string `json:"preview_url"`
TextURL string `json:"text_url"`
Description string `json:"description"`
Meta AttachmentMeta `json:"meta"`
}
// AttachmentMeta holds information for attachment metadata.
type AttachmentMeta struct {
Original AttachmentSize `json:"original"`
Small AttachmentSize `json:"small"`
}
// AttachmentSize holds information for attatchment size.
type AttachmentSize struct {
Width int64 `json:"width"`
Height int64 `json:"height"`
Size string `json:"size"`
Aspect float64 `json:"aspect"`
}
// Emoji hold information for CustomEmoji.
type Emoji struct {
ShortCode string `json:"shortcode"`
StaticURL string `json:"static_url"`
URL string `json:"url"`
VisibleInPicker bool `json:"visible_in_picker"`
}
// Results hold information for search result.
type Results struct {
Accounts []*Account `json:"accounts"`
Statuses []*Status `json:"statuses"`
Hashtags []string `json:"hashtags"`
}
// Pagination is a struct for specifying the get range.
type Pagination struct {
MaxID ID
SinceID ID
MinID ID
Limit int64
}
func newPagination(rawlink string) (*Pagination, error) {
if rawlink == "" {
return nil, errors.New("empty link header")
}
p := &Pagination{}
for _, link := range linkheader.Parse(rawlink) {
switch link.Rel {
case "next":
maxID, err := getPaginationID(link.URL, "max_id")
if err != nil {
return nil, err
}
p.MaxID = maxID
case "prev":
sinceID, err := getPaginationID(link.URL, "since_id")
if err != nil {
return nil, err
}
p.SinceID = sinceID
minID, err := getPaginationID(link.URL, "min_id")
if err != nil {
return nil, err
}
p.MinID = minID
}
}
return p, nil
}
func getPaginationID(rawurl, key string) (ID, error) {
u, err := url.Parse(rawurl)
if err != nil {
return "", err
}
val := u.Query().Get(key)
if val == "" {
return "", nil
}
id, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return "", err
}
return ID(fmt.Sprint(id)), nil
}
func (p *Pagination) toValues() url.Values {
return p.setValues(url.Values{})
}
func (p *Pagination) setValues(params url.Values) url.Values {
if p.MaxID != "" {
params.Set("max_id", string(p.MaxID))
}
if p.SinceID != "" {
params.Set("since_id", string(p.SinceID))
}
if p.MinID != "" {
params.Set("min_id", string(p.MinID))
}
if p.Limit > 0 {
params.Set("limit", fmt.Sprint(p.Limit))
}
return params
}

42
vendor/github.com/mattn/go-mastodon/notification.go generated vendored Normal file
View File

@ -0,0 +1,42 @@
package mastodon
import (
"context"
"fmt"
"net/http"
"time"
)
// Notification hold information for mastodon notification.
type Notification struct {
ID ID `json:"id"`
Type string `json:"type"`
CreatedAt time.Time `json:"created_at"`
Account Account `json:"account"`
Status *Status `json:"status"`
}
// GetNotifications return notifications.
func (c *Client) GetNotifications(ctx context.Context, pg *Pagination) ([]*Notification, error) {
var notifications []*Notification
err := c.doAPI(ctx, http.MethodGet, "/api/v1/notifications", nil, &notifications, pg)
if err != nil {
return nil, err
}
return notifications, nil
}
// GetNotification return notification.
func (c *Client) GetNotification(ctx context.Context, id ID) (*Notification, error) {
var notification Notification
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/notifications/%v", id), nil, &notification, nil)
if err != nil {
return nil, err
}
return &notification, nil
}
// ClearNotifications clear notifications.
func (c *Client) ClearNotifications(ctx context.Context) error {
return c.doAPI(ctx, http.MethodPost, "/api/v1/notifications/clear", nil, nil, nil)
}

39
vendor/github.com/mattn/go-mastodon/report.go generated vendored Normal file
View File

@ -0,0 +1,39 @@
package mastodon
import (
"context"
"net/http"
"net/url"
)
// Report hold information for mastodon report.
type Report struct {
ID int64 `json:"id"`
ActionTaken bool `json:"action_taken"`
}
// GetReports return report of the current user.
func (c *Client) GetReports(ctx context.Context) ([]*Report, error) {
var reports []*Report
err := c.doAPI(ctx, http.MethodGet, "/api/v1/reports", nil, &reports, nil)
if err != nil {
return nil, err
}
return reports, nil
}
// Report reports the report
func (c *Client) Report(ctx context.Context, accountID ID, ids []ID, comment string) (*Report, error) {
params := url.Values{}
params.Set("account_id", string(accountID))
for _, id := range ids {
params.Add("status_ids[]", string(id))
}
params.Set("comment", comment)
var report Report
err := c.doAPI(ctx, http.MethodPost, "/api/v1/reports", params, &report, nil)
if err != nil {
return nil, err
}
return &report, nil
}

297
vendor/github.com/mattn/go-mastodon/status.go generated vendored Normal file
View File

@ -0,0 +1,297 @@
package mastodon
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"time"
)
// Status is struct to hold status.
type Status struct {
ID ID `json:"id"`
URI string `json:"uri"`
URL string `json:"url"`
Account Account `json:"account"`
InReplyToID interface{} `json:"in_reply_to_id"`
InReplyToAccountID interface{} `json:"in_reply_to_account_id"`
Reblog *Status `json:"reblog"`
Content string `json:"content"`
CreatedAt time.Time `json:"created_at"`
Emojis []Emoji `json:"emojis"`
RepliesCount int64 `json:"replies_count"`
ReblogsCount int64 `json:"reblogs_count"`
FavouritesCount int64 `json:"favourites_count"`
Reblogged interface{} `json:"reblogged"`
Favourited interface{} `json:"favourited"`
Muted interface{} `json:"muted"`
Sensitive bool `json:"sensitive"`
SpoilerText string `json:"spoiler_text"`
Visibility string `json:"visibility"`
MediaAttachments []Attachment `json:"media_attachments"`
Mentions []Mention `json:"mentions"`
Tags []Tag `json:"tags"`
Card *Card `json:"card"`
Application Application `json:"application"`
Language string `json:"language"`
Pinned interface{} `json:"pinned"`
}
// Context hold information for mastodon context.
type Context struct {
Ancestors []*Status `json:"ancestors"`
Descendants []*Status `json:"descendants"`
}
// Card hold information for mastodon card.
type Card struct {
URL string `json:"url"`
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Type string `json:"type"`
AuthorName string `json:"author_name"`
AuthorURL string `json:"author_url"`
ProviderName string `json:"provider_name"`
ProviderURL string `json:"provider_url"`
HTML string `json:"html"`
Width int64 `json:"width"`
Height int64 `json:"height"`
}
// GetFavourites return the favorite list of the current user.
func (c *Client) GetFavourites(ctx context.Context, pg *Pagination) ([]*Status, error) {
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, "/api/v1/favourites", nil, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetStatus return status specified by id.
func (c *Client) GetStatus(ctx context.Context, id ID) (*Status, error) {
var status Status
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/statuses/%s", id), nil, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// GetStatusContext return status specified by id.
func (c *Client) GetStatusContext(ctx context.Context, id ID) (*Context, error) {
var context Context
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/statuses/%s/context", id), nil, &context, nil)
if err != nil {
return nil, err
}
return &context, nil
}
// GetStatusCard return status specified by id.
func (c *Client) GetStatusCard(ctx context.Context, id ID) (*Card, error) {
var card Card
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/statuses/%s/card", id), nil, &card, nil)
if err != nil {
return nil, err
}
return &card, nil
}
// GetRebloggedBy returns the account list of the user who reblogged the toot of id.
func (c *Client) GetRebloggedBy(ctx context.Context, id ID, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/statuses/%s/reblogged_by", id), nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// GetFavouritedBy returns the account list of the user who liked the toot of id.
func (c *Client) GetFavouritedBy(ctx context.Context, id ID, pg *Pagination) ([]*Account, error) {
var accounts []*Account
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/statuses/%s/favourited_by", id), nil, &accounts, pg)
if err != nil {
return nil, err
}
return accounts, nil
}
// Reblog is reblog the toot of id and return status of reblog.
func (c *Client) Reblog(ctx context.Context, id ID) (*Status, error) {
var status Status
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/statuses/%s/reblog", id), nil, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// Unreblog is unreblog the toot of id and return status of the original toot.
func (c *Client) Unreblog(ctx context.Context, id ID) (*Status, error) {
var status Status
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/statuses/%s/unreblog", id), nil, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// Favourite is favourite the toot of id and return status of the favourite toot.
func (c *Client) Favourite(ctx context.Context, id ID) (*Status, error) {
var status Status
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/statuses/%s/favourite", id), nil, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// Unfavourite is unfavourite the toot of id and return status of the unfavourite toot.
func (c *Client) Unfavourite(ctx context.Context, id ID) (*Status, error) {
var status Status
err := c.doAPI(ctx, http.MethodPost, fmt.Sprintf("/api/v1/statuses/%s/unfavourite", id), nil, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// GetTimelineHome return statuses from home timeline.
func (c *Client) GetTimelineHome(ctx context.Context, pg *Pagination) ([]*Status, error) {
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, "/api/v1/timelines/home", nil, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetTimelinePublic return statuses from public timeline.
func (c *Client) GetTimelinePublic(ctx context.Context, isLocal bool, pg *Pagination) ([]*Status, error) {
params := url.Values{}
if isLocal {
params.Set("local", "t")
}
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, "/api/v1/timelines/public", params, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetTimelineHashtag return statuses from tagged timeline.
func (c *Client) GetTimelineHashtag(ctx context.Context, tag string, isLocal bool, pg *Pagination) ([]*Status, error) {
params := url.Values{}
if isLocal {
params.Set("local", "t")
}
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/timelines/tag/%s", url.PathEscape(tag)), params, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetTimelineList return statuses from a list timeline.
func (c *Client) GetTimelineList(ctx context.Context, id ID, pg *Pagination) ([]*Status, error) {
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, fmt.Sprintf("/api/v1/timelines/list/%s", url.PathEscape(string(id))), nil, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// GetTimelineMedia return statuses from media timeline.
// NOTE: This is an experimental feature of pawoo.net.
func (c *Client) GetTimelineMedia(ctx context.Context, isLocal bool, pg *Pagination) ([]*Status, error) {
params := url.Values{}
params.Set("media", "t")
if isLocal {
params.Set("local", "t")
}
var statuses []*Status
err := c.doAPI(ctx, http.MethodGet, "/api/v1/timelines/public", params, &statuses, pg)
if err != nil {
return nil, err
}
return statuses, nil
}
// PostStatus post the toot.
func (c *Client) PostStatus(ctx context.Context, toot *Toot) (*Status, error) {
params := url.Values{}
params.Set("status", toot.Status)
if toot.InReplyToID != "" {
params.Set("in_reply_to_id", string(toot.InReplyToID))
}
if toot.MediaIDs != nil {
for _, media := range toot.MediaIDs {
params.Add("media_ids[]", string(media))
}
}
if toot.Visibility != "" {
params.Set("visibility", fmt.Sprint(toot.Visibility))
}
if toot.Sensitive {
params.Set("sensitive", "true")
}
if toot.SpoilerText != "" {
params.Set("spoiler_text", toot.SpoilerText)
}
var status Status
err := c.doAPI(ctx, http.MethodPost, "/api/v1/statuses", params, &status, nil)
if err != nil {
return nil, err
}
return &status, nil
}
// DeleteStatus delete the toot.
func (c *Client) DeleteStatus(ctx context.Context, id ID) error {
return c.doAPI(ctx, http.MethodDelete, fmt.Sprintf("/api/v1/statuses/%s", id), nil, nil, nil)
}
// Search search content with query.
func (c *Client) Search(ctx context.Context, q string, resolve bool) (*Results, error) {
params := url.Values{}
params.Set("q", q)
params.Set("resolve", fmt.Sprint(resolve))
var results Results
err := c.doAPI(ctx, http.MethodGet, "/api/v1/search", params, &results, nil)
if err != nil {
return nil, err
}
return &results, nil
}
// UploadMedia upload a media attachment from a file.
func (c *Client) UploadMedia(ctx context.Context, file string) (*Attachment, error) {
var attachment Attachment
err := c.doAPI(ctx, http.MethodPost, "/api/v1/media", file, &attachment, nil)
if err != nil {
return nil, err
}
return &attachment, nil
}
// UploadMediaFromReader uploads a media attachment from a io.Reader.
func (c *Client) UploadMediaFromReader(ctx context.Context, reader io.Reader) (*Attachment, error) {
var attachment Attachment
err := c.doAPI(ctx, http.MethodPost, "/api/v1/media", reader, &attachment, nil)
if err != nil {
return nil, err
}
return &attachment, nil
}

166
vendor/github.com/mattn/go-mastodon/streaming.go generated vendored Normal file
View File

@ -0,0 +1,166 @@
package mastodon
import (
"bufio"
"context"
"encoding/json"
"io"
"net/http"
"net/url"
"path"
"strings"
)
// UpdateEvent is struct for passing status event to app.
type UpdateEvent struct {
Status *Status `json:"status"`
}
func (e *UpdateEvent) event() {}
// NotificationEvent is struct for passing notification event to app.
type NotificationEvent struct {
Notification *Notification `json:"notification"`
}
func (e *NotificationEvent) event() {}
// DeleteEvent is struct for passing deletion event to app.
type DeleteEvent struct{ ID ID }
func (e *DeleteEvent) event() {}
// ErrorEvent is struct for passing errors to app.
type ErrorEvent struct{ err error }
func (e *ErrorEvent) event() {}
func (e *ErrorEvent) Error() string { return e.err.Error() }
// Event is interface passing events to app.
type Event interface {
event()
}
func handleReader(q chan Event, r io.Reader) error {
var name string
s := bufio.NewScanner(r)
for s.Scan() {
line := s.Text()
token := strings.SplitN(line, ":", 2)
if len(token) != 2 {
continue
}
switch strings.TrimSpace(token[0]) {
case "event":
name = strings.TrimSpace(token[1])
case "data":
var err error
switch name {
case "update":
var status Status
err = json.Unmarshal([]byte(token[1]), &status)
if err == nil {
q <- &UpdateEvent{&status}
}
case "notification":
var notification Notification
err = json.Unmarshal([]byte(token[1]), &notification)
if err == nil {
q <- &NotificationEvent{&notification}
}
case "delete":
q <- &DeleteEvent{ID: ID(strings.TrimSpace(token[1]))}
}
if err != nil {
q <- &ErrorEvent{err}
}
}
}
return s.Err()
}
func (c *Client) streaming(ctx context.Context, p string, params url.Values) (chan Event, error) {
u, err := url.Parse(c.config.Server)
if err != nil {
return nil, err
}
u.Path = path.Join(u.Path, "/api/v1/streaming", p)
u.RawQuery = params.Encode()
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
req.Header.Set("Authorization", "Bearer "+c.config.AccessToken)
q := make(chan Event)
go func() {
defer close(q)
for {
select {
case <-ctx.Done():
return
default:
}
c.doStreaming(req, q)
}
}()
return q, nil
}
func (c *Client) doStreaming(req *http.Request, q chan Event) {
resp, err := c.Do(req)
if err != nil {
q <- &ErrorEvent{err}
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
q <- &ErrorEvent{parseAPIError("bad request", resp)}
return
}
err = handleReader(q, resp.Body)
if err != nil {
q <- &ErrorEvent{err}
}
}
// StreamingUser return channel to read events on home.
func (c *Client) StreamingUser(ctx context.Context) (chan Event, error) {
return c.streaming(ctx, "user", nil)
}
// StreamingPublic return channel to read events on public.
func (c *Client) StreamingPublic(ctx context.Context, isLocal bool) (chan Event, error) {
p := "public"
if isLocal {
p = path.Join(p, "local")
}
return c.streaming(ctx, p, nil)
}
// StreamingHashtag return channel to read events on tagged timeline.
func (c *Client) StreamingHashtag(ctx context.Context, tag string, isLocal bool) (chan Event, error) {
params := url.Values{}
params.Set("tag", tag)
p := "hashtag"
if isLocal {
p = path.Join(p, "local")
}
return c.streaming(ctx, p, params)
}
// StreamingList return channel to read events on a list.
func (c *Client) StreamingList(ctx context.Context, id ID) (chan Event, error) {
params := url.Values{}
params.Set("list", string(id))
return c.streaming(ctx, "list", params)
}

195
vendor/github.com/mattn/go-mastodon/streaming_ws.go generated vendored Normal file
View File

@ -0,0 +1,195 @@
package mastodon
import (
"context"
"encoding/json"
"fmt"
"net/url"
"path"
"strings"
"github.com/gorilla/websocket"
)
// WSClient is a WebSocket client.
type WSClient struct {
websocket.Dialer
client *Client
}
// NewWSClient return WebSocket client.
func (c *Client) NewWSClient() *WSClient { return &WSClient{client: c} }
// Stream is a struct of data that flows in streaming.
type Stream struct {
Event string `json:"event"`
Payload interface{} `json:"payload"`
}
// StreamingWSUser return channel to read events on home using WebSocket.
func (c *WSClient) StreamingWSUser(ctx context.Context) (chan Event, error) {
return c.streamingWS(ctx, "user", "")
}
// StreamingWSPublic return channel to read events on public using WebSocket.
func (c *WSClient) StreamingWSPublic(ctx context.Context, isLocal bool) (chan Event, error) {
s := "public"
if isLocal {
s += ":local"
}
return c.streamingWS(ctx, s, "")
}
// StreamingWSHashtag return channel to read events on tagged timeline using WebSocket.
func (c *WSClient) StreamingWSHashtag(ctx context.Context, tag string, isLocal bool) (chan Event, error) {
s := "hashtag"
if isLocal {
s += ":local"
}
return c.streamingWS(ctx, s, tag)
}
// StreamingWSList return channel to read events on a list using WebSocket.
func (c *WSClient) StreamingWSList(ctx context.Context, id ID) (chan Event, error) {
return c.streamingWS(ctx, "list", string(id))
}
func (c *WSClient) streamingWS(ctx context.Context, stream, tag string) (chan Event, error) {
params := url.Values{}
params.Set("access_token", c.client.config.AccessToken)
params.Set("stream", stream)
if tag != "" {
params.Set("tag", tag)
}
u, err := changeWebSocketScheme(c.client.config.Server)
if err != nil {
return nil, err
}
u.Path = path.Join(u.Path, "/api/v1/streaming")
u.RawQuery = params.Encode()
q := make(chan Event)
go func() {
defer close(q)
for {
err := c.handleWS(ctx, u.String(), q)
if err != nil {
return
}
}
}()
return q, nil
}
func (c *WSClient) handleWS(ctx context.Context, rawurl string, q chan Event) error {
conn, err := c.dialRedirect(rawurl)
if err != nil {
q <- &ErrorEvent{err: err}
// End.
return err
}
// Close the WebSocket when the context is canceled.
go func() {
<-ctx.Done()
conn.Close()
}()
for {
select {
case <-ctx.Done():
q <- &ErrorEvent{err: ctx.Err()}
// End.
return ctx.Err()
default:
}
var s Stream
err := conn.ReadJSON(&s)
if err != nil {
q <- &ErrorEvent{err: err}
// Reconnect.
break
}
err = nil
switch s.Event {
case "update":
var status Status
err = json.Unmarshal([]byte(s.Payload.(string)), &status)
if err == nil {
q <- &UpdateEvent{Status: &status}
}
case "notification":
var notification Notification
err = json.Unmarshal([]byte(s.Payload.(string)), &notification)
if err == nil {
q <- &NotificationEvent{Notification: &notification}
}
case "delete":
if f, ok := s.Payload.(float64); ok {
q <- &DeleteEvent{ID: ID(fmt.Sprint(int64(f)))}
} else {
q <- &DeleteEvent{ID: ID(strings.TrimSpace(s.Payload.(string)))}
}
}
if err != nil {
q <- &ErrorEvent{err}
}
}
return nil
}
func (c *WSClient) dialRedirect(rawurl string) (conn *websocket.Conn, err error) {
for {
conn, rawurl, err = c.dial(rawurl)
if err != nil {
return nil, err
} else if conn != nil {
return conn, nil
}
}
}
func (c *WSClient) dial(rawurl string) (*websocket.Conn, string, error) {
conn, resp, err := c.Dial(rawurl, nil)
if err != nil && err != websocket.ErrBadHandshake {
return nil, "", err
}
defer resp.Body.Close()
if loc := resp.Header.Get("Location"); loc != "" {
u, err := changeWebSocketScheme(loc)
if err != nil {
return nil, "", err
}
return nil, u.String(), nil
}
return conn, "", err
}
func changeWebSocketScheme(rawurl string) (*url.URL, error) {
u, err := url.Parse(rawurl)
if err != nil {
return nil, err
}
switch u.Scheme {
case "http":
u.Scheme = "ws"
case "https":
u.Scheme = "wss"
}
return u, nil
}

20
vendor/github.com/mattn/go-mastodon/unixtime.go generated vendored Normal file
View File

@ -0,0 +1,20 @@
package mastodon
import (
"strconv"
"time"
)
type Unixtime time.Time
func (t *Unixtime) UnmarshalJSON(data []byte) error {
if len(data) > 0 && data[0] == '"' && data[len(data)-1] == '"' {
data = data[1 : len(data)-1]
}
ts, err := strconv.ParseInt(string(data), 10, 64)
if err != nil {
return err
}
*t = Unixtime(time.Unix(ts, 0))
return nil
}

31
vendor/github.com/mmcdole/gofeed/.gitignore generated vendored Normal file
View File

@ -0,0 +1,31 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof
.DS_STORE
cmd/ftest/ftest
# Goland specific files
.idea

24
vendor/github.com/mmcdole/gofeed/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,24 @@
language: go
env:
global:
- GO111MODULE="on"
go:
- 1.11.x
- 1.12.x
- 1.13.x
- tip
matrix:
allow_failures:
- go: tip
fast_finish: true
before_install:
- go get github.com/mattn/goveralls
script:
- go install ./...
- go test -v ./...
- $GOPATH/bin/goveralls -service=travis-ci

21
vendor/github.com/mmcdole/gofeed/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 mmcdole
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

265
vendor/github.com/mmcdole/gofeed/README.md generated vendored Normal file
View File

@ -0,0 +1,265 @@
# gofeed
[![Build Status](https://travis-ci.org/mmcdole/gofeed.svg?branch=master)](https://travis-ci.org/mmcdole/gofeed) [![Coverage Status](https://coveralls.io/repos/github/mmcdole/gofeed/badge.svg?branch=master)](https://coveralls.io/github/mmcdole/gofeed?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/mmcdole/gofeed)](https://goreportcard.com/report/github.com/mmcdole/gofeed) [![](https://godoc.org/github.com/mmcdole/gofeed?status.svg)](http://godoc.org/github.com/mmcdole/gofeed) [![License](http://img.shields.io/:license-mit-blue.svg)](http://doge.mit-license.org)
The `gofeed` library is a robust feed parser that supports parsing both [RSS](https://en.wikipedia.org/wiki/RSS) and [Atom](https://en.wikipedia.org/wiki/Atom_(standard)) feeds. The library provides a universal `gofeed.Parser` that will parse and convert all feed types into a hybrid `gofeed.Feed` model. You also have the option of utilizing the feed specific `atom.Parser` or `rss.Parser` parsers which generate `atom.Feed` and `rss.Feed` respectively.
## Table of Contents
- [Features](#features)
- [Overview](#overview)
- [Basic Usage](#basic-usage)
- [Advanced Usage](#advanced-usage)
- [Extensions](#extensions)
- [Invalid Feeds](#invalid-feeds)
- [Default Mappings](#default-mappings)
- [Dependencies](#dependencies)
- [License](#license)
- [Credits](#credits)
## Features
#### Supported feed types:
* RSS 0.90
* Netscape RSS 0.91
* Userland RSS 0.91
* RSS 0.92
* RSS 0.93
* RSS 0.94
* RSS 1.0
* RSS 2.0
* Atom 0.3
* Atom 1.0
#### Extension Support
The `gofeed` library provides support for parsing several popular predefined extensions into ready-made structs, including [Dublin Core](http://dublincore.org/documents/dces/) and [Apples iTunes](https://help.apple.com/itc/podcasts_connect/#/itcb54353390).
It parses all other feed extensions in a generic way (see the [Extensions](#extensions) section for more details).
#### Invalid Feeds
A best-effort attempt is made at parsing broken and invalid XML feeds. Currently, `gofeed` can succesfully parse feeds with the following issues:
- Unescaped/Naked Markup in feed elements
- Undeclared namespace prefixes
- Missing closing tags on certain elements
- Illegal tags within feed elements without namespace prefixes
- Missing "required" elements as specified by the respective feed specs.
- Incorrect date formats
## Overview
The `gofeed` library is comprised of a universal feed parser and several feed specific parsers. Which one you choose depends entirely on your usecase. If you will be handling both rss and atom feeds then it makes sense to use the `gofeed.Parser`. If you know ahead of time that you will only be parsing one feed type then it would make sense to use `rss.Parser` or `atom.Parser`.
#### Universal Feed Parser
The universal `gofeed.Parser` works in 3 stages: detection, parsing and translation. It first detects the feed type that it is currently parsing. Then it uses a feed specific parser to parse the feed into its true representation which will be either a `rss.Feed` or `atom.Feed`. These models cover every field possible for their respective feed types. Finally, they are *translated* into a `gofeed.Feed` model that is a hybrid of both feed types. Performing the universal feed parsing in these 3 stages allows for more flexibility and keeps the code base more maintainable by separating RSS and Atom parsing into seperate packages.
![Diagram](docs/sequence.png)
The translation step is done by anything which adheres to the `gofeed.Translator` interface. The `DefaultRSSTranslator` and `DefaultAtomTranslator` are used behind the scenes when you use the `gofeed.Parser` with its default settings. You can see how they translate fields from ```atom.Feed``` or ```rss.Feed``` to the universal ```gofeed.Feed``` struct in the [Default Mappings](#default-mappings) section. However, should you disagree with the way certain fields are translated you can easily supply your own `gofeed.Translator` and override this behavior. See the [Advanced Usage](#advanced-usage) section for an example how to do this.
#### Feed Specific Parsers
The `gofeed` library provides two feed specific parsers: `atom.Parser` and `rss.Parser`. If the hybrid `gofeed.Feed` model that the universal `gofeed.Parser` produces does not contain a field from the `atom.Feed` or `rss.Feed` model that you require, it might be beneficial to use the feed specific parsers. When using the `atom.Parser` or `rss.Parser` directly, you can access all of fields found in the `atom.Feed` and `rss.Feed` models. It is also marginally faster because you are able to skip the translation step.
## Basic Usage
#### Universal Feed Parser
The most common usage scenario will be to use ```gofeed.Parser``` to parse an arbitrary RSS or Atom feed into the hybrid ```gofeed.Feed``` model. This hybrid model allows you to treat RSS and Atom feeds the same.
##### Parse a feed from an URL:
```go
fp := gofeed.NewParser()
feed, _ := fp.ParseURL("http://feeds.twit.tv/twit.xml")
fmt.Println(feed.Title)
```
##### Parse a feed from a string:
```go
feedData := `<rss version="2.0">
<channel>
<title>Sample Feed</title>
</channel>
</rss>`
fp := gofeed.NewParser()
feed, _ := fp.ParseString(feedData)
fmt.Println(feed.Title)
```
##### Parse a feed from an io.Reader:
```go
file, _ := os.Open("/path/to/a/file.xml")
defer file.Close()
fp := gofeed.NewParser()
feed, _ := fp.Parse(file)
fmt.Println(feed.Title)
```
##### Parse a feed from an URL with a 60s timeout:
```go
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
fp := gofeed.NewParser()
feed, _ := fp.ParseURLWithContext("http://feeds.twit.tv/twit.xml", ctx)
fmt.Println(feed.Title)
```
#### Feed Specific Parsers
You can easily use the `rss.Parser` and `atom.Parser` directly if you have a usage scenario that requires it:
##### Parse a RSS feed into a `rss.Feed`
```go
feedData := `<rss version="2.0">
<channel>
<webMaster>example@site.com (Example Name)</webMaster>
</channel>
</rss>`
fp := rss.Parser{}
rssFeed, _ := fp.Parse(strings.NewReader(feedData))
fmt.Println(rssFeed.WebMaster)
```
##### Parse an Atom feed into a `atom.Feed`
```go
feedData := `<feed xmlns="http://www.w3.org/2005/Atom">
<subtitle>Example Atom</subtitle>
</feed>`
fp := atom.Parser{}
atomFeed, _ := fp.Parse(strings.NewReader(feedData))
fmt.Println(atomFeed.Subtitle)
```
## Advanced Usage
##### Parse a feed while using a custom translator
The mappings and precedence order that are outlined in the [Default Mappings](#default-mappings) section are provided by the following two structs: `DefaultRSSTranslator` and `DefaultAtomTranslator`. If you have fields that you think should have a different precedence, or if you want to make a translator that is aware of an unsupported extension you can do this by specifying your own RSS or Atom translator when using the `gofeed.Parser`.
Here is a simple example of creating a custom `Translator` that makes the `/rss/channel/itunes:author` field have a higher precedence than the `/rss/channel/managingEditor` field in RSS feeds. We will wrap the existing `DefaultRSSTranslator` since we only want to change the behavior for a single field.
First we must define a custom translator:
```go
import (
"fmt"
"github.com/mmcdole/gofeed"
"github.com/mmcdole/gofeed/rss"
)
type MyCustomTranslator struct {
defaultTranslator *gofeed.DefaultRSSTranslator
}
func NewMyCustomTranslator() *MyCustomTranslator {
t := &MyCustomTranslator{}
// We create a DefaultRSSTranslator internally so we can wrap its Translate
// call since we only want to modify the precedence for a single field.
t.defaultTranslator = &gofeed.DefaultRSSTranslator{}
return t
}
func (ct* MyCustomTranslator) Translate(feed interface{}) (*gofeed.Feed, error) {
rss, found := feed.(*rss.Feed)
if !found {
return nil, fmt.Errorf("Feed did not match expected type of *rss.Feed")
}
f, err := ct.defaultTranslator.Translate(rss)
if err != nil {
return nil, err
}
if rss.ITunesExt != nil && rss.ITunesExt.Author != "" {
f.Author = rss.ITunesExt.Author
} else {
f.Author = rss.ManagingEditor
}
return f
}
```
Next you must configure your `gofeed.Parser` to utilize the new `gofeed.Translator`:
```go
feedData := `<rss version="2.0">
<channel>
<managingEditor>Ender Wiggin</managingEditor>
<itunes:author>Valentine Wiggin</itunes:author>
</channel>
</rss>`
fp := gofeed.NewParser()
fp.RSSTranslator = NewMyCustomTranslator()
feed, _ := fp.ParseString(feedData)
fmt.Println(feed.Author) // Valentine Wiggin
```
## Extensions
Every element which does not belong to the feed's default namespace is considered an extension by `gofeed`. These are parsed and stored in a tree-like structure located at `Feed.Extensions` and `Item.Extensions`. These fields should allow you to access and read any custom extension elements.
In addition to the generic handling of extensions, `gofeed` also has built in support for parsing certain popular extensions into their own structs for convenience. It currently supports the [Dublin Core](http://dublincore.org/documents/dces/) and [Apple iTunes](https://help.apple.com/itc/podcasts_connect/#/itcb54353390) extensions which you can access at `Feed.ItunesExt`, `feed.DublinCoreExt` and `Item.ITunesExt` and `Item.DublinCoreExt`
## Default Mappings
The ```DefaultRSSTranslator``` and the ```DefaultAtomTranslator``` map the following ```rss.Feed``` and ```atom.Feed``` fields to their respective ```gofeed.Feed``` fields. They are listed in order of precedence (highest to lowest):
`gofeed.Feed` | RSS | Atom
--- | --- | ---
Title | /rss/channel/title<br>/rdf:RDF/channel/title<br>/rss/channel/dc:title<br>/rdf:RDF/channel/dc:title | /feed/title
Description | /rss/channel/description<br>/rdf:RDF/channel/description<br>/rss/channel/itunes:subtitle | /feed/subtitle<br>/feed/tagline
Link | /rss/channel/link<br>/rdf:RDF/channel/link | /feed/link[@rel=”alternate”]/@href<br>/feed/link[not(@rel)]/@href
FeedLink | /rss/channel/atom:link[@rel="self"]/@href<br>/rdf:RDF/channel/atom:link[@rel="self"]/@href | /feed/link[@rel="self"]/@href
Updated | /rss/channel/lastBuildDate<br>/rss/channel/dc:date<br>/rdf:RDF/channel/dc:date | /feed/updated<br>/feed/modified
Published | /rss/channel/pubDate |
Author | /rss/channel/managingEditor<br>/rss/channel/webMaster<br>/rss/channel/dc:author<br>/rdf:RDF/channel/dc:author<br>/rss/channel/dc:creator<br>/rdf:RDF/channel/dc:creator<br>/rss/channel/itunes:author | /feed/author
Language | /rss/channel/language<br>/rss/channel/dc:language<br>/rdf:RDF/channel/dc:language | /feed/@xml:lang
Image | /rss/channel/image<br>/rdf:RDF/image<br>/rss/channel/itunes:image | /feed/logo
Copyright | /rss/channel/copyright<br>/rss/channel/dc:rights<br>/rdf:RDF/channel/dc:rights | /feed/rights<br>/feed/copyright
Generator | /rss/channel/generator | /feed/generator
Categories | /rss/channel/category<br>/rss/channel/itunes:category<br>/rss/channel/itunes:keywords<br>/rss/channel/dc:subject<br>/rdf:RDF/channel/dc:subject | /feed/category
`gofeed.Item` | RSS | Atom
--- | --- | ---
Title | /rss/channel/item/title<br>/rdf:RDF/item/title<br>/rdf:RDF/item/dc:title<br>/rss/channel/item/dc:title | /feed/entry/title
Description | /rss/channel/item/description<br>/rdf:RDF/item/description<br>/rss/channel/item/dc:description<br>/rdf:RDF/item/dc:description | /feed/entry/summary
Content | /rss/channel/item/content:encoded | /feed/entry/content
Link | /rss/channel/item/link<br>/rdf:RDF/item/link | /feed/entry/link[@rel=”alternate”]/@href<br>/feed/entry/link[not(@rel)]/@href
Updated | /rss/channel/item/dc:date<br>/rdf:RDF/rdf:item/dc:date | /feed/entry/modified<br>/feed/entry/updated
Published | /rss/channel/item/pubDate<br>/rss/channel/item/dc:date | /feed/entry/published<br>/feed/entry/issued
Author | /rss/channel/item/author<br>/rss/channel/item/dc:author<br>/rdf:RDF/item/dc:author<br>/rss/channel/item/dc:creator<br>/rdf:RDF/item/dc:creator<br>/rss/channel/item/itunes:author | /feed/entry/author
GUID | /rss/channel/item/guid | /feed/entry/id
Image | /rss/channel/item/itunes:image<br>/rss/channel/item/media:image |
Categories | /rss/channel/item/category<br>/rss/channel/item/dc:subject<br>/rss/channel/item/itunes:keywords<br>/rdf:RDF/channel/item/dc:subject | /feed/entry/category
Enclosures | /rss/channel/item/enclosure | /feed/entry/link[@rel=”enclosure”]
## Dependencies
* [goxpp](https://github.com/mmcdole/goxpp) - XML Pull Parser
* [goquery](https://github.com/PuerkitoBio/goquery) - Go jQuery-like interface
* [testify](https://github.com/stretchr/testify) - Unit test enhancements
## License
This project is licensed under the [MIT License](https://raw.githubusercontent.com/mmcdole/gofeed/master/LICENSE)
## Credits
* [cristoper](https://github.com/cristoper) for his work on implementing xml:base relative URI handling.
* [Mark Pilgrim](https://en.wikipedia.org/wiki/Mark_Pilgrim) and [Kurt McKee](http://kurtmckee.org) for their work on the excellent [Universal Feed Parser](https://github.com/kurtmckee/feedparser) Python library. This library was the inspiration for the `gofeed` library.
* [Dan MacTough](http://blog.mact.me) for his work on [node-feedparser](https://github.com/danmactough/node-feedparser). It provided inspiration for the set of fields that should be covered in the hybrid `gofeed.Feed` model.
* [Matt Jibson](https://mattjibson.com/) for his date parsing function in the [goread](https://github.com/mjibson/goread) project.
* [Jim Teeuwen](https://github.com/jteeuwen) for his method of representing arbitrary feed extensions in the [go-pkg-rss](https://github.com/jteeuwen/go-pkg-rss) library.

114
vendor/github.com/mmcdole/gofeed/atom/feed.go generated vendored Normal file
View File

@ -0,0 +1,114 @@
package atom
import (
"encoding/json"
"time"
"github.com/mmcdole/gofeed/extensions"
)
// Feed is an Atom Feed
type Feed struct {
Title string `json:"title,omitempty"`
ID string `json:"id,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Links []*Link `json:"links,omitempty"`
Language string `json:"language,omitempty"`
Generator *Generator `json:"generator,omitempty"`
Icon string `json:"icon,omitempty"`
Logo string `json:"logo,omitempty"`
Rights string `json:"rights,omitempty"`
Contributors []*Person `json:"contributors,omitempty"`
Authors []*Person `json:"authors,omitempty"`
Categories []*Category `json:"categories,omitempty"`
Entries []*Entry `json:"entries"`
Extensions ext.Extensions `json:"extensions,omitempty"`
Version string `json:"version"`
}
func (f Feed) String() string {
json, _ := json.MarshalIndent(f, "", " ")
return string(json)
}
// Entry is an Atom Entry
type Entry struct {
Title string `json:"title,omitempty"`
ID string `json:"id,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Summary string `json:"summary,omitempty"`
Authors []*Person `json:"authors,omitempty"`
Contributors []*Person `json:"contributors,omitempty"`
Categories []*Category `json:"categories,omitempty"`
Links []*Link `json:"links,omitempty"`
Rights string `json:"rights,omitempty"`
Published string `json:"published,omitempty"`
PublishedParsed *time.Time `json:"publishedParsed,omitempty"`
Source *Source `json:"source,omitempty"`
Content *Content `json:"content,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
}
// Category is category metadata for Feeds and Entries
type Category struct {
Term string `json:"term,omitempty"`
Scheme string `json:"scheme,omitempty"`
Label string `json:"label,omitempty"`
}
// Person represents a person in an Atom feed
// for things like Authors, Contributors, etc
type Person struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
URI string `json:"uri,omitempty"`
}
// Link is an Atom link that defines a reference
// from an entry or feed to a Web resource
type Link struct {
Href string `json:"href,omitempty"`
Hreflang string `json:"hreflang,omitempty"`
Rel string `json:"rel,omitempty"`
Type string `json:"type,omitempty"`
Title string `json:"title,omitempty"`
Length string `json:"length,omitempty"`
}
// Content either contains or links to the content of
// the entry
type Content struct {
Src string `json:"src,omitempty"`
Type string `json:"type,omitempty"`
Value string `json:"value,omitempty"`
}
// Generator identifies the agent used to generate a
// feed, for debugging and other purposes.
type Generator struct {
Value string `json:"value,omitempty"`
URI string `json:"uri,omitempty"`
Version string `json:"version,omitempty"`
}
// Source contains the feed information for another
// feed if a given entry came from that feed.
type Source struct {
Title string `json:"title,omitempty"`
ID string `json:"id,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Links []*Link `json:"links,omitempty"`
Generator *Generator `json:"generator,omitempty"`
Icon string `json:"icon,omitempty"`
Logo string `json:"logo,omitempty"`
Rights string `json:"rights,omitempty"`
Contributors []*Person `json:"contributors,omitempty"`
Authors []*Person `json:"authors,omitempty"`
Categories []*Category `json:"categories,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
}

761
vendor/github.com/mmcdole/gofeed/atom/parser.go generated vendored Normal file
View File

@ -0,0 +1,761 @@
package atom
import (
"encoding/base64"
"io"
"strings"
"github.com/PuerkitoBio/goquery"
ext "github.com/mmcdole/gofeed/extensions"
"github.com/mmcdole/gofeed/internal/shared"
xpp "github.com/mmcdole/goxpp"
)
var (
// Atom elements which contain URIs
// https://tools.ietf.org/html/rfc4287
uriElements = map[string]bool{
"icon": true,
"id": true,
"logo": true,
"uri": true,
"url": true, // atom 0.3
}
// Atom attributes which contain URIs
// https://tools.ietf.org/html/rfc4287
atomURIAttrs = map[string]bool{
"href": true,
"scheme": true,
"src": true,
"uri": true,
}
)
// Parser is an Atom Parser
type Parser struct {
base *shared.XMLBase
}
// Parse parses an xml feed into an atom.Feed
func (ap *Parser) Parse(feed io.Reader) (*Feed, error) {
p := xpp.NewXMLPullParser(feed, false, shared.NewReaderLabel)
ap.base = &shared.XMLBase{URIAttrs: atomURIAttrs}
_, err := ap.base.FindRoot(p)
if err != nil {
return nil, err
}
return ap.parseRoot(p)
}
func (ap *Parser) parseRoot(p *xpp.XMLPullParser) (*Feed, error) {
if err := p.Expect(xpp.StartTag, "feed"); err != nil {
return nil, err
}
atom := &Feed{}
atom.Entries = []*Entry{}
atom.Version = ap.parseVersion(p)
atom.Language = ap.parseLanguage(p)
contributors := []*Person{}
authors := []*Person{}
categories := []*Category{}
links := []*Link{}
extensions := ext.Extensions{}
for {
tok, err := ap.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if shared.IsExtension(p) {
e, err := shared.ParseExtension(extensions, p)
if err != nil {
return nil, err
}
extensions = e
} else if name == "title" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Title = result
} else if name == "id" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.ID = result
} else if name == "updated" ||
name == "modified" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Updated = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
atom.UpdatedParsed = &utcDate
}
} else if name == "subtitle" ||
name == "tagline" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Subtitle = result
} else if name == "link" {
result, err := ap.parseLink(p)
if err != nil {
return nil, err
}
links = append(links, result)
} else if name == "generator" {
result, err := ap.parseGenerator(p)
if err != nil {
return nil, err
}
atom.Generator = result
} else if name == "icon" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Icon = result
} else if name == "logo" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Logo = result
} else if name == "rights" ||
name == "copyright" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
atom.Rights = result
} else if name == "contributor" {
result, err := ap.parsePerson("contributor", p)
if err != nil {
return nil, err
}
contributors = append(contributors, result)
} else if name == "author" {
result, err := ap.parsePerson("author", p)
if err != nil {
return nil, err
}
authors = append(authors, result)
} else if name == "category" {
result, err := ap.parseCategory(p)
if err != nil {
return nil, err
}
categories = append(categories, result)
} else if name == "entry" {
result, err := ap.parseEntry(p)
if err != nil {
return nil, err
}
atom.Entries = append(atom.Entries, result)
} else {
err := p.Skip()
if err != nil {
return nil, err
}
}
}
}
if len(categories) > 0 {
atom.Categories = categories
}
if len(authors) > 0 {
atom.Authors = authors
}
if len(contributors) > 0 {
atom.Contributors = contributors
}
if len(links) > 0 {
atom.Links = links
}
if len(extensions) > 0 {
atom.Extensions = extensions
}
if err := p.Expect(xpp.EndTag, "feed"); err != nil {
return nil, err
}
return atom, nil
}
func (ap *Parser) parseEntry(p *xpp.XMLPullParser) (*Entry, error) {
if err := p.Expect(xpp.StartTag, "entry"); err != nil {
return nil, err
}
entry := &Entry{}
contributors := []*Person{}
authors := []*Person{}
categories := []*Category{}
links := []*Link{}
extensions := ext.Extensions{}
for {
tok, err := ap.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if shared.IsExtension(p) {
e, err := shared.ParseExtension(extensions, p)
if err != nil {
return nil, err
}
extensions = e
} else if name == "title" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.Title = result
} else if name == "id" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.ID = result
} else if name == "rights" ||
name == "copyright" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.Rights = result
} else if name == "summary" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.Summary = result
} else if name == "source" {
result, err := ap.parseSource(p)
if err != nil {
return nil, err
}
entry.Source = result
} else if name == "updated" ||
name == "modified" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.Updated = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
entry.UpdatedParsed = &utcDate
}
} else if name == "contributor" {
result, err := ap.parsePerson("contributor", p)
if err != nil {
return nil, err
}
contributors = append(contributors, result)
} else if name == "author" {
result, err := ap.parsePerson("author", p)
if err != nil {
return nil, err
}
authors = append(authors, result)
} else if name == "category" {
result, err := ap.parseCategory(p)
if err != nil {
return nil, err
}
categories = append(categories, result)
} else if name == "link" {
result, err := ap.parseLink(p)
if err != nil {
return nil, err
}
links = append(links, result)
} else if name == "published" ||
name == "issued" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
entry.Published = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
entry.PublishedParsed = &utcDate
}
} else if name == "content" {
result, err := ap.parseContent(p)
if err != nil {
return nil, err
}
entry.Content = result
} else {
err := p.Skip()
if err != nil {
return nil, err
}
}
}
}
if len(categories) > 0 {
entry.Categories = categories
}
if len(authors) > 0 {
entry.Authors = authors
}
if len(links) > 0 {
entry.Links = links
}
if len(contributors) > 0 {
entry.Contributors = contributors
}
if len(extensions) > 0 {
entry.Extensions = extensions
}
if err := p.Expect(xpp.EndTag, "entry"); err != nil {
return nil, err
}
return entry, nil
}
func (ap *Parser) parseSource(p *xpp.XMLPullParser) (*Source, error) {
if err := p.Expect(xpp.StartTag, "source"); err != nil {
return nil, err
}
source := &Source{}
contributors := []*Person{}
authors := []*Person{}
categories := []*Category{}
links := []*Link{}
extensions := ext.Extensions{}
for {
tok, err := ap.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if shared.IsExtension(p) {
e, err := shared.ParseExtension(extensions, p)
if err != nil {
return nil, err
}
extensions = e
} else if name == "title" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Title = result
} else if name == "id" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.ID = result
} else if name == "updated" ||
name == "modified" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Updated = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
source.UpdatedParsed = &utcDate
}
} else if name == "subtitle" ||
name == "tagline" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Subtitle = result
} else if name == "link" {
result, err := ap.parseLink(p)
if err != nil {
return nil, err
}
links = append(links, result)
} else if name == "generator" {
result, err := ap.parseGenerator(p)
if err != nil {
return nil, err
}
source.Generator = result
} else if name == "icon" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Icon = result
} else if name == "logo" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Logo = result
} else if name == "rights" ||
name == "copyright" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
source.Rights = result
} else if name == "contributor" {
result, err := ap.parsePerson("contributor", p)
if err != nil {
return nil, err
}
contributors = append(contributors, result)
} else if name == "author" {
result, err := ap.parsePerson("author", p)
if err != nil {
return nil, err
}
authors = append(authors, result)
} else if name == "category" {
result, err := ap.parseCategory(p)
if err != nil {
return nil, err
}
categories = append(categories, result)
} else {
err := p.Skip()
if err != nil {
return nil, err
}
}
}
}
if len(categories) > 0 {
source.Categories = categories
}
if len(authors) > 0 {
source.Authors = authors
}
if len(contributors) > 0 {
source.Contributors = contributors
}
if len(links) > 0 {
source.Links = links
}
if len(extensions) > 0 {
source.Extensions = extensions
}
if err := p.Expect(xpp.EndTag, "source"); err != nil {
return nil, err
}
return source, nil
}
func (ap *Parser) parseContent(p *xpp.XMLPullParser) (*Content, error) {
c := &Content{}
c.Type = p.Attribute("type")
c.Src = p.Attribute("src")
text, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
c.Value = text
return c, nil
}
func (ap *Parser) parsePerson(name string, p *xpp.XMLPullParser) (*Person, error) {
if err := p.Expect(xpp.StartTag, name); err != nil {
return nil, err
}
person := &Person{}
for {
tok, err := ap.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if name == "name" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
person.Name = result
} else if name == "email" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
person.Email = result
} else if name == "uri" ||
name == "url" ||
name == "homepage" {
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
person.URI = result
} else {
err := p.Skip()
if err != nil {
return nil, err
}
}
}
}
if err := p.Expect(xpp.EndTag, name); err != nil {
return nil, err
}
return person, nil
}
func (ap *Parser) parseLink(p *xpp.XMLPullParser) (*Link, error) {
if err := p.Expect(xpp.StartTag, "link"); err != nil {
return nil, err
}
l := &Link{}
l.Href = p.Attribute("href")
l.Hreflang = p.Attribute("hreflang")
l.Type = p.Attribute("type")
l.Length = p.Attribute("length")
l.Title = p.Attribute("title")
l.Rel = p.Attribute("rel")
if l.Rel == "" {
l.Rel = "alternate"
}
if err := p.Skip(); err != nil {
return nil, err
}
if err := p.Expect(xpp.EndTag, "link"); err != nil {
return nil, err
}
return l, nil
}
func (ap *Parser) parseCategory(p *xpp.XMLPullParser) (*Category, error) {
if err := p.Expect(xpp.StartTag, "category"); err != nil {
return nil, err
}
c := &Category{}
c.Term = p.Attribute("term")
c.Scheme = p.Attribute("scheme")
c.Label = p.Attribute("label")
if err := p.Skip(); err != nil {
return nil, err
}
if err := p.Expect(xpp.EndTag, "category"); err != nil {
return nil, err
}
return c, nil
}
func (ap *Parser) parseGenerator(p *xpp.XMLPullParser) (*Generator, error) {
if err := p.Expect(xpp.StartTag, "generator"); err != nil {
return nil, err
}
g := &Generator{}
uri := p.Attribute("uri") // Atom 1.0
url := p.Attribute("url") // Atom 0.3
if uri != "" {
g.URI = uri
} else if url != "" {
g.URI = url
}
g.Version = p.Attribute("version")
result, err := ap.parseAtomText(p)
if err != nil {
return nil, err
}
g.Value = result
if err := p.Expect(xpp.EndTag, "generator"); err != nil {
return nil, err
}
return g, nil
}
func (ap *Parser) parseAtomText(p *xpp.XMLPullParser) (string, error) {
var text struct {
Type string `xml:"type,attr"`
Mode string `xml:"mode,attr"`
InnerXML string `xml:",innerxml"`
}
err := p.DecodeElement(&text)
if err != nil {
return "", err
}
result := text.InnerXML
result = strings.TrimSpace(result)
lowerType := strings.ToLower(text.Type)
lowerMode := strings.ToLower(text.Mode)
if strings.Contains(result, "<![CDATA[") {
result = shared.StripCDATA(result)
if lowerType == "html" || strings.Contains(lowerType, "xhtml") {
result, _ = ap.base.ResolveHTML(result)
}
} else {
// decode non-CDATA contents depending on type
if lowerType == "text" ||
strings.HasPrefix(lowerType, "text/") ||
(lowerType == "" && lowerMode == "") {
result, err = shared.DecodeEntities(result)
} else if strings.Contains(lowerType, "xhtml") {
result = ap.stripWrappingDiv(result)
result, _ = ap.base.ResolveHTML(result)
} else if lowerType == "html" {
result = ap.stripWrappingDiv(result)
result, err = shared.DecodeEntities(result)
if err == nil {
result, _ = ap.base.ResolveHTML(result)
}
} else {
decodedStr, err := base64.StdEncoding.DecodeString(result)
if err == nil {
result = string(decodedStr)
}
}
}
// resolve relative URIs in URI-containing elements according to xml:base
name := strings.ToLower(p.Name)
if uriElements[name] {
resolved, err := ap.base.ResolveURL(result)
if err == nil {
result = resolved
}
}
return result, err
}
func (ap *Parser) parseLanguage(p *xpp.XMLPullParser) string {
return p.Attribute("lang")
}
func (ap *Parser) parseVersion(p *xpp.XMLPullParser) string {
ver := p.Attribute("version")
if ver != "" {
return ver
}
ns := p.Attribute("xmlns")
if ns == "http://purl.org/atom/ns#" {
return "0.3"
}
if ns == "http://www.w3.org/2005/Atom" {
return "1.0"
}
return ""
}
func (ap *Parser) stripWrappingDiv(content string) (result string) {
result = content
r := strings.NewReader(result)
doc, err := goquery.NewDocumentFromReader(r)
if err == nil {
root := doc.Find("body").Children()
if root.Is("div") && root.Siblings().Size() == 0 {
html, err := root.Unwrap().Html()
if err == nil {
result = html
}
}
}
return
}

48
vendor/github.com/mmcdole/gofeed/detector.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
package gofeed
import (
"io"
"strings"
"github.com/mmcdole/gofeed/internal/shared"
xpp "github.com/mmcdole/goxpp"
)
// FeedType represents one of the possible feed
// types that we can detect.
type FeedType int
const (
// FeedTypeUnknown represents a feed that could not have its
// type determiend.
FeedTypeUnknown FeedType = iota
// FeedTypeAtom repesents an Atom feed
FeedTypeAtom
// FeedTypeRSS represents an RSS feed
FeedTypeRSS
)
// DetectFeedType attempts to determine the type of feed
// by looking for specific xml elements unique to the
// various feed types.
func DetectFeedType(feed io.Reader) FeedType {
p := xpp.NewXMLPullParser(feed, false, shared.NewReaderLabel)
xmlBase := shared.XMLBase{}
_, err := xmlBase.FindRoot(p)
if err != nil {
return FeedTypeUnknown
}
name := strings.ToLower(p.Name)
switch name {
case "rdf":
return FeedTypeRSS
case "rss":
return FeedTypeRSS
case "feed":
return FeedTypeAtom
default:
return FeedTypeUnknown
}
}

View File

@ -0,0 +1,45 @@
package ext
// DublinCoreExtension represents a feed extension
// for the Dublin Core specification.
type DublinCoreExtension struct {
Title []string `json:"title,omitempty"`
Creator []string `json:"creator,omitempty"`
Author []string `json:"author,omitempty"`
Subject []string `json:"subject,omitempty"`
Description []string `json:"description,omitempty"`
Publisher []string `json:"publisher,omitempty"`
Contributor []string `json:"contributor,omitempty"`
Date []string `json:"date,omitempty"`
Type []string `json:"type,omitempty"`
Format []string `json:"format,omitempty"`
Identifier []string `json:"identifier,omitempty"`
Source []string `json:"source,omitempty"`
Language []string `json:"language,omitempty"`
Relation []string `json:"relation,omitempty"`
Coverage []string `json:"coverage,omitempty"`
Rights []string `json:"rights,omitempty"`
}
// NewDublinCoreExtension creates a new DublinCoreExtension
// given the generic extension map for the "dc" prefix.
func NewDublinCoreExtension(extensions map[string][]Extension) *DublinCoreExtension {
dc := &DublinCoreExtension{}
dc.Title = parseTextArrayExtension("title", extensions)
dc.Creator = parseTextArrayExtension("creator", extensions)
dc.Author = parseTextArrayExtension("author", extensions)
dc.Subject = parseTextArrayExtension("subject", extensions)
dc.Description = parseTextArrayExtension("description", extensions)
dc.Publisher = parseTextArrayExtension("publisher", extensions)
dc.Contributor = parseTextArrayExtension("contributor", extensions)
dc.Date = parseTextArrayExtension("date", extensions)
dc.Type = parseTextArrayExtension("type", extensions)
dc.Format = parseTextArrayExtension("format", extensions)
dc.Identifier = parseTextArrayExtension("identifier", extensions)
dc.Source = parseTextArrayExtension("source", extensions)
dc.Language = parseTextArrayExtension("language", extensions)
dc.Relation = parseTextArrayExtension("relation", extensions)
dc.Coverage = parseTextArrayExtension("coverage", extensions)
dc.Rights = parseTextArrayExtension("rights", extensions)
return dc
}

View File

@ -0,0 +1,46 @@
package ext
// Extensions is the generic extension map for Feeds and Items.
// The first map is for the element namespace prefix (e.g., itunes).
// The second map is for the element name (e.g., author).
type Extensions map[string]map[string][]Extension
// Extension represents a single XML element that was in a non
// default namespace in a Feed or Item/Entry.
type Extension struct {
Name string `json:"name"`
Value string `json:"value"`
Attrs map[string]string `json:"attrs"`
Children map[string][]Extension `json:"children"`
}
func parseTextExtension(name string, extensions map[string][]Extension) (value string) {
if extensions == nil {
return
}
matches, ok := extensions[name]
if !ok || len(matches) == 0 {
return
}
match := matches[0]
return match.Value
}
func parseTextArrayExtension(name string, extensions map[string][]Extension) (values []string) {
if extensions == nil {
return
}
matches, ok := extensions[name]
if !ok || len(matches) == 0 {
return
}
values = []string{}
for _, m := range matches {
values = append(values, m.Value)
}
return
}

150
vendor/github.com/mmcdole/gofeed/extensions/itunes.go generated vendored Normal file
View File

@ -0,0 +1,150 @@
package ext
// ITunesFeedExtension is a set of extension
// fields for RSS feeds.
type ITunesFeedExtension struct {
Author string `json:"author,omitempty"`
Block string `json:"block,omitempty"`
Categories []*ITunesCategory `json:"categories,omitempty"`
Explicit string `json:"explicit,omitempty"`
Keywords string `json:"keywords,omitempty"`
Owner *ITunesOwner `json:"owner,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Summary string `json:"summary,omitempty"`
Image string `json:"image,omitempty"`
Complete string `json:"complete,omitempty"`
NewFeedURL string `json:"newFeedUrl,omitempty"`
Type string `json:"type,omitempty"`
}
// ITunesItemExtension is a set of extension
// fields for RSS items.
type ITunesItemExtension struct {
Author string `json:"author,omitempty"`
Block string `json:"block,omitempty"`
Duration string `json:"duration,omitempty"`
Explicit string `json:"explicit,omitempty"`
Keywords string `json:"keywords,omitempty"`
Subtitle string `json:"subtitle,omitempty"`
Summary string `json:"summary,omitempty"`
Image string `json:"image,omitempty"`
IsClosedCaptioned string `json:"isClosedCaptioned,omitempty"`
Episode string `json:"episode,omitempty"`
Season string `json:"season,omitempty"`
Order string `json:"order,omitempty"`
EpisodeType string `json:"episodeType,omitempty"`
}
// ITunesCategory is a category element for itunes feeds.
type ITunesCategory struct {
Text string `json:"text,omitempty"`
Subcategory *ITunesCategory `json:"subcategory,omitempty"`
}
// ITunesOwner is the owner of a particular itunes feed.
type ITunesOwner struct {
Email string `json:"email,omitempty"`
Name string `json:"name,omitempty"`
}
// NewITunesFeedExtension creates an ITunesFeedExtension given an
// extension map for the "itunes" key.
func NewITunesFeedExtension(extensions map[string][]Extension) *ITunesFeedExtension {
feed := &ITunesFeedExtension{}
feed.Author = parseTextExtension("author", extensions)
feed.Block = parseTextExtension("block", extensions)
feed.Explicit = parseTextExtension("explicit", extensions)
feed.Keywords = parseTextExtension("keywords", extensions)
feed.Subtitle = parseTextExtension("subtitle", extensions)
feed.Summary = parseTextExtension("summary", extensions)
feed.Image = parseImage(extensions)
feed.Complete = parseTextExtension("complete", extensions)
feed.NewFeedURL = parseTextExtension("new-feed-url", extensions)
feed.Categories = parseCategories(extensions)
feed.Owner = parseOwner(extensions)
feed.Type = parseTextExtension("type", extensions)
return feed
}
// NewITunesItemExtension creates an ITunesItemExtension given an
// extension map for the "itunes" key.
func NewITunesItemExtension(extensions map[string][]Extension) *ITunesItemExtension {
entry := &ITunesItemExtension{}
entry.Author = parseTextExtension("author", extensions)
entry.Block = parseTextExtension("block", extensions)
entry.Duration = parseTextExtension("duration", extensions)
entry.Explicit = parseTextExtension("explicit", extensions)
entry.Subtitle = parseTextExtension("subtitle", extensions)
entry.Summary = parseTextExtension("summary", extensions)
entry.Keywords = parseTextExtension("keywords", extensions)
entry.Image = parseImage(extensions)
entry.IsClosedCaptioned = parseTextExtension("isClosedCaptioned", extensions)
entry.Episode = parseTextExtension("episode", extensions)
entry.Season = parseTextExtension("season", extensions)
entry.Order = parseTextExtension("order", extensions)
entry.EpisodeType = parseTextExtension("episodeType", extensions)
return entry
}
func parseImage(extensions map[string][]Extension) (image string) {
if extensions == nil {
return
}
matches, ok := extensions["image"]
if !ok || len(matches) == 0 {
return
}
image = matches[0].Attrs["href"]
return
}
func parseOwner(extensions map[string][]Extension) (owner *ITunesOwner) {
if extensions == nil {
return
}
matches, ok := extensions["owner"]
if !ok || len(matches) == 0 {
return
}
owner = &ITunesOwner{}
if name, ok := matches[0].Children["name"]; ok {
owner.Name = name[0].Value
}
if email, ok := matches[0].Children["email"]; ok {
owner.Email = email[0].Value
}
return
}
func parseCategories(extensions map[string][]Extension) (categories []*ITunesCategory) {
if extensions == nil {
return
}
matches, ok := extensions["category"]
if !ok || len(matches) == 0 {
return
}
categories = []*ITunesCategory{}
for _, cat := range matches {
c := &ITunesCategory{}
if text, ok := cat.Attrs["text"]; ok {
c.Text = text
}
if subs, ok := cat.Children["category"]; ok {
s := &ITunesCategory{}
if text, ok := subs[0].Attrs["text"]; ok {
s.Text = text
}
c.Subcategory = s
}
categories = append(categories, c)
}
return
}

104
vendor/github.com/mmcdole/gofeed/feed.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
package gofeed
import (
"encoding/json"
"time"
"github.com/mmcdole/gofeed/extensions"
)
// Feed is the universal Feed type that atom.Feed
// and rss.Feed gets translated to. It represents
// a web feed.
// Sorting with sort.Sort will order the Items by
// oldest to newest publish time.
type Feed struct {
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Link string `json:"link,omitempty"`
FeedLink string `json:"feedLink,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Published string `json:"published,omitempty"`
PublishedParsed *time.Time `json:"publishedParsed,omitempty"`
Author *Person `json:"author,omitempty"`
Language string `json:"language,omitempty"`
Image *Image `json:"image,omitempty"`
Copyright string `json:"copyright,omitempty"`
Generator string `json:"generator,omitempty"`
Categories []string `json:"categories,omitempty"`
DublinCoreExt *ext.DublinCoreExtension `json:"dcExt,omitempty"`
ITunesExt *ext.ITunesFeedExtension `json:"itunesExt,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
Custom map[string]string `json:"custom,omitempty"`
Items []*Item `json:"items"`
FeedType string `json:"feedType"`
FeedVersion string `json:"feedVersion"`
}
func (f Feed) String() string {
json, _ := json.MarshalIndent(f, "", " ")
return string(json)
}
// Item is the universal Item type that atom.Entry
// and rss.Item gets translated to. It represents
// a single entry in a given feed.
type Item struct {
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Content string `json:"content,omitempty"`
Link string `json:"link,omitempty"`
Updated string `json:"updated,omitempty"`
UpdatedParsed *time.Time `json:"updatedParsed,omitempty"`
Published string `json:"published,omitempty"`
PublishedParsed *time.Time `json:"publishedParsed,omitempty"`
Author *Person `json:"author,omitempty"`
GUID string `json:"guid,omitempty"`
Image *Image `json:"image,omitempty"`
Categories []string `json:"categories,omitempty"`
Enclosures []*Enclosure `json:"enclosures,omitempty"`
DublinCoreExt *ext.DublinCoreExtension `json:"dcExt,omitempty"`
ITunesExt *ext.ITunesItemExtension `json:"itunesExt,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
Custom map[string]string `json:"custom,omitempty"`
}
// Person is an individual specified in a feed
// (e.g. an author)
type Person struct {
Name string `json:"name,omitempty"`
Email string `json:"email,omitempty"`
}
// Image is an image that is the artwork for a given
// feed or item.
type Image struct {
URL string `json:"url,omitempty"`
Title string `json:"title,omitempty"`
}
// Enclosure is a file associated with a given Item.
type Enclosure struct {
URL string `json:"url,omitempty"`
Length string `json:"length,omitempty"`
Type string `json:"type,omitempty"`
}
// Len returns the length of Items.
func (f Feed) Len() int {
return len(f.Items)
}
// Less compares PublishedParsed of Items[i], Items[k]
// and returns true if Items[i] is less than Items[k].
func (f Feed) Less(i, k int) bool {
return f.Items[i].PublishedParsed.Before(
*f.Items[k].PublishedParsed,
)
}
// Swap swaps Items[i] and Items[k].
func (f Feed) Swap(i, k int) {
f.Items[i], f.Items[k] = f.Items[k], f.Items[i]
}

12
vendor/github.com/mmcdole/gofeed/go.mod generated vendored Normal file
View File

@ -0,0 +1,12 @@
module github.com/mmcdole/gofeed
require (
github.com/PuerkitoBio/goquery v1.5.0
github.com/codegangsta/cli v1.20.0
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/testify v1.2.2
golang.org/x/net v0.0.0-20181220203305-927f97764cc3
golang.org/x/text v0.3.0
)

20
vendor/github.com/mmcdole/gofeed/go.sum generated vendored Normal file
View File

@ -0,0 +1,20 @@
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/codegangsta/cli v1.20.0 h1:iX1FXEgwzd5+XN6wk5cVHOGQj6Q3Dcp20lUeS4lHNTw=
github.com/codegangsta/cli v1.20.0/go.mod h1:/qJNoX69yVSKu5o4jLyXAENLRyk1uhi7zkbQ3slBdOA=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

View File

@ -0,0 +1,19 @@
package shared
import (
"io"
"golang.org/x/net/html/charset"
)
func NewReaderLabel(label string, input io.Reader) (io.Reader, error) {
conv, err := charset.NewReaderLabel(label, input)
if err != nil {
return nil, err
}
// Wrap the charset decoder reader with a XML sanitizer
//clean := NewXMLSanitizerReader(conv)
return conv, nil
}

View File

@ -0,0 +1,219 @@
package shared
import (
"fmt"
"strings"
"time"
)
// DateFormats taken from github.com/mjibson/goread
var dateFormats = []string{
time.RFC822, // RSS
time.RFC822Z, // RSS
time.RFC3339, // Atom
time.UnixDate,
time.RubyDate,
time.RFC850,
time.RFC1123Z,
time.RFC1123,
time.ANSIC,
"Mon, January 2 2006 15:04:05 -0700",
"Mon, Jan 2 2006 15:04:05 -700",
"Mon, Jan 2 2006 15:04:05 -0700",
"Mon Jan 2 15:04 2006",
"Mon Jan 02, 2006 3:04 pm",
"Mon Jan 02 2006 15:04:05 -0700",
"Monday, January 2, 2006 03:04 PM",
"Monday, January 2, 2006",
"Monday, January 02, 2006",
"Monday, 2 January 2006 15:04:05 -0700",
"Monday, 2 Jan 2006 15:04:05 -0700",
"Monday, 02 January 2006 15:04:05 -0700",
"Monday, 02 January 2006 15:04:05",
"Mon, 2 January 2006, 15:04 -0700",
"Mon, 2 January 2006 15:04:05 -0700",
"Mon, 2 January 2006",
"Mon, 2 Jan 2006 3:04:05 PM -0700",
"Mon, 2 Jan 2006 15:4:5 -0700 GMT",
"Mon, 2, Jan 2006 15:4",
"Mon, 2 Jan 2006, 15:04 -0700",
"Mon, 2 Jan 2006 15:04 -0700",
"Mon, 2 Jan 2006 15:04:05 UT",
"Mon, 2 Jan 2006 15:04:05 -0700 MST",
"Mon, 2 Jan 2006 15:04:05-0700",
"Mon, 2 Jan 2006 15:04:05 -0700",
"Mon, 2 Jan 2006 15:04:05",
"Mon, 2 Jan 2006 15:04",
"Mon,2 Jan 2006",
"Mon, 2 Jan 2006",
"Mon, 2 Jan 06 15:04:05 -0700",
"Mon, 2006-01-02 15:04",
"Mon, 02 January 2006",
"Mon, 02 Jan 2006 15 -0700",
"Mon, 02 Jan 2006 15:04 -0700",
"Mon, 02 Jan 2006 15:04:05 Z",
"Mon, 02 Jan 2006 15:04:05 UT",
"Mon, 02 Jan 2006 15:04:05 MST-07:00",
"Mon, 02 Jan 2006 15:04:05 MST -0700",
"Mon, 02 Jan 2006 15:04:05 GMT-0700",
"Mon,02 Jan 2006 15:04:05 -0700",
"Mon, 02 Jan 2006 15:04:05 -0700",
"Mon, 02 Jan 2006 15:04:05 -07:00",
"Mon, 02 Jan 2006 15:04:05 --0700",
"Mon 02 Jan 2006 15:04:05 -0700",
"Mon, 02 Jan 2006 15:04:05 -07",
"Mon, 02 Jan 2006 15:04:05 00",
"Mon, 02 Jan 2006 15:04:05",
"Mon, 02 Jan 2006",
"January 2, 2006 3:04 PM",
"January 2, 2006, 3:04 p.m.",
"January 2, 2006 15:04:05",
"January 2, 2006 03:04 PM",
"January 2, 2006",
"January 02, 2006 15:04",
"January 02, 2006 03:04 PM",
"January 02, 2006",
"Jan 2, 2006 3:04:05 PM",
"Jan 2, 2006",
"Jan 02 2006 03:04:05PM",
"Jan 02, 2006",
"6/1/2 15:04",
"6-1-2 15:04",
"2 January 2006 15:04:05 -0700",
"2 January 2006",
"2 Jan 2006 15:04:05 Z",
"2 Jan 2006 15:04:05 -0700",
"2 Jan 2006",
"2.1.2006 15:04:05",
"2/1/2006",
"2-1-2006",
"2006 January 02",
"2006-1-2T15:04:05Z",
"2006-1-2 15:04:05",
"2006-1-2",
"2006-1-02T15:04:05Z",
"2006-01-02T15:04Z",
"2006-01-02T15:04-07:00",
"2006-01-02T15:04:05Z",
"2006-01-02T15:04:05-07:00:00",
"2006-01-02T15:04:05:-0700",
"2006-01-02T15:04:05-0700",
"2006-01-02T15:04:05-07:00",
"2006-01-02T15:04:05 -0700",
"2006-01-02T15:04:05:00",
"2006-01-02T15:04:05",
"2006-01-02 at 15:04:05",
"2006-01-02 15:04:05Z",
"2006-01-02 15:04:05-0700",
"2006-01-02 15:04:05-07:00",
"2006-01-02 15:04:05 -0700",
"2006-01-02 15:04",
"2006-01-02 00:00:00.0 15:04:05.0 -0700",
"2006/01/02",
"2006-01-02",
"15:04 02.01.2006 -0700",
"1/2/2006 3:04:05 PM",
"1/2/2006",
"06/1/2 15:04",
"06-1-2 15:04",
"02 Monday, Jan 2006 15:04",
"02 Jan 2006 15:04:05 UT",
"02 Jan 2006 15:04:05 -0700",
"02 Jan 2006 15:04:05",
"02 Jan 2006",
"02.01.2006 15:04:05",
"02/01/2006 15:04:05",
"02.01.2006 15:04",
"02/01/2006 - 15:04",
"02.01.2006 -0700",
"02/01/2006",
"02-01-2006",
"01/02/2006 3:04 PM",
"01/02/2006 - 15:04",
"01/02/2006",
"01-02-2006",
}
// Named zone cannot be consistently loaded, so handle separately
var dateFormatsWithNamedZone = []string{
"Mon, January 02, 2006, 15:04:05 MST",
"Mon, January 02, 2006 15:04:05 MST",
"Mon, Jan 2, 2006 15:04 MST",
"Mon, Jan 2 2006 15:04 MST",
"Mon, Jan 2, 2006 15:04:05 MST",
"Mon Jan 2 15:04:05 2006 MST",
"Mon, Jan 02,2006 15:04:05 MST",
"Monday, January 2, 2006 15:04:05 MST",
"Monday, 2 January 2006 15:04:05 MST",
"Monday, 2 Jan 2006 15:04:05 MST",
"Monday, 02 January 2006 15:04:05 MST",
"Mon, 2 January 2006 15:04 MST",
"Mon, 2 January 2006, 15:04:05 MST",
"Mon, 2 January 2006 15:04:05 MST",
"Mon, 2 Jan 2006 15:4:5 MST",
"Mon, 2 Jan 2006 15:04 MST",
"Mon, 2 Jan 2006 15:04:05MST",
"Mon, 2 Jan 2006 15:04:05 MST",
"Mon 2 Jan 2006 15:04:05 MST",
"mon,2 Jan 2006 15:04:05 MST",
"Mon, 2 Jan 15:04:05 MST",
"Mon, 2 Jan 06 15:04:05 MST",
"Mon,02 January 2006 14:04:05 MST",
"Mon, 02 Jan 2006 3:04:05 PM MST",
"Mon,02 Jan 2006 15:04 MST",
"Mon, 02 Jan 2006 15:04 MST",
"Mon, 02 Jan 2006, 15:04:05 MST",
"Mon, 02 Jan 2006 15:04:05MST",
"Mon, 02 Jan 2006 15:04:05 MST",
"Mon , 02 Jan 2006 15:04:05 MST",
"Mon, 02 Jan 06 15:04:05 MST",
"January 2, 2006 15:04:05 MST",
"January 02, 2006 15:04:05 MST",
"Jan 2, 2006 3:04:05 PM MST",
"Jan 2, 2006 15:04:05 MST",
"2 January 2006 15:04:05 MST",
"2 Jan 2006 15:04:05 MST",
"2006-01-02 15:04:05 MST",
"1/2/2006 3:04:05 PM MST",
"1/2/2006 15:04:05 MST",
"02 Jan 2006 15:04 MST",
"02 Jan 2006 15:04:05 MST",
"02/01/2006 15:04 MST",
"02-01-2006 15:04:05 MST",
"01/02/2006 15:04:05 MST",
}
// ParseDate parses a given date string using a large
// list of commonly found feed date formats.
func ParseDate(ds string) (t time.Time, err error) {
d := strings.TrimSpace(ds)
if d == "" {
return t, fmt.Errorf("Date string is empty")
}
for _, f := range dateFormats {
if t, err = time.Parse(f, d); err == nil {
return
}
}
for _, f := range dateFormatsWithNamedZone {
t, err = time.Parse(f, d)
if err != nil {
continue
}
// This is a format match! Now try to load the timezone name
loc, err := time.LoadLocation(t.Location().String())
if err != nil {
// We couldn't load the TZ name. Just use UTC instead...
return t, nil
}
if t, err = time.ParseInLocation(f, ds, loc); err == nil {
return t, nil
}
// This should not be reachable
}
err = fmt.Errorf("Failed to parse date: %s", ds)
return
}

View File

@ -0,0 +1,176 @@
package shared
import (
"strings"
"github.com/mmcdole/gofeed/extensions"
"github.com/mmcdole/goxpp"
)
// IsExtension returns whether or not the current
// XML element is an extension element (if it has a
// non empty prefix)
func IsExtension(p *xpp.XMLPullParser) bool {
space := strings.TrimSpace(p.Space)
if prefix, ok := p.Spaces[space]; ok {
return !(prefix == "" || prefix == "rss" || prefix == "rdf" || prefix == "content")
}
return p.Space != ""
}
// ParseExtension parses the current element of the
// XMLPullParser as an extension element and updates
// the extension map
func ParseExtension(fe ext.Extensions, p *xpp.XMLPullParser) (ext.Extensions, error) {
prefix := prefixForNamespace(p.Space, p)
result, err := parseExtensionElement(p)
if err != nil {
return nil, err
}
// Ensure the extension prefix map exists
if _, ok := fe[prefix]; !ok {
fe[prefix] = map[string][]ext.Extension{}
}
// Ensure the extension element slice exists
if _, ok := fe[prefix][p.Name]; !ok {
fe[prefix][p.Name] = []ext.Extension{}
}
fe[prefix][p.Name] = append(fe[prefix][p.Name], result)
return fe, nil
}
func parseExtensionElement(p *xpp.XMLPullParser) (e ext.Extension, err error) {
if err = p.Expect(xpp.StartTag, "*"); err != nil {
return e, err
}
e.Name = p.Name
e.Children = map[string][]ext.Extension{}
e.Attrs = map[string]string{}
for _, attr := range p.Attrs {
// TODO: Alright that we are stripping
// namespace information from attributes ?
e.Attrs[attr.Name.Local] = attr.Value
}
for {
tok, err := p.Next()
if err != nil {
return e, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
child, err := parseExtensionElement(p)
if err != nil {
return e, err
}
if _, ok := e.Children[child.Name]; !ok {
e.Children[child.Name] = []ext.Extension{}
}
e.Children[child.Name] = append(e.Children[child.Name], child)
} else if tok == xpp.Text {
e.Value += p.Text
}
}
e.Value = strings.TrimSpace(e.Value)
if err = p.Expect(xpp.EndTag, e.Name); err != nil {
return e, err
}
return e, nil
}
func prefixForNamespace(space string, p *xpp.XMLPullParser) string {
// First we check if the global namespace map
// contains an entry for this namespace/prefix.
// This way we can use the canonical prefix for this
// ns instead of the one defined in the feed.
if prefix, ok := canonicalNamespaces[space]; ok {
return prefix
}
// Next we check if the feed itself defined this
// this namespace and return it if we have a result.
if prefix, ok := p.Spaces[space]; ok {
return prefix
}
// Lastly, any namespace which is not defined in the
// the feed will be the prefix itself when using Go's
// xml.Decoder.Token() method.
return space
}
// Namespaces taken from github.com/kurtmckee/feedparser
// These are used for determining canonical name space prefixes
// for many of the popular RSS/Atom extensions.
//
// These canonical prefixes override any prefixes used in the feed itself.
var canonicalNamespaces = map[string]string{
"http://webns.net/mvcb/": "admin",
"http://purl.org/rss/1.0/modules/aggregation/": "ag",
"http://purl.org/rss/1.0/modules/annotate/": "annotate",
"http://media.tangent.org/rss/1.0/": "audio",
"http://backend.userland.com/blogChannelModule": "blogChannel",
"http://creativecommons.org/ns#license": "cc",
"http://web.resource.org/cc/": "cc",
"http://cyber.law.harvard.edu/rss/creativeCommonsRssModule.html": "creativeCommons",
"http://backend.userland.com/creativeCommonsRssModule": "creativeCommons",
"http://purl.org/rss/1.0/modules/company": "co",
"http://purl.org/rss/1.0/modules/content/": "content",
"http://my.theinfo.org/changed/1.0/rss/": "cp",
"http://purl.org/dc/elements/1.1/": "dc",
"http://purl.org/dc/terms/": "dcterms",
"http://purl.org/rss/1.0/modules/email/": "email",
"http://purl.org/rss/1.0/modules/event/": "ev",
"http://rssnamespace.org/feedburner/ext/1.0": "feedburner",
"http://freshmeat.net/rss/fm/": "fm",
"http://xmlns.com/foaf/0.1/": "foaf",
"http://www.w3.org/2003/01/geo/wgs84_pos#": "geo",
"http://www.georss.org/georss": "georss",
"http://www.opengis.net/gml": "gml",
"http://postneo.com/icbm/": "icbm",
"http://purl.org/rss/1.0/modules/image/": "image",
"http://www.itunes.com/DTDs/PodCast-1.0.dtd": "itunes",
"http://example.com/DTDs/PodCast-1.0.dtd": "itunes",
"http://purl.org/rss/1.0/modules/link/": "l",
"http://search.yahoo.com/mrss": "media",
"http://search.yahoo.com/mrss/": "media",
"http://madskills.com/public/xml/rss/module/pingback/": "pingback",
"http://prismstandard.org/namespaces/1.2/basic/": "prism",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
"http://www.w3.org/2000/01/rdf-schema#": "rdfs",
"http://purl.org/rss/1.0/modules/reference/": "ref",
"http://purl.org/rss/1.0/modules/richequiv/": "reqv",
"http://purl.org/rss/1.0/modules/search/": "search",
"http://purl.org/rss/1.0/modules/slash/": "slash",
"http://schemas.xmlsoap.org/soap/envelope/": "soap",
"http://purl.org/rss/1.0/modules/servicestatus/": "ss",
"http://hacks.benhammersley.com/rss/streaming/": "str",
"http://purl.org/rss/1.0/modules/subscription/": "sub",
"http://purl.org/rss/1.0/modules/syndication/": "sy",
"http://schemas.pocketsoap.com/rss/myDescModule/": "szf",
"http://purl.org/rss/1.0/modules/taxonomy/": "taxo",
"http://purl.org/rss/1.0/modules/threading/": "thr",
"http://purl.org/rss/1.0/modules/textinput/": "ti",
"http://madskills.com/public/xml/rss/module/trackback/": "trackback",
"http://wellformedweb.org/commentAPI/": "wfw",
"http://purl.org/rss/1.0/modules/wiki/": "wiki",
"http://www.w3.org/1999/xhtml": "xhtml",
"http://www.w3.org/1999/xlink": "xlink",
"http://www.w3.org/XML/1998/namespace": "xml",
"http://podlove.org/simple-chapters": "psc",
}

View File

@ -0,0 +1,204 @@
package shared
import (
"bytes"
"errors"
"fmt"
"regexp"
"strconv"
"strings"
xpp "github.com/mmcdole/goxpp"
)
var (
emailNameRgx = regexp.MustCompile(`^([^@]+@[^\s]+)\s+\(([^@]+)\)$`)
nameEmailRgx = regexp.MustCompile(`^([^@]+)\s+\(([^@]+@[^)]+)\)$`)
nameOnlyRgx = regexp.MustCompile(`^([^@()]+)$`)
emailOnlyRgx = regexp.MustCompile(`^([^@()]+@[^@()]+)$`)
TruncatedEntity = errors.New("truncated entity")
InvalidNumericReference = errors.New("invalid numeric reference")
)
const CDATA_START = "<![CDATA["
const CDATA_END = "]]>"
// ParseText is a helper function for parsing the text
// from the current element of the XMLPullParser.
// This function can handle parsing naked XML text from
// an element.
func ParseText(p *xpp.XMLPullParser) (string, error) {
var text struct {
Type string `xml:"type,attr"`
InnerXML string `xml:",innerxml"`
}
err := p.DecodeElement(&text)
if err != nil {
return "", err
}
result := text.InnerXML
result = strings.TrimSpace(result)
if strings.Contains(result, CDATA_START) {
return StripCDATA(result), nil
}
return DecodeEntities(result)
}
// StripCDATA removes CDATA tags from the string
// content outside of CDATA tags is passed via DecodeEntities
func StripCDATA(str string) string {
buf := bytes.NewBuffer([]byte{})
curr := 0
for curr < len(str) {
start := indexAt(str, CDATA_START, curr)
if start == -1 {
dec, _ := DecodeEntities(str[curr:])
buf.Write([]byte(dec))
return buf.String()
}
end := indexAt(str, CDATA_END, start)
if end == -1 {
dec, _ := DecodeEntities(str[curr:])
buf.Write([]byte(dec))
return buf.String()
}
buf.Write([]byte(str[start+len(CDATA_START) : end]))
curr = curr + end + len(CDATA_END)
}
return buf.String()
}
// DecodeEntities decodes escaped XML entities
// in a string and returns the unescaped string
func DecodeEntities(str string) (string, error) {
data := []byte(str)
buf := bytes.NewBuffer([]byte{})
for len(data) > 0 {
// Find the next entity
idx := bytes.IndexByte(data, '&')
if idx == -1 {
buf.Write(data)
break
}
buf.Write(data[:idx])
data = data[idx:]
// If there is only the '&' left here
if len(data) == 1 {
buf.Write(data)
return buf.String(), nil
}
// Find the end of the entity
end := bytes.IndexByte(data, ';')
if end == -1 {
// it's not an entitiy. just a plain old '&' possibly with extra bytes
buf.Write(data)
return buf.String(), nil
}
// Check if there is a space somewhere within the 'entitiy'.
// If there is then skip the whole thing since it's not a real entity.
if strings.Contains(string(data[1:end]), " ") {
buf.Write(data)
return buf.String(), nil
} else {
if data[1] == '#' {
// Numerical character reference
var str string
base := 10
if len(data) > 2 && data[2] == 'x' {
str = string(data[3:end])
base = 16
} else {
str = string(data[2:end])
}
i, err := strconv.ParseUint(str, base, 32)
if err != nil {
return "", InvalidNumericReference
}
buf.WriteRune(rune(i))
} else {
// Predefined entity
name := string(data[1:end])
var c byte
switch name {
case "lt":
c = '<'
case "gt":
c = '>'
case "quot":
c = '"'
case "apos":
c = '\''
case "amp":
c = '&'
default:
return "", fmt.Errorf("unknown predefined "+
"entity &%s;", name)
}
buf.WriteByte(c)
}
}
// Skip the entity
data = data[end+1:]
}
return buf.String(), nil
}
// ParseNameAddress parses name/email strings commonly
// found in RSS feeds of the format "Example Name (example@site.com)"
// and other variations of this format.
func ParseNameAddress(nameAddressText string) (name string, address string) {
if nameAddressText == "" {
return
}
if emailNameRgx.MatchString(nameAddressText) {
result := emailNameRgx.FindStringSubmatch(nameAddressText)
address = result[1]
name = result[2]
} else if nameEmailRgx.MatchString(nameAddressText) {
result := nameEmailRgx.FindStringSubmatch(nameAddressText)
name = result[1]
address = result[2]
} else if nameOnlyRgx.MatchString(nameAddressText) {
result := nameOnlyRgx.FindStringSubmatch(nameAddressText)
name = result[1]
} else if emailOnlyRgx.MatchString(nameAddressText) {
result := emailOnlyRgx.FindStringSubmatch(nameAddressText)
address = result[1]
}
return
}
func indexAt(str, substr string, start int) int {
idx := strings.Index(str[start:], substr)
if idx > -1 {
idx += start
}
return idx
}

View File

@ -0,0 +1,258 @@
package shared
import (
"bytes"
"fmt"
"golang.org/x/net/html"
"net/url"
"strings"
"github.com/mmcdole/goxpp"
)
var (
// HTML attributes which contain URIs
// https://pythonhosted.org/feedparser/resolving-relative-links.html
// To catch every possible URI attribute is non-trivial:
// https://stackoverflow.com/questions/2725156/complete-list-of-html-tag-attributes-which-have-a-url-value
htmlURIAttrs = map[string]bool{
"action": true,
"background": true,
"cite": true,
"codebase": true,
"data": true,
"href": true,
"poster": true,
"profile": true,
"scheme": true,
"src": true,
"uri": true,
"usemap": true,
}
)
type urlStack []*url.URL
func (s *urlStack) push(u *url.URL) {
*s = append([]*url.URL{u}, *s...)
}
func (s *urlStack) pop() *url.URL {
if s == nil || len(*s) == 0 {
return nil
}
var top *url.URL
top, *s = (*s)[0], (*s)[1:]
return top
}
func (s *urlStack) top() *url.URL {
if s == nil || len(*s) == 0 {
return nil
}
return (*s)[0]
}
type XMLBase struct {
stack urlStack
URIAttrs map[string]bool
}
// FindRoot iterates through the tokens of an xml document until
// it encounters its first StartTag event. It returns an error
// if it reaches EndDocument before finding a tag.
func (b *XMLBase) FindRoot(p *xpp.XMLPullParser) (event xpp.XMLEventType, err error) {
for {
event, err = b.NextTag(p)
if err != nil {
return event, err
}
if event == xpp.StartTag {
break
}
if event == xpp.EndDocument {
return event, fmt.Errorf("Failed to find root node before document end.")
}
}
return
}
// XMLBase.NextTag iterates through the tokens until it reaches a StartTag or
// EndTag It maintains the urlStack upon encountering StartTag and EndTags, so
// that the top of the stack (accessible through the CurrentBase() and
// CurrentBaseURL() methods) is the absolute base URI by which relative URIs
// should be resolved.
//
// NextTag is similar to goxpp's NextTag method except it wont throw an error
// if the next immediate token isnt a Start/EndTag. Instead, it will continue
// to consume tokens until it hits a Start/EndTag or EndDocument.
func (b *XMLBase) NextTag(p *xpp.XMLPullParser) (event xpp.XMLEventType, err error) {
for {
if p.Event == xpp.EndTag {
// Pop xml:base after each end tag
b.pop()
}
event, err = p.Next()
if err != nil {
return event, err
}
if event == xpp.EndTag {
break
}
if event == xpp.StartTag {
base := parseBase(p)
err = b.push(base)
if err != nil {
return
}
err = b.resolveAttrs(p)
if err != nil {
return
}
break
}
if event == xpp.EndDocument {
return event, fmt.Errorf("Failed to find NextTag before reaching the end of the document.")
}
}
return
}
func parseBase(p *xpp.XMLPullParser) string {
xmlURI := "http://www.w3.org/XML/1998/namespace"
for _, attr := range p.Attrs {
if attr.Name.Local == "base" && attr.Name.Space == xmlURI {
return attr.Value
}
}
return ""
}
func (b *XMLBase) push(base string) error {
newURL, err := url.Parse(base)
if err != nil {
return err
}
topURL := b.CurrentBaseURL()
if topURL != nil {
newURL = topURL.ResolveReference(newURL)
}
b.stack.push(newURL)
return nil
}
// returns the popped base URL
func (b *XMLBase) pop() string {
url := b.stack.pop()
if url != nil {
return url.String()
}
return ""
}
func (b *XMLBase) CurrentBaseURL() *url.URL {
return b.stack.top()
}
func (b *XMLBase) CurrentBase() string {
if url := b.CurrentBaseURL(); url != nil {
return url.String()
}
return ""
}
// resolve the given string as a URL relative to current base
func (b *XMLBase) ResolveURL(u string) (string, error) {
if b.CurrentBase() == "" {
return u, nil
}
relURL, err := url.Parse(u)
if err != nil {
return u, err
}
curr := b.CurrentBaseURL()
if curr.Path != "" && u != "" && curr.Path[len(curr.Path)-1] != '/' {
// There's no reason someone would use a path in xml:base if they
// didn't mean for it to be a directory
curr.Path = curr.Path + "/"
}
absURL := b.CurrentBaseURL().ResolveReference(relURL)
return absURL.String(), nil
}
// resolve relative URI attributes according to xml:base
func (b *XMLBase) resolveAttrs(p *xpp.XMLPullParser) error {
for i, attr := range p.Attrs {
lowerName := strings.ToLower(attr.Name.Local)
if b.URIAttrs[lowerName] {
absURL, err := b.ResolveURL(attr.Value)
if err != nil {
return err
}
p.Attrs[i].Value = absURL
}
}
return nil
}
// Transforms html by resolving any relative URIs in attributes
// if an error occurs during parsing or serialization, then the original string
// is returned along with the error.
func (b *XMLBase) ResolveHTML(relHTML string) (string, error) {
if b.CurrentBase() == "" {
return relHTML, nil
}
htmlReader := strings.NewReader(relHTML)
doc, err := html.Parse(htmlReader)
if err != nil {
return relHTML, err
}
var visit func(*html.Node)
// recursively traverse HTML resolving any relative URIs in attributes
visit = func(n *html.Node) {
if n.Type == html.ElementNode {
for i, a := range n.Attr {
if htmlURIAttrs[a.Key] {
absVal, err := b.ResolveURL(a.Val)
if err == nil {
n.Attr[i].Val = absVal
}
break
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
visit(c)
}
}
visit(doc)
var w bytes.Buffer
err = html.Render(&w, doc)
if err != nil {
return relHTML, err
}
// html.Render() always writes a complete html5 document, so strip the html
// and body tags
absHTML := w.String()
absHTML = strings.TrimPrefix(absHTML, "<html><head></head><body>")
absHTML = strings.TrimSuffix(absHTML, "</body></html>")
return absHTML, err
}

View File

@ -0,0 +1,23 @@
package shared
import (
"io"
"golang.org/x/text/transform"
)
// NewXMLSanitizerReader creates an io.Reader that
// wraps another io.Reader and removes illegal xml
// characters from the io stream.
func NewXMLSanitizerReader(xml io.Reader) io.Reader {
isIllegal := func(r rune) bool {
return !(r == 0x09 ||
r == 0x0A ||
r == 0x0D ||
r >= 0x20 && r <= 0xDF77 ||
r >= 0xE000 && r <= 0xFFFD ||
r >= 0x10000 && r <= 0x10FFFF)
}
t := transform.Chain(transform.RemoveFunc(isIllegal))
return transform.NewReader(xml, t)
}

165
vendor/github.com/mmcdole/gofeed/parser.go generated vendored Normal file
View File

@ -0,0 +1,165 @@
package gofeed
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"net/http"
"strings"
"github.com/mmcdole/gofeed/atom"
"github.com/mmcdole/gofeed/rss"
)
// ErrFeedTypeNotDetected is returned when the detection system can not figure
// out the Feed format
var ErrFeedTypeNotDetected = errors.New("Failed to detect feed type")
// HTTPError represents an HTTP error returned by a server.
type HTTPError struct {
StatusCode int
Status string
}
func (err HTTPError) Error() string {
return fmt.Sprintf("http error: %s", err.Status)
}
// Parser is a universal feed parser that detects
// a given feed type, parsers it, and translates it
// to the universal feed type.
type Parser struct {
AtomTranslator Translator
RSSTranslator Translator
Client *http.Client
rp *rss.Parser
ap *atom.Parser
}
// NewParser creates a universal feed parser.
func NewParser() *Parser {
fp := Parser{
rp: &rss.Parser{},
ap: &atom.Parser{},
}
return &fp
}
// Parse parses a RSS or Atom feed into
// the universal gofeed.Feed. It takes an
// io.Reader which should return the xml content.
func (f *Parser) Parse(feed io.Reader) (*Feed, error) {
// Wrap the feed io.Reader in a io.TeeReader
// so we can capture all the bytes read by the
// DetectFeedType function and construct a new
// reader with those bytes intact for when we
// attempt to parse the feeds.
var buf bytes.Buffer
tee := io.TeeReader(feed, &buf)
feedType := DetectFeedType(tee)
// Glue the read bytes from the detect function
// back into a new reader
r := io.MultiReader(&buf, feed)
switch feedType {
case FeedTypeAtom:
return f.parseAtomFeed(r)
case FeedTypeRSS:
return f.parseRSSFeed(r)
}
return nil, ErrFeedTypeNotDetected
}
// ParseURL fetches the contents of a given url and
// attempts to parse the response into the universal feed type.
func (f *Parser) ParseURL(feedURL string) (feed *Feed, err error) {
return f.ParseURLWithContext(feedURL, context.Background())
}
// ParseURLWithContext fetches contents of a given url and
// attempts to parse the response into the universal feed type.
// Request could be canceled or timeout via given context
func (f *Parser) ParseURLWithContext(feedURL string, ctx context.Context) (feed *Feed, err error) {
client := f.httpClient()
req, err := http.NewRequest("GET", feedURL, nil)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
req.Header.Set("User-Agent", "Gofeed/1.0")
resp, err := client.Do(req)
if err != nil {
return nil, err
}
if resp != nil {
defer func() {
ce := resp.Body.Close()
if ce != nil {
err = ce
}
}()
}
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return nil, HTTPError{
StatusCode: resp.StatusCode,
Status: resp.Status,
}
}
return f.Parse(resp.Body)
}
// ParseString parses a feed XML string and into the
// universal feed type.
func (f *Parser) ParseString(feed string) (*Feed, error) {
return f.Parse(strings.NewReader(feed))
}
func (f *Parser) parseAtomFeed(feed io.Reader) (*Feed, error) {
af, err := f.ap.Parse(feed)
if err != nil {
return nil, err
}
return f.atomTrans().Translate(af)
}
func (f *Parser) parseRSSFeed(feed io.Reader) (*Feed, error) {
rf, err := f.rp.Parse(feed)
if err != nil {
return nil, err
}
return f.rssTrans().Translate(rf)
}
func (f *Parser) atomTrans() Translator {
if f.AtomTranslator != nil {
return f.AtomTranslator
}
f.AtomTranslator = &DefaultAtomTranslator{}
return f.AtomTranslator
}
func (f *Parser) rssTrans() Translator {
if f.RSSTranslator != nil {
return f.RSSTranslator
}
f.RSSTranslator = &DefaultRSSTranslator{}
return f.RSSTranslator
}
func (f *Parser) httpClient() *http.Client {
if f.Client != nil {
return f.Client
}
f.Client = &http.Client{}
return f.Client
}

120
vendor/github.com/mmcdole/gofeed/rss/feed.go generated vendored Normal file
View File

@ -0,0 +1,120 @@
package rss
import (
"encoding/json"
"time"
"github.com/mmcdole/gofeed/extensions"
)
// Feed is an RSS Feed
type Feed struct {
Title string `json:"title,omitempty"`
Link string `json:"link,omitempty"`
Description string `json:"description,omitempty"`
Language string `json:"language,omitempty"`
Copyright string `json:"copyright,omitempty"`
ManagingEditor string `json:"managingEditor,omitempty"`
WebMaster string `json:"webMaster,omitempty"`
PubDate string `json:"pubDate,omitempty"`
PubDateParsed *time.Time `json:"pubDateParsed,omitempty"`
LastBuildDate string `json:"lastBuildDate,omitempty"`
LastBuildDateParsed *time.Time `json:"lastBuildDateParsed,omitempty"`
Categories []*Category `json:"categories,omitempty"`
Generator string `json:"generator,omitempty"`
Docs string `json:"docs,omitempty"`
TTL string `json:"ttl,omitempty"`
Image *Image `json:"image,omitempty"`
Rating string `json:"rating,omitempty"`
SkipHours []string `json:"skipHours,omitempty"`
SkipDays []string `json:"skipDays,omitempty"`
Cloud *Cloud `json:"cloud,omitempty"`
TextInput *TextInput `json:"textInput,omitempty"`
DublinCoreExt *ext.DublinCoreExtension `json:"dcExt,omitempty"`
ITunesExt *ext.ITunesFeedExtension `json:"itunesExt,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
Items []*Item `json:"items"`
Version string `json:"version"`
}
func (f Feed) String() string {
json, _ := json.MarshalIndent(f, "", " ")
return string(json)
}
// Item is an RSS Item
type Item struct {
Title string `json:"title,omitempty"`
Link string `json:"link,omitempty"`
Description string `json:"description,omitempty"`
Content string `json:"content,omitempty"`
Author string `json:"author,omitempty"`
Categories []*Category `json:"categories,omitempty"`
Comments string `json:"comments,omitempty"`
Enclosure *Enclosure `json:"enclosure,omitempty"`
GUID *GUID `json:"guid,omitempty"`
PubDate string `json:"pubDate,omitempty"`
PubDateParsed *time.Time `json:"pubDateParsed,omitempty"`
Source *Source `json:"source,omitempty"`
DublinCoreExt *ext.DublinCoreExtension `json:"dcExt,omitempty"`
ITunesExt *ext.ITunesItemExtension `json:"itunesExt,omitempty"`
Extensions ext.Extensions `json:"extensions,omitempty"`
}
// Image is an image that represents the feed
type Image struct {
URL string `json:"url,omitempty"`
Link string `json:"link,omitempty"`
Title string `json:"title,omitempty"`
Width string `json:"width,omitempty"`
Height string `json:"height,omitempty"`
Description string `json:"description,omitempty"`
}
// Enclosure is a media object that is attached to
// the item
type Enclosure struct {
URL string `json:"url,omitempty"`
Length string `json:"length,omitempty"`
Type string `json:"type,omitempty"`
}
// GUID is a unique identifier for an item
type GUID struct {
Value string `json:"value,omitempty"`
IsPermalink string `json:"isPermalink,omitempty"`
}
// Source contains feed information for another
// feed if a given item came from that feed
type Source struct {
Title string `json:"title,omitempty"`
URL string `json:"url,omitempty"`
}
// Category is category metadata for Feeds and Entries
type Category struct {
Domain string `json:"domain,omitempty"`
Value string `json:"value,omitempty"`
}
// TextInput specifies a text input box that
// can be displayed with the channel
type TextInput struct {
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Name string `json:"name,omitempty"`
Link string `json:"link,omitempty"`
}
// Cloud allows processes to register with a
// cloud to be notified of updates to the channel,
// implementing a lightweight publish-subscribe protocol
// for RSS feeds
type Cloud struct {
Domain string `json:"domain,omitempty"`
Port string `json:"port,omitempty"`
Path string `json:"path,omitempty"`
RegisterProcedure string `json:"registerProcedure,omitempty"`
Protocol string `json:"protocol,omitempty"`
}

770
vendor/github.com/mmcdole/gofeed/rss/parser.go generated vendored Normal file
View File

@ -0,0 +1,770 @@
package rss
import (
"fmt"
"io"
"strings"
ext "github.com/mmcdole/gofeed/extensions"
"github.com/mmcdole/gofeed/internal/shared"
xpp "github.com/mmcdole/goxpp"
)
// Parser is a RSS Parser
type Parser struct {
base *shared.XMLBase
}
// Parse parses an xml feed into an rss.Feed
func (rp *Parser) Parse(feed io.Reader) (*Feed, error) {
p := xpp.NewXMLPullParser(feed, false, shared.NewReaderLabel)
rp.base = &shared.XMLBase{}
_, err := rp.base.FindRoot(p)
if err != nil {
return nil, err
}
return rp.parseRoot(p)
}
func (rp *Parser) parseRoot(p *xpp.XMLPullParser) (*Feed, error) {
rssErr := p.Expect(xpp.StartTag, "rss")
rdfErr := p.Expect(xpp.StartTag, "rdf")
if rssErr != nil && rdfErr != nil {
return nil, fmt.Errorf("%s or %s", rssErr.Error(), rdfErr.Error())
}
// Items found in feed root
var channel *Feed
var textinput *TextInput
var image *Image
items := []*Item{}
ver := rp.parseVersion(p)
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
// Skip any extensions found in the feed root.
if shared.IsExtension(p) {
p.Skip()
continue
}
name := strings.ToLower(p.Name)
if name == "channel" {
channel, err = rp.parseChannel(p)
if err != nil {
return nil, err
}
} else if name == "item" {
item, err := rp.parseItem(p)
if err != nil {
return nil, err
}
items = append(items, item)
} else if name == "textinput" {
textinput, err = rp.parseTextInput(p)
if err != nil {
return nil, err
}
} else if name == "image" {
image, err = rp.parseImage(p)
if err != nil {
return nil, err
}
} else {
p.Skip()
}
}
}
rssErr = p.Expect(xpp.EndTag, "rss")
rdfErr = p.Expect(xpp.EndTag, "rdf")
if rssErr != nil && rdfErr != nil {
return nil, fmt.Errorf("%s or %s", rssErr.Error(), rdfErr.Error())
}
if channel == nil {
channel = &Feed{}
channel.Items = []*Item{}
}
if len(items) > 0 {
channel.Items = append(channel.Items, items...)
}
if textinput != nil {
channel.TextInput = textinput
}
if image != nil {
channel.Image = image
}
channel.Version = ver
return channel, nil
}
func (rp *Parser) parseChannel(p *xpp.XMLPullParser) (rss *Feed, err error) {
if err = p.Expect(xpp.StartTag, "channel"); err != nil {
return nil, err
}
rss = &Feed{}
rss.Items = []*Item{}
extensions := ext.Extensions{}
categories := []*Category{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if shared.IsExtension(p) {
ext, err := shared.ParseExtension(extensions, p)
if err != nil {
return nil, err
}
extensions = ext
} else if name == "title" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Title = result
} else if name == "description" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Description = result
} else if name == "link" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Link = result
} else if name == "language" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Language = result
} else if name == "copyright" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Copyright = result
} else if name == "managingeditor" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.ManagingEditor = result
} else if name == "webmaster" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.WebMaster = result
} else if name == "pubdate" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.PubDate = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
rss.PubDateParsed = &utcDate
}
} else if name == "lastbuilddate" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.LastBuildDate = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
rss.LastBuildDateParsed = &utcDate
}
} else if name == "generator" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Generator = result
} else if name == "docs" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Docs = result
} else if name == "ttl" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.TTL = result
} else if name == "rating" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
rss.Rating = result
} else if name == "skiphours" {
result, err := rp.parseSkipHours(p)
if err != nil {
return nil, err
}
rss.SkipHours = result
} else if name == "skipdays" {
result, err := rp.parseSkipDays(p)
if err != nil {
return nil, err
}
rss.SkipDays = result
} else if name == "item" {
result, err := rp.parseItem(p)
if err != nil {
return nil, err
}
rss.Items = append(rss.Items, result)
} else if name == "cloud" {
result, err := rp.parseCloud(p)
if err != nil {
return nil, err
}
rss.Cloud = result
} else if name == "category" {
result, err := rp.parseCategory(p)
if err != nil {
return nil, err
}
categories = append(categories, result)
} else if name == "image" {
result, err := rp.parseImage(p)
if err != nil {
return nil, err
}
rss.Image = result
} else if name == "textinput" {
result, err := rp.parseTextInput(p)
if err != nil {
return nil, err
}
rss.TextInput = result
} else {
// Skip element as it isn't an extension and not
// part of the spec
p.Skip()
}
}
}
if err = p.Expect(xpp.EndTag, "channel"); err != nil {
return nil, err
}
if len(categories) > 0 {
rss.Categories = categories
}
if len(extensions) > 0 {
rss.Extensions = extensions
if itunes, ok := rss.Extensions["itunes"]; ok {
rss.ITunesExt = ext.NewITunesFeedExtension(itunes)
}
if dc, ok := rss.Extensions["dc"]; ok {
rss.DublinCoreExt = ext.NewDublinCoreExtension(dc)
}
}
return rss, nil
}
func (rp *Parser) parseItem(p *xpp.XMLPullParser) (item *Item, err error) {
if err = p.Expect(xpp.StartTag, "item"); err != nil {
return nil, err
}
item = &Item{}
extensions := ext.Extensions{}
categories := []*Category{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if shared.IsExtension(p) {
ext, err := shared.ParseExtension(extensions, p)
if err != nil {
return nil, err
}
item.Extensions = ext
} else if name == "title" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Title = result
} else if name == "description" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Description = result
} else if name == "encoded" {
space := strings.TrimSpace(p.Space)
if prefix, ok := p.Spaces[space]; ok && prefix == "content" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Content = result
}
} else if name == "link" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Link = result
} else if name == "author" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Author = result
} else if name == "comments" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.Comments = result
} else if name == "pubdate" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
item.PubDate = result
date, err := shared.ParseDate(result)
if err == nil {
utcDate := date.UTC()
item.PubDateParsed = &utcDate
}
} else if name == "source" {
result, err := rp.parseSource(p)
if err != nil {
return nil, err
}
item.Source = result
} else if name == "enclosure" {
result, err := rp.parseEnclosure(p)
if err != nil {
return nil, err
}
item.Enclosure = result
} else if name == "guid" {
result, err := rp.parseGUID(p)
if err != nil {
return nil, err
}
item.GUID = result
} else if name == "category" {
result, err := rp.parseCategory(p)
if err != nil {
return nil, err
}
categories = append(categories, result)
} else {
// Skip any elements not part of the item spec
p.Skip()
}
}
}
if len(categories) > 0 {
item.Categories = categories
}
if len(extensions) > 0 {
item.Extensions = extensions
if itunes, ok := item.Extensions["itunes"]; ok {
item.ITunesExt = ext.NewITunesItemExtension(itunes)
}
if dc, ok := item.Extensions["dc"]; ok {
item.DublinCoreExt = ext.NewDublinCoreExtension(dc)
}
}
if err = p.Expect(xpp.EndTag, "item"); err != nil {
return nil, err
}
return item, nil
}
func (rp *Parser) parseSource(p *xpp.XMLPullParser) (source *Source, err error) {
if err = p.Expect(xpp.StartTag, "source"); err != nil {
return nil, err
}
source = &Source{}
source.URL = p.Attribute("url")
result, err := shared.ParseText(p)
if err != nil {
return source, err
}
source.Title = result
if err = p.Expect(xpp.EndTag, "source"); err != nil {
return nil, err
}
return source, nil
}
func (rp *Parser) parseEnclosure(p *xpp.XMLPullParser) (enclosure *Enclosure, err error) {
if err = p.Expect(xpp.StartTag, "enclosure"); err != nil {
return nil, err
}
enclosure = &Enclosure{}
enclosure.URL = p.Attribute("url")
enclosure.Length = p.Attribute("length")
enclosure.Type = p.Attribute("type")
// Ignore any enclosure text
_, err = p.NextText()
if err != nil {
return enclosure, err
}
if err = p.Expect(xpp.EndTag, "enclosure"); err != nil {
return nil, err
}
return enclosure, nil
}
func (rp *Parser) parseImage(p *xpp.XMLPullParser) (image *Image, err error) {
if err = p.Expect(xpp.StartTag, "image"); err != nil {
return nil, err
}
image = &Image{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return image, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if name == "url" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.URL = result
} else if name == "title" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.Title = result
} else if name == "link" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.Link = result
} else if name == "width" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.Width = result
} else if name == "height" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.Height = result
} else if name == "description" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
image.Description = result
} else {
p.Skip()
}
}
}
if err = p.Expect(xpp.EndTag, "image"); err != nil {
return nil, err
}
return image, nil
}
func (rp *Parser) parseGUID(p *xpp.XMLPullParser) (guid *GUID, err error) {
if err = p.Expect(xpp.StartTag, "guid"); err != nil {
return nil, err
}
guid = &GUID{}
guid.IsPermalink = p.Attribute("isPermalink")
result, err := shared.ParseText(p)
if err != nil {
return
}
guid.Value = result
if err = p.Expect(xpp.EndTag, "guid"); err != nil {
return nil, err
}
return guid, nil
}
func (rp *Parser) parseCategory(p *xpp.XMLPullParser) (cat *Category, err error) {
if err = p.Expect(xpp.StartTag, "category"); err != nil {
return nil, err
}
cat = &Category{}
cat.Domain = p.Attribute("domain")
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
cat.Value = result
if err = p.Expect(xpp.EndTag, "category"); err != nil {
return nil, err
}
return cat, nil
}
func (rp *Parser) parseTextInput(p *xpp.XMLPullParser) (*TextInput, error) {
if err := p.Expect(xpp.StartTag, "textinput"); err != nil {
return nil, err
}
ti := &TextInput{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if name == "title" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
ti.Title = result
} else if name == "description" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
ti.Description = result
} else if name == "name" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
ti.Name = result
} else if name == "link" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
ti.Link = result
} else {
p.Skip()
}
}
}
if err := p.Expect(xpp.EndTag, "textinput"); err != nil {
return nil, err
}
return ti, nil
}
func (rp *Parser) parseSkipHours(p *xpp.XMLPullParser) ([]string, error) {
if err := p.Expect(xpp.StartTag, "skiphours"); err != nil {
return nil, err
}
hours := []string{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if name == "hour" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
hours = append(hours, result)
} else {
p.Skip()
}
}
}
if err := p.Expect(xpp.EndTag, "skiphours"); err != nil {
return nil, err
}
return hours, nil
}
func (rp *Parser) parseSkipDays(p *xpp.XMLPullParser) ([]string, error) {
if err := p.Expect(xpp.StartTag, "skipdays"); err != nil {
return nil, err
}
days := []string{}
for {
tok, err := rp.base.NextTag(p)
if err != nil {
return nil, err
}
if tok == xpp.EndTag {
break
}
if tok == xpp.StartTag {
name := strings.ToLower(p.Name)
if name == "day" {
result, err := shared.ParseText(p)
if err != nil {
return nil, err
}
days = append(days, result)
} else {
p.Skip()
}
}
}
if err := p.Expect(xpp.EndTag, "skipdays"); err != nil {
return nil, err
}
return days, nil
}
func (rp *Parser) parseCloud(p *xpp.XMLPullParser) (*Cloud, error) {
if err := p.Expect(xpp.StartTag, "cloud"); err != nil {
return nil, err
}
cloud := &Cloud{}
cloud.Domain = p.Attribute("domain")
cloud.Port = p.Attribute("port")
cloud.Path = p.Attribute("path")
cloud.RegisterProcedure = p.Attribute("registerProcedure")
cloud.Protocol = p.Attribute("protocol")
rp.base.NextTag(p)
if err := p.Expect(xpp.EndTag, "cloud"); err != nil {
return nil, err
}
return cloud, nil
}
func (rp *Parser) parseVersion(p *xpp.XMLPullParser) (ver string) {
name := strings.ToLower(p.Name)
if name == "rss" {
ver = p.Attribute("version")
} else if name == "rdf" {
ns := p.Attribute("xmlns")
if ns == "http://channel.netscape.com/rdf/simple/0.9/" ||
ns == "http://my.netscape.com/rdf/simple/0.9/" {
ver = "0.9"
} else if ns == "http://purl.org/rss/1.0/" {
ver = "1.0"
}
}
return
}

686
vendor/github.com/mmcdole/gofeed/translator.go generated vendored Normal file
View File

@ -0,0 +1,686 @@
package gofeed
import (
"fmt"
"strings"
"time"
"github.com/mmcdole/gofeed/atom"
ext "github.com/mmcdole/gofeed/extensions"
"github.com/mmcdole/gofeed/internal/shared"
"github.com/mmcdole/gofeed/rss"
)
// Translator converts a particular feed (atom.Feed or rss.Feed)
// into the generic Feed struct
type Translator interface {
Translate(feed interface{}) (*Feed, error)
}
// DefaultRSSTranslator converts an rss.Feed struct
// into the generic Feed struct.
//
// This default implementation defines a set of
// mapping rules between rss.Feed -> Feed
// for each of the fields in Feed.
type DefaultRSSTranslator struct{}
// Translate converts an RSS feed into the universal
// feed type.
func (t *DefaultRSSTranslator) Translate(feed interface{}) (*Feed, error) {
rss, found := feed.(*rss.Feed)
if !found {
return nil, fmt.Errorf("Feed did not match expected type of *rss.Feed")
}
result := &Feed{}
result.Title = t.translateFeedTitle(rss)
result.Description = t.translateFeedDescription(rss)
result.Link = t.translateFeedLink(rss)
result.FeedLink = t.translateFeedFeedLink(rss)
result.Updated = t.translateFeedUpdated(rss)
result.UpdatedParsed = t.translateFeedUpdatedParsed(rss)
result.Published = t.translateFeedPublished(rss)
result.PublishedParsed = t.translateFeedPublishedParsed(rss)
result.Author = t.translateFeedAuthor(rss)
result.Language = t.translateFeedLanguage(rss)
result.Image = t.translateFeedImage(rss)
result.Copyright = t.translateFeedCopyright(rss)
result.Generator = t.translateFeedGenerator(rss)
result.Categories = t.translateFeedCategories(rss)
result.Items = t.translateFeedItems(rss)
result.ITunesExt = rss.ITunesExt
result.DublinCoreExt = rss.DublinCoreExt
result.Extensions = rss.Extensions
result.FeedVersion = rss.Version
result.FeedType = "rss"
return result, nil
}
func (t *DefaultRSSTranslator) translateFeedItem(rssItem *rss.Item) (item *Item) {
item = &Item{}
item.Title = t.translateItemTitle(rssItem)
item.Description = t.translateItemDescription(rssItem)
item.Content = t.translateItemContent(rssItem)
item.Link = t.translateItemLink(rssItem)
item.Published = t.translateItemPublished(rssItem)
item.PublishedParsed = t.translateItemPublishedParsed(rssItem)
item.Author = t.translateItemAuthor(rssItem)
item.GUID = t.translateItemGUID(rssItem)
item.Image = t.translateItemImage(rssItem)
item.Categories = t.translateItemCategories(rssItem)
item.Enclosures = t.translateItemEnclosures(rssItem)
item.DublinCoreExt = rssItem.DublinCoreExt
item.ITunesExt = rssItem.ITunesExt
item.Extensions = rssItem.Extensions
return
}
func (t *DefaultRSSTranslator) translateFeedTitle(rss *rss.Feed) (title string) {
if rss.Title != "" {
title = rss.Title
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Title != nil {
title = t.firstEntry(rss.DublinCoreExt.Title)
}
return
}
func (t *DefaultRSSTranslator) translateFeedDescription(rss *rss.Feed) (desc string) {
return rss.Description
}
func (t *DefaultRSSTranslator) translateFeedLink(rss *rss.Feed) (link string) {
if rss.Link != "" {
link = rss.Link
} else if rss.ITunesExt != nil && rss.ITunesExt.Subtitle != "" {
link = rss.ITunesExt.Subtitle
}
return
}
func (t *DefaultRSSTranslator) translateFeedFeedLink(rss *rss.Feed) (link string) {
atomExtensions := t.extensionsForKeys([]string{"atom", "atom10", "atom03"}, rss.Extensions)
for _, ex := range atomExtensions {
if links, ok := ex["link"]; ok {
for _, l := range links {
if l.Attrs["rel"] == "self" {
link = l.Attrs["href"]
}
}
}
}
return
}
func (t *DefaultRSSTranslator) translateFeedUpdated(rss *rss.Feed) (updated string) {
if rss.LastBuildDate != "" {
updated = rss.LastBuildDate
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Date != nil {
updated = t.firstEntry(rss.DublinCoreExt.Date)
}
return
}
func (t *DefaultRSSTranslator) translateFeedUpdatedParsed(rss *rss.Feed) (updated *time.Time) {
if rss.LastBuildDateParsed != nil {
updated = rss.LastBuildDateParsed
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Date != nil {
dateText := t.firstEntry(rss.DublinCoreExt.Date)
date, err := shared.ParseDate(dateText)
if err == nil {
updated = &date
}
}
return
}
func (t *DefaultRSSTranslator) translateFeedPublished(rss *rss.Feed) (published string) {
return rss.PubDate
}
func (t *DefaultRSSTranslator) translateFeedPublishedParsed(rss *rss.Feed) (published *time.Time) {
return rss.PubDateParsed
}
func (t *DefaultRSSTranslator) translateFeedAuthor(rss *rss.Feed) (author *Person) {
if rss.ManagingEditor != "" {
name, address := shared.ParseNameAddress(rss.ManagingEditor)
author = &Person{}
author.Name = name
author.Email = address
} else if rss.WebMaster != "" {
name, address := shared.ParseNameAddress(rss.WebMaster)
author = &Person{}
author.Name = name
author.Email = address
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Author != nil {
dcAuthor := t.firstEntry(rss.DublinCoreExt.Author)
name, address := shared.ParseNameAddress(dcAuthor)
author = &Person{}
author.Name = name
author.Email = address
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Creator != nil {
dcCreator := t.firstEntry(rss.DublinCoreExt.Creator)
name, address := shared.ParseNameAddress(dcCreator)
author = &Person{}
author.Name = name
author.Email = address
} else if rss.ITunesExt != nil && rss.ITunesExt.Author != "" {
name, address := shared.ParseNameAddress(rss.ITunesExt.Author)
author = &Person{}
author.Name = name
author.Email = address
}
return
}
func (t *DefaultRSSTranslator) translateFeedLanguage(rss *rss.Feed) (language string) {
if rss.Language != "" {
language = rss.Language
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Language != nil {
language = t.firstEntry(rss.DublinCoreExt.Language)
}
return
}
func (t *DefaultRSSTranslator) translateFeedImage(rss *rss.Feed) (image *Image) {
if rss.Image != nil {
image = &Image{}
image.Title = rss.Image.Title
image.URL = rss.Image.URL
} else if rss.ITunesExt != nil && rss.ITunesExt.Image != "" {
image = &Image{}
image.URL = rss.ITunesExt.Image
}
return
}
func (t *DefaultRSSTranslator) translateFeedCopyright(rss *rss.Feed) (rights string) {
if rss.Copyright != "" {
rights = rss.Copyright
} else if rss.DublinCoreExt != nil && rss.DublinCoreExt.Rights != nil {
rights = t.firstEntry(rss.DublinCoreExt.Rights)
}
return
}
func (t *DefaultRSSTranslator) translateFeedGenerator(rss *rss.Feed) (generator string) {
return rss.Generator
}
func (t *DefaultRSSTranslator) translateFeedCategories(rss *rss.Feed) (categories []string) {
cats := []string{}
if rss.Categories != nil {
for _, c := range rss.Categories {
cats = append(cats, c.Value)
}
}
if rss.ITunesExt != nil && rss.ITunesExt.Keywords != "" {
keywords := strings.Split(rss.ITunesExt.Keywords, ",")
for _, k := range keywords {
cats = append(cats, k)
}
}
if rss.ITunesExt != nil && rss.ITunesExt.Categories != nil {
for _, c := range rss.ITunesExt.Categories {
cats = append(cats, c.Text)
if c.Subcategory != nil {
cats = append(cats, c.Subcategory.Text)
}
}
}
if rss.DublinCoreExt != nil && rss.DublinCoreExt.Subject != nil {
for _, c := range rss.DublinCoreExt.Subject {
cats = append(cats, c)
}
}
if len(cats) > 0 {
categories = cats
}
return
}
func (t *DefaultRSSTranslator) translateFeedItems(rss *rss.Feed) (items []*Item) {
items = []*Item{}
for _, i := range rss.Items {
items = append(items, t.translateFeedItem(i))
}
return
}
func (t *DefaultRSSTranslator) translateItemTitle(rssItem *rss.Item) (title string) {
if rssItem.Title != "" {
title = rssItem.Title
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Title != nil {
title = t.firstEntry(rssItem.DublinCoreExt.Title)
}
return
}
func (t *DefaultRSSTranslator) translateItemDescription(rssItem *rss.Item) (desc string) {
if rssItem.Description != "" {
desc = rssItem.Description
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Description != nil {
desc = t.firstEntry(rssItem.DublinCoreExt.Description)
}
return
}
func (t *DefaultRSSTranslator) translateItemContent(rssItem *rss.Item) (content string) {
return rssItem.Content
}
func (t *DefaultRSSTranslator) translateItemLink(rssItem *rss.Item) (link string) {
return rssItem.Link
}
func (t *DefaultRSSTranslator) translateItemUpdated(rssItem *rss.Item) (updated string) {
if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Date != nil {
updated = t.firstEntry(rssItem.DublinCoreExt.Date)
}
return updated
}
func (t *DefaultRSSTranslator) translateItemUpdatedParsed(rssItem *rss.Item) (updated *time.Time) {
if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Date != nil {
updatedText := t.firstEntry(rssItem.DublinCoreExt.Date)
updatedDate, err := shared.ParseDate(updatedText)
if err == nil {
updated = &updatedDate
}
}
return
}
func (t *DefaultRSSTranslator) translateItemPublished(rssItem *rss.Item) (pubDate string) {
if rssItem.PubDate != "" {
return rssItem.PubDate
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Date != nil {
return t.firstEntry(rssItem.DublinCoreExt.Date)
}
return
}
func (t *DefaultRSSTranslator) translateItemPublishedParsed(rssItem *rss.Item) (pubDate *time.Time) {
if rssItem.PubDateParsed != nil {
return rssItem.PubDateParsed
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Date != nil {
pubDateText := t.firstEntry(rssItem.DublinCoreExt.Date)
pubDateParsed, err := shared.ParseDate(pubDateText)
if err == nil {
pubDate = &pubDateParsed
}
}
return
}
func (t *DefaultRSSTranslator) translateItemAuthor(rssItem *rss.Item) (author *Person) {
if rssItem.Author != "" {
name, address := shared.ParseNameAddress(rssItem.Author)
author = &Person{}
author.Name = name
author.Email = address
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Author != nil {
dcAuthor := t.firstEntry(rssItem.DublinCoreExt.Author)
name, address := shared.ParseNameAddress(dcAuthor)
author = &Person{}
author.Name = name
author.Email = address
} else if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Creator != nil {
dcCreator := t.firstEntry(rssItem.DublinCoreExt.Creator)
name, address := shared.ParseNameAddress(dcCreator)
author = &Person{}
author.Name = name
author.Email = address
} else if rssItem.ITunesExt != nil && rssItem.ITunesExt.Author != "" {
name, address := shared.ParseNameAddress(rssItem.ITunesExt.Author)
author = &Person{}
author.Name = name
author.Email = address
}
return
}
func (t *DefaultRSSTranslator) translateItemGUID(rssItem *rss.Item) (guid string) {
if rssItem.GUID != nil {
guid = rssItem.GUID.Value
}
return
}
func (t *DefaultRSSTranslator) translateItemImage(rssItem *rss.Item) (image *Image) {
if rssItem.ITunesExt != nil && rssItem.ITunesExt.Image != "" {
image = &Image{}
image.URL = rssItem.ITunesExt.Image
}
return
}
func (t *DefaultRSSTranslator) translateItemCategories(rssItem *rss.Item) (categories []string) {
cats := []string{}
if rssItem.Categories != nil {
for _, c := range rssItem.Categories {
cats = append(cats, c.Value)
}
}
if rssItem.ITunesExt != nil && rssItem.ITunesExt.Keywords != "" {
keywords := strings.Split(rssItem.ITunesExt.Keywords, ",")
for _, k := range keywords {
cats = append(cats, k)
}
}
if rssItem.DublinCoreExt != nil && rssItem.DublinCoreExt.Subject != nil {
for _, c := range rssItem.DublinCoreExt.Subject {
cats = append(cats, c)
}
}
if len(cats) > 0 {
categories = cats
}
return
}
func (t *DefaultRSSTranslator) translateItemEnclosures(rssItem *rss.Item) (enclosures []*Enclosure) {
if rssItem.Enclosure != nil {
e := &Enclosure{}
e.URL = rssItem.Enclosure.URL
e.Type = rssItem.Enclosure.Type
e.Length = rssItem.Enclosure.Length
enclosures = []*Enclosure{e}
}
return
}
func (t *DefaultRSSTranslator) extensionsForKeys(keys []string, extensions ext.Extensions) (matches []map[string][]ext.Extension) {
matches = []map[string][]ext.Extension{}
if extensions == nil {
return
}
for _, key := range keys {
if match, ok := extensions[key]; ok {
matches = append(matches, match)
}
}
return
}
func (t *DefaultRSSTranslator) firstEntry(entries []string) (value string) {
if entries == nil {
return
}
if len(entries) == 0 {
return
}
return entries[0]
}
// DefaultAtomTranslator converts an atom.Feed struct
// into the generic Feed struct.
//
// This default implementation defines a set of
// mapping rules between atom.Feed -> Feed
// for each of the fields in Feed.
type DefaultAtomTranslator struct{}
// Translate converts an Atom feed into the universal
// feed type.
func (t *DefaultAtomTranslator) Translate(feed interface{}) (*Feed, error) {
atom, found := feed.(*atom.Feed)
if !found {
return nil, fmt.Errorf("Feed did not match expected type of *atom.Feed")
}
result := &Feed{}
result.Title = t.translateFeedTitle(atom)
result.Description = t.translateFeedDescription(atom)
result.Link = t.translateFeedLink(atom)
result.FeedLink = t.translateFeedFeedLink(atom)
result.Updated = t.translateFeedUpdated(atom)
result.UpdatedParsed = t.translateFeedUpdatedParsed(atom)
result.Author = t.translateFeedAuthor(atom)
result.Language = t.translateFeedLanguage(atom)
result.Image = t.translateFeedImage(atom)
result.Copyright = t.translateFeedCopyright(atom)
result.Categories = t.translateFeedCategories(atom)
result.Generator = t.translateFeedGenerator(atom)
result.Items = t.translateFeedItems(atom)
result.Extensions = atom.Extensions
result.FeedVersion = atom.Version
result.FeedType = "atom"
return result, nil
}
func (t *DefaultAtomTranslator) translateFeedItem(entry *atom.Entry) (item *Item) {
item = &Item{}
item.Title = t.translateItemTitle(entry)
item.Description = t.translateItemDescription(entry)
item.Content = t.translateItemContent(entry)
item.Link = t.translateItemLink(entry)
item.Updated = t.translateItemUpdated(entry)
item.UpdatedParsed = t.translateItemUpdatedParsed(entry)
item.Published = t.translateItemPublished(entry)
item.PublishedParsed = t.translateItemPublishedParsed(entry)
item.Author = t.translateItemAuthor(entry)
item.GUID = t.translateItemGUID(entry)
item.Image = t.translateItemImage(entry)
item.Categories = t.translateItemCategories(entry)
item.Enclosures = t.translateItemEnclosures(entry)
item.Extensions = entry.Extensions
return
}
func (t *DefaultAtomTranslator) translateFeedTitle(atom *atom.Feed) (title string) {
return atom.Title
}
func (t *DefaultAtomTranslator) translateFeedDescription(atom *atom.Feed) (desc string) {
return atom.Subtitle
}
func (t *DefaultAtomTranslator) translateFeedLink(atom *atom.Feed) (link string) {
l := t.firstLinkWithType("alternate", atom.Links)
if l != nil {
link = l.Href
}
return
}
func (t *DefaultAtomTranslator) translateFeedFeedLink(atom *atom.Feed) (link string) {
feedLink := t.firstLinkWithType("self", atom.Links)
if feedLink != nil {
link = feedLink.Href
}
return
}
func (t *DefaultAtomTranslator) translateFeedUpdated(atom *atom.Feed) (updated string) {
return atom.Updated
}
func (t *DefaultAtomTranslator) translateFeedUpdatedParsed(atom *atom.Feed) (updated *time.Time) {
return atom.UpdatedParsed
}
func (t *DefaultAtomTranslator) translateFeedAuthor(atom *atom.Feed) (author *Person) {
a := t.firstPerson(atom.Authors)
if a != nil {
feedAuthor := Person{}
feedAuthor.Name = a.Name
feedAuthor.Email = a.Email
author = &feedAuthor
}
return
}
func (t *DefaultAtomTranslator) translateFeedLanguage(atom *atom.Feed) (language string) {
return atom.Language
}
func (t *DefaultAtomTranslator) translateFeedImage(atom *atom.Feed) (image *Image) {
if atom.Logo != "" {
feedImage := Image{}
feedImage.URL = atom.Logo
image = &feedImage
}
return
}
func (t *DefaultAtomTranslator) translateFeedCopyright(atom *atom.Feed) (rights string) {
return atom.Rights
}
func (t *DefaultAtomTranslator) translateFeedGenerator(atom *atom.Feed) (generator string) {
if atom.Generator != nil {
if atom.Generator.Value != "" {
generator += atom.Generator.Value
}
if atom.Generator.Version != "" {
generator += " v" + atom.Generator.Version
}
if atom.Generator.URI != "" {
generator += " " + atom.Generator.URI
}
generator = strings.TrimSpace(generator)
}
return
}
func (t *DefaultAtomTranslator) translateFeedCategories(atom *atom.Feed) (categories []string) {
if atom.Categories != nil {
categories = []string{}
for _, c := range atom.Categories {
categories = append(categories, c.Term)
}
}
return
}
func (t *DefaultAtomTranslator) translateFeedItems(atom *atom.Feed) (items []*Item) {
items = []*Item{}
for _, entry := range atom.Entries {
items = append(items, t.translateFeedItem(entry))
}
return
}
func (t *DefaultAtomTranslator) translateItemTitle(entry *atom.Entry) (title string) {
return entry.Title
}
func (t *DefaultAtomTranslator) translateItemDescription(entry *atom.Entry) (desc string) {
return entry.Summary
}
func (t *DefaultAtomTranslator) translateItemContent(entry *atom.Entry) (content string) {
if entry.Content != nil {
content = entry.Content.Value
}
return
}
func (t *DefaultAtomTranslator) translateItemLink(entry *atom.Entry) (link string) {
l := t.firstLinkWithType("alternate", entry.Links)
if l != nil {
link = l.Href
}
return
}
func (t *DefaultAtomTranslator) translateItemUpdated(entry *atom.Entry) (updated string) {
return entry.Updated
}
func (t *DefaultAtomTranslator) translateItemUpdatedParsed(entry *atom.Entry) (updated *time.Time) {
return entry.UpdatedParsed
}
func (t *DefaultAtomTranslator) translateItemPublished(entry *atom.Entry) (updated string) {
return entry.Published
}
func (t *DefaultAtomTranslator) translateItemPublishedParsed(entry *atom.Entry) (updated *time.Time) {
return entry.PublishedParsed
}
func (t *DefaultAtomTranslator) translateItemAuthor(entry *atom.Entry) (author *Person) {
a := t.firstPerson(entry.Authors)
if a != nil {
author = &Person{}
author.Name = a.Name
author.Email = a.Email
}
return
}
func (t *DefaultAtomTranslator) translateItemGUID(entry *atom.Entry) (guid string) {
return entry.ID
}
func (t *DefaultAtomTranslator) translateItemImage(entry *atom.Entry) (image *Image) {
return nil
}
func (t *DefaultAtomTranslator) translateItemCategories(entry *atom.Entry) (categories []string) {
if entry.Categories != nil {
categories = []string{}
for _, c := range entry.Categories {
categories = append(categories, c.Term)
}
}
return
}
func (t *DefaultAtomTranslator) translateItemEnclosures(entry *atom.Entry) (enclosures []*Enclosure) {
if entry.Links != nil {
enclosures = []*Enclosure{}
for _, e := range entry.Links {
if e.Rel == "enclosure" {
enclosure := &Enclosure{}
enclosure.URL = e.Href
enclosure.Length = e.Length
enclosure.Type = e.Type
enclosures = append(enclosures, enclosure)
}
}
if len(enclosures) == 0 {
enclosures = nil
}
}
return
}
func (t *DefaultAtomTranslator) firstLinkWithType(linkType string, links []*atom.Link) *atom.Link {
if links == nil {
return nil
}
for _, link := range links {
if link.Rel == linkType {
return link
}
}
return nil
}
func (t *DefaultAtomTranslator) firstPerson(persons []*atom.Person) (person *atom.Person) {
if persons == nil || len(persons) == 0 {
return
}
person = persons[0]
return
}

24
vendor/github.com/mmcdole/goxpp/.gitignore generated vendored Normal file
View File

@ -0,0 +1,24 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof

17
vendor/github.com/mmcdole/goxpp/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,17 @@
language: go
go:
- tip
- 1.6
- 1.5
- 1.4
install:
- go get github.com/stretchr/testify/assert
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
script:
- go test -v -covermode=count -coverprofile=coverage.out
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken=$COVERALLS_TOKEN
env:
global:
secure: IZqRp8DmY6LLP+9PTjttLFuCeA/IqW4qQWGnNqyc8nwKoqbHTVY/MZav9WrocGBZZGdI/zLghnE3wp2bywPyBoBPJv9oq7NUZ47DGpdkaLazgP9O68dtshOZ/zDZU7kSbGDF6mePVYjPjkBkWFyt13np89n/bB0L0zdsQE3bKukJ0lSnN32aOjphL+nFEfxM5ghnOuZ7ZDyXTjehQNkNZG73T2ttic8pxi1M+xxl1FXODNgTz0D6qs2ZdKSjJeE9n5iOJqNxIRfW1iIXPn8L2UQBV1+8aohxGy22flwz1ZCO2MZJLqdR1apGZdqVrYhKjxOnyyRWfEX3mpl6/EiW1gLqPgpzjKPuu/wiwfUJOBdFbrn5WGoR6f16XJ6bmxo4NGUEtBXeZz932HWl8XXD+CAGfLNw1NAabH1HpNAYBd4CFpiIi5RtU5sRtumzXwjvgHxlRhwIRb6jWqWezBbRL10MrnhnUyyXu1AWV7LrSvPDbXZ/5NL4/fbW/Piop1vhuNhHauLvZxWa1yv9q1CN8Uad8KEiJcjRj6lac7CCTspoEaPEVlL01tJZDllga1XQCJcRJVPSzt0qEzG2XqhwjWaRkbMLqLXJ9/0YsY/QU9BA0mtPwo+2e2J8ZRmqqXNatysNk9l5sH5TBk3lP+n4jFtI0nGndCxwqPHFGsjhPsE=

Some files were not shown because too many files have changed in this diff Show More