Merge pull request #24 from 030/18-reddit
[GH-6][GH-18] Example extended and added RSS Reddit parsing.
This commit is contained in:
commit
81ac155986
3 changed files with 39 additions and 12 deletions
|
@ -1,3 +1,4 @@
|
|||
https://reddit.com/.rss
|
||||
http://blog.golang.org/feed.atom
|
||||
http://feeds.nos.nl/nosnieuwsalgemeen
|
||||
https://aws.amazon.com/blogs/devops/feed
|
||||
|
@ -13,4 +14,5 @@ https://ubuntu.com/blog/feed
|
|||
https://www.docker.com/blog/feed
|
||||
https://www.theregister.co.uk/data_centre/bofh/headlines.atom
|
||||
https://www.theregister.co.uk/devops/headlines.atom
|
||||
https://xkcd.com/rss.xml
|
||||
https://xkcd.com/rss.xml
|
||||
https://www.filmvandaag.nl/feeds/rss/nieuws
|
|
@ -4,6 +4,7 @@ import (
|
|||
"bufio"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
|
@ -20,15 +21,27 @@ func main() {
|
|||
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
url := scanner.Text()
|
||||
ext := filepath.Ext(url)
|
||||
stringURL := scanner.Text()
|
||||
ext := filepath.Ext(stringURL)
|
||||
|
||||
resp, err := rss.Read(url)
|
||||
u, err := url.Parse(stringURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Println("\n" + u.Host)
|
||||
|
||||
reddit := false
|
||||
if u.Host == "reddit.com" {
|
||||
reddit = true
|
||||
}
|
||||
|
||||
resp, err := rss.Read(stringURL, reddit)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
if ext == ".atom" {
|
||||
if ext == ".atom" || u.Host == "reddit.com" {
|
||||
feed, err := rss.Atom(resp)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
@ -50,7 +63,7 @@ func main() {
|
|||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
fmt.Println(time.String() + " " + item.Title)
|
||||
fmt.Println(time.String() + " " + item.Title + " " + item.Link)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
24
rss.go
24
rss.go
|
@ -52,24 +52,36 @@ func (d Date) MustFormat(format string) string {
|
|||
}
|
||||
|
||||
//Read a string url and returns a Channel struct, error
|
||||
func Read(url string) (*http.Response, error) {
|
||||
return ReadWithClient(url, http.DefaultClient)
|
||||
func Read(url string, reddit bool) (*http.Response, error) {
|
||||
return ReadWithClient(url, http.DefaultClient, reddit)
|
||||
}
|
||||
|
||||
//InsecureRead reads without certificate check
|
||||
func InsecureRead(url string) (*http.Response, error) {
|
||||
func InsecureRead(url string, reddit bool) (*http.Response, error) {
|
||||
tr := &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
client := &http.Client{Transport: tr}
|
||||
|
||||
return ReadWithClient(url, client)
|
||||
return ReadWithClient(url, client, reddit)
|
||||
}
|
||||
|
||||
//ReadWithClient a string url and custom client that must match the Fetcher interface
|
||||
//returns a Channel struct, error
|
||||
func ReadWithClient(url string, client Fetcher) (*http.Response, error) {
|
||||
response, err := client.Get(url)
|
||||
func ReadWithClient(url string, client *http.Client, reddit bool) (*http.Response, error) {
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// This header is required to read Reddit Feeds, see:
|
||||
// https://www.reddit.com/r/redditdev/comments/5w60r1/error_429_too_many_requests_i_havent_made_many/
|
||||
// Note: a random string is required to prevent occurrence of 'Too Many Requests' response.
|
||||
if reddit {
|
||||
req.Header.Set("user-agent", "hello:myappname:v0.0 (by /u/ocelost)")
|
||||
}
|
||||
|
||||
response, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue