-
-
Notifications
You must be signed in to change notification settings - Fork 100
/
html_page_parser.go
58 lines (44 loc) · 1019 Bytes
/
html_page_parser.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
package main
import (
"bytes"
"net/url"
"github.com/yhat/scrape"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
)
type htmlPageParser struct {
linkFinder linkFinder
}
func newHtmlPageParser(f linkFinder) *htmlPageParser {
return &htmlPageParser{f}
}
func (p htmlPageParser) Parse(u *url.URL, typ string, body []byte) (page, error) {
if typ != "text/html" {
return nil, nil
}
n, err := html.Parse(bytes.NewReader(body))
if err != nil {
return nil, err
}
u.Fragment = ""
frs := map[string]struct{}{}
scrape.FindAllNested(n, func(n *html.Node) bool {
for _, a := range []string{"id", "name"} {
if s := scrape.Attr(n, a); s != "" {
frs[s] = struct{}{}
}
}
return false
})
base := u
if n, ok := scrape.Find(n, func(n *html.Node) bool {
return n.DataAtom == atom.Base
}); ok {
u, err := url.Parse(scrape.Attr(n, "href"))
if err != nil {
return nil, err
}
base = base.ResolveReference(u)
}
return newHtmlPage(u, frs, p.linkFinder.Find(n, base)), nil
}