-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.go
More file actions
122 lines (96 loc) · 1.91 KB
/
main.go
File metadata and controls
122 lines (96 loc) · 1.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
package main
import (
"fmt"
"net/http"
"net/url"
"os"
"strings"
"golang.org/x/net/html"
)
func getHref(t html.Token) (ok bool, href string) {
for _, a := range t.Attr {
if a.Key == "href" {
href = a.Val
ok = true
}
}
return
}
func getAnchorTags(t html.Token) (isAnchor bool) {
isAnchor = t.Data == "a"
return isAnchor
}
func filterDomainLink(crawlUrl string, link string) (ok bool) {
u, er := url.Parse(crawlUrl)
l, err := url.Parse(link)
if er != nil || err != nil {
fmt.Println("ERROR: Failed to compare domain")
return
}
urlDomain := u.Hostname()
linkDomain := l.Hostname()
if urlDomain == linkDomain {
ok = true
}
return ok
}
func crawl(crawlUrl string, ch chan string, chFinished chan bool) {
resp, err := http.Get(crawlUrl)
defer func() {
chFinished <- true
}()
if err != nil {
fmt.Println("ERROR: Failed to crawl \"" + crawlUrl + "\"")
return
}
b := resp.Body
defer b.Close()
z := html.NewTokenizer(b)
for {
tt := z.Next()
switch {
case tt == html.ErrorToken:
return
case tt == html.StartTagToken:
t := z.Token()
anchorTags := getAnchorTags(t)
if !anchorTags {
continue
}
ok, link := getHref(t)
if !ok {
continue
}
hasProto := strings.Index(link, "http") == 0
if hasProto {
isSameDomain := filterDomainLink(crawlUrl, link)
if isSameDomain {
ch <- link
}
}
}
}
}
func main() {
linkUrls := make(map[string]bool)
pageUrls := os.Args[1:]
fmt.Println("\nPage", pageUrls)
chUrls := make(chan string)
chFinished := make(chan bool)
for _, crawlUrl := range pageUrls {
go crawl(crawlUrl, chUrls, chFinished)
}
for c := 0; c < len(pageUrls); {
select {
case crawlUrl := <-chUrls:
linkUrls[crawlUrl] = true
case <-chFinished:
c++
}
}
fmt.Println("\nTotal ", len(linkUrls), " links on this page:")
for crawlUrl, _ := range linkUrls {
fmt.Println(" - " + crawlUrl)
}
close(chUrls)
}