-
Notifications
You must be signed in to change notification settings - Fork 0
/
parse_url.go
115 lines (92 loc) · 1.97 KB
/
parse_url.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
package main
import (
"fmt"
"golang.org/x/net/html"
"net/http"
"os"
// "strings"
)
// Helper function to pull the href attribute from a Token
func getHref(t html.Token) (ok bool, href string) {
// Iterate over all of the Token's attributes until we find an "href"
for _, a := range t.Attr {
if a.Key == "href" {
href = a.Val
ok = true
}
}
// "bare" return will return the variables (ok, href) as defined in
// the function definition
return
}
func getX(t html.Token) (ok bool, value string) {
for _, a := range t.Attr {
if a.Key == "href" {
value = a.Val
ok = true
}
}
return
}
// Extract all http** links from a given webpage
func crawl(url string, ch chan string, chFinished chan bool) {
resp, err := http.Get(url)
defer func() {
// Notify that we're done after this function
chFinished <- true
}()
if err != nil {
fmt.Println("ERROR: Failed to crawl \"" + url + "\"")
return
}
b := resp.Body
defer b.Close() // close Body when the function returns
z := html.NewTokenizer(b)
for {
tt := z.Next()
fmt.Println(z.Text)
switch {
case tt == html.ErrorToken:
// End of the document, we're done
return
case tt == html.StartTagToken:
t := z.Token()
if t.Data != "p" {
continue
}
fmt.Printf("t: %+v\n", t)
ok, v := getX(t)
if !ok {
continue
}
// Make sure the url begines in http**
ch <- v
}
}
}
func main() {
foundUrls := make(map[string]bool)
seedUrls := os.Args[1:]
// Channels
chUrls := make(chan string)
chFinished := make(chan bool)
// Kick off the crawl process (concurrently)
for _, url := range seedUrls {
go crawl(url, chUrls, chFinished)
}
// Subscribe to both channels
for c := 0; c < len(seedUrls); {
select {
case url := <-chUrls:
foundUrls[url] = true
case <-chFinished:
c++
}
}
// We're done! Print the results...
fmt.Println("\nFound", len(foundUrls), "unique urls:\n")
for url, _ := range foundUrls {
fmt.Println(" - " + url)
}
close(chUrls)
}