-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.go
72 lines (59 loc) · 1.69 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package main
import (
log "github.com/Sirupsen/logrus"
"runtime"
"github.com/lebarde/scrapix/scraplib"
//"github.com/spf13/cobra"
)
func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
VERSION := "alpha 1"
log.Debug("Scrapix v.", VERSION)
// TODO read command line and do actions
// Read config file and populate data
// Data structure is a map of Url - see url.go
urls := scraplib.UrlsConfig(scraplib.ReadConfig())
// Retrieve the urls
log.Debug("Launching ", len(urls), " requests...")
for _, u := range urls {
go scraplib.Crawl(u /*, chUrls, chFinished*/)
}
// Subscribe to all the channels
for _, u := range urls {
select {
case <-u.ChFinished:
// Do nothing?
}
}
// We're done! Print the results...
log.Debug("Found some urls:")
for _, u := range urls {
if u.Found {
log.Debug(" - " + u.Address)
}
}
db, err := scraplib.InitDatabase()
defer db.Close()
checkErr(err)
// TODO compare inside the database
// then show the results.
db.DbUpdateUrls(urls)
return
}
func checkErr(err error) {
if err != nil {
panic(err)
}
}