-
Notifications
You must be signed in to change notification settings - Fork 2
/
Program.cs
49 lines (41 loc) · 1.65 KB
/
Program.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
using System;
using Crawler.Implementation;
namespace Crawler
{
class Program
{
static void Main()
{
// This example crawls this website and downloads
// all images on the website and all pages that
// begins with the url provided.
const string website = "https://cajor.dk/";
// These rules only allows urls that begins with the
// website, which should be every page on the site.
var crawlerRules = new ImageCrawlerRules(website);
var crawlerProcessorRules = new ImageCrawlerProcesserRules();
// Create a processer with rules on how images are handled.
var crawlerProcesser = new ImageCrawlerProcesser(crawlerProcessorRules, @"P:\Pictures\Crawler");
// Only download images with the following extensions.
crawlerProcessorRules.AllowedExtensions.Add(".png");
crawlerProcessorRules.AllowedExtensions.Add(".jpg");
crawlerProcessorRules.AllowedExtensions.Add(".jpeg");
crawlerProcessorRules.AllowedExtensions.Add(".gif");
crawlerProcessorRules.AllowedExtensions.Add(".bmp");
// We only want pictures larger than 300x300.
// This is checked after downloading the image.
crawlerProcessorRules.MinWidth = 300;
crawlerProcessorRules.MinHeight = 300;
// Create a crawler with the rules we have created above, and a thread
// for each processor core.
using (var crawler = new ImageCrawler(crawlerRules, crawlerProcesser, Environment.ProcessorCount))
{
// Start the crawler.
crawler.Start(new Uri(website));
// The crawler should now be crawling ;)
Console.WriteLine("Crawler started, press a key to close...");
Console.ReadLine();
}
}
}
}