Skip to content

Commit 877a32b

Browse files
fix: respect recursive rules (#26)
1 parent ddf1fa4 commit 877a32b

File tree

7 files changed

+45
-2
lines changed

7 files changed

+45
-2
lines changed

package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
"gaxios": "^1.6.0",
2727
"is-absolute-url": "^2.1.0",
2828
"meow": "^5.0.0",
29+
"server-destroy": "^1.0.1",
2930
"update-notifier": "^2.5.0"
3031
},
3132
"devDependencies": {
@@ -37,6 +38,7 @@
3738
"@types/mocha": "^5.2.5",
3839
"@types/nock": "^9.3.1",
3940
"@types/node": "^11.9.3",
41+
"@types/server-destroy": "^1.0.0",
4042
"@types/sinon": "^7.0.5",
4143
"@types/update-notifier": "^2.5.0",
4244
"codecov": "^3.1.0",

src/index.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import {EventEmitter} from 'events';
22
import * as gaxios from 'gaxios';
33
import * as http from 'http';
4+
import enableDestroy = require('server-destroy');
45

56
import {getLinks} from './links';
67

@@ -55,6 +56,7 @@ export class LinkChecker extends EventEmitter {
5556
if (!options.path.startsWith('http')) {
5657
const port = options.port || 5000 + Math.round(Math.random() * 1000);
5758
server = await this.startWebServer(options.path, port);
59+
enableDestroy(server);
5860
options.path = `http://localhost:${port}`;
5961
}
6062
const results = await this.crawl(
@@ -64,7 +66,7 @@ export class LinkChecker extends EventEmitter {
6466
passed: results.filter(x => x.state === LinkState.BROKEN).length === 0
6567
};
6668
if (server) {
67-
server.close();
69+
server.destroy();
6870
}
6971
return result;
7072
}
@@ -141,7 +143,8 @@ export class LinkChecker extends EventEmitter {
141143
const urls = getLinks(data, opts.checkOptions.path);
142144
for (const url of urls) {
143145
// only crawl links that start with the same host
144-
const crawl = url.startsWith(opts.checkOptions.path);
146+
const crawl = opts.checkOptions.recurse! &&
147+
url.startsWith(opts.checkOptions.path);
145148
await this.crawl({
146149
url,
147150
crawl,

test/fixtures/recurse/fake.html

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
<html>
2+
<body>
3+
<a href='/nothere.html'></a>
4+
<a href='http://www.google.com'></a>
5+
</body>
6+
</html>

test/fixtures/recurse/first.html

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
<html>
2+
<body>
3+
<a href='/'></a>
4+
<a href='/second.html'></a>
5+
</body>
6+
</html>

test/fixtures/recurse/index.html

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
<html>
2+
<body>
3+
<a href="first.html">go right over there</a>
4+
</body>
5+
</html>

test/fixtures/recurse/second.html

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
<html>
2+
<body>
3+
<a href='http://fake.local'>Link to another castle</a>
4+
</body>
5+
</html>

test/test.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,4 +75,20 @@ describe('linkinator', () => {
7575
assert.strictEqual(
7676
results.links.filter(x => x.state === LinkState.OK).length, 2);
7777
});
78+
79+
it('should perform a recursive scan', async () => {
80+
// This test is making sure that we do a recursive scan of links,
81+
// but also that we don't follow links to another site
82+
const scope = nock('http://fake.local')
83+
.get('/')
84+
.replyWithFile(200, 'test/fixtures/recurse/fake.html');
85+
const results = await check({path: 'test/fixtures/recurse', recurse: true});
86+
assert.strictEqual(results.links.length, 5);
87+
scope.done();
88+
});
89+
90+
it('should not recurse by default', async () => {
91+
const results = await check({path: 'test/fixtures/recurse'});
92+
assert.strictEqual(results.links.length, 2);
93+
});
7894
});

0 commit comments

Comments
 (0)