Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
C
crawl
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
ale
crawl
Commits
3af8c9a2
Commit
3af8c9a2
authored
10 years ago
by
ale
Browse files
Options
Downloads
Patches
Plain Diff
move URLInfo logic into the Crawler itself
parent
7c1b1f70
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
cmd/crawl/crawl.go
+1
-1
1 addition, 1 deletion
cmd/crawl/crawl.go
cmd/links/links.go
+1
-1
1 addition, 1 deletion
cmd/links/links.go
crawler.go
+45
-33
45 additions, 33 deletions
crawler.go
with
47 additions
and
35 deletions
cmd/crawl/crawl.go
+
1
−
1
View file @
3af8c9a2
...
...
@@ -135,7 +135,7 @@ func main() {
saver
:=
NewSaveHandler
(
w
)
crawler
,
err
:=
crawl
.
NewCrawler
(
"crawldb"
,
seeds
,
scope
,
crawl
.
FetcherFunc
(
http
.
Get
),
saver
)
crawler
,
err
:=
crawl
.
NewCrawler
(
"crawldb"
,
seeds
,
scope
,
crawl
.
FetcherFunc
(
http
.
Get
),
crawl
.
NewRedirectHandler
(
saver
)
)
if
err
!=
nil
{
log
.
Fatal
(
err
)
}
...
...
This diff is collapsed.
Click to expand it.
cmd/links/links.go
+
1
−
1
View file @
3af8c9a2
...
...
@@ -44,7 +44,7 @@ func main() {
crawl
.
NewSeedScope
(
seeds
),
}
crawler
,
err
:=
crawl
.
NewCrawler
(
"crawldb"
,
seeds
,
scope
,
crawl
.
FetcherFunc
(
http
.
Get
),
crawl
.
HandlerFunc
(
extractLinks
))
crawler
,
err
:=
crawl
.
NewCrawler
(
"crawldb"
,
seeds
,
scope
,
crawl
.
FetcherFunc
(
http
.
Get
),
crawl
.
NewRedirectHandler
(
crawl
.
HandlerFunc
(
extractLinks
))
)
if
err
!=
nil
{
log
.
Fatal
(
err
)
}
...
...
This diff is collapsed.
Click to expand it.
crawler.go
+
45
−
33
View file @
3af8c9a2
...
...
@@ -5,6 +5,7 @@ import (
"encoding/gob"
"errors"
"fmt"
"io"
"log"
"net/http"
"net/url"
...
...
@@ -76,6 +77,7 @@ type URLInfo struct {
Error
error
}
// A Fetcher retrieves contents from remote URLs.
type
Fetcher
interface
{
Fetch
(
string
)
(
*
http
.
Response
,
error
)
}
...
...
@@ -86,6 +88,9 @@ func (f FetcherFunc) Fetch(u string) (*http.Response, error) {
return
f
(
u
)
}
// A Handler processes crawled contents. Any errors returned by public
// implementations of this interface are considered permanent and will
// not cause the URL to be fetched again.
type
Handler
interface
{
Handle
(
*
Crawler
,
string
,
int
,
*
http
.
Response
,
error
)
error
}
...
...
@@ -96,7 +101,7 @@ func (f HandlerFunc) Handle(db *Crawler, u string, depth int, resp *http.Respons
return
f
(
db
,
u
,
depth
,
resp
,
err
)
}
//
UrlDb is the database of crawled URLs (either pending or done)
.
//
The Crawler object contains the crawler state
.
type
Crawler
struct
{
db
*
gobDB
seeds
[]
*
url
.
URL
...
...
@@ -107,19 +112,12 @@ type Crawler struct {
enqueueMx
sync
.
Mutex
}
type
Q
ueuePair
struct
{
type
q
ueuePair
struct
{
Key
[]
byte
URL
string
Depth
int
}
// Update this URLInfo entry in the crawl database.
func
(
c
*
Crawler
)
UpdateURL
(
info
*
URLInfo
)
error
{
wo
:=
levigo
.
NewWriteOptions
()
defer
wo
.
Close
()
return
c
.
db
.
PutObj
(
wo
,
[]
byte
(
fmt
.
Sprintf
(
"url/%s"
,
info
.
URL
)),
info
)
}
// Enqueue a (possibly new) URL for processing.
func
(
c
*
Crawler
)
Enqueue
(
u
*
url
.
URL
,
depth
int
)
{
// Normalize the URL.
...
...
@@ -153,13 +151,13 @@ func (c *Crawler) Enqueue(u *url.URL, depth int) {
// URL will fail.
wo
:=
levigo
.
NewWriteOptions
()
defer
wo
.
Close
()
c
.
db
.
PutObj
(
wo
,
qkey
,
&
Q
ueuePair
{
Key
:
qkey
,
URL
:
urlStr
,
Depth
:
depth
})
c
.
db
.
PutObj
(
wo
,
qkey
,
&
q
ueuePair
{
Key
:
qkey
,
URL
:
urlStr
,
Depth
:
depth
})
c
.
db
.
PutObj
(
wo
,
ukey
,
&
info
)
}
// Scan the queue for URLs until there are no more.
func
(
c
*
Crawler
)
process
()
<-
chan
Q
ueuePair
{
ch
:=
make
(
chan
Q
ueuePair
)
func
(
c
*
Crawler
)
process
()
<-
chan
q
ueuePair
{
ch
:=
make
(
chan
q
ueuePair
)
go
func
()
{
queuePrefix
:=
[]
byte
(
"queue/"
)
for
range
time
.
Tick
(
2
*
time
.
Second
)
{
...
...
@@ -173,7 +171,7 @@ func (c *Crawler) process() <-chan QueuePair {
iter
:=
c
.
db
.
NewPrefixIterator
(
ro
,
queuePrefix
)
for
;
iter
.
Valid
();
iter
.
Next
()
{
var
p
Q
ueuePair
var
p
q
ueuePair
if
err
:=
iter
.
Value
(
&
p
);
err
!=
nil
{
continue
}
...
...
@@ -195,27 +193,42 @@ func (c *Crawler) process() <-chan QueuePair {
}
// Main worker loop.
func
(
c
*
Crawler
)
urlHandler
(
queue
<-
chan
Q
ueuePair
)
{
func
(
c
*
Crawler
)
urlHandler
(
queue
<-
chan
q
ueuePair
)
{
for
p
:=
range
queue
{
// Retrieve the URLInfo object from the crawl db.
// Ignore errors, we can work with an empty object.
urlkey
:=
[]
byte
(
fmt
.
Sprintf
(
"url/%s"
,
p
.
URL
))
var
info
URLInfo
ro
:=
levigo
.
NewReadOptions
()
c
.
db
.
GetObj
(
ro
,
urlkey
,
&
info
)
info
.
CrawledAt
=
time
.
Now
()
info
.
URL
=
p
.
URL
// Fetch the URL and handle it. Make sure to Close the
// response body (even if it gets replaced in the
// Response object).
fmt
.
Printf
(
"%s
\n
"
,
p
.
URL
)
httpResp
,
httpErr
:=
c
.
fetcher
.
Fetch
(
p
.
URL
)
respBody
:=
httpResp
.
Body
err
:=
c
.
handler
.
Handle
(
c
,
p
.
URL
,
p
.
Depth
,
httpResp
,
httpErr
)
var
respBody
io
.
ReadCloser
if
httpErr
==
nil
{
respBody
.
Close
()
respBody
=
httpResp
.
Body
info
.
StatusCode
=
httpResp
.
StatusCode
}
// Remove the URL from the queue if the handler was successful.
if
err
==
nil
{
wo
:=
levigo
.
NewWriteOptions
()
// Invoke the handler (even if the fetcher errored out).
info
.
Error
=
c
.
handler
.
Handle
(
c
,
p
.
URL
,
p
.
Depth
,
httpResp
,
httpErr
)
wo
:=
levigo
.
NewWriteOptions
()
if
httpErr
==
nil
{
respBody
.
Close
()
// Remove the URL from the queue if the fetcher was successful.
c
.
db
.
Delete
(
wo
,
p
.
Key
)
wo
.
Close
()
}
else
{
log
.
Printf
(
"error
handl
ing %s: %v"
,
p
.
URL
,
e
rr
)
log
.
Printf
(
"error
retriev
ing %s: %v"
,
p
.
URL
,
httpE
rr
)
}
c
.
db
.
PutObj
(
wo
,
urlkey
,
&
info
)
wo
.
Close
()
}
}
...
...
@@ -225,7 +238,7 @@ func MustParseURLs(urls []string) []*url.URL {
for
_
,
s
:=
range
urls
{
u
,
err
:=
url
.
Parse
(
s
)
if
err
!=
nil
{
log
.
Fatalf
(
"error parsing
seed
\"
%s
\"
: %v"
,
s
,
err
)
log
.
Fatalf
(
"error parsing
URL
\"
%s
\"
: %v"
,
s
,
err
)
}
parsed
=
append
(
parsed
,
u
)
}
...
...
@@ -242,7 +255,7 @@ func NewCrawler(path string, seeds []*url.URL, scopes []Scope, f Fetcher, h Hand
c
:=
&
Crawler
{
db
:
db
,
fetcher
:
f
,
handler
:
&
standardPageHandler
{
h
}
,
handler
:
h
,
seeds
:
seeds
,
scopes
:
scopes
,
}
...
...
@@ -270,16 +283,12 @@ func (c *Crawler) Run(concurrency int) {
wg
.
Wait
()
}
// Standard page handler, follows redirects and invokes a child
// handler when status == 200.
type
standardPageHandler
struct
{
type
redirectHandler
struct
{
h
Handler
}
func
(
wrap
*
standardPageHandler
)
Handle
(
c
*
Crawler
,
u
string
,
depth
int
,
resp
*
http
.
Response
,
err
error
)
error
{
info
:=
&
URLInfo
{
URL
:
u
,
CrawledAt
:
time
.
Now
()}
func
(
wrap
*
redirectHandler
)
Handle
(
c
*
Crawler
,
u
string
,
depth
int
,
resp
*
http
.
Response
,
err
error
)
error
{
if
err
==
nil
{
info
.
StatusCode
=
resp
.
StatusCode
if
resp
.
StatusCode
==
200
{
err
=
wrap
.
h
.
Handle
(
c
,
u
,
depth
,
resp
,
err
)
}
else
if
resp
.
StatusCode
>
300
&&
resp
.
StatusCode
<
400
{
...
...
@@ -296,8 +305,11 @@ func (wrap *standardPageHandler) Handle(c *Crawler, u string, depth int, resp *h
err
=
errors
.
New
(
resp
.
Status
)
}
}
info
.
Error
=
err
return
err
}
c
.
UpdateURL
(
info
)
return
nil
// NewRedirectHandler returns a Handler that follows HTTP redirects,
// and will call the wrapped handler on every request with HTTP status 200.
func
NewRedirectHandler
(
wrap
Handler
)
Handler
{
return
&
redirectHandler
{
wrap
}
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment