Crawler

package main

import (
    "fmt"
    "webcrawler/crawler"
    "webcrawler/model"
    "webcrawler/urlutils"
    "os"
    "flag"
)

func main() {
    if len(os.Args) < 2 {
        log.Fatal("Url must be provided as first argument")
    }

    strategy := flag.String("strat", "par", "par for parallel OR seq for sequential crawling strategy")
    routineMultiplier := flag.Int("m", 1, "Goroutine multiplier. Default 1x logical CPUs. Only works in parallel strategy")

    page := model.NewBasePage(os.Args[1])
    urlutils.BASE_URL = os.Args[1]
    flag.Parse()
    pages := crawler.Crawl(&page, *strategy, *routineMultiplier)
    fmt.Printf("Crawled: %d\n", len(pages))
}

results matching ""

    No results matching ""