Web Scraper

Go is has increasingly become popular due to its flexibility amd power. So today,we build a simple web scraper using golang. We make a GET request to the website.

package main
import(
        "fmt"
        "net/http"
        "io/ioutil"
)

func main(){
       //Make GET request
       response,err := http.Get("www.example.com")
       Check(err)
       
       //close the connection stream
       defer resp.Body.Close()

       //get the bytes acquired
       resultsInBytes,err := ioutil.ReadAll(resp.Body)
       Check(err)

       //convert into a readable format
       results := string(resultsInBytes)

       //Display website
       fmt.Println(results)
}

//error handler function
func Check(err error){
       if err!= nil{
            fmt.Println(err)
       }
}

We can make it powerful in a way that it can store the acquired website in a file but we will use the io module to copy the website to the file,instead of ioutil module.

package main
import(
        "fmt"
        "net/http"
        "io"
        "os"
)

func main(){
       //create file
       f := os.Create("filename.html")

       //Make GET request
       response,err := http.Get("www.example.com")
       Check(err)
       
       //close the connection stream
       defer resp.Body.Close()

       //get the bytes acquired
       resultsInBytes,err := io.Copy(f,resp.Body)
       Check(err)

       //convert into a readable format
       results := string(resultsInBytes)

       //Display website
       fmt.Println(results)
}

//error handler function
func Check(err error){
       if err!= nil{
            fmt.Println(err)
       }
}

This post is also available on DEV.