爬蟲被想太多,把他當作一個模擬別人的請求響應便可了,全部呢go寫爬蟲關鍵是寫請求
json
package main import ( "bytes" "encoding/json" "io" "io/ioutil" "net/http" "time" ) func Get(url string) string { client := &http.Client{Timeout: 5 * time.Second} // 超時時間:5秒 至關於咱們爬蟲中的timeout參數 resp, err := client.Get(url) //發起請求 //resp, err := http.NewRequest("GET", url) 也能夠這樣寫 post同理 //增長header選項 resp.Header.Add("Cookie", "xxxxxx") resp.Header.Add("User-Agent", "xxx") resp.Header.Add("X-Requested-With", "xxxx") //cookies就直接加在請求頭中就行了 if err != nil { //請求返回的錯誤參數 panic(err) } defer resp.Body.Close() //請求成功對於請求提進行解析 var buffer [512]byte result := bytes.NewBuffer(nil) for { n, err := resp.Body.Read(buffer[0:]) result.Write(buffer[0:n]) if err != nil && err == io.EOF { break } else if err != nil { panic(err) } } return result.String() } func main(){ print(Get("http://www.baidu.com")) }
//其餘地方就省略咯 func Post(url string, data interface{}, contentType string) string { client := &http.Client{Timeout: 5 * time.Second} jsonStr, _ := json.Marshal(data) resp, err := client.Post(url, contentType, bytes.NewBuffer(jsonStr)) if err != nil { panic(err) } defer resp.Body.Close() result, _ := ioutil.ReadAll(resp.Body) return string(result) }