Add fetcher, support for downloading forum pages
This commit is contained in:
58
domains/fetcher/v1/request.go
Normal file
58
domains/fetcher/v1/request.go
Normal file
@@ -0,0 +1,58 @@
|
||||
// NNM-Club torrent filess mass downloader
|
||||
// Created for Uploaders group
|
||||
// Copyright (c) 2012-2019 Vladimir "fat0troll" Hodakov
|
||||
|
||||
package fetcherv1
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/text/encoding/charmap"
|
||||
"golang.org/x/text/transform"
|
||||
)
|
||||
|
||||
func downloadAdditionalPages() {
|
||||
for i := range forumPagesLinks {
|
||||
forumPage, _ := strconv.Atoi(i)
|
||||
dlog.Info().Int("номер страницы", forumPage).Msg("Скачивается ещё одна страница форума")
|
||||
pageFile, err := dumpForumPage("https://" + c.Config.URL + "/forum/" + forumPagesLinks[i])
|
||||
if err != nil {
|
||||
dlog.Fatal().Err(err).Msg("Не удалось загрузить страницу форума")
|
||||
}
|
||||
|
||||
_ = setQuerier(pageFile, "forumPage", forumPage)
|
||||
}
|
||||
}
|
||||
|
||||
func dumpForumPage(url string) (string, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for i := range c.Cookies {
|
||||
req.AddCookie(c.Cookies[i])
|
||||
}
|
||||
|
||||
resp, err := dclient.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
tempF, err := ioutil.TempFile("", "massdl-*")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer tempF.Close()
|
||||
|
||||
respInUTF8 := transform.NewReader(resp.Body, charmap.Windows1251.NewDecoder())
|
||||
_, err = io.Copy(tempF, respInUTF8)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return tempF.Name(), nil
|
||||
}
|
Reference in New Issue
Block a user