Restructured code
This commit is contained in:
parent
6ed4f74b88
commit
a3cf924918
|
@ -13,5 +13,4 @@
|
|||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
vendor/
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
package mangadex
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var cacheEnabled bool = true
|
||||
|
||||
func EnableCache() {
|
||||
cacheEnabled = true
|
||||
}
|
||||
|
||||
func DisableCache() {
|
||||
cacheEnabled = false
|
||||
}
|
||||
|
||||
func getCachePath() string {
|
||||
userCacheDir, errCache := os.UserCacheDir()
|
||||
if errCache != nil {
|
||||
logrus.Fatalf("Unable to retrieve cache directory: %s", errCache)
|
||||
}
|
||||
return filepath.Join(userCacheDir, "go-mangadex")
|
||||
}
|
||||
|
||||
func getCachePathFor(mangadexURL *url.URL) string {
|
||||
fileName := getCacheFilename(mangadexURL)
|
||||
return filepath.Join(getCachePath(), fileName)
|
||||
}
|
||||
|
||||
func getCacheFilename(mangadexURL *url.URL) string {
|
||||
return strings.ReplaceAll(mangadexURL.Path, "/", "_")
|
||||
}
|
||||
|
||||
func cacheExists(mangadexURL *url.URL) bool {
|
||||
stat, err := os.Stat(getCachePathFor(mangadexURL))
|
||||
if os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
return !stat.IsDir()
|
||||
}
|
||||
|
||||
func initCache() {
|
||||
cachePath := getCachePath()
|
||||
_, err := os.Stat(cachePath)
|
||||
if os.IsNotExist(err) {
|
||||
logrus.Infof("Cache directory does not exist, creating. [%s]", cachePath)
|
||||
errCachePath := os.MkdirAll(cachePath, 0755)
|
||||
if errCachePath != nil {
|
||||
logrus.Errorf("Cache directory couldn't be generated, caching will likely fail: %s", errCachePath)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
package mangadex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const APIBaseURL = "https://api.mangadex.org/v2/"
|
||||
|
||||
func doRequest(method string, requestURL string) (*MangaDexResponse, error) {
|
||||
result := MangaDexResponse{}
|
||||
parsedURL, errParse := url.Parse(requestURL)
|
||||
if errParse != nil {
|
||||
return &result, errParse
|
||||
}
|
||||
|
||||
if cacheEnabled {
|
||||
initCache()
|
||||
if cacheExists(parsedURL) {
|
||||
cacheData, errRead := ioutil.ReadFile(getCachePathFor(parsedURL))
|
||||
if errRead != nil {
|
||||
logrus.Fatalf("Error reading cache for URL: %s [%s]: %s", parsedURL.String(), getCacheFilename(parsedURL), errRead)
|
||||
}
|
||||
errJSON := json.Unmarshal(cacheData, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Fatalf("Error parsing JSON from cache: %s: %s", getCacheFilename(parsedURL), errJSON)
|
||||
}
|
||||
logrus.Debugf("Request loaded from cache: %s", parsedURL.String())
|
||||
return &result, nil
|
||||
} else {
|
||||
logrus.Debugf("Cache not found for %s", parsedURL.String())
|
||||
}
|
||||
}
|
||||
|
||||
logrus.Tracef("Making request %s", parsedURL)
|
||||
request := http.Request{
|
||||
Method: method,
|
||||
URL: parsedURL,
|
||||
Header: map[string][]string{
|
||||
"User-Agent": {"go-mangadex/0.0.1"},
|
||||
},
|
||||
}
|
||||
|
||||
response, errResponse := http.DefaultClient.Do(&request)
|
||||
if errResponse != nil {
|
||||
logrus.Tracef("Request error: %s", errResponse)
|
||||
return &result, errResponse
|
||||
}
|
||||
|
||||
if response.StatusCode != 200 {
|
||||
logrus.Tracef("Response status code not successful: %d", response.StatusCode)
|
||||
logrus.Tracef("Response body: %s", response.Body)
|
||||
return &result, errors.New(strconv.Itoa(response.StatusCode))
|
||||
}
|
||||
|
||||
logrus.Tracef("Response status code: %s", response.Status)
|
||||
|
||||
if response.Body != nil {
|
||||
defer response.Body.Close()
|
||||
}
|
||||
|
||||
body, errRead := ioutil.ReadAll(response.Body)
|
||||
if errRead != nil {
|
||||
logrus.Errorf("Error reading body: %s", errRead)
|
||||
return &result, errRead
|
||||
}
|
||||
|
||||
logrus.Tracef("Response body: %s", body)
|
||||
|
||||
// Write cache
|
||||
logrus.Infof("Writting cache for %s", parsedURL.String())
|
||||
logrus.Infof("Writting cache to: %s", getCacheFilename(parsedURL))
|
||||
errWriteCache := ioutil.WriteFile(getCachePathFor(parsedURL), body, 0644)
|
||||
if errWriteCache != nil {
|
||||
logrus.Warnf("Can't write to cache: %s", errWriteCache)
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(body, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing body: %s", errJSON)
|
||||
return &result, errJSON
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
256
main.go
256
main.go
|
@ -1,256 +0,0 @@
|
|||
package mangadex
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const APIBaseURL = "https://api.mangadex.org/v2/"
|
||||
|
||||
var cacheEnabled bool = true
|
||||
|
||||
func EnableCache() {
|
||||
cacheEnabled = true
|
||||
}
|
||||
|
||||
func DisableCache() {
|
||||
cacheEnabled = false
|
||||
}
|
||||
|
||||
func getCachePath() string {
|
||||
userCacheDir, errCache := os.UserCacheDir()
|
||||
if errCache != nil {
|
||||
logrus.Fatalf("Unable to retrieve cache directory: %s", errCache)
|
||||
}
|
||||
return filepath.Join(userCacheDir, "go-mangadex")
|
||||
}
|
||||
|
||||
func getCachePathFor(mangadexURL string) string {
|
||||
fileName := getCacheFilename(mangadexURL)
|
||||
return filepath.Join(getCachePath(), fileName)
|
||||
}
|
||||
|
||||
func getCacheFilename(mangadexURL string) string {
|
||||
urlHash := sha1.New()
|
||||
_, errWrite := urlHash.Write([]byte(mangadexURL))
|
||||
if errWrite != nil {
|
||||
logrus.Errorf("Error generating hash for %s: %s", mangadexURL, errWrite)
|
||||
}
|
||||
return fmt.Sprintf("%x", urlHash.Sum(nil))
|
||||
}
|
||||
|
||||
func cacheExists(mangadexURL string) bool {
|
||||
stat, err := os.Stat(getCachePathFor(mangadexURL))
|
||||
if os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
return !stat.IsDir()
|
||||
}
|
||||
|
||||
func initCache() {
|
||||
cachePath := getCachePath()
|
||||
_, err := os.Stat(cachePath)
|
||||
if os.IsNotExist(err) {
|
||||
logrus.Infof("Cache directory does not exist, creating. [%s]", cachePath)
|
||||
errCachePath := os.MkdirAll(cachePath, 0755)
|
||||
if errCachePath != nil {
|
||||
logrus.Errorf("Cache directory couldn't be generated, caching will likely fail: %s", errCachePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func doRequest(method string, requestURL string) (*MangaDexResponse, error) {
|
||||
result := MangaDexResponse{}
|
||||
parsedURL, errParse := url.Parse(requestURL)
|
||||
if errParse != nil {
|
||||
return &result, errParse
|
||||
}
|
||||
|
||||
if cacheEnabled {
|
||||
initCache()
|
||||
if cacheExists(parsedURL.String()) {
|
||||
cacheData, errRead := ioutil.ReadFile(getCachePathFor(parsedURL.String()))
|
||||
if errRead != nil {
|
||||
logrus.Fatalf("Error reading cache for URL: %s [%s]: %s", parsedURL.String(), getCacheFilename(parsedURL.String()), errRead)
|
||||
}
|
||||
errJSON := json.Unmarshal(cacheData, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Fatalf("Error parsing JSON from cache: %s: %s", getCacheFilename(parsedURL.String()), errJSON)
|
||||
}
|
||||
logrus.Debugf("Request loaded from cache: %s", parsedURL.String())
|
||||
return &result, nil
|
||||
} else {
|
||||
logrus.Debugf("Cache not found for %s", parsedURL.String())
|
||||
}
|
||||
}
|
||||
|
||||
logrus.Tracef("Making request %s", parsedURL)
|
||||
request := http.Request{
|
||||
Method: method,
|
||||
URL: parsedURL,
|
||||
Header: map[string][]string{
|
||||
"User-Agent": {"go-mangadex/0.0.1"},
|
||||
},
|
||||
}
|
||||
|
||||
response, errResponse := http.DefaultClient.Do(&request)
|
||||
if errResponse != nil {
|
||||
logrus.Tracef("Request error: %s", errResponse)
|
||||
return &result, errResponse
|
||||
}
|
||||
|
||||
if response.StatusCode != 200 {
|
||||
logrus.Tracef("Response status code not successful: %d", response.StatusCode)
|
||||
logrus.Tracef("Response body: %s", response.Body)
|
||||
return &result, errors.New(strconv.Itoa(response.StatusCode))
|
||||
}
|
||||
|
||||
logrus.Tracef("Response status code: %s", response.Status)
|
||||
|
||||
if response.Body != nil {
|
||||
defer response.Body.Close()
|
||||
}
|
||||
|
||||
body, errRead := ioutil.ReadAll(response.Body)
|
||||
if errRead != nil {
|
||||
logrus.Errorf("Error reading body: %s", errRead)
|
||||
return &result, errRead
|
||||
}
|
||||
|
||||
logrus.Tracef("Response body: %s", body)
|
||||
|
||||
// Write cache
|
||||
if cacheEnabled {
|
||||
logrus.Infof("Writting cache for %s", parsedURL.String())
|
||||
logrus.Infof("Writting cache to: %s", getCacheFilename(parsedURL.String()))
|
||||
errWriteCache := ioutil.WriteFile(getCachePathFor(parsedURL.String()), body, 0644)
|
||||
if errWriteCache != nil {
|
||||
logrus.Warnf("Can't write to cache: %s", errWriteCache)
|
||||
}
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(body, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing body: %s", errJSON)
|
||||
return &result, errJSON
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
type GetChaptersParams struct {
|
||||
Limit int `json:"limit"`
|
||||
Page int `json:"p"`
|
||||
BlockGroups bool `json:"blockgroups"`
|
||||
}
|
||||
|
||||
func NewGetChaptersParams() GetChaptersParams {
|
||||
return GetChaptersParams{
|
||||
Limit: 100,
|
||||
Page: 0,
|
||||
BlockGroups: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (params *GetChaptersParams) validate() {
|
||||
if params.Limit < 1 || params.Limit > 100 {
|
||||
params.Limit = 100
|
||||
}
|
||||
}
|
||||
|
||||
func (params *GetChaptersParams) asQueryParams() url.Values {
|
||||
queryParams := url.Values{}
|
||||
|
||||
if params.Page > 0 {
|
||||
queryParams.Add("p", strconv.FormatInt(int64(params.Page), 10))
|
||||
}
|
||||
queryParams.Add("limit", strconv.FormatInt(int64(params.Limit), 10))
|
||||
if params.BlockGroups {
|
||||
queryParams.Add("blockgroups", strconv.FormatBool(params.BlockGroups))
|
||||
}
|
||||
|
||||
return queryParams
|
||||
}
|
||||
|
||||
func (manga *Manga) GetChapters(params GetChaptersParams) ([]MangaChapterList, []MangaGroup, error) {
|
||||
var mangaChaptersResult []MangaChapterList
|
||||
var mangaGroupsResult []MangaGroup
|
||||
params.validate()
|
||||
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(manga.ID), "chapters")+"?"+params.asQueryParams().Encode())
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return mangaChaptersResult, mangaGroupsResult, errRequest
|
||||
}
|
||||
|
||||
var mangaDexChaptersResponse MangaDexChaptersResponse
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &mangaDexChaptersResponse)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return mangaChaptersResult, mangaGroupsResult, errJSON
|
||||
}
|
||||
|
||||
return mangaDexChaptersResponse.Chapters, mangaDexChaptersResponse.Groups, nil
|
||||
}
|
||||
|
||||
func (manga *Manga) GetChapter(chapter string) (MangaChapterDetail, error) {
|
||||
var result MangaChapterDetail
|
||||
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("chapter", chapter))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func GetManga(mangaID int) (Manga, error) {
|
||||
result := Manga{}
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(mangaID)))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (manga *Manga) GetCovers() ([]MangaCover, error) {
|
||||
var result []MangaCover
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(manga.ID), "covers"))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
return result, nil
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
package mangadex
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"path"
|
||||
"strconv"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// GetChapters - Requests the chapters and groups for the provided manga instance.
|
||||
// Requires a ChaptersParams argument to work through the pagination and other request
|
||||
// parameters.
|
||||
func (manga *Manga) GetChapters(params ChaptersParams) ([]MangaChapterList, []MangaGroup, error) {
|
||||
var mangaChaptersResult []MangaChapterList
|
||||
var mangaGroupsResult []MangaGroup
|
||||
params.validate()
|
||||
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(manga.ID), "chapters")+"?"+params.asQueryParams().Encode())
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return mangaChaptersResult, mangaGroupsResult, errRequest
|
||||
}
|
||||
|
||||
var mangaDexChaptersResponse MangaDexChaptersResponse
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &mangaDexChaptersResponse)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return mangaChaptersResult, mangaGroupsResult, errJSON
|
||||
}
|
||||
|
||||
return mangaDexChaptersResponse.Chapters, mangaDexChaptersResponse.Groups, nil
|
||||
}
|
||||
|
||||
// GetChapter retrieves the specific chapter detail from the provided Manga.
|
||||
// This function returns a more detailed chapter object since the list only returns information
|
||||
// but the detail endpoint is needed in order to get the pages and servers where those are stored.
|
||||
func (manga *Manga) GetChapter(chapter string) (MangaChapterDetail, error) {
|
||||
var result MangaChapterDetail
|
||||
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("chapter", chapter))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetCovers requests the covers for the provided manga
|
||||
func (manga *Manga) GetCovers() ([]MangaCover, error) {
|
||||
var result []MangaCover
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(manga.ID), "covers"))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetManga retrieves the manga information for the provided ID.
|
||||
func GetManga(mangaID int) (Manga, error) {
|
||||
result := Manga{}
|
||||
response, errRequest := doRequest("GET", APIBaseURL+path.Join("manga", strconv.Itoa(mangaID)))
|
||||
if errRequest != nil {
|
||||
logrus.Errorf("Request error: %s", errRequest)
|
||||
return result, errRequest
|
||||
}
|
||||
|
||||
errJSON := json.Unmarshal(response.Data, &result)
|
||||
if errJSON != nil {
|
||||
logrus.Errorf("Error parsing JSON: %s", errJSON)
|
||||
return result, errJSON
|
||||
}
|
||||
return result, nil
|
||||
}
|
40
types.go
40
types.go
|
@ -1,6 +1,10 @@
|
|||
package mangadex
|
||||
|
||||
import "encoding/json"
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type MangaDexResponse struct {
|
||||
// Same as HTTP status code
|
||||
|
@ -140,3 +144,37 @@ type MangaGroup struct {
|
|||
LastUpdated int `json:"lastUpdated"`
|
||||
Banner string `json:"banner"`
|
||||
}
|
||||
|
||||
type ChaptersParams struct {
|
||||
Limit int `json:"limit"`
|
||||
Page int `json:"p"`
|
||||
BlockGroups bool `json:"blockgroups"`
|
||||
}
|
||||
|
||||
func NewChaptersParams() ChaptersParams {
|
||||
return ChaptersParams{
|
||||
Limit: 100,
|
||||
Page: 0,
|
||||
BlockGroups: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (params *ChaptersParams) validate() {
|
||||
if params.Limit < 1 || params.Limit > 100 {
|
||||
params.Limit = 100
|
||||
}
|
||||
}
|
||||
|
||||
func (params *ChaptersParams) asQueryParams() url.Values {
|
||||
queryParams := url.Values{}
|
||||
|
||||
if params.Page > 0 {
|
||||
queryParams.Add("p", strconv.FormatInt(int64(params.Page), 10))
|
||||
}
|
||||
queryParams.Add("limit", strconv.FormatInt(int64(params.Limit), 10))
|
||||
if params.BlockGroups {
|
||||
queryParams.Add("blockgroups", strconv.FormatBool(params.BlockGroups))
|
||||
}
|
||||
|
||||
return queryParams
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue