396 lines
11 KiB
Go
396 lines
11 KiB
Go
package cli
|
|
|
|
import (
|
|
"archive/zip"
|
|
"errors"
|
|
"flag"
|
|
"fmt"
|
|
"io"
|
|
"io/ioutil"
|
|
"net/http"
|
|
"os"
|
|
"path"
|
|
"path/filepath"
|
|
"sort"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/fmartingr/go-mangadex"
|
|
"github.com/fmartingr/mangadex2cbr/pkg/tasks"
|
|
"github.com/sirupsen/logrus"
|
|
)
|
|
|
|
type CliOPtions struct {
|
|
MangaID string
|
|
}
|
|
|
|
const defaultLogLevel string = "INFO"
|
|
const defaultLanguage string = "gb"
|
|
|
|
func fileExists(filename string) bool {
|
|
info, err := os.Stat(filename)
|
|
if os.IsNotExist(err) {
|
|
return false
|
|
}
|
|
return !info.IsDir()
|
|
}
|
|
|
|
func DownloadFile(url string, destinationPath string) error {
|
|
response, err := http.Get(url)
|
|
if err != nil {
|
|
logrus.Errorf("Error downloading page: %s", err)
|
|
return err
|
|
}
|
|
|
|
if response.StatusCode != 200 {
|
|
//logrus.Errorf("Status code not OK: %d", response.StatusCode)
|
|
return errors.New("Status code not 200")
|
|
}
|
|
|
|
if response.Body != nil {
|
|
defer response.Body.Close()
|
|
}
|
|
|
|
file, errOpen := os.Create(destinationPath)
|
|
if err != nil {
|
|
logrus.Errorf("Error opening file: %s", errOpen)
|
|
return errOpen
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
body, errBody := ioutil.ReadAll(response.Body)
|
|
if errBody != nil {
|
|
logrus.Errorf("Failing read body from response: %s", errBody)
|
|
return err
|
|
}
|
|
|
|
_, errWrite := file.Write(body)
|
|
if errWrite != nil {
|
|
return errWrite
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func Start() {
|
|
logLevelFlag := flag.String("log-level", defaultLogLevel, "Log level")
|
|
mangaIDFlag := flag.Int("manga-id", 0, "Manga ID to convert")
|
|
noCacheFlag := flag.Bool("no-cache", false, "Cache requests to mangadex")
|
|
languageFlag := flag.String("language", defaultLanguage, "Language to fetch chapters")
|
|
outputPath := "Output"
|
|
flag.Parse()
|
|
|
|
cwd, errCwd := os.Getwd()
|
|
if errCwd != nil {
|
|
logrus.Fatalf("Error retrieving current working directory: %s", errCwd)
|
|
}
|
|
outputPath = filepath.Join(cwd, outputPath)
|
|
|
|
if !*noCacheFlag {
|
|
mangadex.EnableCache()
|
|
}
|
|
|
|
logLevel, errLogLevel := logrus.ParseLevel(*logLevelFlag)
|
|
if errLogLevel != nil {
|
|
logrus.Warnf("Incorrect loglevel %s, using default %s", *logLevelFlag, defaultLogLevel)
|
|
} else {
|
|
logrus.SetLevel(logLevel)
|
|
}
|
|
|
|
if *mangaIDFlag == 0 {
|
|
logrus.Error("You should set -manga-id")
|
|
logrus.Exit(1)
|
|
}
|
|
|
|
logrus.Infof("Getting information for Manga ID: %d", *mangaIDFlag)
|
|
manga, err := mangadex.GetManga(*mangaIDFlag)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
// TODO: Allow selecting a name from AlternativeNames
|
|
|
|
logrus.Infof("Found! %s", manga.Title)
|
|
logrus.Infof("Getting chapter information...")
|
|
|
|
var mangaChapters []mangadex.MangaChapterList
|
|
mangaGroups := map[int]mangadex.MangaGroup{}
|
|
chapterParams := mangadex.NewChaptersParams()
|
|
|
|
selectedLanguage := *languageFlag
|
|
var fetchChaptersTasks []tasks.Task
|
|
|
|
fetchChaptersTasks = tasks.TaskPush(fetchChaptersTasks, tasks.Task{Arguments: map[string]string{"page": "1"}})
|
|
|
|
for len(fetchChaptersTasks) > 0 {
|
|
var task tasks.Task
|
|
task, fetchChaptersTasks = tasks.TaskPop(fetchChaptersTasks)
|
|
logrus.Infof("Fetching chapters (page %s)", task.Arguments["page"])
|
|
page, errPage := strconv.Atoi(task.Arguments["page"])
|
|
if errPage != nil {
|
|
logrus.Error("Can't convert page to int: %s: %s", task.Arguments["page"], errPage)
|
|
}
|
|
chapterParams.Page = page
|
|
|
|
chapters, groups, errChapters := manga.GetChapters(chapterParams)
|
|
if errChapters != nil {
|
|
logrus.Errorf("Error retrieving manga chapters: %s", errChapters)
|
|
}
|
|
|
|
for chapter := range chapters {
|
|
if chapters[chapter].Language == selectedLanguage {
|
|
mangaChapters = append(mangaChapters, chapters[chapter])
|
|
}
|
|
}
|
|
|
|
for group := range groups {
|
|
_, exists := mangaGroups[groups[group].ID]
|
|
if !exists {
|
|
mangaGroups[groups[group].ID] = groups[group]
|
|
}
|
|
}
|
|
|
|
// If we have the total number of items we try the next page
|
|
if len(chapters) == chapterParams.Limit {
|
|
fetchChaptersTasks = tasks.TaskPush(fetchChaptersTasks, tasks.Task{Arguments: map[string]string{"page": strconv.Itoa(page + 1)}})
|
|
}
|
|
}
|
|
|
|
logrus.Printf("Found following groups: ")
|
|
for group := range mangaGroups {
|
|
logrus.Printf(" %6d: %s", mangaGroups[group].ID, mangaGroups[group].Name)
|
|
}
|
|
|
|
// TODO: Select groups for digitalization, Using all for testing
|
|
selectedGroups := make([]int, 0, len(mangaGroups))
|
|
for k := range mangaGroups {
|
|
selectedGroups = append(selectedGroups, k)
|
|
}
|
|
|
|
logrus.Infof("Selected groups: %d", selectedGroups)
|
|
|
|
logrus.Infof("Calculating Volumes...")
|
|
|
|
mangaVolumeChapter := map[string]mangadex.MangaChapterList{}
|
|
var mangaVolumeChapterKeys []string
|
|
|
|
// Using keys to get track of which chapters we already have in store, and for sorting
|
|
// through chatpers and volumes as well. Also keys are useful as page prefixes.
|
|
|
|
var downloadChaptersTasks []tasks.Task
|
|
for chapter := range mangaChapters {
|
|
// Sorting fix for chapters that do not contain "decimals"
|
|
// Usually chapters are just 1, 2, 3... but in some ocassions the chapters are sorted like
|
|
// 1, 1.1, 1.2, 2, ... and that make a mess when sorting. Making the "non-decimal" chapters
|
|
// fake "decimals" solves this issue easily for us.
|
|
|
|
// TODO Delete me
|
|
// if mangaChapters[chapter].Volume != "1" {
|
|
// continue
|
|
// }
|
|
|
|
if !strings.Contains(mangaChapters[chapter].Chapter, ".") {
|
|
mangaChapters[chapter].Chapter += ".0"
|
|
}
|
|
volumeChapterKey := fmt.Sprintf("%04s_%08s", mangaChapters[chapter].Volume, mangaChapters[chapter].Chapter)
|
|
_, exists := mangaVolumeChapter[volumeChapterKey]
|
|
if !exists {
|
|
logrus.Debugf("Collecting volume %4s chapter %4s from group %7d", mangaChapters[chapter].Volume, mangaChapters[chapter].Chapter, mangaChapters[chapter].Groups)
|
|
mangaVolumeChapter[volumeChapterKey] = mangaChapters[chapter]
|
|
mangaVolumeChapterKeys = append(mangaVolumeChapterKeys, volumeChapterKey)
|
|
downloadChaptersTasks = tasks.TaskPush(
|
|
downloadChaptersTasks,
|
|
tasks.Task{
|
|
Arguments: map[string]string{
|
|
"chapterID": strconv.Itoa(mangaChapters[chapter].ID),
|
|
"volume": mangaChapters[chapter].Volume,
|
|
"key": volumeChapterKey,
|
|
}})
|
|
}
|
|
}
|
|
|
|
logrus.Debugf("Sorting by volume and chapter")
|
|
sort.Strings(mangaVolumeChapterKeys)
|
|
|
|
mangaOutputPath := filepath.Join(outputPath, manga.Title)
|
|
|
|
logrus.Infof("Downloading chapters and calculating pages ")
|
|
|
|
var downloadPagesTasks []tasks.Task
|
|
|
|
for len(downloadChaptersTasks) > 0 {
|
|
var task tasks.Task
|
|
task, downloadChaptersTasks = tasks.TaskPop(downloadChaptersTasks)
|
|
|
|
// TODO: Delete me
|
|
// if task.Arguments["volume"] != "1" {
|
|
// continue
|
|
// }
|
|
|
|
chapter, errChapterDetail := manga.GetChapter(task.Arguments["chapterID"])
|
|
if errChapterDetail != nil {
|
|
logrus.Errorf("Failed getting chapter detail: %s", errChapterDetail)
|
|
}
|
|
|
|
volumeOutputPath := filepath.Join(mangaOutputPath, fmt.Sprintf("%s - Volume %s", manga.Title, chapter.Volume))
|
|
|
|
logrus.Infof("Processing Volume %s Chapter %s", chapter.Volume, chapter.Chapter)
|
|
logrus.Tracef("Processing %s", task.Arguments["key"])
|
|
|
|
errMkdir := os.MkdirAll(volumeOutputPath, 0766)
|
|
if errMkdir != nil {
|
|
logrus.Fatalf("Error creating output directory: %s", errMkdir)
|
|
}
|
|
|
|
for page := range chapter.Pages {
|
|
extension := path.Ext(chapter.Pages[page])
|
|
pageFilename := fmt.Sprintf("%s_%03d%s", task.Arguments["key"], page, extension)
|
|
pageDestinationPath := filepath.Join(volumeOutputPath, pageFilename)
|
|
downloadPagesTasks = tasks.TaskPush(downloadPagesTasks, tasks.Task{
|
|
Arguments: map[string]string{
|
|
"filename": pageFilename,
|
|
"destinationPath": pageDestinationPath,
|
|
"url": chapter.Server + path.Join(chapter.Hash, chapter.Pages[page]),
|
|
"urlFallback": chapter.ServerFallback + path.Join(chapter.Hash, chapter.Pages[page]),
|
|
},
|
|
})
|
|
}
|
|
}
|
|
|
|
logrus.Infof("Downloading %d pages", len(downloadPagesTasks))
|
|
|
|
for len(downloadPagesTasks) > 0 {
|
|
var task tasks.Task
|
|
task, downloadPagesTasks = tasks.TaskPop(downloadPagesTasks)
|
|
destinationPath := task.Arguments["destinationPath"]
|
|
if !fileExists(task.Arguments["destinationPath"]) {
|
|
logrus.Tracef("Downloading page from %s", task.Arguments["url"])
|
|
logrus.Tracef("Downlading page to %s", destinationPath)
|
|
errDownload := DownloadFile(task.Arguments["url"], destinationPath)
|
|
if errDownload != nil {
|
|
// TODO: Distinguish between 420 errors (GONE -> Use fallback) and other errors to keep retrying on the
|
|
// original (potentially MangaDex@HOME) urls.
|
|
_, exists := task.Arguments["urlFallback"]
|
|
if exists {
|
|
logrus.Warnf("Using fallback for page %s", task.Arguments["filename"])
|
|
downloadPagesTasks = tasks.TaskPush(downloadPagesTasks, tasks.Task{
|
|
Arguments: map[string]string{
|
|
"destinationPath": task.Arguments["destinationPath"],
|
|
"url": task.Arguments["urlFallback"],
|
|
"key": task.Arguments["key"],
|
|
},
|
|
})
|
|
} else {
|
|
logrus.Errorf("Error downloading page: %s", errDownload)
|
|
}
|
|
}
|
|
time.Sleep(100 * time.Millisecond)
|
|
}
|
|
}
|
|
|
|
logrus.Info("Downloading covers")
|
|
covers, errCovers := manga.GetCovers()
|
|
if errCovers != nil {
|
|
logrus.Errorf("Error getting cover information: %s", errCovers)
|
|
}
|
|
|
|
for cover := range covers {
|
|
extension := path.Ext(covers[cover].URL)
|
|
coverOutputPath := filepath.Join(mangaOutputPath, fmt.Sprintf("%s - Volume %s", manga.Title, covers[cover].Volume), fmt.Sprintf("0000%s", extension))
|
|
if !fileExists(coverOutputPath) {
|
|
logrus.Tracef("Downloading cover from %s", covers[cover].URL)
|
|
logrus.Tracef("Writting cover to: %s", coverOutputPath)
|
|
errDownload := DownloadFile(covers[cover].URL, coverOutputPath)
|
|
if errDownload != nil {
|
|
logrus.Errorf("Error downloading cover: %s", errDownload)
|
|
}
|
|
time.Sleep(100 * time.Millisecond)
|
|
}
|
|
}
|
|
|
|
// TODO: Zip folders into CBR files
|
|
logrus.Info("Compressing volumes")
|
|
|
|
files, errReadVolumes := ioutil.ReadDir(mangaOutputPath)
|
|
if errReadVolumes != nil {
|
|
logrus.Errorf("Error reading path with volume files: %s", errReadVolumes)
|
|
}
|
|
|
|
for file := range files {
|
|
if files[file].IsDir() {
|
|
destinationPath := filepath.Join(mangaOutputPath, files[file].Name()) + ".cbr"
|
|
if !fileExists(destinationPath) {
|
|
_, errZip := zipVolume(destinationPath)
|
|
if errZip != nil {
|
|
logrus.Errorf("Error packing CBR file: %s", errZip)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
func zipVolume(destinationPath string) (string, error) {
|
|
logrus.Tracef("Packing volume %s", path.Base(destinationPath))
|
|
pagesPath := strings.Replace(destinationPath, path.Ext(destinationPath), "", 1)
|
|
|
|
newZipFile, errCreate := os.Create(destinationPath)
|
|
if errCreate != nil {
|
|
return "", errCreate
|
|
}
|
|
defer newZipFile.Close()
|
|
|
|
zipWriter := zip.NewWriter(newZipFile)
|
|
defer zipWriter.Close()
|
|
|
|
files, errReadFiles := ioutil.ReadDir(pagesPath)
|
|
if errReadFiles != nil {
|
|
return "", errReadFiles
|
|
}
|
|
|
|
for _, file := range files {
|
|
if errAddFile := AddFileToZip(zipWriter, filepath.Join(pagesPath, file.Name())); errAddFile != nil {
|
|
return "", errAddFile
|
|
}
|
|
}
|
|
|
|
return "", nil
|
|
}
|
|
|
|
func AddFileToZip(zipWriter *zip.Writer, filename string) error {
|
|
|
|
fileToZip, err := os.Open(filename)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer fileToZip.Close()
|
|
|
|
// Get the file information
|
|
info, err := fileToZip.Stat()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
header, err := zip.FileInfoHeader(info)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Using FileInfoHeader() above only uses the basename of the file. If we want
|
|
// to preserve the folder structure we can overwrite this with the full path.
|
|
header.Name = filename
|
|
|
|
// Change to deflate to gain better compression
|
|
// see http://golang.org/pkg/archive/zip/#pkg-constants
|
|
header.Method = zip.Deflate
|
|
|
|
writer, err := zipWriter.CreateHeader(header)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
_, err = io.Copy(writer, fileToZip)
|
|
return err
|
|
}
|