yep good
This commit is contained in:
commit
bdc166c445
|
@ -0,0 +1,3 @@
|
|||
dl
|
||||
dist*
|
||||
vendor
|
|
@ -0,0 +1,17 @@
|
|||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
digest = "1:6f9339c912bbdda81302633ad7e99a28dfa5a639c864061f1929510a9a64aa74"
|
||||
name = "github.com/dustin/go-humanize"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "9f541cc9db5d55bce703bd99987c9d5cb8eea45e"
|
||||
version = "v1.0.0"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
input-imports = ["github.com/dustin/go-humanize"]
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
|
@ -0,0 +1,34 @@
|
|||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
#
|
||||
# [prune]
|
||||
# non-go = false
|
||||
# go-tests = true
|
||||
# unused-packages = true
|
||||
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/dustin/go-humanize"
|
||||
version = "1.0.0"
|
||||
|
||||
[prune]
|
||||
go-tests = true
|
||||
unused-packages = true
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019 TJ Horner
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,34 @@
|
|||
.PHONY: dist dist-win dist-macos dist-linux ensure-dist-dir build install uninstall
|
||||
|
||||
GOBUILD=go build -ldflags="-s -w"
|
||||
INSTALLPATH=/usr/local/bin
|
||||
|
||||
ensure-dist-dir:
|
||||
@- mkdir -p dist
|
||||
|
||||
dist-win: ensure-dist-dir
|
||||
# Build for Windows x64
|
||||
GOOS=windows GOARCH=amd64 $(GOBUILD) -o dist/e6dl-windows-amd64.exe *.go
|
||||
|
||||
dist-macos: ensure-dist-dir
|
||||
# Build for macOS x64
|
||||
GOOS=darwin GOARCH=amd64 $(GOBUILD) -o dist/e6dl-darwin-amd64 *.go
|
||||
|
||||
dist-linux: ensure-dist-dir
|
||||
# Build for Linux x64
|
||||
GOOS=linux GOARCH=amd64 $(GOBUILD) -o dist/e6dl-linux-amd64 *.go
|
||||
|
||||
dist: dist-win dist-macos dist-linux
|
||||
|
||||
build:
|
||||
@- mkdir -p bin
|
||||
$(GOBUILD) -o bin/e6dl *.go
|
||||
@- chmod +x bin/e6dl
|
||||
|
||||
install: build
|
||||
mv bin/e6dl $(INSTALLPATH)/e6dl
|
||||
@- rm -rf bin
|
||||
@echo "e6dl was installed to $(INSTALLPATH)/e6dl. Run make uninstall to get rid of it, or just remove the binary yourself."
|
||||
|
||||
uninstall:
|
||||
rm $(INSTALLPATH)/e6dl
|
|
@ -0,0 +1,27 @@
|
|||
# e6dl
|
||||
|
||||
This is a command line tool for downloading posts that match certain tags on e621 or e926.
|
||||
|
||||
It does basically the same thing as [this tool](https://www.npmjs.com/package/e6dl) except it was written in Go and the output is a lot less pretty.
|
||||
|
||||
I made this because I wanted to rewrite one of my previous projects in Go, so I decided to start with this one since it's a pretty small and simple command line tool.
|
||||
|
||||
## Installing, Building, etc.
|
||||
|
||||
See [here](https://github.com/tjhorner/nplcsv/blob/master/README.md) since it uses the same Makefile.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
Usage of e6dl:
|
||||
--concurrents int
|
||||
Maximum amount of concurrent downloads (default 5)
|
||||
--limit int
|
||||
Maximum amount of posts to grab from e621 (default 10)
|
||||
--out string
|
||||
The directory to write the downloaded posts to (default "dl")
|
||||
--sfw
|
||||
Download posts from e926 instead of e621.
|
||||
--tags string
|
||||
Tags to search for
|
||||
```
|
|
@ -0,0 +1,76 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/dustin/go-humanize"
|
||||
)
|
||||
|
||||
// BeginDownload is meant to be called as a goroutine and begins the post download process.
|
||||
func BeginDownload(posts *[]Post, saveDirectory *string, maxConcurrents *int) {
|
||||
var wg sync.WaitGroup
|
||||
var completed int
|
||||
|
||||
total := len(*posts)
|
||||
|
||||
// Distribute the posts based on the number of workers
|
||||
ppw := len(*posts) / *maxConcurrents
|
||||
mod := len(*posts) % *maxConcurrents
|
||||
|
||||
for i := 0; i < *maxConcurrents; i++ {
|
||||
postsLower := i * ppw
|
||||
postsUpper := i*ppw + ppw
|
||||
|
||||
if i == *maxConcurrents-1 {
|
||||
// Give the last worker the remaining posts
|
||||
// TODO: compensate it for labor
|
||||
postsUpper += mod
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go work(i+1, (*posts)[postsLower:postsUpper], *saveDirectory, &completed, &total, &wg)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func work(wn int, posts []Post, directory string, completed *int, total *int, wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
|
||||
for _, post := range posts {
|
||||
*completed++
|
||||
fmt.Printf("[%d/%d] [w%d] Downloading post %d (%s)...\n", *completed, *total, wn, post.ID, humanize.Bytes(uint64(post.FileSize)))
|
||||
downloadPost(&post, directory)
|
||||
}
|
||||
}
|
||||
|
||||
func downloadPost(post *Post, directory string) {
|
||||
pathSliced := strings.Split(post.FileURL, ".")
|
||||
extension := pathSliced[len(pathSliced)-1]
|
||||
|
||||
resp, err := HTTPGet(post.FileURL)
|
||||
if err != nil {
|
||||
fmt.Println("Unable to download, skipping...")
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fmt.Println("Unable to read post response body, skipping...")
|
||||
return
|
||||
}
|
||||
|
||||
savePath := path.Join(directory, strconv.Itoa(post.ID)+"."+extension)
|
||||
|
||||
err = ioutil.WriteFile(savePath, body, 0755)
|
||||
if err != nil {
|
||||
fmt.Printf("Error: could not write to file: %v\n", err)
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Post represents an e621 post object returned by the e621 API.
|
||||
type Post struct {
|
||||
ID int `json:"id"`
|
||||
Tags string `json:"tags"`
|
||||
LockedTags bool `json:"locked_tags"`
|
||||
Description string `json:"description"`
|
||||
CreatedAt SerializedDate `json:"created_at"`
|
||||
CreatorID int `json:"creator_id"`
|
||||
Author string `json:"author"`
|
||||
Change int `json:"change"`
|
||||
Source string `json:"source"`
|
||||
Score int `json:"score"`
|
||||
FavoritesCount int `json:"fav_count"`
|
||||
MD5Hash string `json:"md5"`
|
||||
FileSize int `json:"file_size"`
|
||||
FileURL string `json:"file_url"`
|
||||
FileExt string `json:"file_ext"`
|
||||
PreviewURL string `json:"preview_url"`
|
||||
PreviewHeight int `json:"preview_height"`
|
||||
PreviewWidth int `json:"preview_width"`
|
||||
Rating string `json:"rating"`
|
||||
Status string `json:"status"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
HasComments bool `json:"has_comments"`
|
||||
HasNotes bool `json:"has_notes"`
|
||||
HasChildren bool `json:"has_children"`
|
||||
Children string `json:"children"`
|
||||
ParentID int `json:"parent_id"`
|
||||
Artist []string `json:"artist"`
|
||||
Sources []string `json:"sources"`
|
||||
}
|
||||
|
||||
// SerializedDate represents a serialized date passed via JSON
|
||||
type SerializedDate struct {
|
||||
JSONClass string `json:"json_class"`
|
||||
Seconds int `json:"s"`
|
||||
Nanoseconds int `json:"n"`
|
||||
}
|
||||
|
||||
// GetPostsForTags gets a list of e621 Posts
|
||||
func GetPostsForTags(tags string, limit int, sfw bool) ([]Post, error) {
|
||||
client := &http.Client{}
|
||||
|
||||
var domain string
|
||||
|
||||
if sfw {
|
||||
domain = "e926.net"
|
||||
} else {
|
||||
domain = "e621.net"
|
||||
}
|
||||
|
||||
req, _ := http.NewRequest("GET", "https://"+domain+"/post/index.json", nil)
|
||||
req.Header.Set("User-Agent", "e6dl: go edition (@tjhorner on Telegram)")
|
||||
|
||||
qs := req.URL.Query()
|
||||
qs.Add("tags", tags)
|
||||
qs.Add("limit", strconv.Itoa(limit))
|
||||
|
||||
req.URL.RawQuery = qs.Encode()
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var posts []Post
|
||||
json.Unmarshal(body, &posts)
|
||||
|
||||
return posts, nil
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// define cmd line flags
|
||||
tags := flag.String("tags", "", "Tags to search for")
|
||||
maxConcurrents := flag.Int("concurrents", 5, "Maximum amount of concurrent downloads")
|
||||
postLimit := flag.Int("limit", 10, "Maximum amount of posts to grab from e621")
|
||||
saveDirectory := flag.String("out", "dl", "The directory to write the downloaded posts to")
|
||||
sfw := flag.Bool("sfw", false, "Download posts from e926 instead of e621.")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
fmt.Printf("Fetching posts for \"%v\" (limit %v)\n", *tags, *postLimit)
|
||||
|
||||
posts, err := GetPostsForTags(*tags, *postLimit, *sfw)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Printf("Found %d posts. Starting download with %d workers...\n\n", len(posts), *maxConcurrents)
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
absSaveDir := path.Join(cwd, *saveDirectory)
|
||||
|
||||
err = os.MkdirAll(absSaveDir, 0755)
|
||||
if err != nil {
|
||||
fmt.Printf("Cannot create output directory (%s). Do you have the right permissions?\n", absSaveDir)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
BeginDownload(&posts, saveDirectory, maxConcurrents)
|
||||
|
||||
fmt.Printf("All done! %d posts downloaded and saved.\n", len(posts))
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// HTTPGet is a helper function that automatically adds the
|
||||
// tool's UA to an HTTP GET request
|
||||
func HTTPGet(url string) (*http.Response, error) {
|
||||
client := &http.Client{}
|
||||
|
||||
req, _ := http.NewRequest("GET", url, nil)
|
||||
req.Header.Set("User-Agent", "e6dl: go edition (@tjhorner on Telegram)")
|
||||
|
||||
return client.Do(req)
|
||||
}
|
Loading…
Reference in New Issue