replace whitespaces with tabs

This commit is contained in:
m3philis
2025-04-03 15:23:34 +02:00
parent 4700488489
commit 693290bc03

View File

@@ -1,217 +1,216 @@
package main package main
import ( import (
"encoding/json" "encoding/json"
"flag" "flag"
"fmt" "fmt"
"io" "io"
"io/ioutil" "log"
"log" "net/http"
"net/http" "net/url"
"net/url" "os"
"os" "os/user"
"os/user" "strconv"
"strconv" "strings"
"strings" "sync"
"sync" "time"
"time"
) )
// Global vars // Global vars
var wg sync.WaitGroup var wg sync.WaitGroup
type picture struct { type picture struct {
FileURL string `json:"file_url"` FileURL string `json:"file_url"`
Width int `json:"width"` Width int `json:"width"`
Height int `json:"height"` Height int `json:"height"`
ImageWidth int `json:"image_width"` ImageWidth int `json:"image_width"`
ImageHeight int `json:"image_height"` ImageHeight int `json:"image_height"`
FileExt string `json:"file_ext"` FileExt string `json:"file_ext"`
Tags string `json:"tag_string"` Tags string `json:"tag_string"`
} }
// main function to download pictures // main function to download pictures
func main() { func main() {
// define flags and parse them // define flags and parse them
var path string var path string
var safemode bool var safemode bool
var tags string var tags string
var aspect string var aspect string
var site string var site string
// variables for downloading // variables for downloading
picHits := 1 picHits := 1
page := 1 page := 1
ratio := 0.0 ratio := 0.0
flag.StringVar(&path, "dir", "unnamed", "Directory to safe pictures. Default is %HOME/pictures/konachan/unnamed") flag.StringVar(&path, "dir", "unnamed", "Directory to safe pictures. Default is %HOME/pictures/konachan/unnamed")
flag.BoolVar(&safemode, "safe", false, "Safemode to filter NSFW pictures. Default is false") flag.BoolVar(&safemode, "safe", false, "Safemode to filter NSFW pictures. Default is false")
flag.StringVar(&tags, "tags", "", "Tags used to filter search query.") flag.StringVar(&tags, "tags", "", "Tags used to filter search query.")
flag.StringVar(&aspect, "aspect", "", "Aspect ratio pics should have") flag.StringVar(&aspect, "aspect", "", "Aspect ratio pics should have")
flag.StringVar(&site, "site", "konachan", "Site to crawl from, either konachan or danbooru") flag.StringVar(&site, "site", "konachan", "Site to crawl from, either konachan or danbooru")
flag.Parse() flag.Parse()
// set home directory and create it to save pictures in // set home directory and create it to save pictures in
homepath, err := user.Current() homepath, err := user.Current()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
filepath := strings.Join([]string{homepath.HomeDir, "pictures", site, strings.TrimSuffix(path, "\n")}, "/") filepath := strings.Join([]string{homepath.HomeDir, "pictures", site, strings.TrimSuffix(path, "\n")}, "/")
os.MkdirAll(filepath, 0700) os.MkdirAll(filepath, 0700)
// edit tags array to met API requirement // edit tags array to met API requirement
tags = strings.Replace(tags, ",", "+", -1) tags = strings.Replace(tags, ",", "+", -1)
tags = strings.Replace(tags, "=", ":", -1) tags = strings.Replace(tags, "=", ":", -1)
tags = strings.TrimSuffix(tags, "\n") tags = strings.TrimSuffix(tags, "\n")
// calculate aspect ratio // calculate aspect ratio
if isFlagPassed("aspect") { if isFlagPassed("aspect") {
aspectSlice := strings.Split(aspect, ":") aspectSlice := strings.Split(aspect, ":")
widthF, _ := strconv.ParseFloat(aspectSlice[0], 64) widthF, _ := strconv.ParseFloat(aspectSlice[0], 64)
heightF, _ := strconv.ParseFloat(aspectSlice[1], 64) heightF, _ := strconv.ParseFloat(aspectSlice[1], 64)
ratio = widthF / heightF ratio = widthF / heightF
} else { } else {
ratio = 0.0 ratio = 0.0
} }
for picHits > 0 { for picHits > 0 {
fmt.Println("Page: ", page) fmt.Println("Page: ", page)
website := fmt.Sprintf("https://konachan.com/post.json?page=%d&tags=%s", page, tags) website := fmt.Sprintf("https://konachan.com/post.json?page=%d&tags=%s", page, tags)
if safemode { if safemode {
website = fmt.Sprintf("https://konachan.com/post.json?page=%d&tags=%s+rating:safe", page, tags) website = fmt.Sprintf("https://konachan.com/post.json?page=%d&tags=%s+rating:safe", page, tags)
} }
if site == "danbooru" { if site == "danbooru" {
website = fmt.Sprintf("https://danbooru.donmai.us/posts.json?page=%d&tags=%s", page, tags) website = fmt.Sprintf("https://danbooru.donmai.us/posts.json?page=%d&tags=%s", page, tags)
if safemode { if safemode {
website = fmt.Sprintf("https://danbooru.donmai.us/posts.json?page=%d&tags=%s+rating:safe", page, tags) website = fmt.Sprintf("https://danbooru.donmai.us/posts.json?page=%d&tags=%s+rating:safe", page, tags)
} }
} }
picList := openConnection(website) picList := openConnection(website)
pictures, count := parseMaps(picList, ratio) pictures, count := parseMaps(picList, ratio)
picHits = count picHits = count
page++ page++
wg.Add(len(pictures)) wg.Add(len(pictures))
for _, pic := range pictures { for _, pic := range pictures {
go downloadPic(pic, filepath) go downloadPic(pic, filepath)
} }
wg.Wait() wg.Wait()
time.Sleep(1 * time.Second) time.Sleep(1 * time.Second)
} }
} }
func isFlagPassed(name string) bool { func isFlagPassed(name string) bool {
found := false found := false
flag.Visit(func(f *flag.Flag) { flag.Visit(func(f *flag.Flag) {
if f.Name == name { if f.Name == name {
found = true found = true
} }
}) })
return found return found
} }
// function to create the connection to konachan and get the API response // function to create the connection to konachan and get the API response
func openConnection(url string) []picture { func openConnection(url string) []picture {
var f []picture var f []picture
result, err := http.Get(url) result, err := http.Get(url)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
defer result.Body.Close() defer result.Body.Close()
data, err := ioutil.ReadAll(result.Body) data, err := io.ReadAll(result.Body)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
if err = json.Unmarshal(data, &f); err != nil { if err = json.Unmarshal(data, &f); err != nil {
panic(err) panic(err)
} }
return f return f
} }
// function to parse the json response and extract only the file url // function to parse the json response and extract only the file url
func parseMaps(f []picture, ratio float64) ([]string, int) { func parseMaps(f []picture, ratio float64) ([]string, int) {
fileURLs := []string{} fileURLs := []string{}
picCount := 0 picCount := 0
if isFlagPassed("aspect") { if isFlagPassed("aspect") {
for _, pic := range f { for _, pic := range f {
picCount++ picCount++
picWidthF := 1.0 picWidthF := 1.0
picHeightF := 1.0 picHeightF := 1.0
if (pic.Width != 0 && pic.Height != 0) { if pic.Width != 0 && pic.Height != 0 {
picWidthF = float64(pic.Width) picWidthF = float64(pic.Width)
picHeightF = float64(pic.Height) picHeightF = float64(pic.Height)
} else { } else {
picWidthF = float64(pic.ImageWidth) picWidthF = float64(pic.ImageWidth)
picHeightF = float64(pic.ImageHeight) picHeightF = float64(pic.ImageHeight)
} }
if (picWidthF / picHeightF) == ratio { if (picWidthF / picHeightF) == ratio {
fileURL := pic.FileURL fileURL := pic.FileURL
fileURLs = append(fileURLs, fileURL) fileURLs = append(fileURLs, fileURL)
} }
} }
} else { } else {
for _, pic := range f { for _, pic := range f {
picCount++ picCount++
fileURL := pic.FileURL fileURL := pic.FileURL
fileURLs = append(fileURLs, fileURL) fileURLs = append(fileURLs, fileURL)
} }
} }
return fileURLs, picCount return fileURLs, picCount
} }
// function to download and sace the pictures to disk // function to download and sace the pictures to disk
func downloadPic(picURL string, filepath string) { func downloadPic(picURL string, filepath string) {
defer wg.Done() defer wg.Done()
picName, err := url.PathUnescape(strings.Split(picURL, "/")[len(strings.Split(picURL, "/"))-1]) picName, err := url.PathUnescape(strings.Split(picURL, "/")[len(strings.Split(picURL, "/"))-1])
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
if _, err := os.Stat(filepath + "/" + picName); err == nil { if _, err := os.Stat(filepath + "/" + picName); err == nil {
return return
} }
result, err := http.Get(picURL) result, err := http.Get(picURL)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
defer result.Body.Close() defer result.Body.Close()
//fmt.Println(result.Status) //fmt.Println(result.Status)
if result.StatusCode != 200 { if result.StatusCode != 200 {
wg.Add(1) wg.Add(1)
go downloadPic(picURL, filepath) go downloadPic(picURL, filepath)
return return
} }
file, err := os.Create(filepath + "/" + picName) file, err := os.Create(filepath + "/" + picName)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
_, err = io.Copy(file, result.Body) _, err = io.Copy(file, result.Body)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
file.Close() file.Close()
fmt.Printf("Downloading: %s\n", picName) fmt.Printf("Downloading: %s\n", picName)
} }