package main

import (
	"fmt"
	"io"
	"log"
	"net/http"
	"os"
	"path/filepath"
	"strings"
	"sync"
)

//并发下载器
//描述:编写一个程序,能同时从多个URL下载文件。每个下载任务用一个goroutine处理,使用sync.WaitGroup等待所有下载完成。
//技能点:goroutine、sync.WaitGroup、HTTP请求、错误处理。

var wg sync.WaitGroup
var uploadDir = "uploads"

func downloadFile(url string) {

	res, err := http.Get(url)
	if err != nil {
		log.Fatalln("创建请求失败:", err.Error())
	}
	defer res.Body.Close()
	if res.StatusCode != 200 {
		log.Fatalln("响应错误")
	}
	contentType := res.Header.Get("Content-Type")
	if contentType == "" {
		log.Fatalln("不存在content-type")
	}
	urlPath := res.Request.URL.String()
	lastIndex := strings.LastIndex(urlPath, "/")
	fileName := urlPath[lastIndex+1:]
	if os.MkdirAll(uploadDir, 0777) != nil {
		log.Fatal(err)
	}
	filePath := filepath.Join(uploadDir, fileName)

	fd, err := os.Create(filePath)
	if err != nil {
		log.Fatal(err)
	}
	defer fd.Close()
	_, err = io.Copy(fd, res.Body)
	if err != nil {
		log.Fatalln("写入文件失败: ", err.Error())
	}
	fmt.Println("文件保存在: ", filePath)

}

func worker(id int, tasks <-chan string) {
	defer wg.Done()
	for url := range tasks {
		downloadFile(url)
		log.Println("worker id: ", id, " download file successfully")
	}
}
func produce(tasks chan<- string, urls []string, times int) {
	for range times {
		for _, url := range urls {
			tasks <- url
		}
	}
	close(tasks)
}
func main() {
	arr := []string{
		"https://imgapi.xl0408.top/index.php",
		"https://www.dmoe.cc/random.php",
		"https://img.paulzzh.com/touhou/random",
	}
	tasks := make(chan string, 5)

	numWorkers := 20
	wg.Add(numWorkers)
	for i := 0; i < numWorkers; i++ {
		go worker(i, tasks)
	}
	go produce(tasks, arr, 100)
	wg.Wait()

}