commit ce230479c7fb4982ae92d61e7e7dfc551969ce1d Author: AzenKain Date: Wed Dec 31 16:06:58 2025 +0700 init diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..df13a43 --- /dev/null +++ b/Makefile @@ -0,0 +1,4 @@ +build: + @echo Building windows binary... + go build -trimpath -ldflags="-s -w" . + @echo Done! diff --git a/api/as.go b/api/as.go new file mode 100644 index 0000000..e94c2fd --- /dev/null +++ b/api/as.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetASEventInfoApi(eventId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/boss/%s.json", + locale, + eventId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListASEventApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/maze_boss.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/character.go b/api/character.go new file mode 100644 index 0000000..b3a34f3 --- /dev/null +++ b/api/character.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetCharacterInfoApi(avatarId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/character/%s.json", + locale, + avatarId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListCharacterApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/character.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/lightcone.go b/api/lightcone.go new file mode 100644 index 0000000..fe9fb46 --- /dev/null +++ b/api/lightcone.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetLightconeInfoApi(lightconeId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/lightcone/%s.json", + locale, + lightconeId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListLightconeApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/lightcone.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/moc.go b/api/moc.go new file mode 100644 index 0000000..bb82b86 --- /dev/null +++ b/api/moc.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetMOCEventInfoApi(eventId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/maze/%s.json", + locale, + eventId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListMOCEventApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/maze.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/monster.go b/api/monster.go new file mode 100644 index 0000000..83273b5 --- /dev/null +++ b/api/monster.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetMonsterInfoApi(monsterId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/monster/%s.json", + locale, + monsterId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListMonsterApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/monster.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/peak.go b/api/peak.go new file mode 100644 index 0000000..2e0c991 --- /dev/null +++ b/api/peak.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetPeakEventInfoApi(eventId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/peak/%s.json", + locale, + eventId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListPeakEventApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/maze_peak.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/pf.go b/api/pf.go new file mode 100644 index 0000000..e077887 --- /dev/null +++ b/api/pf.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetPFEventInfoApi(eventId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/story/%s.json", + locale, + eventId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListPFEventApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/maze_extra.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/api/relic.go b/api/relic.go new file mode 100644 index 0000000..f72828c --- /dev/null +++ b/api/relic.go @@ -0,0 +1,35 @@ +package api + +import ( + "fmt" + "hakushi-crawl/utils" + "log" +) + +func GetRelicInfoApi(relicId string, locale string) (*any, error) { + url := fmt.Sprintf( + "https://api.hakush.in/hsr/data/%s/relicset/%s.json", + locale, + relicId, + ) + + data, err := utils.GetĐataJSON[any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return data, nil +} + +func GetListRelicsetApi() (map[string]*any, error) { + url := "https://api.hakush.in/hsr/data/relicset.json" + + data, err := utils.GetĐataJSON[map[string]*any](url) + if err != nil { + log.Println(err) + return nil, nil + } + + return *data, nil +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..014b71b --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module hakushi-crawl + +go 1.25.5 diff --git a/hakushi-crawl.exe b/hakushi-crawl.exe new file mode 100644 index 0000000..4de84be Binary files /dev/null and b/hakushi-crawl.exe differ diff --git a/internal/as.go b/internal/as.go new file mode 100644 index 0000000..975cc3b --- /dev/null +++ b/internal/as.go @@ -0,0 +1,54 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataASEvent(locale string) error { + listItem, err := api.GetListASEventApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetASEventInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + fileName := fmt.Sprintf("as.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/character.go b/internal/character.go new file mode 100644 index 0000000..7ad3cfc --- /dev/null +++ b/internal/character.go @@ -0,0 +1,54 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataCharacter(locale string) error { + listItem, err := api.GetListCharacterApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetCharacterInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("characters.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/lightcone.go b/internal/lightcone.go new file mode 100644 index 0000000..608798a --- /dev/null +++ b/internal/lightcone.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataLightcone(locale string) error { + listItem, err := api.GetListLightconeApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetLightconeInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("lightcones.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/moc.go b/internal/moc.go new file mode 100644 index 0000000..c5c780e --- /dev/null +++ b/internal/moc.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataMOCEvent(locale string) error { + listItem, err := api.GetListMOCEventApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetMOCEventInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("moc.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/monster.go b/internal/monster.go new file mode 100644 index 0000000..5895631 --- /dev/null +++ b/internal/monster.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataMonster(locale string) error { + listItem, err := api.GetListMonsterApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetMonsterInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("monsters.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/peak.go b/internal/peak.go new file mode 100644 index 0000000..c7b9b63 --- /dev/null +++ b/internal/peak.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataPeakEvent(locale string) error { + listItem, err := api.GetListPeakEventApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetPeakEventInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("peak.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/pf.go b/internal/pf.go new file mode 100644 index 0000000..f6cb987 --- /dev/null +++ b/internal/pf.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataPFEvent(locale string) error { + listItem, err := api.GetListPFEventApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetPFEventInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("pf.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/internal/relic.go b/internal/relic.go new file mode 100644 index 0000000..1e9b8b0 --- /dev/null +++ b/internal/relic.go @@ -0,0 +1,55 @@ +package internal + +import ( + "encoding/json" + "fmt" + "hakushi-crawl/api" + "os" + "sync" +) + +func SaveDataRelic(locale string) error { + listItem, err := api.GetListRelicsetApi() + if err != nil { + return err + } + + mapItemDetail := make(map[string]*any) + + var mu sync.Mutex + + jobs := make(chan string) + wg := sync.WaitGroup{} + + workerCount := 12 + + for range workerCount { + wg.Go(func() { + for itemId := range jobs { + itemDetail, err := api.GetRelicInfoApi(itemId, locale) + if err != nil || itemDetail == nil { + continue + } + mu.Lock() + mapItemDetail[itemId] = itemDetail + mu.Unlock() + } + }) + } + + for itemId := range listItem { + jobs <- itemId + } + close(jobs) + + wg.Wait() + + fileName := fmt.Sprintf("relics.%s.json", locale) + + data, err := json.Marshal(mapItemDetail) + if err != nil { + return err + } + + return os.WriteFile(fileName, data, 0644) +} diff --git a/main.go b/main.go new file mode 100644 index 0000000..60a1272 --- /dev/null +++ b/main.go @@ -0,0 +1,38 @@ +package main + +import ( + "fmt" + "hakushi-crawl/internal" +) + +func main() { + listLocale := []string{ + "en", "jp", "cn", "kr", + } + for _, locale := range listLocale { + fmt.Printf("Fetching character data for %s\n", locale) + internal.SaveDataCharacter(locale) + + fmt.Printf("Fetching lightcone data for %s\n", locale) + internal.SaveDataLightcone(locale) + + fmt.Printf("Fetching relic data for %s\n", locale) + internal.SaveDataRelic(locale) + + fmt.Printf("Fetching monster data for %s\n", locale) + internal.SaveDataMonster(locale) + + fmt.Printf("Fetching moc data for %s\n", locale) + internal.SaveDataMOCEvent(locale) + + fmt.Printf("Fetching pf data for %s\n", locale) + internal.SaveDataPFEvent(locale) + + fmt.Printf("Fetching as data for %s\n", locale) + internal.SaveDataASEvent(locale) + + fmt.Printf("Fetching peak data for %s\n", locale) + internal.SaveDataPeakEvent(locale) + } + fmt.Println("DONE!") +} diff --git a/utils/utils.go b/utils/utils.go new file mode 100644 index 0000000..dc5906f --- /dev/null +++ b/utils/utils.go @@ -0,0 +1,34 @@ +package utils + +import ( + "encoding/json" + "fmt" + "net/http" +) + +func GetĐataJSON[T any](url string) (*T, error) { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("http status %d", resp.StatusCode) + } + + var data T + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, err + } + + return &data, nil +}