package main import ( "context" "database/sql" "encoding/json" "fmt" "io" "net/http" "net/url" "strconv" "time" _ "modernc.org/sqlite" synchronizator "git.alecodes.page/alecodes/synchronizator/pkg" ) type PokeApiListResponse[T any] struct { Count uint64 `json:"count"` Next string `json:"next"` Previous string `json:"previous"` Results []T `json:"results"` } type Pokedex struct { Name string `json:"name"` Url string `json:"url"` } type PokeApiPokedexResponse struct { Pokemons []Pokemon `json:"pokemon_entries"` } type PokemonSpecies struct { Name string `json:"name"` Url string `json:"url"` } type Pokemon struct { Id int `json:"entry_number"` PokemonSpecies PokemonSpecies `json:"pokemon_species"` } func getPokedexs( ctx context.Context, pagination synchronizator.Pagination, ) (synchronizator.FetchCollectionResponse, error) { payload := synchronizator.FetchCollectionResponse{ Pagination: pagination, } var collections []*synchronizator.Collection params := url.Values{} params.Add("offset", strconv.FormatUint(pagination.Offset, 10)) params.Add("limit", strconv.FormatUint(pagination.Limit, 10)) resp, err := http.Get("https://pokeapi.co/api/v2/pokedex?" + params.Encode()) if err != nil { return payload, err } body, err := io.ReadAll(resp.Body) resp.Body.Close() var data PokeApiListResponse[Pokedex] err = json.Unmarshal(body, &data) if err != nil { return payload, err } collections = make([]*synchronizator.Collection, 0, len(data.Results)) for _, pokedex := range data.Results { collection_name := "Pokedex_" + pokedex.Name metadata, err := json.Marshal(pokedex) collection := synchronizator.NewCollection(collection_name, metadata) if err != nil { return payload, err } collections = append(collections, collection) } // fmt.Println(data) payload.Offset += pagination.Limit payload.Total = data.Count payload.Response = collections return payload, nil } func getPokemons( metadata []byte, pagination synchronizator.Pagination, ) ([]*synchronizator.Node, synchronizator.Pagination, error) { var nodes []*synchronizator.Node pokedex := &Pokedex{} json.Unmarshal(metadata, pokedex) resp, err := http.Get(pokedex.Url) if err != nil { return nil, pagination, err } body, err := io.ReadAll(resp.Body) resp.Body.Close() var data PokeApiPokedexResponse err = json.Unmarshal(body, &data) if err != nil { return nil, pagination, err } nodes = make([]*synchronizator.Node, 0, len(data.Pokemons)) for _, pokemon := range data.Pokemons { metadata, err := json.Marshal(pokemon) node := synchronizator.NewNode(pokemon.PokemonSpecies.Name, metadata) if err != nil { return nil, pagination, err } nodes = append(nodes, node) } // fmt.Println(data) return nodes, pagination, nil } func main() { start := time.Now() defer func() { elapsed := time.Now().Sub(start) fmt.Printf("\n\nExecution time took: %s", elapsed) }() connection, err := sql.Open("sqlite", "db.sql") if err != nil { fmt.Println(err) return } defer connection.Close() opts := synchronizator.DefaultOptions // opts.Log_level = synchronizator.DEBUG opts.DANGEROUSLY_DROP_TABLES = true sync, err := synchronizator.New(connection, opts) if err != nil { fmt.Println(err) return } pokeApi, err := sync.NewPlatform("pokeapi", nil) if err != nil { fmt.Println(err) return } ctx, cancel := context.WithCancel(context.Background()) defer cancel() pagination := synchronizator.StartPagination pool_config := &synchronizator.WorkConfig{ AmountOfWorkers: 5, MaxRetries: 3, BaseRetryTime: time.Second * 2, RateLimit: synchronizator.NewRateLimiter(5, time.Minute), Timeout: time.Second * 2, } err = pokeApi.FetchCollections(ctx, getPokedexs, pagination, pool_config) if err != nil { fmt.Println(err) return } // for _, pokedex := range pokeApi.Collections { // if pokedex.IsDefault() { // continue // } // // err = pokedex.FetchNodes(getPokemons, synchronizator.StartPagination) // if err != nil { // fmt.Println(err) // return // } // } }