fix: update readwise example to work without duplicating nodes

this is a workaround to the fact that readwise use cursor pagination
instead of offset pagination, proper handling will be added later
This commit is contained in:
Alexander Navarro 2024-12-02 09:34:12 -03:00
parent 92c9814e2a
commit 8f424d45f7
8 changed files with 42 additions and 9 deletions

View file

@ -7,5 +7,5 @@ meta {
get {
url: https://readwise.io/api/v3/list/
body: none
auth: none
auth: inherit
}

View file

@ -0,0 +1,15 @@
meta {
name: Highlights export
type: http
seq: 3
}
get {
url: https://readwise.io/api/v3/list/?withHtmlContent=true
body: none
auth: inherit
}
params:query {
withHtmlContent: true
}

View file

@ -0,0 +1,9 @@
auth {
mode: apikey
}
auth:apikey {
key: Authorization
value: Token {{API_KEY}}
placement: header
}

View file

@ -1,3 +1,3 @@
vars:secret [
API-KEY
API_KEY
]

View file

@ -22,6 +22,7 @@ type ReadwiseCursor struct {
}
type ReadwiseApiResponse struct {
Detail string `json:detail`
Count uint64 `json:"count"`
NextPageCursor string `json:"nextPageCursor"`
Results []ReadwiseDocument `json:"results"`
@ -70,6 +71,7 @@ func getReadwiseDocuments(
var documents []*synchronizator.Node
params := url.Values{}
params.Add("withHtmlContent", "true")
if cursor.Cursor != "" {
params.Add("pageCursor", cursor.Cursor)
}
@ -99,6 +101,14 @@ func getReadwiseDocuments(
return payload, err
}
if resp.StatusCode > 201 {
return payload, fmt.Errorf(
"Request failed with status %v: %v",
resp.StatusCode,
data.Detail,
)
}
var rawData RawReadwiseApiResponse
err = json.Unmarshal(body, &rawData)
if err != nil {
@ -164,14 +174,15 @@ func main() {
}
pagination := synchronizator.StartPagination
pagination.Pages = 0
pagination.Pages = 1
pagination.Offset = 100
pagination.Total = 100
pagination.Limit = 100
pool_config := &synchronizator.WorkConfig{
AmountOfWorkers: 5,
MaxRetries: 1,
BaseRetryTime: time.Second * 2,
RateLimit: synchronizator.NewRateLimiter(20, time.Minute),
MaxRetries: 2,
BaseRetryTime: time.Second * 30,
RateLimit: synchronizator.NewRateLimiter(10, time.Minute),
Timeout: time.Second * 2,
}

View file

@ -114,7 +114,7 @@ func (collection *Collection) FetchNodes(
collection.childs = slices.Concat(collection.childs, values)
fmt.Printf("Nodes: %v\n", len(collection.childs))
fmt.Printf("Nodes added: %d, Nodes in collection: %d\n", len(values), len(collection.childs))
err = BulkCreateNode(collection._conn, values)
if err != nil {

View file

@ -133,7 +133,6 @@ func fetchWithPagination[T any](
current_page += page_offset
for current_page <= start_pagination.Pages {
fmt.Printf("Total pages: %v, Current page: %v\n", start_pagination.Pages, current_page)
page := start_pagination
page.Offset = current_page * page.Limit
tasks <- page

View file

@ -240,7 +240,6 @@ func workUnitDispatcher[T, S any](
config.TasksProcessed.Add(1)
case <-ctx.Done():
fmt.Println("context done")
return
}
}