generated from alecodes/base-template
feat: add basic readwise example
This commit is contained in:
parent
f58db2ecaa
commit
92c9814e2a
10 changed files with 273 additions and 248 deletions
11
examples/mock_data/Readwise/Document List.bru
Normal file
11
examples/mock_data/Readwise/Document List.bru
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
meta {
|
||||
name: Document List
|
||||
type: http
|
||||
seq: 2
|
||||
}
|
||||
|
||||
get {
|
||||
url: https://readwise.io/api/v3/list/
|
||||
body: none
|
||||
auth: none
|
||||
}
|
||||
9
examples/mock_data/Readwise/bruno.json
Normal file
9
examples/mock_data/Readwise/bruno.json
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"version": "1",
|
||||
"name": "Readwise",
|
||||
"type": "collection",
|
||||
"ignore": [
|
||||
"node_modules",
|
||||
".git"
|
||||
]
|
||||
}
|
||||
3
examples/mock_data/Readwise/environments/Readwise.bru
Normal file
3
examples/mock_data/Readwise/environments/Readwise.bru
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
vars:secret [
|
||||
API-KEY
|
||||
]
|
||||
195
examples/readwise/main.go
Normal file
195
examples/readwise/main.go
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
_ "modernc.org/sqlite"
|
||||
|
||||
synchronizator "git.alecodes.page/alecodes/synchronizator/pkg"
|
||||
)
|
||||
|
||||
const API_TOKEN = ""
|
||||
|
||||
type ReadwiseCursor struct {
|
||||
Cursor string
|
||||
}
|
||||
|
||||
type ReadwiseApiResponse struct {
|
||||
Count uint64 `json:"count"`
|
||||
NextPageCursor string `json:"nextPageCursor"`
|
||||
Results []ReadwiseDocument `json:"results"`
|
||||
}
|
||||
|
||||
type RawReadwiseApiResponse struct {
|
||||
Count uint64 `json:"count"`
|
||||
NextPageCursor string `json:"nextPageCursor"`
|
||||
Results []json.RawMessage `json:"results"` // All ass raw
|
||||
}
|
||||
|
||||
type ReadwiseDocument struct {
|
||||
Id string `json:"id"`
|
||||
Url string `json:"url"`
|
||||
Title string `json:"title"`
|
||||
// Author string `json:"author"`
|
||||
// Source string `json:"source"`
|
||||
// Category string `json:"category"`
|
||||
Location string `json:"location"`
|
||||
// Tags map[string]string `json:"tags"`
|
||||
// SiteName string `json:"site_name"`
|
||||
// CreatedAt string `json:"created_at"`
|
||||
// UpdatedAt string `json:"updated_at"`
|
||||
// Summary string `json:"summary"`
|
||||
SourceUrl string `json:"source_url"`
|
||||
// Notes string `json:"notes"`
|
||||
// ParentId interface{} `json:"parent_id"`
|
||||
// SavedAt string `json:"saved_at"`
|
||||
// LastMovedAt string `json:"last_moved_at"`
|
||||
}
|
||||
|
||||
func getReadwiseDocuments(
|
||||
ctx context.Context,
|
||||
pagination synchronizator.Pagination,
|
||||
) (synchronizator.FetchNodesResponse, error) {
|
||||
payload := synchronizator.FetchNodesResponse{
|
||||
Pagination: pagination,
|
||||
}
|
||||
|
||||
cursor, ok := ctx.Value("readwise-cursor").(*ReadwiseCursor)
|
||||
|
||||
if !ok {
|
||||
return payload, fmt.Errorf("Couldn't retreive cursor from context!")
|
||||
}
|
||||
|
||||
var documents []*synchronizator.Node
|
||||
|
||||
params := url.Values{}
|
||||
if cursor.Cursor != "" {
|
||||
params.Add("pageCursor", cursor.Cursor)
|
||||
}
|
||||
|
||||
url := "https://readwise.io/api/v3/list?" + params.Encode()
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
fmt.Println("Error creating request:", err)
|
||||
return payload, err
|
||||
}
|
||||
|
||||
// Add the authorization header
|
||||
req.Header.Set("Authorization", "Token "+API_TOKEN)
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return payload, err
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
resp.Body.Close()
|
||||
|
||||
var data ReadwiseApiResponse
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
return payload, err
|
||||
}
|
||||
|
||||
var rawData RawReadwiseApiResponse
|
||||
err = json.Unmarshal(body, &rawData)
|
||||
if err != nil {
|
||||
return payload, err
|
||||
}
|
||||
|
||||
cursor.Cursor = data.NextPageCursor
|
||||
|
||||
documents = make([]*synchronizator.Node, 0, len(data.Results))
|
||||
|
||||
for i, document := range data.Results {
|
||||
metadata, err := json.Marshal(document)
|
||||
if err != nil {
|
||||
return payload, err
|
||||
}
|
||||
|
||||
node := synchronizator.NewNode(
|
||||
document.Title,
|
||||
"DOCUMENT",
|
||||
metadata,
|
||||
rawData.Results[i],
|
||||
)
|
||||
documents = append(documents, node)
|
||||
}
|
||||
|
||||
payload.Response = documents
|
||||
|
||||
return payload, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
start := time.Now()
|
||||
|
||||
defer func() {
|
||||
elapsed := time.Now().Sub(start)
|
||||
fmt.Printf("\n\nExecution time took: %s", elapsed)
|
||||
}()
|
||||
|
||||
connection, err := sql.Open("sqlite", "readwise.sql")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
defer connection.Close()
|
||||
|
||||
opts := synchronizator.DefaultOptions
|
||||
// opts.Log_level = synchronizator.DEBUG
|
||||
opts.DANGEROUSLY_DROP_TABLES = true
|
||||
|
||||
sync, err := synchronizator.New(connection, opts)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
readwiseReader, err := sync.NewPlatform("readwise_reader", nil, nil)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
pagination := synchronizator.StartPagination
|
||||
pagination.Pages = 0
|
||||
pagination.Total = 100
|
||||
pagination.Limit = 100
|
||||
pool_config := &synchronizator.WorkConfig{
|
||||
AmountOfWorkers: 5,
|
||||
MaxRetries: 1,
|
||||
BaseRetryTime: time.Second * 2,
|
||||
RateLimit: synchronizator.NewRateLimiter(20, time.Minute),
|
||||
Timeout: time.Second * 2,
|
||||
}
|
||||
|
||||
collection, err := readwiseReader.GetDefaultCollection()
|
||||
|
||||
cursor := &ReadwiseCursor{}
|
||||
|
||||
ctx := context.WithValue(context.Background(), "readwise-cursor", cursor)
|
||||
|
||||
for {
|
||||
err = collection.FetchNodes(ctx, getReadwiseDocuments, pagination, pool_config)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
if cursor.Cursor == "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,222 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
synchronizator "git.alecodes.page/alecodes/synchronizator/pkg"
|
||||
_ "modernc.org/sqlite"
|
||||
)
|
||||
|
||||
type ProgrammingLanguage struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (language *ProgrammingLanguage) ToNode() (string, string, []byte, error) {
|
||||
metadata, err := json.Marshal("{\"test\": \"foo\"}")
|
||||
if err != nil {
|
||||
return "", "", nil, err
|
||||
}
|
||||
return "PROGRAMMING_LANGUAGE", language.Name, metadata, nil
|
||||
}
|
||||
|
||||
func (language *ProgrammingLanguage) FromNode(_class string, name string, metadata []byte) error {
|
||||
if _class != "PROGRAMMING_LANGUAGE" {
|
||||
return fmt.Errorf("invalid class %s", _class)
|
||||
}
|
||||
language.Name = name
|
||||
return nil
|
||||
}
|
||||
|
||||
type Library struct {
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
}
|
||||
|
||||
func (library *Library) ToNode() (string, string, []byte, error) {
|
||||
metadata, err := json.Marshal(library.Metadata)
|
||||
if err != nil {
|
||||
return "", "", nil, err
|
||||
}
|
||||
return "LIBRARY", library.Name, metadata, nil
|
||||
}
|
||||
|
||||
func (library *Library) FromNode(_class string, name string, metadata []byte) error {
|
||||
if _class != "LIBRARY" {
|
||||
return fmt.Errorf("invalid class %s", _class)
|
||||
}
|
||||
if err := json.Unmarshal(metadata, &library.Metadata); err != nil {
|
||||
return err
|
||||
}
|
||||
library.Name = name
|
||||
return nil
|
||||
}
|
||||
|
||||
type (
|
||||
BelognsTo struct{}
|
||||
IsSame struct{}
|
||||
)
|
||||
|
||||
func main2() {
|
||||
connection, err := sql.Open("sqlite", "db.sql")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
defer connection.Close()
|
||||
|
||||
opts := synchronizator.DefaultOptions
|
||||
// opts.Log_level = synchronizator.DEBUG
|
||||
opts.DANGEROUSLY_DROP_TABLES = true
|
||||
|
||||
sync, err := synchronizator.New(connection, opts)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
languages, err := loadData()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
for language, libraries := range languages {
|
||||
_, err := generateCollection(
|
||||
&ProgrammingLanguage{Name: strings.ToUpper(language)},
|
||||
libraries,
|
||||
sync,
|
||||
)
|
||||
if err != nil {
|
||||
println(err)
|
||||
}
|
||||
|
||||
// fmt.Fprintf(
|
||||
// os.Stderr,
|
||||
// "libraries_collection%+v\n",
|
||||
// libraries_collection,
|
||||
// )
|
||||
}
|
||||
|
||||
golang, err := sync.GetNode(1)
|
||||
if err != nil {
|
||||
println(err)
|
||||
}
|
||||
fmt.Println("%v", golang)
|
||||
relationships, err := golang.GetOutRelations()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, relationship := range relationships {
|
||||
fmt.Printf("%v -> %v -> %v\n", relationship.From, relationship.GetClass(), relationship.To)
|
||||
}
|
||||
}
|
||||
|
||||
// generateCollection Main example of the usage of the synchronizator package
|
||||
func generateCollection(
|
||||
language *ProgrammingLanguage,
|
||||
libraries []Library,
|
||||
sync *synchronizator.DB,
|
||||
) (*synchronizator.Collection, error) {
|
||||
language_libraries, err := sync.NewCollection(language)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, library := range libraries {
|
||||
node, err := sync.NewNode(&library)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data := &Library{}
|
||||
if err := node.Unmarshall(data); err != nil {
|
||||
println(err)
|
||||
}
|
||||
|
||||
if err := language_libraries.AddChild(node); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return language_libraries, nil
|
||||
}
|
||||
|
||||
func loadData() (map[string][]Library, error) {
|
||||
// Find all CSV files
|
||||
files, err := filepath.Glob("examples/mock_data/*.csv")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to glob files: %w", err)
|
||||
}
|
||||
|
||||
result := make(map[string][]Library)
|
||||
|
||||
for _, file := range files {
|
||||
// Load CSV file
|
||||
libraries, err := processCSVFile(file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process %s: %w", file, err)
|
||||
}
|
||||
|
||||
// Use base filename without extension as language_name
|
||||
language_name := filepath.Base(file)
|
||||
language_name = language_name[:len(language_name)-len(filepath.Ext(language_name))]
|
||||
|
||||
result[language_name] = libraries
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func processCSVFile(filename string) ([]Library, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
reader := csv.NewReader(file)
|
||||
|
||||
// Skip header
|
||||
_, err = reader.Read()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var libraries []Library
|
||||
|
||||
// Read records
|
||||
for {
|
||||
record, err := reader.Read()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse metadata JSON
|
||||
var metadata map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(record[2]), &metadata); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse metadata: %w", err)
|
||||
}
|
||||
|
||||
library := Library{
|
||||
Name: record[0],
|
||||
Category: record[1],
|
||||
Metadata: metadata,
|
||||
}
|
||||
libraries = append(libraries, library)
|
||||
}
|
||||
|
||||
return libraries, nil
|
||||
}
|
||||
Reference in a new issue