aboutsummaryrefslogtreecommitdiff
path: root/backend/internal/leader/manager/ibd/auth/auth.go
blob: 2bcf060b4ef0ae8438c5daf5c2b341d3797ed895 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
package auth

import (
	"context"
	"log/slog"
	"time"

	"github.com/ansg191/ibd-trader/backend/internal/database"
	"github.com/ansg191/ibd-trader/backend/internal/redis/taskqueue"

	"github.com/redis/go-redis/v9"
	"github.com/robfig/cron/v3"
)

const (
	Queue         = "auth-queue"
	QueueEncoding = taskqueue.EncodingJSON
)

// Manager is responsible for sending authentication tasks to the workers.
type Manager struct {
	queue    taskqueue.TaskQueue[TaskInfo]
	db       database.Executor
	schedule cron.Schedule
}

func New(
	ctx context.Context,
	db database.Executor,
	rClient *redis.Client,
	schedule cron.Schedule,
) (*Manager, error) {
	queue, err := taskqueue.New(
		ctx,
		rClient,
		Queue,
		"auth-manager",
		taskqueue.WithEncoding[TaskInfo](QueueEncoding),
	)
	if err != nil {
		return nil, err
	}

	return &Manager{
		queue:    queue,
		db:       db,
		schedule: schedule,
	}, nil
}

func (m *Manager) Run(ctx context.Context) {
	for {
		now := time.Now()
		// Find the next time
		nextTime := m.schedule.Next(now)
		if nextTime.IsZero() {
			// Sleep until the next day
			time.Sleep(time.Until(now.AddDate(0, 0, 1)))
			continue
		}

		timer := time.NewTimer(nextTime.Sub(now))
		slog.DebugContext(ctx, "waiting for next Auth scrape", "next_exec", nextTime)

		select {
		case <-timer.C:
			nextExec := m.schedule.Next(nextTime)
			m.scrapeCookies(ctx, nextExec)
		case <-ctx.Done():
			if !timer.Stop() {
				<-timer.C
			}
			return
		}
	}
}

// scrapeCookies scrapes the cookies for every user from the IBD website.
//
// This iterates through all users with IBD credentials and checks whether their cookies are still valid.
// If the cookies are invalid or missing, it re-authenticates the user and updates the cookies in the database.
func (m *Manager) scrapeCookies(ctx context.Context, deadline time.Time) {
	ctx, cancel := context.WithDeadline(ctx, deadline)
	defer cancel()

	// Get all users with IBD credentials
	users, err := database.ListUsers(ctx, m.db, true)
	if err != nil {
		slog.ErrorContext(ctx, "failed to get users", "error", err)
		return
	}

	// Create a new task for each user
	for _, user := range users {
		task := TaskInfo{
			UserSubject: user.Subject,
		}

		// Enqueue the task
		_, err := m.queue.Enqueue(ctx, task)
		if err != nil {
			slog.ErrorContext(ctx, "failed to enqueue task", "error", err)
		}
	}

	slog.InfoContext(ctx, "enqueued tasks for all users")
}

type TaskInfo struct {
	UserSubject string `json:"user_subject"`
}