Skip to content

Commit

Permalink
feat: add additional column ordering to keysetpagination (#640)
Browse files Browse the repository at this point in the history
Co-authored-by: Patrik <[email protected]>
  • Loading branch information
jonas-jonas and zepatrik authored Dec 1, 2022
1 parent d5dfdaa commit d608f03
Show file tree
Hide file tree
Showing 5 changed files with 305 additions and 38 deletions.
16 changes: 12 additions & 4 deletions pagination/keysetpagination/header.go
Original file line number Diff line number Diff line change
Expand Up @@ -83,18 +83,26 @@ func header(u *url.URL, rel, token string, size int) string {
// It contains links to the first and next page, if one exists.
func Header(w http.ResponseWriter, u *url.URL, p *Paginator) {
size := p.Size()
w.Header().Set("Link", header(u, "first", p.defaultToken, size))
w.Header().Set("Link", header(u, "first", p.defaultToken.Encode(), size))

if !p.IsLast() {
w.Header().Add("Link", header(u, "next", p.Token(), size))
w.Header().Add("Link", header(u, "next", p.Token().Encode(), size))
}
}

// Parse returns the pagination options from the URL query.
func Parse(q url.Values) ([]Option, error) {
func Parse(q url.Values, p PageTokenConstructor) ([]Option, error) {
var opts []Option
if q.Has("page_token") {
opts = append(opts, WithToken(q.Get("page_token")))
pageToken, err := url.QueryUnescape(q.Get("page_token"))
if err != nil {
return nil, errors.WithStack(err)
}
parsed, err := p(pageToken)
if err != nil {
return nil, errors.WithStack(err)
}
opts = append(opts, WithToken(parsed))
}
if q.Has("page_size") {
size, err := strconv.Atoi(q.Get("page_size"))
Expand Down
44 changes: 44 additions & 0 deletions pagination/keysetpagination/header_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
// Copyright © 2022 Ory Corp
// SPDX-License-Identifier: Apache-2.0

package keysetpagination

import (
"net/http/httptest"
"net/url"
"testing"

"github.com/instana/testify/assert"
"github.com/stretchr/testify/require"
)

func TestHeader(t *testing.T) {
p := &Paginator{
defaultToken: StringPageToken("default"),
token: StringPageToken("next"),
size: 2,
}

u, err := url.Parse("http://ory.sh/")
require.NoError(t, err)

r := httptest.NewRecorder()

Header(r, u, p)

links := r.HeaderMap["Link"]
require.Len(t, links, 2)
assert.Contains(t, links[0], "page_token=default")
assert.Contains(t, links[1], "page_token=next")

t.Run("with isLast", func(t *testing.T) {
p.isLast = true

Header(r, u, p)

links := r.HeaderMap["Link"]
require.Len(t, links, 1)
assert.Contains(t, links[0], "page_token=default")
})

}
74 changes: 74 additions & 0 deletions pagination/keysetpagination/page_token.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
// Copyright © 2022 Ory Corp
// SPDX-License-Identifier: Apache-2.0

package keysetpagination

import (
"encoding/base64"
"fmt"
"strings"
)

type PageToken interface {
Parse(string) map[string]string
Encode() string
}

var _ PageToken = new(StringPageToken)
var _ PageToken = new(MapPageToken)

type StringPageToken string

func (s StringPageToken) Parse(idField string) map[string]string {
return map[string]string{idField: string(s)}
}

func (s StringPageToken) Encode() string {
return string(s)
}

func NewStringPageToken(s string) (PageToken, error) {
return StringPageToken(s), nil
}

type MapPageToken map[string]string

func (m MapPageToken) Parse(_ string) map[string]string {
return map[string]string(m)
}

const pageTokenColumnDelim = "/"

func (m MapPageToken) Encode() string {
elems := make([]string, 0, len(m))
for k, v := range m {
elems = append(elems, fmt.Sprintf("%s=%s", k, v))
}

// For now: use Base64 instead of URL escaping, as the Timestamp format we need to use can contain a `+` sign,
// which represents a space in URLs, so it's not properly encoded by the Go library.
return base64.RawStdEncoding.EncodeToString([]byte(strings.Join(elems, pageTokenColumnDelim)))
}

func NewMapPageToken(s string) (PageToken, error) {
b, err := base64.RawStdEncoding.DecodeString(s)
if err != nil {
return nil, err
}
tokens := strings.Split(string(b), pageTokenColumnDelim)

r := map[string]string{}

for _, p := range tokens {
if columnName, value, found := strings.Cut(p, "="); found {
r[columnName] = value
}
}

return MapPageToken(r), nil
}

var _ PageTokenConstructor = NewMapPageToken
var _ PageTokenConstructor = NewStringPageToken

type PageTokenConstructor = func(string) (PageToken, error)
92 changes: 83 additions & 9 deletions pagination/keysetpagination/paginator.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,51 @@
package keysetpagination

import (
"errors"
"fmt"

"github.com/gobuffalo/pop/v6"
"github.com/gobuffalo/pop/v6/columns"
)

type (
Item interface{ PageToken() string }
Item interface{ PageToken() PageToken }

Order string

columnOrdering struct {
name string
order Order
}
Paginator struct {
token, defaultToken string
token, defaultToken PageToken
size, defaultSize, maxSize int
isLast bool
additionalColumn columnOrdering
}
Option func(*Paginator) *Paginator
)

func (p *Paginator) Token() string {
if p.token == "" {
var ErrUnknownOrder = errors.New("unknown order")

const (
OrderDescending Order = "DESC"
OrderAscending Order = "ASC"
)

func (o Order) extract() (string, string, error) {
switch o {
case OrderAscending:
return ">", string(o), nil
case OrderDescending:
return "<", string(o), nil
default:
return "", "", ErrUnknownOrder
}
}

func (p *Paginator) Token() PageToken {
if p.token == nil {
return p.defaultToken
}
return p.token
Expand Down Expand Up @@ -51,22 +79,58 @@ func (p *Paginator) ToOptions() []Option {
WithDefaultToken(p.defaultToken),
WithDefaultSize(p.defaultSize),
WithMaxSize(p.maxSize),
WithColumn(p.additionalColumn.name, p.additionalColumn.order),
withIsLast(p.isLast),
}
}

func (p *Paginator) multipleOrderFieldsQuery(q *pop.Query, idField string, cols map[string]*columns.Column, quote func(string) string) {
tokenParts := p.Token().Parse(idField)
idValue := tokenParts[idField]

column, ok := cols[p.additionalColumn.name]
if !ok {
q.Where(fmt.Sprintf(`%s > ?`, quote(idField)), idValue)
return
}

quoteName := quote(column.Name)

value, ok := tokenParts[column.Name]

if !ok {
q.Where(fmt.Sprintf(`%s > ?`, quote(idField)), idValue)
return
}

sign, keyword, err := p.additionalColumn.order.extract()
if err != nil {
q.Where(fmt.Sprintf(`%s > ?`, quote(idField)), idValue)
return
}

q.
Where(fmt.Sprintf("%s %s ? OR (%s = ? AND %s > ?)", quoteName, sign, quoteName, quote(idField)), value, value, idValue).
Order(fmt.Sprintf("%s %s", quoteName, keyword))

}

// Paginate returns a function that paginates a pop.Query.
// Usage:
//
// q := c.Where("foo = ?", foo).Scope(keysetpagination.Paginate[Item](paginator))
func Paginate[I Item](p *Paginator) pop.ScopeFunc {
var item I
id := (&pop.Model{Value: item}).IDField()
model := &pop.Model{Value: item}
id := model.IDField()
return func(q *pop.Query) *pop.Query {
eid := q.Connection.Dialect.Quote(id)

p.multipleOrderFieldsQuery(q, id, model.Columns().Cols, q.Connection.Dialect.Quote)

return q.
Limit(p.Size()+1).
Where(fmt.Sprintf(`%s > ?`, eid), p.Token()).
Limit(p.Size() + 1).
// we always need to order by the id field last
Order(fmt.Sprintf(`%s ASC`, eid))
}
}
Expand All @@ -92,7 +156,7 @@ func Result[I Item](items []I, p *Paginator) ([]I, *Paginator) {
}
}

func WithDefaultToken(t string) Option {
func WithDefaultToken(t PageToken) Option {
return func(opts *Paginator) *Paginator {
opts.defaultToken = t
return opts
Expand All @@ -113,7 +177,7 @@ func WithMaxSize(size int) Option {
}
}

func WithToken(t string) Option {
func WithToken(t PageToken) Option {
return func(opts *Paginator) *Paginator {
opts.token = t
return opts
Expand All @@ -127,6 +191,16 @@ func WithSize(size int) Option {
}
}

func WithColumn(name string, order Order) Option {
return func(opts *Paginator) *Paginator {
opts.additionalColumn = columnOrdering{
name: name,
order: order,
}
return opts
}
}

func withIsLast(isLast bool) Option {
return func(opts *Paginator) *Paginator {
opts.isLast = isLast
Expand Down
Loading

0 comments on commit d608f03

Please sign in to comment.