UPDATE: Change cursor to offset, bc FE dk implement
All checks were successful
Build and Release / release (push) Successful in 1m3s

This commit is contained in:
2026-04-08 13:35:18 +07:00
parent ff478f33b4
commit 82241b432e
17 changed files with 683 additions and 310 deletions

View File

@@ -11,6 +11,11 @@ type PreSignedCompleteDto struct {
}
type SearchMediaDto struct {
CursorPaginationDto
Search string `json:"search" query:"search" validate:"omitempty,min=2,max=200"`
PaginationDto
Sort string `json:"sort" query:"sort" validate:"omitempty,oneof=id created_at updated_at size original_name storage_key mime_type"`
Search string `json:"search" query:"search" validate:"omitempty,min=2,max=200"`
UserIDs []string `json:"user_ids" query:"user_ids" validate:"omitempty,dive,uuid"`
MimeType string `json:"mime_type" query:"mime_type" validate:"omitempty,max=100"`
MinSize *int64 `json:"min_size" query:"min_size" validate:"omitempty,min=0"`
MaxSize *int64 `json:"max_size" query:"max_size" validate:"omitempty,min=0,gtefield=MinSize"`
}

View File

@@ -1,5 +1,7 @@
package request
import "time"
type UpdateProfileDto struct {
DisplayName string `json:"display_name" validate:"omitempty,min=2,max=50"`
FullName string `json:"full_name" validate:"omitempty,min=2,max=100"`
@@ -21,21 +23,18 @@ type ChangeRoleDto struct {
Roles []string `json:"role_ids" validate:"required,min=1,dive,required,uuid"`
}
type GetAllUserDto struct {
CursorPaginationDto
IsDeleted *bool `json:"is_deleted" query:"is_deleted" validate:"omitempty"`
RoleIDs []string `json:"role_ids" query:"role_ids" validate:"omitempty,dive,uuid"`
}
type CursorPaginationDto struct {
Cursor string `json:"cursor" query:"cursor" validate:"omitempty,uuid"`
Limit int `json:"limit" query:"limit" validate:"required,min=1,max=100"`
Sort string `json:"sort" query:"sort" validate:"omitempty,oneof=id created_at updated_at"`
Order string `json:"order" query:"order" validate:"omitempty,oneof=asc desc"`
type PaginationDto struct {
Page int `json:"page" query:"page" validate:"omitempty,min=1"`
Limit int `json:"limit" query:"limit" validate:"required,min=1,max=100"`
Order string `json:"order" query:"order" validate:"omitempty,oneof=asc desc"`
}
type SearchUserDto struct {
CursorPaginationDto
Search string `json:"search" query:"search" validate:"omitempty,min=2,max=200"`
IsDeleted *bool `json:"is_deleted" query:"is_deleted" validate:"omitempty"`
RoleIDs []string `json:"role_ids" query:"role_ids" validate:"omitempty,dive,uuid"`
PaginationDto
Sort string `json:"sort" query:"sort" validate:"omitempty,oneof=id created_at updated_at email is_deleted auth_provider"`
Search string `json:"search" query:"search" validate:"omitempty,min=2,max=200"`
IsDeleted *bool `json:"is_deleted" query:"is_deleted" validate:"omitempty"`
RoleIDs []string `json:"role_ids" query:"role_ids" validate:"omitempty,dive,uuid"`
AuthProvider string `json:"auth_provider" query:"auth_provider" validate:"omitempty"`
CreatedFrom *time.Time `json:"created_from" query:"created_from" validate:"omitempty"`
CreatedTo *time.Time `json:"created_to" query:"created_to" validate:"omitempty"`
}

View File

@@ -13,18 +13,45 @@ type CommonResponse struct {
}
type JWTClaims struct {
UId string `json:"uid"`
Roles []constants.Role `json:"roles"`
TokenVersion int32 `json:"token_version"`
UId string `json:"uid"`
Roles []constants.Role `json:"roles"`
TokenVersion int32 `json:"token_version"`
jwt.RegisteredClaims
}
type PaginatedResponse struct {
Data any `json:"data"`
Status bool `json:"status"`
Message string `json:"message"`
Pagination struct {
NextCursor string `json:"next_cursor"`
HasMore bool `json:"has_more"`
} `json:"pagination"`
type PaginationMeta struct {
CurrentPage int `json:"current_page"`
PageSize int `json:"page_size"`
TotalRecords int64 `json:"total_records"`
TotalPages int `json:"total_pages"`
}
type PaginatedResponse struct {
Status bool `json:"status"`
Message string `json:"message"`
Data any `json:"data"`
Pagination *PaginationMeta `json:"pagination"`
}
func BuildPaginatedResponse(data any, totalRecords int64, page int, limit int) *PaginatedResponse {
if page < 1 {
page = 1
}
if limit < 1 {
limit = 10
}
totalPages := int((totalRecords + int64(limit) - 1) / int64(limit))
return &PaginatedResponse{
Status: true,
Message: "Success",
Data: data,
Pagination: &PaginationMeta{
CurrentPage: page,
PageSize: limit,
TotalRecords: totalRecords,
TotalPages: totalPages,
},
}
}

View File

@@ -11,6 +11,43 @@ import (
"github.com/jackc/pgx/v5/pgtype"
)
const countMedias = `-- name: CountMedias :one
SELECT count(*)
FROM medias
WHERE
($1::uuid[] IS NULL OR user_id = ANY($1::uuid[]))
AND ($2::text IS NULL OR mime_type ILIKE $2::text || '%')
AND ($3::bigint IS NULL OR size >= $3::bigint)
AND ($4::bigint IS NULL OR size <= $4::bigint)
AND (
$5::text IS NULL OR
id::text ILIKE '%' || $5::text || '%' OR
original_name ILIKE '%' || $5::text || '%' OR
storage_key ILIKE '%' || $5::text || '%'
)
`
type CountMediasParams struct {
UserIds []pgtype.UUID `json:"user_ids"`
MimeType pgtype.Text `json:"mime_type"`
MinSize pgtype.Int8 `json:"min_size"`
MaxSize pgtype.Int8 `json:"max_size"`
SearchText pgtype.Text `json:"search_text"`
}
func (q *Queries) CountMedias(ctx context.Context, arg CountMediasParams) (int64, error) {
row := q.db.QueryRow(ctx, countMedias,
arg.UserIds,
arg.MimeType,
arg.MinSize,
arg.MaxSize,
arg.SearchText,
)
var count int64
err := row.Scan(&count)
return count, err
}
const createMedia = `-- name: CreateMedia :one
INSERT INTO medias (
user_id, storage_key, original_name, mime_type, size, file_metadata
@@ -122,62 +159,69 @@ func (q *Queries) GetMediasByUserID(ctx context.Context, userID pgtype.UUID) ([]
}
const searchMedias = `-- name: SearchMedias :many
SELECT id, user_id, storage_key, original_name, mime_type, size, file_metadata, created_at, updated_at
SELECT
id, user_id, storage_key, original_name, mime_type, size, file_metadata, created_at, updated_at
FROM medias
WHERE
($1::uuid IS NULL OR id > $1::uuid)
($1::uuid[] IS NULL OR user_id = ANY($1::uuid[]))
AND ($2::text IS NULL OR mime_type ILIKE $2::text || '%')
AND ($3::bigint IS NULL OR size >= $3::bigint)
AND ($4::bigint IS NULL OR size <= $4::bigint)
AND (
$2::text IS NULL OR
original_name ILIKE '%' || $2::text || '%' OR
storage_key ILIKE '%' || $2::text || '%'
$5::text IS NULL OR
id::text ILIKE '%' || $5::text || '%' OR
original_name ILIKE '%' || $5::text || '%' OR
storage_key ILIKE '%' || $5::text || '%'
)
ORDER BY
-- id
CASE
WHEN $3 = 'id' AND $4 = 'asc' THEN id
END ASC,
CASE
WHEN $3 = 'id' AND $4 = 'desc' THEN id
END DESC,
CASE WHEN $6 = 'id' AND $7 = 'asc' THEN id END ASC,
CASE WHEN $6 = 'id' AND $7 = 'desc' THEN id END DESC,
-- created_at
CASE
WHEN $3 = 'created_at' AND $4 = 'asc' THEN created_at
END ASC,
CASE
WHEN $3 = 'created_at' AND $4 = 'desc' THEN created_at
END DESC,
CASE WHEN $6 = 'created_at' AND $7 = 'asc' THEN created_at END ASC,
CASE WHEN $6 = 'created_at' AND $7 = 'desc' THEN created_at END DESC,
-- updated_at
CASE
WHEN $3 = 'updated_at' AND $4 = 'asc' THEN updated_at
END ASC,
CASE
WHEN $3 = 'updated_at' AND $4 = 'desc' THEN updated_at
END DESC,
CASE WHEN $6 = 'updated_at' AND $7 = 'asc' THEN updated_at END ASC,
CASE WHEN $6 = 'updated_at' AND $7 = 'desc' THEN updated_at END DESC,
-- fallback
CASE WHEN $6 = 'size' AND $7 = 'asc' THEN size END ASC,
CASE WHEN $6 = 'size' AND $7 = 'desc' THEN size END DESC,
CASE WHEN $6 = 'original_name' AND $7 = 'asc' THEN original_name END ASC,
CASE WHEN $6 = 'original_name' AND $7 = 'desc' THEN original_name END DESC,
CASE WHEN $6 = 'storage_key' AND $7 = 'asc' THEN storage_key END ASC,
CASE WHEN $6 = 'storage_key' AND $7 = 'desc' THEN storage_key END DESC,
CASE WHEN $6 = 'mime_type' AND $7 = 'asc' THEN mime_type END ASC,
CASE WHEN $6 = 'mime_type' AND $7 = 'desc' THEN mime_type END DESC,
id ASC
LIMIT $5
LIMIT $9
OFFSET $8
`
type SearchMediasParams struct {
Cursor pgtype.UUID `json:"cursor"`
SearchText pgtype.Text `json:"search_text"`
Sort interface{} `json:"sort"`
Order interface{} `json:"order"`
Limit int32 `json:"limit"`
UserIds []pgtype.UUID `json:"user_ids"`
MimeType pgtype.Text `json:"mime_type"`
MinSize pgtype.Int8 `json:"min_size"`
MaxSize pgtype.Int8 `json:"max_size"`
SearchText pgtype.Text `json:"search_text"`
Sort interface{} `json:"sort"`
Order interface{} `json:"order"`
Offset int32 `json:"offset"`
Limit int32 `json:"limit"`
}
func (q *Queries) SearchMedias(ctx context.Context, arg SearchMediasParams) ([]Media, error) {
rows, err := q.db.Query(ctx, searchMedias,
arg.Cursor,
arg.UserIds,
arg.MimeType,
arg.MinSize,
arg.MaxSize,
arg.SearchText,
arg.Sort,
arg.Order,
arg.Offset,
arg.Limit,
)
if err != nil {

View File

@@ -11,6 +11,60 @@ import (
"github.com/jackc/pgx/v5/pgtype"
)
const countUsers = `-- name: CountUsers :one
SELECT count(*)
FROM users u
WHERE
($1::boolean IS NULL OR u.is_deleted = $1::boolean)
AND (
$2::uuid[] IS NULL OR
EXISTS (
SELECT 1 FROM user_roles ur2
WHERE ur2.user_id = u.id
AND ur2.role_id = ANY($2::uuid[])
)
)
AND ($3::text IS NULL OR u.auth_provider = $3::text)
AND ($4::timestamp IS NULL OR u.created_at >= $4::timestamp)
AND ($5::timestamp IS NULL OR u.created_at <= $5::timestamp)
AND (
$6::text IS NULL OR
u.id::text ILIKE '%' || $6::text || '%' OR
u.email ILIKE '%' || $6::text || '%' OR
EXISTS (
SELECT 1 FROM user_profiles p
WHERE p.user_id = u.id
AND (
p.full_name ILIKE '%' || $6::text || '%' OR
p.phone ILIKE '%' || $6::text || '%'
)
)
)
`
type CountUsersParams struct {
IsDeleted pgtype.Bool `json:"is_deleted"`
RoleIds []pgtype.UUID `json:"role_ids"`
AuthProvider pgtype.Text `json:"auth_provider"`
CreatedFrom pgtype.Timestamp `json:"created_from"`
CreatedTo pgtype.Timestamp `json:"created_to"`
SearchText pgtype.Text `json:"search_text"`
}
func (q *Queries) CountUsers(ctx context.Context, arg CountUsersParams) (int64, error) {
row := q.db.QueryRow(ctx, countUsers,
arg.IsDeleted,
arg.RoleIds,
arg.AuthProvider,
arg.CreatedFrom,
arg.CreatedTo,
arg.SearchText,
)
var count int64
err := row.Scan(&count)
return count, err
}
const createUserProfile = `-- name: CreateUserProfile :one
INSERT INTO user_profiles (
user_id,
@@ -304,11 +358,12 @@ SELECT
u.email,
u.password_hash,
u.token_version,
u.google_id,
u.auth_provider,
u.refresh_token,
u.is_deleted,
u.created_at,
u.updated_at,
(
SELECT json_build_object(
'display_name', p.display_name,
@@ -323,7 +378,6 @@ SELECT
FROM user_profiles p
WHERE p.user_id = u.id
) AS profile,
(
SELECT COALESCE(
json_agg(json_build_object('id', r.id, 'name', r.name)),
@@ -333,67 +387,62 @@ SELECT
JOIN roles r ON ur.role_id = r.id
WHERE ur.user_id = u.id
) AS roles
FROM users u
WHERE
($1::uuid IS NULL OR u.id > $1::uuid)
AND ($2::boolean IS NULL OR u.is_deleted = $2::boolean)
($1::boolean IS NULL OR u.is_deleted = $1::boolean)
AND (
$3::uuid[] IS NULL OR
$2::uuid[] IS NULL OR
EXISTS (
SELECT 1 FROM user_roles ur2
WHERE ur2.user_id = u.id
AND ur2.role_id = ANY($3::uuid[])
AND ur2.role_id = ANY($2::uuid[])
)
)
AND ($4::uuid IS NULL OR u.id = $4::uuid)
AND ($3::text IS NULL OR u.auth_provider = $3::text)
AND ($4::timestamp IS NULL OR u.created_at >= $4::timestamp)
AND ($5::timestamp IS NULL OR u.created_at <= $5::timestamp)
AND (
$5::text IS NULL OR
u.email ILIKE '%' || $5::text || '%'
$6::text IS NULL OR
u.id::text ILIKE '%' || $6::text || '%' OR
u.email ILIKE '%' || $6::text || '%' OR
EXISTS (
SELECT 1 FROM user_profiles p
WHERE p.user_id = u.id
AND (
p.full_name ILIKE '%' || $6::text || '%' OR
p.phone ILIKE '%' || $6::text || '%'
)
)
)
ORDER BY
-- id
CASE
WHEN $6 = 'id' AND $7 = 'asc' THEN id
END ASC,
CASE
WHEN $6 = 'id' AND $7 = 'desc' THEN id
END DESC,
-- created_at
CASE
WHEN $6 = 'created_at' AND $7 = 'asc' THEN u.created_at
END ASC,
CASE
WHEN $6 = 'created_at' AND $7 = 'desc' THEN u.created_at
END DESC,
-- updated_at
CASE
WHEN $6 = 'updated_at' AND $7 = 'asc' THEN u.updated_at
END ASC,
CASE
WHEN $6 = 'updated_at' AND $7 = 'desc' THEN u.updated_at
END DESC,
-- fallback
CASE WHEN $7 = 'id' AND $8 = 'asc' THEN u.id END ASC,
CASE WHEN $7 = 'id' AND $8 = 'desc' THEN u.id END DESC,
CASE WHEN $7 = 'created_at' AND $8 = 'asc' THEN u.created_at END ASC,
CASE WHEN $7 = 'created_at' AND $8 = 'desc' THEN u.created_at END DESC,
CASE WHEN $7 = 'updated_at' AND $8 = 'asc' THEN u.updated_at END ASC,
CASE WHEN $7 = 'updated_at' AND $8 = 'desc' THEN u.updated_at END DESC,
CASE WHEN $7 = 'email' AND $8 = 'asc' THEN u.email END ASC,
CASE WHEN $7 = 'email' AND $8 = 'desc' THEN u.email END DESC,
CASE WHEN $7 = 'is_deleted' AND $8 = 'asc' THEN u.is_deleted END ASC,
CASE WHEN $7 = 'is_deleted' AND $8 = 'desc' THEN u.is_deleted END DESC,
CASE WHEN $7 = 'auth_provider' AND $8 = 'asc' THEN u.auth_provider END ASC,
CASE WHEN $7 = 'auth_provider' AND $8 = 'desc' THEN u.auth_provider END DESC,
u.id ASC
LIMIT $8
LIMIT $10
OFFSET $9
`
type SearchUsersParams struct {
Cursor pgtype.UUID `json:"cursor"`
IsDeleted pgtype.Bool `json:"is_deleted"`
RoleIds []pgtype.UUID `json:"role_ids"`
SearchID pgtype.UUID `json:"search_id"`
SearchText pgtype.Text `json:"search_text"`
Sort interface{} `json:"sort"`
Order interface{} `json:"order"`
Limit int32 `json:"limit"`
IsDeleted pgtype.Bool `json:"is_deleted"`
RoleIds []pgtype.UUID `json:"role_ids"`
AuthProvider pgtype.Text `json:"auth_provider"`
CreatedFrom pgtype.Timestamp `json:"created_from"`
CreatedTo pgtype.Timestamp `json:"created_to"`
SearchText pgtype.Text `json:"search_text"`
Sort interface{} `json:"sort"`
Order interface{} `json:"order"`
Offset int32 `json:"offset"`
Limit int32 `json:"limit"`
}
type SearchUsersRow struct {
@@ -401,6 +450,8 @@ type SearchUsersRow struct {
Email string `json:"email"`
PasswordHash pgtype.Text `json:"password_hash"`
TokenVersion int32 `json:"token_version"`
GoogleID pgtype.Text `json:"google_id"`
AuthProvider string `json:"auth_provider"`
RefreshToken pgtype.Text `json:"refresh_token"`
IsDeleted bool `json:"is_deleted"`
CreatedAt pgtype.Timestamptz `json:"created_at"`
@@ -411,13 +462,15 @@ type SearchUsersRow struct {
func (q *Queries) SearchUsers(ctx context.Context, arg SearchUsersParams) ([]SearchUsersRow, error) {
rows, err := q.db.Query(ctx, searchUsers,
arg.Cursor,
arg.IsDeleted,
arg.RoleIds,
arg.SearchID,
arg.AuthProvider,
arg.CreatedFrom,
arg.CreatedTo,
arg.SearchText,
arg.Sort,
arg.Order,
arg.Offset,
arg.Limit,
)
if err != nil {
@@ -432,6 +485,8 @@ func (q *Queries) SearchUsers(ctx context.Context, arg SearchUsersParams) ([]Sea
&i.Email,
&i.PasswordHash,
&i.TokenVersion,
&i.GoogleID,
&i.AuthProvider,
&i.RefreshToken,
&i.IsDeleted,
&i.CreatedAt,

View File

@@ -18,6 +18,7 @@ type MediaRepository interface {
GetByID(ctx context.Context, id pgtype.UUID) (*models.MediaEntity, error)
GetByUserID(ctx context.Context, userId pgtype.UUID) ([]*models.MediaEntity, error)
Search(ctx context.Context, params sqlc.SearchMediasParams) ([]*models.MediaEntity, error)
Count(ctx context.Context, params sqlc.CountMediasParams) (int64, error)
Delete(ctx context.Context, id pgtype.UUID) error
Create(ctx context.Context, params sqlc.CreateMediaParams) (*models.MediaEntity, error)
}
@@ -85,6 +86,7 @@ func (r *mediaRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.
var media models.MediaEntity
err := r.c.Get(ctx, cacheId, &media)
if err == nil {
_ = r.c.Set(ctx, cacheId, media, constants.NormalCacheDuration)
return &media, nil
}
@@ -118,11 +120,10 @@ func (r *mediaRepository) Create(ctx context.Context, params sqlc.CreateMediaPar
go func() {
bgCtx := context.Background()
_ = r.c.DelByPattern(bgCtx, "media:target*")
_ = r.c.DelByPattern(bgCtx, "media:userId:*")
_ = r.c.DelByPattern(bgCtx, "media:search*")
_ = r.c.DelByPattern(bgCtx, "media:count*")
}()
media := models.MediaEntity{
ID: convert.UUIDToString(row.ID),
UserID: convert.UUIDToString(row.UserID),
@@ -155,7 +156,16 @@ func (r *mediaRepository) Search(ctx context.Context, params sqlc.SearchMediasPa
queryKey := r.generateQueryKey("media:search", params)
var cachedIDs []string
if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 {
return r.getByIDsWithFallback(ctx, cachedIDs)
listItem, err := r.getByIDsWithFallback(ctx, cachedIDs)
if err != nil {
return nil, err
}
newCachedIDs := make([]string, len(listItem))
for i, media := range listItem {
newCachedIDs[i] = media.ID
}
_ = r.c.Set(ctx, queryKey, newCachedIDs, constants.ListCacheDuration)
return listItem, err
}
rows, err := r.q.SearchMedias(ctx, params)
@@ -195,11 +205,35 @@ func (r *mediaRepository) Search(ctx context.Context, params sqlc.SearchMediasPa
return medias, nil
}
func (r *mediaRepository) Count(ctx context.Context, params sqlc.CountMediasParams) (int64, error) {
queryKey := r.generateQueryKey("media:count", params)
var count int64
if err := r.c.Get(ctx, queryKey, &count); err == nil {
_ = r.c.Set(ctx, queryKey, count, constants.ListCacheDuration)
return count, nil
}
count, err := r.q.CountMedias(ctx, params)
if err != nil {
return 0, err
}
_ = r.c.Set(ctx, queryKey, count, constants.ListCacheDuration)
return count, nil
}
func (r *mediaRepository) GetByUserID(ctx context.Context, userId pgtype.UUID) ([]*models.MediaEntity, error) {
queryKey := fmt.Sprintf("media:userId:%s", convert.UUIDToString(userId))
var cachedIDs []string
if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 {
return r.getByIDsWithFallback(ctx, cachedIDs)
listItem, err := r.getByIDsWithFallback(ctx, cachedIDs)
if err != nil {
return nil, err
}
newCachedIDs := make([]string, len(listItem))
for i, media := range listItem {
newCachedIDs[i] = media.ID
}
_ = r.c.Set(ctx, queryKey, newCachedIDs, constants.ListCacheDuration)
return listItem, nil
}
rows, err := r.q.GetMediasByUserID(ctx, userId)

View File

@@ -97,6 +97,7 @@ func (r *roleRepository) GetByID(ctx context.Context, id pgtype.UUID) (*models.R
var role models.RoleEntity
err := r.c.Get(ctx, cacheId, &role)
if err == nil {
_ = r.c.Set(ctx, cacheId, role, constants.NormalCacheDuration)
return &role, nil
}
@@ -122,6 +123,7 @@ func (r *roleRepository) GetByname(ctx context.Context, name string) (*models.Ro
var role models.RoleEntity
err := r.c.Get(ctx, cacheId, &role)
if err == nil {
_ = r.c.Set(ctx, cacheId, role, constants.NormalCacheDuration)
return &role, nil
}
row, err := r.q.GetRoleByName(ctx, name)
@@ -146,6 +148,11 @@ func (r *roleRepository) Create(ctx context.Context, name string) (*models.RoleE
if err != nil {
return nil, err
}
go func() {
bgCtx := context.Background()
_ = r.c.DelByPattern(bgCtx, "role:all*")
}()
role := models.RoleEntity{
ID: convert.UUIDToString(row.ID),
Name: row.Name,
@@ -183,24 +190,52 @@ func (r *roleRepository) Update(ctx context.Context, params sqlc.UpdateRoleParam
}
func (r *roleRepository) All(ctx context.Context) ([]*models.RoleEntity, error) {
queryKey := "role:all"
var cachedIDs []string
if err := r.c.Get(ctx, queryKey, &cachedIDs); err == nil && len(cachedIDs) > 0 {
listItem, err := r.getByIDsWithFallback(ctx, cachedIDs)
if err != nil {
return nil, err
}
newCachedIDs := make([]string, len(listItem))
for i, media := range listItem {
newCachedIDs[i] = media.ID
}
_ = r.c.Set(ctx, queryKey, newCachedIDs, constants.ListCacheDuration)
return listItem, err
}
rows, err := r.q.GetRoles(ctx)
if err != nil {
return nil, err
}
var roles []*models.RoleEntity
var ids []string
roleToCache := make(map[string]any)
var users []*models.RoleEntity
for _, row := range rows {
user := &models.RoleEntity{
role := &models.RoleEntity{
ID: convert.UUIDToString(row.ID),
Name: row.Name,
IsDeleted: row.IsDeleted,
CreatedAt: convert.TimeToPtr(row.CreatedAt),
UpdatedAt: convert.TimeToPtr(row.UpdatedAt),
}
users = append(users, user)
ids = append(ids, role.ID)
roles = append(roles, role)
roleToCache[fmt.Sprintf("role:id:%s", role.ID)] = role
}
return users, nil
if len(roleToCache) > 0 {
_ = r.c.MSet(ctx, roleToCache, constants.NormalCacheDuration)
}
if len(ids) > 0 {
_ = r.c.Set(ctx, queryKey, ids, constants.ListCacheDuration)
}
return roles, nil
}
func (r *roleRepository) Delete(ctx context.Context, id pgtype.UUID) error {

View File

@@ -20,6 +20,7 @@ type UserRepository interface {
GetByIDWithoutDeleted(ctx context.Context, id pgtype.UUID) (*models.UserEntity, error)
GetByEmail(ctx context.Context, email string) (*models.UserEntity, error)
Search(ctx context.Context, params sqlc.SearchUsersParams) ([]*models.UserEntity, error)
Count(ctx context.Context, params sqlc.CountUsersParams) (int64, error)
UpsertUser(ctx context.Context, params sqlc.UpsertUserParams) (*models.UserEntity, error)
CreateProfile(ctx context.Context, params sqlc.CreateUserProfileParams) (*models.UserProfileSimple, error)
UpdateProfile(ctx context.Context, params sqlc.UpdateUserProfileParams) (*models.UserEntity, error)
@@ -205,9 +206,8 @@ func (r *userRepository) UpsertUser(ctx context.Context, params sqlc.UpsertUserP
}
go func() {
bgCtx := context.Background()
_ = r.c.DelByPattern(bgCtx, "user:all*")
_ = r.c.DelByPattern(bgCtx, "user:search*")
_ = r.c.DelByPattern(bgCtx, "user:count*")
}()
return &models.UserEntity{
@@ -320,6 +320,22 @@ func (r *userRepository) Search(ctx context.Context, params sqlc.SearchUsersPara
return users, nil
}
func (r *userRepository) Count(ctx context.Context, params sqlc.CountUsersParams) (int64, error) {
queryKey := r.generateQueryKey("user:count", params)
var count int64
if err := r.c.Get(ctx, queryKey, &count); err == nil {
return count, nil
}
count, err := r.q.CountUsers(ctx, params)
if err != nil {
return 0, err
}
_ = r.c.Set(ctx, queryKey, count, constants.NormalCacheDuration)
return count, nil
}
func (r *userRepository) Delete(ctx context.Context, id pgtype.UUID) error {
user, err := r.GetByID(ctx, id)
if err != nil {

View File

@@ -21,9 +21,10 @@ import (
"strings"
"github.com/gofiber/fiber/v3"
"github.com/rs/zerolog/log"
"github.com/google/uuid"
"github.com/jackc/pgx/v5/pgtype"
"github.com/rs/zerolog/log"
"golang.org/x/sync/errgroup"
)
type MediaService interface {
@@ -111,60 +112,86 @@ func (m *mediaService) GetMediaByUserID(ctx context.Context, id string) ([]*resp
return models.MediaEntitiesToResponse(medias), nil
}
func (m *mediaService) SearchMedia(ctx context.Context, dto *request.SearchMediaDto) (*response.PaginatedResponse, error) {
arg := sqlc.SearchMediasParams{
Limit: int32(dto.Limit + 1),
}
func (m *mediaService) fillSearchArgs(arg *sqlc.SearchMediasParams, dto *request.SearchMediaDto) {
if dto.Sort != "" {
arg.Sort = pgtype.Text{String: dto.Sort, Valid: true}
} else {
arg.Sort = pgtype.Text{String: "id", Valid: true}
}
if dto.Order != "" {
arg.Order = pgtype.Text{String: dto.Order, Valid: true}
} else {
arg.Order = pgtype.Text{String: "asc", Valid: true}
arg.Order = pgtype.Text{String: "asc", Valid: true}
if dto.Order == "desc" {
arg.Order = pgtype.Text{String: "desc", Valid: true}
}
if dto.Cursor != "" {
pgID, err := convert.StringToUUID(dto.Cursor)
if err != nil {
return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid cursor format")
if dto.MimeType != "" {
arg.MimeType = pgtype.Text{String: dto.MimeType, Valid: true}
}
if dto.MaxSize != nil {
arg.MaxSize = pgtype.Int8{Int64: *dto.MaxSize, Valid: true}
}
if dto.MinSize != nil {
arg.MinSize = pgtype.Int8{Int64: *dto.MinSize, Valid: true}
}
if len(dto.UserIDs) > 0 {
for _, id := range dto.UserIDs {
if u, err := convert.StringToUUID(id); err == nil {
arg.UserIds = append(arg.UserIds, u)
}
}
arg.Cursor = pgID
}
if dto.Search != "" {
arg.SearchText = pgtype.Text{String: dto.Search, Valid: true}
}
}
rows, err := m.mediaRepo.Search(ctx, arg)
if err != nil {
func (m *mediaService) SearchMedia(ctx context.Context, dto *request.SearchMediaDto) (*response.PaginatedResponse, error) {
if dto.Page < 1 {
dto.Page = 1
}
offset := (dto.Page - 1) * dto.Limit
arg := sqlc.SearchMediasParams{
Limit: int32(dto.Limit),
Offset: int32(offset),
}
m.fillSearchArgs(&arg, dto)
var rows []*models.MediaEntity
var totalRecords int64
g, gCtx := errgroup.WithContext(ctx)
g.Go(func() error {
var err error
rows, err = m.mediaRepo.Search(gCtx, arg)
return err
})
g.Go(func() error {
countArg := sqlc.CountMediasParams{
UserIds: arg.UserIds,
MimeType: arg.MimeType,
MinSize: arg.MinSize,
MaxSize: arg.MaxSize,
SearchText: arg.SearchText,
}
var err error
totalRecords, err = m.mediaRepo.Count(gCtx, countArg)
return err
})
if err := g.Wait(); err != nil {
return nil, err
}
hasMore := false
var nextCursor string
if len(rows) > dto.Limit {
hasMore = true
nextCursor = rows[dto.Limit-1].ID
rows = rows[:dto.Limit]
}
res := &response.PaginatedResponse{
Data: rows,
Status: true,
Message: "",
}
res.Pagination.HasMore = hasMore
res.Pagination.NextCursor = nextCursor
return res, nil
return response.BuildPaginatedResponse(rows, totalRecords, dto.Page, dto.Limit), nil
}
func (m *mediaService) UploadServerSide(ctx context.Context, userId string, fileHeader *multipart.FileHeader) (*response.MediaResponse, error) {
userIdUUID, err := convert.StringToUUID(userId)
if err != nil {

View File

@@ -12,6 +12,7 @@ import (
"github.com/gofiber/fiber/v3"
"github.com/jackc/pgx/v5/pgtype"
"golang.org/x/crypto/bcrypt"
"golang.org/x/sync/errgroup"
)
type UserService interface {
@@ -208,81 +209,90 @@ func (u *userService) RestoreUser(ctx context.Context, userId string) (*response
return user.ToResponse(), nil
}
func (u *userService) SearchUser(ctx context.Context, dto *request.SearchUserDto) (*response.PaginatedResponse, error) {
arg := sqlc.SearchUsersParams{
Limit: int32(dto.Limit + 1),
}
func (m *userService) fillSearchArgs(arg *sqlc.SearchUsersParams, dto *request.SearchUserDto) {
if dto.Sort != "" {
arg.Sort = pgtype.Text{String: dto.Sort, Valid: true}
} else {
arg.Sort = pgtype.Text{String: "id", Valid: true}
}
if dto.Order != "" {
arg.Order = pgtype.Text{String: dto.Order, Valid: true}
} else {
arg.Order = pgtype.Text{String: "asc", Valid: true}
arg.Order = pgtype.Text{String: "asc", Valid: true}
if dto.Order == "desc" {
arg.Order = pgtype.Text{String: "desc", Valid: true}
}
if dto.Cursor != "" {
pgID, err := convert.StringToUUID(dto.Cursor)
if err != nil {
return nil, fiber.NewError(fiber.StatusBadRequest, "Invalid cursor format")
}
arg.Cursor = pgID
if dto.AuthProvider != "" {
arg.AuthProvider = pgtype.Text{String: dto.AuthProvider, Valid: true}
}
if dto.Search != "" {
pgID, err := convert.StringToUUID(dto.Search)
if err == nil {
arg.SearchID = pgID
} else {
arg.SearchText = pgtype.Text{String: dto.Search, Valid: true}
}
if dto.CreatedFrom != nil {
arg.CreatedFrom = pgtype.Timestamp{Time: *dto.CreatedFrom, Valid: true}
}
if dto.CreatedTo != nil {
arg.CreatedTo = pgtype.Timestamp{Time: *dto.CreatedTo, Valid: true}
}
if dto.IsDeleted != nil {
arg.IsDeleted = pgtype.Bool{Bool: *dto.IsDeleted, Valid: true}
}
if len(dto.RoleIDs) > 0 {
var pgRoleIDs []pgtype.UUID
for _, idStr := range dto.RoleIDs {
pgID, err := convert.StringToUUID(idStr)
if err != nil {
continue
for _, id := range dto.RoleIDs {
if u, err := convert.StringToUUID(id); err == nil {
arg.RoleIds = append(arg.RoleIds, u)
}
pgRoleIDs = append(pgRoleIDs, pgID)
}
arg.RoleIds = pgRoleIDs
}
rows, err := u.userRepo.Search(ctx, arg)
if err != nil {
if dto.Search != "" {
arg.SearchText = pgtype.Text{String: dto.Search, Valid: true}
}
}
func (u *userService) SearchUser(ctx context.Context, dto *request.SearchUserDto) (*response.PaginatedResponse, error) {
if dto.Page < 1 {
dto.Page = 1
}
offset := (dto.Page - 1) * dto.Limit
arg := sqlc.SearchUsersParams{
Limit: int32(dto.Limit),
Offset: int32(offset),
}
u.fillSearchArgs(&arg, dto)
var rows []*models.UserEntity
var totalRecords int64
g, gCtx := errgroup.WithContext(ctx)
g.Go(func() error {
var err error
rows, err = u.userRepo.Search(gCtx, arg)
return err
})
g.Go(func() error {
countArg := sqlc.CountUsersParams{
RoleIds: arg.RoleIds,
AuthProvider: arg.AuthProvider,
CreatedFrom: arg.CreatedFrom,
CreatedTo: arg.CreatedTo,
IsDeleted: arg.IsDeleted,
SearchText: arg.SearchText,
}
var err error
totalRecords, err = u.userRepo.Count(gCtx, countArg)
return err
})
if err := g.Wait(); err != nil {
return nil, err
}
hasMore := false
var nextCursor string
if len(rows) > dto.Limit {
hasMore = true
nextCursor = rows[dto.Limit-1].ID
rows = rows[:dto.Limit]
}
users := models.UsersEntityToResponse(rows)
res := &response.PaginatedResponse{
Data: users,
Status: true,
Message: "",
}
res.Pagination.HasMore = hasMore
res.Pagination.NextCursor = nextCursor
return res, nil
return response.BuildPaginatedResponse(rows, totalRecords, dto.Page, dto.Limit), nil
}
func (u *userService) GetUserByID(ctx context.Context, userId string) (*response.UserResponse, error) {