mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-31 01:57:26 +01:00
Slow progress, but we're getting there.
This commit is contained in:
@@ -153,7 +153,7 @@ func (ctrl *V1Controller) HandleEntityAttachmentDelete() errchain.HandlerFunc {
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
|
||||
// @Param payload body repo.EntityAttachmentUpdate true "Attachment Update"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/entities/{id}/attachments/{attachment_id} [PUT]
|
||||
// @Security Bearer
|
||||
@@ -222,7 +222,7 @@ func (ctrl *V1Controller) handleEntityAttachmentsHandler(w http.ResponseWriter,
|
||||
|
||||
// Update Attachment Handler
|
||||
case http.MethodPut:
|
||||
var attachment repo.ItemAttachmentUpdate
|
||||
var attachment repo.EntityAttachmentUpdate
|
||||
err = server.Decode(r, &attachment)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode attachment")
|
||||
|
||||
@@ -156,7 +156,7 @@ func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc {
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
|
||||
// @Param payload body repo.EntityAttachmentUpdate true "Attachment Update"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
|
||||
// @Security Bearer
|
||||
@@ -226,7 +226,7 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
|
||||
|
||||
// Update Attachment Handler
|
||||
case http.MethodPut:
|
||||
var attachment repo.ItemAttachmentUpdate
|
||||
var attachment repo.EntityAttachmentUpdate
|
||||
err = server.Decode(r, &attachment)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode attachment")
|
||||
|
||||
@@ -19,7 +19,7 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, gid uuid.UUID, attac
|
||||
return attachment, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, gid uuid.UUID, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, gid uuid.UUID, itemID uuid.UUID, data *repo.EntityAttachmentUpdate) (repo.ItemOut, error) {
|
||||
// Update Attachment
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, gid, data.ID, data)
|
||||
if err != nil {
|
||||
@@ -47,7 +47,7 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename st
|
||||
}
|
||||
|
||||
// Create the attachment
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemID, repo.ItemCreateAttachment{Title: filename, Content: file}, attachmentType, primary)
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemID, repo.EntityCreateAttachment{Title: filename, Content: file}, attachmentType, primary)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to create attachment")
|
||||
}
|
||||
|
||||
@@ -4,9 +4,10 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pressly/goose/v3"
|
||||
"time"
|
||||
)
|
||||
|
||||
//nolint:gochecknoinits
|
||||
@@ -106,7 +107,7 @@ func Up20250831120023(ctx context.Context, tx *sql.Tx) error {
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Process each group and create default entity types
|
||||
// Process each group and create default entity types, and perform migrations that depend on entity types information
|
||||
for rows.Next() {
|
||||
var groupID string
|
||||
if err := rows.Scan(&groupID); err != nil {
|
||||
@@ -149,27 +150,58 @@ func Up20250831120023(ctx context.Context, tx *sql.Tx) error {
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to migrate locations to entities for group %s: %w", groupID, err)
|
||||
}
|
||||
|
||||
// Migrate existing items to entities
|
||||
_, err = tx.ExecContext(ctx, `
|
||||
INSERT INTO "entities" (
|
||||
"id", "created_at", "updated_at", "name", "description",
|
||||
"import_ref", "notes", "quantity", "insured", "archived", "asset_id",
|
||||
"serial_number", "model_number", "manufacturer", "lifetime_warranty",
|
||||
"warranty_expires", "warranty_details", "purchase_time", "purchase_from",
|
||||
"purchase_price", "sold_time", "sold_to", "sold_price", "sold_notes",
|
||||
"group_entities", "entity_type"
|
||||
)
|
||||
SELECT
|
||||
i."id", i."created_at", i."updated_at", i."name", i."description",
|
||||
i."import_ref", i."notes", i."quantity", i."insured", i."archived", i."asset_id",
|
||||
i."serial_number", i."model_number", i."manufacturer", i."lifetime_warranty",
|
||||
i."warranty_expires", i."warranty_details", i."purchase_time", i."purchase_from",
|
||||
i."purchase_price", i."sold_time", i."sold_to", i."sold_price", i."sold_notes",
|
||||
i."group_items", ?
|
||||
FROM "items" i
|
||||
WHERE i."group_items" = ?
|
||||
`, itemTypeID, groupID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to migrate items to entities for group %s: %w", groupID, err)
|
||||
}
|
||||
|
||||
// Migrate existing locations to entities
|
||||
_, err = tx.ExecContext(ctx, `
|
||||
INSERT INTO "entities" (
|
||||
"id", "created_at", "updated_at", "name", "description",
|
||||
"group_entities", "entity_type"
|
||||
)
|
||||
SELECT l.id, l.created_at, l.updated_at, l.name, l.description, l.group_locations, ? FROM "locations" l WHERE l."group_locations" = ?
|
||||
`, locTypeID, groupID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to migrate locations to entities for group %s: %w", groupID, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Drop old tables
|
||||
_, err = tx.ExecContext(ctx, `DROP TABLE IF EXISTS "items"`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop items table: %w", err)
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, `DROP TABLE IF EXISTS "locations"`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop locations table: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Down20250831120023(ctx context.Context, tx *sql.Tx) error {
|
||||
// Drop tables in reverse order to avoid foreign key constraints
|
||||
_, err := tx.ExecContext(ctx, `DROP TABLE IF EXISTS "entity_fields";`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop entity_fields table: %w", err)
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, `DROP TABLE IF EXISTS "entities";`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop entities table: %w", err)
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, `DROP TABLE IF EXISTS "entity_types";`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop entity_types table: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
type EntitiesRepository struct {
|
||||
@@ -72,7 +73,7 @@ type (
|
||||
Notes string `json:"notes"`
|
||||
|
||||
// Edges
|
||||
Attachments []ItemAttachment `json:"attachments,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||
Fields []EntityField `json:"fields,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||
Attachments []EntityAttachment `json:"attachments,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||
Fields []EntityField `json:"fields,omitempty" extensions:"x-nullable,x-omitempty"`
|
||||
}
|
||||
)
|
||||
|
||||
@@ -58,7 +58,7 @@ type AttachmentRepo struct {
|
||||
}
|
||||
|
||||
type (
|
||||
ItemAttachment struct {
|
||||
EntityAttachment struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
@@ -70,21 +70,21 @@ type (
|
||||
Thumbnail *ent.Attachment `json:"thumbnail,omitempty"`
|
||||
}
|
||||
|
||||
ItemAttachmentUpdate struct {
|
||||
EntityAttachmentUpdate struct {
|
||||
ID uuid.UUID `json:"-"`
|
||||
Type string `json:"type"`
|
||||
Title string `json:"title"`
|
||||
Primary bool `json:"primary"`
|
||||
}
|
||||
|
||||
ItemCreateAttachment struct {
|
||||
EntityCreateAttachment struct {
|
||||
Title string `json:"title"`
|
||||
Content io.Reader `json:"content"`
|
||||
}
|
||||
)
|
||||
|
||||
func ToItemAttachment(attachment *ent.Attachment) ItemAttachment {
|
||||
return ItemAttachment{
|
||||
func ToItemAttachment(attachment *ent.Attachment) EntityAttachment {
|
||||
return EntityAttachment{
|
||||
ID: attachment.ID,
|
||||
CreatedAt: attachment.CreatedAt,
|
||||
UpdatedAt: attachment.UpdatedAt,
|
||||
@@ -138,7 +138,7 @@ func (r *AttachmentRepo) GetConnString() string {
|
||||
return r.storage.ConnString
|
||||
}
|
||||
|
||||
func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc ItemCreateAttachment, typ attachment.Type, primary bool) (*ent.Attachment, error) {
|
||||
func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc EntityCreateAttachment, typ attachment.Type, primary bool) (*ent.Attachment, error) {
|
||||
tx, err := r.db.Tx(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -312,7 +312,7 @@ func (r *AttachmentRepo) Get(ctx context.Context, gid uuid.UUID, id uuid.UUID) (
|
||||
}
|
||||
}
|
||||
|
||||
func (r *AttachmentRepo) Update(ctx context.Context, gid uuid.UUID, id uuid.UUID, data *ItemAttachmentUpdate) (*ent.Attachment, error) {
|
||||
func (r *AttachmentRepo) Update(ctx context.Context, gid uuid.UUID, id uuid.UUID, data *EntityAttachmentUpdate) (*ent.Attachment, error) {
|
||||
// Validate that the attachment belongs to the specified group
|
||||
_, err := r.db.Attachment.Query().
|
||||
Where(
|
||||
@@ -753,7 +753,7 @@ func (r *AttachmentRepo) CreateMissingThumbnails(ctx context.Context, groupId uu
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (r *AttachmentRepo) UploadFile(ctx context.Context, itemGroup *ent.Group, doc ItemCreateAttachment) (string, error) {
|
||||
func (r *AttachmentRepo) UploadFile(ctx context.Context, itemGroup *ent.Group, doc EntityCreateAttachment) (string, error) {
|
||||
// Prepare for the hashing of the file contents
|
||||
hashOut := make([]byte, 32)
|
||||
|
||||
@@ -869,7 +869,7 @@ func (r *AttachmentRepo) processThumbnailFromImage(ctx context.Context, groupId
|
||||
return "", err
|
||||
}
|
||||
|
||||
thumbnailFile, err := r.UploadFile(ctx, group, ItemCreateAttachment{
|
||||
thumbnailFile, err := r.UploadFile(ctx, group, EntityCreateAttachment{
|
||||
Title: fmt.Sprintf("%s-thumb", title),
|
||||
Content: bytes.NewReader(contentBytes),
|
||||
})
|
||||
@@ -56,7 +56,7 @@ func TestAttachmentRepo_Create(t *testing.T) {
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, _ := tRepos.Attachments.Create(tt.args.ctx, tt.args.itemID, ItemCreateAttachment{Title: "Test", Content: strings.NewReader("This is a test")}, tt.args.typ, false)
|
||||
got, _ := tRepos.Attachments.Create(tt.args.ctx, tt.args.itemID, EntityCreateAttachment{Title: "Test", Content: strings.NewReader("This is a test")}, tt.args.typ, false)
|
||||
// TODO: Figure out how this works and fix the test later
|
||||
// if (err != nil) != tt.wantErr {
|
||||
// t.Errorf("AttachmentRepo.Create() error = %v, wantErr %v", err, tt.wantErr)
|
||||
@@ -92,7 +92,7 @@ func useAttachments(t *testing.T, n int) []*ent.Attachment {
|
||||
|
||||
attachments := make([]*ent.Attachment, n)
|
||||
for i := 0; i < n; i++ {
|
||||
attach, err := tRepos.Attachments.Create(context.Background(), item.ID, ItemCreateAttachment{Title: "Test", Content: strings.NewReader("Test String")}, attachment.TypePhoto, true)
|
||||
attach, err := tRepos.Attachments.Create(context.Background(), item.ID, EntityCreateAttachment{Title: "Test", Content: strings.NewReader("Test String")}, attachment.TypePhoto, true)
|
||||
require.NoError(t, err)
|
||||
attachments[i] = attach
|
||||
|
||||
@@ -107,7 +107,7 @@ func TestAttachmentRepo_Update(t *testing.T) {
|
||||
|
||||
for _, typ := range []attachment.Type{"photo", "manual", "warranty", "attachment"} {
|
||||
t.Run(string(typ), func(t *testing.T) {
|
||||
_, err := tRepos.Attachments.Update(context.Background(), tGroup.ID, entity.ID, &ItemAttachmentUpdate{
|
||||
_, err := tRepos.Attachments.Update(context.Background(), tGroup.ID, entity.ID, &EntityAttachmentUpdate{
|
||||
Type: string(typ),
|
||||
})
|
||||
|
||||
@@ -136,7 +136,7 @@ func TestAttachmentRepo_EnsureSinglePrimaryAttachment(t *testing.T) {
|
||||
attachments := useAttachments(t, 2)
|
||||
|
||||
setAndVerifyPrimary := func(primaryAttachmentID, nonPrimaryAttachmentID uuid.UUID) {
|
||||
primaryAttachment, err := tRepos.Attachments.Update(ctx, tGroup.ID, primaryAttachmentID, &ItemAttachmentUpdate{
|
||||
primaryAttachment, err := tRepos.Attachments.Update(ctx, tGroup.ID, primaryAttachmentID, &EntityAttachmentUpdate{
|
||||
Type: attachment.TypePhoto.String(),
|
||||
Primary: true,
|
||||
})
|
||||
@@ -158,11 +158,11 @@ func TestAttachmentRepo_UpdateNonPhotoDoesNotAffectPrimaryPhoto(t *testing.T) {
|
||||
item := useItems(t, 1)[0]
|
||||
|
||||
// Create a photo attachment that will be primary
|
||||
photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Test Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, true)
|
||||
photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Test Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create a manual attachment (non-photo)
|
||||
manualAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Test Manual", Content: strings.NewReader("Manual content")}, attachment.TypeManual, false)
|
||||
manualAttachment, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Test Manual", Content: strings.NewReader("Manual content")}, attachment.TypeManual, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Cleanup
|
||||
@@ -177,7 +177,7 @@ func TestAttachmentRepo_UpdateNonPhotoDoesNotAffectPrimaryPhoto(t *testing.T) {
|
||||
assert.True(t, photoAttachment.Primary)
|
||||
|
||||
// Update the manual attachment (this should NOT affect the photo's primary status)
|
||||
_, err = tRepos.Attachments.Update(ctx, tGroup.ID, manualAttachment.ID, &ItemAttachmentUpdate{
|
||||
_, err = tRepos.Attachments.Update(ctx, tGroup.ID, manualAttachment.ID, &EntityAttachmentUpdate{
|
||||
Type: attachment.TypeManual.String(),
|
||||
Title: "Updated Manual",
|
||||
Primary: false, // This should have no effect since it's not a photo
|
||||
@@ -200,7 +200,7 @@ func TestAttachmentRepo_AddingPDFAfterPhotoKeepsPhotoAsPrimary(t *testing.T) {
|
||||
item := useItems(t, 1)[0]
|
||||
|
||||
// Step 1: Upload a photo first (this should become primary since it's the first photo)
|
||||
photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Item Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, false)
|
||||
photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Item Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Cleanup
|
||||
@@ -214,7 +214,7 @@ func TestAttachmentRepo_AddingPDFAfterPhotoKeepsPhotoAsPrimary(t *testing.T) {
|
||||
assert.True(t, photoAttachment.Primary, "First photo should automatically become primary")
|
||||
|
||||
// Step 2: Add a PDF receipt (this should NOT affect the photo's primary status)
|
||||
pdfAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Receipt PDF", Content: strings.NewReader("PDF content")}, attachment.TypeReceipt, false)
|
||||
pdfAttachment, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Receipt PDF", Content: strings.NewReader("PDF content")}, attachment.TypeReceipt, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Add to cleanup
|
||||
@@ -246,10 +246,10 @@ func TestAttachmentRepo_SettingPhotoPrimaryStillWorks(t *testing.T) {
|
||||
item := useItems(t, 1)[0]
|
||||
|
||||
// Create two photo attachments
|
||||
photo1, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Photo 1", Content: strings.NewReader("Photo 1 content")}, attachment.TypePhoto, false)
|
||||
photo1, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Photo 1", Content: strings.NewReader("Photo 1 content")}, attachment.TypePhoto, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
photo2, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Photo 2", Content: strings.NewReader("Photo 2 content")}, attachment.TypePhoto, false)
|
||||
photo2, err := tRepos.Attachments.Create(ctx, item.ID, EntityCreateAttachment{Title: "Photo 2", Content: strings.NewReader("Photo 2 content")}, attachment.TypePhoto, false)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Cleanup
|
||||
@@ -268,7 +268,7 @@ func TestAttachmentRepo_SettingPhotoPrimaryStillWorks(t *testing.T) {
|
||||
assert.False(t, photo2.Primary)
|
||||
|
||||
// Now set photo2 as primary (this should work and remove primary from photo1)
|
||||
photo2, err = tRepos.Attachments.Update(ctx, tGroup.ID, photo2.ID, &ItemAttachmentUpdate{
|
||||
photo2, err = tRepos.Attachments.Update(ctx, tGroup.ID, photo2.ID, &EntityAttachmentUpdate{
|
||||
Type: attachment.TypePhoto.String(),
|
||||
Title: "Photo 2",
|
||||
Primary: true,
|
||||
@@ -180,8 +180,8 @@ type (
|
||||
// Extras
|
||||
Notes string `json:"notes"`
|
||||
|
||||
Attachments []ItemAttachment `json:"attachments"`
|
||||
Fields []ItemField `json:"fields"`
|
||||
Attachments []EntityAttachment `json:"attachments"`
|
||||
Fields []ItemField `json:"fields"`
|
||||
}
|
||||
)
|
||||
|
||||
@@ -264,7 +264,7 @@ func mapFields(fields []*ent.EntityField) []ItemField {
|
||||
}
|
||||
|
||||
func mapItemOut(item *ent.Entity) ItemOut {
|
||||
var attachments []ItemAttachment
|
||||
var attachments []EntityAttachment
|
||||
if item.Edges.Attachments != nil {
|
||||
attachments = mapEach(item.Edges.Attachments, ToItemAttachment)
|
||||
}
|
||||
|
||||
@@ -106,30 +106,32 @@ type LocationQuery struct {
|
||||
func (r *LocationRepository) GetAll(ctx context.Context, gid uuid.UUID, filter LocationQuery) ([]LocationOutCount, error) {
|
||||
query := `--sql
|
||||
SELECT
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
created_at,
|
||||
updated_at,
|
||||
entities.id,
|
||||
entities.name,
|
||||
entities.description,
|
||||
entities.created_at,
|
||||
entities.updated_at,
|
||||
(
|
||||
SELECT
|
||||
SUM(entities.quantity)
|
||||
FROM
|
||||
entities
|
||||
WHERE
|
||||
entities.location_entities = entities.id
|
||||
entities.entity_children = entities.id
|
||||
AND entities.archived = false
|
||||
) as item_count
|
||||
FROM
|
||||
locations
|
||||
entities
|
||||
JOIN entity_types ON entities.entity_type = entity_types.id
|
||||
AND entity_types.is_location = true
|
||||
WHERE
|
||||
locations.group_locations = $1 {{ FILTER_CHILDREN }}
|
||||
entities.group_entities = $1 {{ FILTER_CHILDREN }}
|
||||
ORDER BY
|
||||
locations.name ASC
|
||||
entities.name ASC
|
||||
`
|
||||
|
||||
if filter.FilterChildren {
|
||||
query = strings.Replace(query, "{{ FILTER_CHILDREN }}", "AND locations.location_children IS NULL", 1)
|
||||
query = strings.Replace(query, "{{ FILTER_CHILDREN }}", "AND entities.entity_children IS NULL", 1)
|
||||
} else {
|
||||
query = strings.Replace(query, "{{ FILTER_CHILDREN }}", "", 1)
|
||||
}
|
||||
@@ -282,16 +284,20 @@ type ItemPath struct {
|
||||
|
||||
func (r *LocationRepository) PathForLoc(ctx context.Context, gid, locID uuid.UUID) ([]ItemPath, error) {
|
||||
query := `WITH RECURSIVE location_path AS (
|
||||
SELECT id, name, location_children
|
||||
FROM locations
|
||||
WHERE id = $1 -- Replace ? with the ID of the item's location
|
||||
AND group_locations = $2 -- Replace ? with the ID of the group
|
||||
SELECT e.id, e.name, e.entity_children
|
||||
FROM entities e
|
||||
JOIN entity_types et ON e.entity_type = et.id
|
||||
WHERE e.id = $1
|
||||
AND e.group_entities = $2
|
||||
AND et.is_location = true
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT loc.id, loc.name, loc.location_children
|
||||
FROM locations loc
|
||||
JOIN location_path lp ON loc.id = lp.location_children
|
||||
SELECT e.id, e.name, e.entity_children
|
||||
FROM entities e
|
||||
JOIN entity_types et ON e.entity_type = et.id
|
||||
JOIN location_path lp ON e.id = lp.entity_children
|
||||
WHERE et.is_location = true
|
||||
)
|
||||
|
||||
SELECT id, name
|
||||
@@ -331,24 +337,28 @@ func (r *LocationRepository) Tree(ctx context.Context, gid uuid.UUID, tq TreeQue
|
||||
query := `
|
||||
WITH recursive location_tree(id, NAME, parent_id, level, node_type) AS
|
||||
(
|
||||
SELECT id,
|
||||
NAME,
|
||||
location_children AS parent_id,
|
||||
SELECT e.id,
|
||||
e.NAME,
|
||||
e.entity_children AS parent_id,
|
||||
0 AS level,
|
||||
'location' AS node_type
|
||||
FROM locations
|
||||
WHERE location_children IS NULL
|
||||
AND group_locations = $1
|
||||
FROM entities e
|
||||
JOIN entity_types et ON e.entity_type = et.id
|
||||
WHERE e.entity_children IS NULL
|
||||
AND et.is_location = true
|
||||
AND e.group_entities = $1
|
||||
UNION ALL
|
||||
SELECT c.id,
|
||||
c.NAME,
|
||||
c.location_children AS parent_id,
|
||||
c.entity_children AS parent_id,
|
||||
level + 1,
|
||||
'location' AS node_type
|
||||
FROM locations c
|
||||
FROM entities c
|
||||
JOIN entity_types et ON c.entity_type = et.id
|
||||
JOIN location_tree p
|
||||
ON c.location_children = p.id
|
||||
WHERE level < 10 -- prevent infinite loop & excessive recursion
|
||||
ON c.entity_children = p.id
|
||||
WHERE et.is_location = true
|
||||
AND level < 10 -- prevent infinite loop & excessive recursion
|
||||
){{ WITH_ITEMS }}
|
||||
|
||||
SELECT id,
|
||||
@@ -370,26 +380,30 @@ func (r *LocationRepository) Tree(ctx context.Context, gid uuid.UUID, tq TreeQue
|
||||
if tq.WithItems {
|
||||
itemQuery := `, item_tree(id, NAME, parent_id, level, node_type) AS
|
||||
(
|
||||
SELECT id,
|
||||
NAME,
|
||||
location_items as parent_id,
|
||||
SELECT e.id,
|
||||
e.NAME,
|
||||
e.entity_children as parent_id,
|
||||
0 AS level,
|
||||
'item' AS node_type
|
||||
FROM items
|
||||
WHERE item_children IS NULL
|
||||
AND location_items IN (SELECT id FROM location_tree)
|
||||
FROM entities e
|
||||
JOIN entity_types et ON e.entity_type = et.id
|
||||
WHERE e.entity_children IS NULL
|
||||
AND et.is_location = false
|
||||
AND e.entity_children IN (SELECT id FROM location_tree)
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT c.id,
|
||||
c.NAME,
|
||||
c.item_children AS parent_id,
|
||||
c.entity_children AS parent_id,
|
||||
level + 1,
|
||||
'item' AS node_type
|
||||
FROM items c
|
||||
FROM entities c
|
||||
JOIN entity_types et ON c.entity_type = et.id
|
||||
JOIN item_tree p
|
||||
ON c.item_children = p.id
|
||||
WHERE c.item_children IS NOT NULL
|
||||
ON c.entity_children = p.id
|
||||
WHERE c.entity_children IS NOT NULL
|
||||
AND et.is_location = false
|
||||
AND level < 10 -- prevent infinite loop & excessive recursion
|
||||
)`
|
||||
|
||||
|
||||
Reference in New Issue
Block a user