Compare commits

..

58 Commits

Author SHA1 Message Date
Hayden
3cfd274212 fix failing tests 2023-03-06 21:06:18 -09:00
Hayden
e71146df3d set validate + order fields by name 2023-03-06 20:55:40 -09:00
Hayden
8c72d66d87 add custom validator 2023-03-06 20:55:29 -09:00
Hayden
b6c2db0e83 refactor 2023-03-06 11:46:01 -09:00
Hayden
883468e04c refactor schema edges 2023-03-06 11:43:56 -09:00
Hayden
ccd40ffcac audit and update documentation + improve format 2023-03-06 11:21:39 -09:00
Hayden
9c6421a9ff include link to API docs 2023-03-06 10:40:35 -09:00
Hayden
c18d72b8a9 delete test file 2023-03-06 10:38:44 -09:00
Hayden
737007b156 support group edges via scaffold 2023-03-05 21:56:53 -09:00
Hayden
9fa4da819f refactor schema folder 2023-03-05 21:47:31 -09:00
Hayden
77fb37ebf4 remove notifier service 2023-03-05 21:30:44 -09:00
Hayden
7167d200ea add scaffold schema reference 2023-03-05 21:30:16 -09:00
Hayden
e1f9c2997b improve documentation 2023-03-05 21:21:07 -09:00
Hayden
c2f432cb4d use badges instead of text 2023-03-05 21:21:07 -09:00
Hayden
b6f44dfe58 wip: notifiers UI 2023-03-05 21:21:06 -09:00
Hayden
30b1879c35 reverse checkbox label display 2023-03-05 21:21:06 -09:00
Hayden
913912a1d9 go tidy 2023-03-05 21:21:06 -09:00
Hayden
fa069845fa update errors to fix validation panic 2023-03-05 21:21:06 -09:00
Hayden
882f86f6f4 new routes for notifiers 2023-03-05 21:21:06 -09:00
Hayden
d79d0b45bf refactor adapters to fit more common use cases 2023-03-05 21:21:06 -09:00
Hayden
b0a9c510ad introduce experimental adapter pattern for hdlrs 2023-03-05 21:21:05 -09:00
Hayden
b82fbb01f5 notifier repo 2023-03-05 21:21:05 -09:00
Hayden
4d744c0e4b new mapper helpers 2023-03-05 21:21:05 -09:00
Hayden
9f59a5d393 db migration 2023-03-05 21:21:05 -09:00
Hayden
37857682e6 add group_id to notifier 2023-03-05 21:21:05 -09:00
Hayden
43b34e2899 go mod tidy 2023-03-05 21:21:05 -09:00
Hayden
65d0b8fe4f gen: migrations 2023-03-05 21:20:31 -09:00
Hayden
791d6979b6 gen: ent code 2023-03-05 21:20:30 -09:00
Hayden
b491ff9ec1 update schema files 2023-03-05 21:20:30 -09:00
Hayden
f169f1c710 wip: shoutrrr wrapper (may remove) 2023-03-05 21:20:30 -09:00
Hayden
4725d9c823 introduce scaffold for new models 2023-03-05 21:20:30 -09:00
renovate[bot]
14315cb88a fix(deps): update module golang.org/x/crypto to v0.7.0 (#336)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-03-05 16:49:16 -09:00
Hayden
cf536393f5 fix datetime display issues (again) (#324) 2023-02-27 19:52:56 -09:00
Hayden
025521431e feat: add scheduled maintenance tasks (#320)
* add scheduled maintenance tasks

* fix failing typecheck
2023-02-26 18:42:23 -09:00
Hayden
70297b9d27 feat: more-currency-support (#316)
* add polish and turkish lira

* add romanian lei

* code-gen
2023-02-25 18:13:52 -09:00
Hayden
729293745f fix: table row background (#315) 2023-02-25 18:07:03 -09:00
Hayden
a6bcb36c5b feat: import export rewrite (#290)
* WIP: initial work

* refactoring

* fix failing JS tests

* update import docs

* fix import headers

* fix column headers

* update refs on import

* remove demo status

* finnnneeeee

* formatting
2023-02-25 17:54:40 -09:00
Adrian
a005fa5b9b feat: add currency swiss francs (#311) 2023-02-25 10:46:27 -09:00
renovate[bot]
a2dfa9dcef chore(deps): update dependency vitest to ^0.29.0 (#312)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-25 10:45:56 -09:00
renovate[bot]
32216f63bd fix(deps): update module github.com/stretchr/testify to v1.8.2 (#313)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-25 10:45:35 -09:00
renovate[bot]
ef190e26df fix(deps): update github.com/gocarina/gocsv digest to bcce7dc (#307)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-19 18:04:36 -09:00
Hayden
c3e3702a7e refactor: change icon for locations to tree view 2023-02-18 10:56:28 -09:00
Hayden
fb57120067 feat: support item nesting in tree view (#306) 2023-02-18 10:55:14 -09:00
Hayden
9d9b05d8a6 fix: several layout issues (#305)
* fix login version issue

* allow wrapping on action menu
2023-02-18 10:09:19 -09:00
Hayden
3ac6c7c858 fix: use item quantity as count mechanism (#304) 2023-02-18 09:57:09 -09:00
Hayden
859d3b9ffe fix label store (#303) 2023-02-18 09:47:04 -09:00
renovate[bot]
6cfa6c9fc8 chore(deps): update dependency nuxt to v3.2.2 (#298)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-17 21:58:09 -09:00
Hayden
12975ce26e feat: change auth to use cookies (#301)
* frontend cookie implementation

* accept cookies for authentication

* remove auth store

* add self attr
2023-02-17 21:57:21 -09:00
Hayden
bd321af29f chore: developer cleanup (#300)
* new PR tasks

* add homebox to know words

* formatting

* bump deps

* generate db models

* ts errors

* drop id

* fix accessor

* drop unused time field

* change CI

* add expected error

* add type check

* resolve serveral type errors

* hoise in CI
2023-02-17 21:41:01 -09:00
renovate[bot]
88f9ff90d4 fix(deps): update module entgo.io/ent to v0.11.8 (#272)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:43:44 -09:00
renovate[bot]
354f1adbee chore(deps): update dependency nuxt to v3.2.0 (#259)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:43:22 -09:00
renovate[bot]
2a62a43493 fix(deps): update dependency dompurify to v3 (#277)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:42:48 -09:00
Kyle Brown
673db41f37 build(docker): add image source label (#288)
This adds the `org.opencontainers.image.source` label pointing to this repository for cross-referencing information. For example: https://github.com/renovatebot/renovate/blob/main/lib/modules/datasource/docker/readme.md
2023-02-16 10:36:40 -09:00
renovate[bot]
830ce2b0a9 fix(deps): update module github.com/ardanlabs/conf/v3 to v3.1.4 (#291)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-02-16 10:35:54 -09:00
Hayden
e8e6a425dd fix: button display on light mode (#293) 2023-02-16 10:28:52 -09:00
Hayden
da00db0608 fix: code generation and type processing (#292)
regular expressions are order specific and when applied in a random order you can get a variety of outputs. Using a list preserves order and ensures that the data-contracts.ts file is deterministic.
2023-02-16 10:13:09 -09:00
Hayden
efd7069fe4 feat: hide registration button when disabled (#287)
* add allow registration to API Summary

* code gen

* use env for troubleshooting

* disable registration toggle based on backend
2023-02-15 08:58:38 -09:00
Hayden
dd349aa98e fix #285 (#286) 2023-02-15 08:52:13 -09:00
254 changed files with 15055 additions and 4588 deletions

View File

@@ -4,7 +4,33 @@ on:
workflow_call:
jobs:
Frontend:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: pnpm/action-setup@v2.2.4
with:
version: 6.0.2
- name: Install dependencies
run: pnpm install --shamefully-hoist
working-directory: frontend
- name: Run Lint
run: pnpm run lint:ci
working-directory: frontend
- name: Run Typecheck
run: pnpm run typecheck
working-directory: frontend
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -34,9 +60,5 @@ jobs:
run: pnpm install
working-directory: frontend
- name: Run linter 👀
run: pnpm lint
working-directory: "frontend"
- name: Run Integration Tests
run: task test:ci

View File

@@ -0,0 +1,33 @@
---
# yaml-language-server: $schema=https://hay-kot.github.io/scaffold/schema.json
messages:
pre: |
# Ent Model Generation
With Boilerplate!
post: |
Complete!
questions:
- name: "model"
prompt:
message: "What is the name of the model? (PascalCase)"
required: true
- name: "by_group"
prompt:
confirm: "Include a Group Edge? (group_id -> id)"
required: true
rewrites:
- from: 'templates/model.go'
to: 'backend/internal/data/ent/schema/{{ lower .Scaffold.model }}.go'
inject:
- name: "Insert Groups Edge"
path: 'backend/internal/data/ent/schema/group.go'
at: // $scaffold_edge
template: |
{{- if .Scaffold.by_group -}}
owned("{{ lower .Scaffold.model }}s", {{ .Scaffold.model }}.Type),
{{- end -}}

View File

@@ -0,0 +1,40 @@
package schema
import (
"entgo.io/ent"
"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
)
type {{ .Scaffold.model }} struct {
ent.Schema
}
func ({{ .Scaffold.model }}) Mixin() []ent.Mixin {
return []ent.Mixin{
mixins.BaseMixin{},
{{- if .Scaffold.by_group }}
GroupMixin{ref: "{{ snakecase .Scaffold.model }}s"},
{{- end }}
}
}
// Fields of the {{ .Scaffold.model }}.
func ({{ .Scaffold.model }}) Fields() []ent.Field {
return []ent.Field{
// field.String("name").
}
}
// Edges of the {{ .Scaffold.model }}.
func ({{ .Scaffold.model }}) Edges() []ent.Edge {
return []ent.Edge{
// edge.From("group", Group.Type).
}
}
func ({{ .Scaffold.model }}) Indexes() []ent.Index {
return []ent.Index{
// index.Fields("token"),
}
}

View File

@@ -9,7 +9,8 @@
"README.md": "LICENSE, SECURITY.md"
},
"cSpell.words": [
"debughandlers"
"debughandlers",
"Homebox"
],
// use ESLint to format code on save
"editor.formatOnSave": false,

View File

@@ -41,9 +41,10 @@ COPY --from=builder /go/bin/api /app
RUN chmod +x /app/api
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
EXPOSE 7745
WORKDIR /app
VOLUME [ "/data" ]
ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ]
CMD [ "/data/config.yml" ]

View File

@@ -2,6 +2,7 @@ version: "3"
env:
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_fk=1
HBOX_OPTIONS_ALLOW_REGISTRATION: true
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
tasks:
setup:
@@ -27,6 +28,7 @@ tasks:
--path ./backend/app/api/static/docs/swagger.json \
--output ./frontend/lib/api/types
- go run ./scripts/process-types/*.go ./frontend/lib/api/types/data-contracts.ts
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
@@ -107,6 +109,16 @@ tasks:
cmds:
- cd frontend && pnpm dev
ui:fix:
desc: Runs prettier and eslint on the frontend
cmds:
- cd frontend && pnpm run lint:fix
ui:check:
desc: Runs type checking
cmds:
- cd frontend && pnpm run typecheck
test:ci:
desc: Runs end-to-end test on a live server (only for use in CI)
cmds:
@@ -115,3 +127,12 @@ tasks:
- sleep 5
- cd frontend && pnpm run test:ci
silent: true
pr:
desc: Runs all tasks required for a PR
cmds:
- task: generate
- task: go:all
- task: ui:check
- task: ui:fix
- task: test:ci

View File

@@ -2,7 +2,6 @@ package main
import (
"context"
"encoding/csv"
"strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
@@ -10,7 +9,7 @@ import (
)
func (a *app) SetupDemo() {
csvText := `Import Ref,Location,Labels,Quantity,Name,Description,Insured,Serial Number,Model Number,Manufacturer,Notes,Purchase From,Purchased Price,Purchased Time,Lifetime Warranty,Warranty Expires,Warranty Details,Sold To,Sold Price,Sold Time,Sold Notes
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
@@ -19,13 +18,11 @@ func (a *app) SetupDemo() {
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
`
var (
registration = services.UserRegistration{
Email: "demo@example.com",
Name: "Demo",
Password: "demo",
}
)
registration := services.UserRegistration{
Email: "demo@example.com",
Name: "Demo",
Password: "demo",
}
// First check if we've already setup a demo user and skip if so
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password)
@@ -42,17 +39,7 @@ func (a *app) SetupDemo() {
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password)
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
// Read CSV Text
reader := csv.NewReader(strings.NewReader(csvText))
reader.Comma = ','
records, err := reader.ReadAll()
if err != nil {
log.Err(err).Msg("Failed to read CSV")
log.Fatal().Msg("Failed to setup demo")
}
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, records)
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
if err != nil {
log.Err(err).Msg("Failed to import CSV")
log.Fatal().Msg("Failed to setup demo")

View File

@@ -44,12 +44,13 @@ type (
}
ApiSummary struct {
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Demo bool `json:"demo"`
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Demo bool `json:"demo"`
AllowRegistration bool `json:"allowRegistration"`
}
)
@@ -74,19 +75,21 @@ func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, options ..
}
// HandleBase godoc
// @Summary Retrieves the basic information about the API
// @Tags Base
// @Produce json
// @Success 200 {object} ApiSummary
// @Router /v1/status [GET]
//
// @Summary Application Info
// @Tags Base
// @Produce json
// @Success 200 {object} ApiSummary
// @Router /v1/status [GET]
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
return server.Respond(w, http.StatusOK, ApiSummary{
Healthy: ready(),
Title: "Go API Template",
Message: "Welcome to the Go API Template Application!",
Build: build,
Demo: ctrl.isDemo,
Healthy: ready(),
Title: "Homebox",
Message: "Track, Manage, and Organize your shit",
Build: build,
Demo: ctrl.isDemo,
AllowRegistration: ctrl.allowRegistration,
})
}
}

View File

@@ -21,7 +21,7 @@ func (ctrl *V1Controller) routeID(r *http.Request) (uuid.UUID, error) {
func (ctrl *V1Controller) routeUUID(r *http.Request, key string) (uuid.UUID, error) {
ID, err := uuid.Parse(chi.URLParam(r, key))
if err != nil {
return uuid.Nil, validate.NewInvalidRouteKeyError(key)
return uuid.Nil, validate.NewRouteKeyError(key)
}
return ID, nil
}

View File

@@ -1,8 +1,10 @@
package v1
import (
"context"
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/pkgs/server"
@@ -13,20 +15,13 @@ type ActionAmountResult struct {
Completed int `json:"completed"`
}
// HandleGroupInvitationsCreate godoc
// @Summary Ensures all items in the database have an asset id
// @Tags Group
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int, error)) server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
totalCompleted, err := ctrl.svc.Items.EnsureAssetID(ctx, ctx.GID)
totalCompleted, err := fn(ctx, ctx.GID)
if err != nil {
log.Err(err).Msg("failed to ensure asset id")
log.Err(err).Str("action_ref", ref).Msg("failed to run action")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
@@ -34,23 +29,41 @@ func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
}
}
// HandleEnsureAssetID godoc
//
// @Summary Ensure Asset IDs
// @Description Ensures all items in the database have an asset ID
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() server.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
}
// HandleEnsureImportRefs godoc
//
// @Summary Ensures Import Refs
// @Description Ensures all items in the database have an import ref
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureImportRefs() server.HandlerFunc {
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
}
// HandleItemDateZeroOut godoc
// @Summary Resets all item date fields to the beginning of the day
// @Tags Group
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer
//
// @Summary Zero Out Time Fields
// @Description Resets all item date fields to the beginning of the day
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDateZeroOut() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
totalCompleted, err := ctrl.repo.Items.ZeroOutTimeFields(ctx, ctx.GID)
if err != nil {
log.Err(err).Msg("failed to ensure asset id")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
return server.Respond(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
}
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
}

View File

@@ -14,14 +14,15 @@ import (
"github.com/rs/zerolog/log"
)
// HandleItemGet godocs
// @Summary Gets an item by Asset ID
// @Tags Assets
// @Produce json
// @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET]
// @Security Bearer
// HandleAssetGet godocs
//
// @Summary Get Item by Asset ID
// @Tags Items
// @Produce json
// @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAssetGet() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -49,7 +50,6 @@ func (ctrl *V1Controller) HandleAssetGet() server.HandlerFunc {
return server.Respond(w, http.StatusBadRequest, "Invalid page size")
}
}
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetId), int(page), int(pageSize))
if err != nil {

View File

@@ -26,15 +26,16 @@ type (
)
// HandleAuthLogin godoc
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param username formData string false "string" example(admin@admin.com)
// @Param password formData string false "string" example(admin)
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
//
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param username formData string false "string" example(admin@admin.com)
// @Param password formData string false "string" example(admin)
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
func (ctrl *V1Controller) HandleAuthLogin() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
loginForm := &LoginForm{}
@@ -50,7 +51,6 @@ func (ctrl *V1Controller) HandleAuthLogin() server.HandlerFunc {
loginForm.Password = r.PostFormValue("password")
case server.ContentJSON:
err := server.Decode(r, loginForm)
if err != nil {
log.Err(err).Msg("failed to decode login form")
}
@@ -72,7 +72,6 @@ func (ctrl *V1Controller) HandleAuthLogin() server.HandlerFunc {
}
newToken, err := ctrl.svc.User.Login(r.Context(), strings.ToLower(loginForm.Username), loginForm.Password)
if err != nil {
return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
}
@@ -86,11 +85,12 @@ func (ctrl *V1Controller) HandleAuthLogin() server.HandlerFunc {
}
// HandleAuthLogout godoc
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
//
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthLogout() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
@@ -108,13 +108,14 @@ func (ctrl *V1Controller) HandleAuthLogout() server.HandlerFunc {
}
// HandleAuthLogout godoc
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
//
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthRefresh() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
requestToken := services.UseTokenCtx(r.Context())

View File

@@ -25,24 +25,26 @@ type (
)
// HandleGroupGet godoc
// @Summary Get the current user's group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
//
// @Summary Get Group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupGet() server.HandlerFunc {
return ctrl.handleGroupGeneral()
}
// HandleGroupUpdate godoc
// @Summary Updates some fields of the current users group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
//
// @Summary Update Group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupUpdate() server.HandlerFunc {
return ctrl.handleGroupGeneral()
}
@@ -81,13 +83,14 @@ func (ctrl *V1Controller) handleGroupGeneral() server.HandlerFunc {
}
// HandleGroupInvitationsCreate godoc
// @Summary Get the current user
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
//
// @Summary Create Group Invitation
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupInvitationsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
data := GroupInvitationCreate{}

View File

@@ -2,6 +2,7 @@ package v1
import (
"database/sql"
"encoding/csv"
"errors"
"net/http"
"strings"
@@ -14,19 +15,19 @@ import (
)
// HandleItemsGetAll godoc
// @Summary Get All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
//
// @Summary Query All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() server.HandlerFunc {
extractQuery := func(r *http.Request) repo.ItemQuery {
params := r.URL.Query()
@@ -87,13 +88,14 @@ func (ctrl *V1Controller) HandleItemsGetAll() server.HandlerFunc {
}
// HandleItemsCreate godoc
// @Summary Create a new item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 200 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
//
// @Summary Create Item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 200 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.ItemCreate{}
@@ -114,38 +116,41 @@ func (ctrl *V1Controller) HandleItemsCreate() server.HandlerFunc {
}
// HandleItemGet godocs
// @Summary Gets a item and fields
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
//
// @Summary Get Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemGet() server.HandlerFunc {
return ctrl.handleItemsGeneral()
}
// HandleItemDelete godocs
// @Summary deletes a item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
//
// @Summary Delete Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() server.HandlerFunc {
return ctrl.handleItemsGeneral()
}
// HandleItemUpdate godocs
// @Summary updates a item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
//
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() server.HandlerFunc {
return ctrl.handleItemsGeneral()
}
@@ -193,13 +198,14 @@ func (ctrl *V1Controller) handleItemsGeneral() server.HandlerFunc {
}
// HandleGetAllCustomFieldNames godocs
// @Summary imports items into the database
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields [GET]
// @Success 200 {object} []string
// @Security Bearer
//
// @Summary Get All Custom Field Names
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -214,13 +220,14 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldNames() server.HandlerFunc {
}
// HandleGetAllCustomFieldValues godocs
// @Summary imports items into the database
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string
// @Security Bearer
//
// @Summary Get All Custom Field Values
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldValues() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -235,16 +242,16 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() server.HandlerFunc {
}
// HandleItemsImport godocs
// @Summary imports items into the database
// @Tags Items
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
//
// @Summary Import Items
// @Tags Items
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
if err != nil {
log.Err(err).Msg("failed to parse multipart form")
@@ -257,15 +264,9 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
data, err := services.ReadCsv(file)
if err != nil {
log.Err(err).Msg("failed to read csv")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
user := services.UseUserCtx(r.Context())
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, data)
_, err = ctrl.svc.Items.CsvImport(r.Context(), user.GroupID, file)
if err != nil {
log.Err(err).Msg("failed to import items")
return validate.NewRequestError(err, http.StatusInternalServerError)
@@ -274,3 +275,27 @@ func (ctrl *V1Controller) HandleItemsImport() server.HandlerFunc {
return server.Respond(w, http.StatusNoContent, nil)
}
}
// HandleItemsExport godocs
//
// @Summary Export Items
// @Tags Items
// @Success 200 {string} string "text/csv"
// @Router /v1/items/export [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsExport() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
if err != nil {
log.Err(err).Msg("failed to export items")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
w.Header().Set("Content-Type", "text/tsv")
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
writer := csv.NewWriter(w)
return writer.WriteAll(csvData)
}
}

View File

@@ -18,18 +18,19 @@ type (
}
)
// HandleItemsImport godocs
// @Summary imports items into the database
// @Tags Items Attachments
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string true "Type of file"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} server.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
// HandleItemAttachmentCreate godocs
//
// @Summary Create Item Attachment
// @Tags Items Attachments
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string true "Type of file"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} server.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
@@ -82,7 +83,6 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
attachment.Type(attachmentType),
file,
)
if err != nil {
log.Err(err).Msg("failed to add attachment")
return validate.NewRequestError(err, http.StatusInternalServerError)
@@ -93,39 +93,42 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() server.HandlerFunc {
}
// HandleItemAttachmentGet godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
//
// @Summary Get Item Attachment
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentGet() server.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentDelete godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
//
// @Summary Delete Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDelete() server.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentUpdate godocs
// @Summary retrieves an attachment for an item
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
//
// @Summary Update Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentUpdate() server.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}

View File

@@ -12,12 +12,13 @@ import (
)
// HandleLabelsGetAll godoc
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} server.Results{items=[]repo.LabelOut}
// @Router /v1/labels [GET]
// @Security Bearer
//
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} server.Results{items=[]repo.LabelOut}
// @Router /v1/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsGetAll() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
user := services.UseUserCtx(r.Context())
@@ -31,13 +32,14 @@ func (ctrl *V1Controller) HandleLabelsGetAll() server.HandlerFunc {
}
// HandleLabelsCreate godoc
// @Summary Create a new label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
//
// @Summary Create Label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.LabelCreate{}
@@ -58,37 +60,40 @@ func (ctrl *V1Controller) HandleLabelsCreate() server.HandlerFunc {
}
// HandleLabelDelete godocs
// @Summary deletes a label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
//
// @Summary Delete Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelDelete() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
}
// HandleLabelGet godocs
// @Summary Gets a label and fields
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
//
// @Summary Get Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelGet() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
}
// HandleLabelUpdate godocs
// @Summary updates a label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
//
// @Summary Update Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelUpdate() server.HandlerFunc {
return ctrl.handleLabelsGeneral()
}

View File

@@ -12,13 +12,14 @@ import (
)
// HandleLocationTreeQuery godoc
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param withItems query bool false "include items in response tree"
// @Success 200 {object} server.Results{items=[]repo.TreeItem}
// @Router /v1/locations/tree [GET]
// @Security Bearer
//
// @Summary Get Locations Tree
// @Tags Locations
// @Produce json
// @Param withItems query bool false "include items in response tree"
// @Success 200 {object} server.Results{items=[]repo.TreeItem}
// @Router /v1/locations/tree [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationTreeQuery() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
user := services.UseUserCtx(r.Context())
@@ -34,7 +35,6 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() server.HandlerFunc {
WithItems: withItems,
},
)
if err != nil {
log.Err(err).Msg("failed to get locations tree")
return validate.NewRequestError(err, http.StatusInternalServerError)
@@ -45,13 +45,14 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() server.HandlerFunc {
}
// HandleLocationGetAll godoc
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} server.Results{items=[]repo.LocationOutCount}
// @Router /v1/locations [GET]
// @Security Bearer
//
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} server.Results{items=[]repo.LocationOutCount}
// @Router /v1/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGetAll() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
user := services.UseUserCtx(r.Context())
@@ -73,13 +74,14 @@ func (ctrl *V1Controller) HandleLocationGetAll() server.HandlerFunc {
}
// HandleLocationCreate godoc
// @Summary Create a new location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
//
// @Summary Create Location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationCreate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
createData := repo.LocationCreate{}
@@ -100,38 +102,41 @@ func (ctrl *V1Controller) HandleLocationCreate() server.HandlerFunc {
}
// HandleLocationDelete godocs
// @Summary deletes a location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
//
// @Summary Delete Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationDelete() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}
// HandleLocationGet godocs
// @Summary Gets a location and fields
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
//
// @Summary Get Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGet() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}
// HandleLocationUpdate godocs
// @Summary updates a location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
//
// @Summary Update Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationUpdate() server.HandlerFunc {
return ctrl.handleLocationGeneral()
}

View File

@@ -2,6 +2,7 @@ package v1
import (
"net/http"
"strconv"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
@@ -11,47 +12,51 @@ import (
)
// HandleMaintenanceGetLog godoc
// @Summary Get Maintenance Log
// @Tags Maintenance
// @Produce json
// @Success 200 {object} repo.MaintenanceLog
// @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer
//
// @Summary Get Maintenance Log
// @Tags Maintenance
// @Produce json
// @Success 200 {object} repo.MaintenanceLog
// @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceLogGet() server.HandlerFunc {
return ctrl.handleMaintenanceLog()
}
// HandleMaintenanceEntryCreate godoc
// @Summary Create Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST]
// @Security Bearer
//
// @Summary Create Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryCreate() server.HandlerFunc {
return ctrl.handleMaintenanceLog()
}
// HandleMaintenanceEntryDelete godoc
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Success 204
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
// @Security Bearer
//
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Success 204
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() server.HandlerFunc {
return ctrl.handleMaintenanceLog()
}
// HandleMaintenanceEntryUpdate godoc
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
// @Security Bearer
//
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() server.HandlerFunc {
return ctrl.handleMaintenanceLog()
}
@@ -66,7 +71,14 @@ func (ctrl *V1Controller) handleMaintenanceLog() server.HandlerFunc {
switch r.Method {
case http.MethodGet:
mlog, err := ctrl.repo.MaintEntry.GetLog(ctx, itemID)
completed, _ := strconv.ParseBool(r.URL.Query().Get("completed"))
scheduled, _ := strconv.ParseBool(r.URL.Query().Get("scheduled"))
query := repo.MaintenanceLogQuery{
Completed: completed,
Scheduled: scheduled,
}
mlog, err := ctrl.repo.MaintEntry.GetLog(ctx, itemID, query)
if err != nil {
log.Err(err).Msg("failed to get items")
return validate.NewRequestError(err, http.StatusInternalServerError)

View File

@@ -0,0 +1,106 @@
package v1
import (
"context"
"net/http"
"github.com/containrrr/shoutrrr"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/homebox/backend/pkgs/server"
)
// HandleGetUserNotifiers godoc
//
// @Summary Get Notifiers
// @Tags Notifiers
// @Produce json
// @Success 200 {object} server.Results{items=[]repo.NotifierOut}
// @Router /v1/notifiers [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetUserNotifiers() server.HandlerFunc {
fn := func(ctx context.Context, _ struct{}) ([]repo.NotifierOut, error) {
user := services.UseUserCtx(ctx)
return ctrl.repo.Notifiers.GetByUser(ctx, user.ID)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleCreateNotifier godoc
//
// @Summary Create Notifier
// @Tags Notifiers
// @Produce json
// @Param payload body repo.NotifierCreate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleCreateNotifier() server.HandlerFunc {
fn := func(ctx context.Context, in repo.NotifierCreate) (repo.NotifierOut, error) {
auth := services.NewContext(ctx)
return ctrl.repo.Notifiers.Create(ctx, auth.GID, auth.UID, in)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleDeleteNotifier godocs
//
// @Summary Delete a Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Success 204
// @Router /v1/notifiers/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleDeleteNotifier() server.HandlerFunc {
fn := func(ctx context.Context, ID uuid.UUID) (any, error) {
auth := services.NewContext(ctx)
return nil, ctrl.repo.Notifiers.Delete(ctx, auth.UID, ID)
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
// HandleUpdateNotifier godocs
//
// @Summary Update Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Param payload body repo.NotifierUpdate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUpdateNotifier() server.HandlerFunc {
fn := func(ctx context.Context, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
auth := services.NewContext(ctx)
return ctrl.repo.Notifiers.Update(ctx, auth.UID, ID, in)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandlerNotifierTest godoc
//
// @Summary Test Notifier
// @Tags Notifiers
// @Produce json
// @Param id path string true "Notifier ID"
// @Param url query string true "URL"
// @Success 204
// @Router /v1/notifiers/test [POST]
// @Security Bearer
func (ctrl *V1Controller) HandlerNotifierTest() server.HandlerFunc {
type body struct {
URL string `json:"url" validate:"required"`
}
fn := func(ctx context.Context, q body) (any, error) {
err := shoutrrr.Send(q.URL, "Test message from Homebox")
return nil, err
}
return adapters.Action(fn, http.StatusOK)
}

View File

@@ -19,7 +19,7 @@ var qrcodeLogo []byte
// HandleGenerateQRCode godoc
//
// @Summary Encode data into QRCode
// @Summary Create QR Code
// @Tags Items
// @Produce json
// @Param data query string false "data to be encoded into qrcode"

View File

@@ -9,7 +9,7 @@ import (
// HandleBillOfMaterialsExport godoc
//
// @Summary Generates a Bill of Materials CSV
// @Summary Export Bill of Materials
// @Tags Reporting
// @Produce json
// @Success 200 {string} string "text/csv"
@@ -19,13 +19,13 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
actor := services.UseUserCtx(r.Context())
csv, err := ctrl.svc.Reporting.BillOfMaterialsTSV(r.Context(), actor.GroupID)
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
if err != nil {
return err
}
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", "attachment; filename=bom.csv")
w.Header().Set("Content-Type", "text/tsv")
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
_, err = w.Write(csv)
return err
}

View File

@@ -10,12 +10,13 @@ import (
)
// HandleGroupGet godoc
// @Summary Get the current user's group statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET]
// @Security Bearer
//
// @Summary Get Location Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLocations() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -29,13 +30,14 @@ func (ctrl *V1Controller) HandleGroupStatisticsLocations() server.HandlerFunc {
}
}
// HandleGroupGet godoc
// @Summary Get the current user's group statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET]
// @Security Bearer
// HandleGroupStatisticsLabels godoc
//
// @Summary Get Label Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLabels() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -49,13 +51,14 @@ func (ctrl *V1Controller) HandleGroupStatisticsLabels() server.HandlerFunc {
}
}
// HandleGroupGet godoc
// @Summary Get the current user's group statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET]
// @Security Bearer
// HandleGroupStatistics godoc
//
// @Summary Get Group Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatistics() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
@@ -69,15 +72,16 @@ func (ctrl *V1Controller) HandleGroupStatistics() server.HandlerFunc {
}
}
// HandleGroupGet godoc
// @Summary Queries the changes overtime of the purchase price over time
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date"
// @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer
// HandleGroupStatisticsPriceOverTime godoc
//
// @Summary Get Purchase Price Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date"
// @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() server.HandlerFunc {
parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) {
if datestr == "" {

View File

@@ -12,13 +12,14 @@ import (
"github.com/rs/zerolog/log"
)
// HandleUserSelf godoc
// @Summary Get the current user
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Router /v1/users/register [Post]
// HandleUserRegistration godoc
//
// @Summary Register New User
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Router /v1/users/register [Post]
func (ctrl *V1Controller) HandleUserRegistration() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
regData := services.UserRegistration{}
@@ -43,12 +44,13 @@ func (ctrl *V1Controller) HandleUserRegistration() server.HandlerFunc {
}
// HandleUserSelf godoc
// @Summary Get the current user
// @Tags User
// @Produce json
// @Success 200 {object} server.Result{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
//
// @Summary Get User Self
// @Tags User
// @Produce json
// @Success 200 {object} server.Result{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelf() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
@@ -63,13 +65,14 @@ func (ctrl *V1Controller) HandleUserSelf() server.HandlerFunc {
}
// HandleUserSelfUpdate godoc
// @Summary Update the current user
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} server.Result{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
//
// @Summary Update Account
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} server.Result{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfUpdate() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
updateData := repo.UserUpdate{}
@@ -80,7 +83,6 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() server.HandlerFunc {
actor := services.UseUserCtx(r.Context())
newData, err := ctrl.svc.User.UpdateSelf(r.Context(), actor.ID, updateData)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
@@ -90,12 +92,13 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() server.HandlerFunc {
}
// HandleUserSelfDelete godoc
// @Summary Deletes the user account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
//
// @Summary Delete Account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfDelete() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {
@@ -119,12 +122,13 @@ type (
)
// HandleUserSelfChangePassword godoc
// @Summary Updates the users password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
//
// @Summary Change Password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfChangePassword() server.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {

View File

@@ -27,9 +27,9 @@ var (
buildTime = "now"
)
// @title Go API Templates
// @title Homebox API
// @version 1.0
// @description This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.
// @description Track, Manage, and Organize your Shit.
// @contact.name Don't
// @license.name MIT
// @BasePath /api
@@ -57,7 +57,7 @@ func run(cfg *config.Config) error {
// =========================================================================
// Initialize Database & Repos
err := os.MkdirAll(cfg.Storage.Data, 0755)
err := os.MkdirAll(cfg.Storage.Data, 0o755)
if err != nil {
log.Fatal().Err(err).Msg("failed to create data directory")
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"errors"
"net/http"
"net/url"
"strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
@@ -15,9 +16,7 @@ type tokenHasKey struct {
key string
}
var (
hashedToken = tokenHasKey{key: "hashedToken"}
)
var hashedToken = tokenHasKey{key: "hashedToken"}
type RoleMode int
@@ -70,6 +69,45 @@ func (a *app) mwRoles(rm RoleMode, required ...string) server.Middleware {
}
}
type KeyFunc func(r *http.Request) (string, error)
func getBearer(r *http.Request) (string, error) {
auth := r.Header.Get("Authorization")
if auth == "" {
return "", errors.New("authorization header is required")
}
return auth, nil
}
func getQuery(r *http.Request) (string, error) {
token := r.URL.Query().Get("access_token")
if token == "" {
return "", errors.New("access_token query is required")
}
token, err := url.QueryUnescape(token)
if err != nil {
return "", errors.New("access_token query is required")
}
return token, nil
}
func getCookie(r *http.Request) (string, error) {
cookie, err := r.Cookie("hb.auth.token")
if err != nil {
return "", errors.New("access_token cookie is required")
}
token, err := url.QueryUnescape(cookie.Value)
if err != nil {
return "", errors.New("access_token cookie is required")
}
return token, nil
}
// mwAuthToken is a middleware that will check the database for a stateful token
// and attach it's user to the request context, or return an appropriate error.
// Authorization support is by token via Headers or Query Parameter
@@ -77,26 +115,36 @@ func (a *app) mwRoles(rm RoleMode, required ...string) server.Middleware {
// Example:
// - header = "Bearer 1234567890"
// - query = "?access_token=1234567890"
// - cookie = hb.auth.token = 1234567890
func (a *app) mwAuthToken(next server.Handler) server.Handler {
return server.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
requestToken := r.Header.Get("Authorization")
if requestToken == "" {
// check for query param
requestToken = r.URL.Query().Get("access_token")
if requestToken == "" {
return validate.NewRequestError(errors.New("Authorization header or query is required"), http.StatusUnauthorized)
keyFuncs := [...]KeyFunc{
getBearer,
getCookie,
getQuery,
}
var requestToken string
for _, keyFunc := range keyFuncs {
token, err := keyFunc(r)
if err == nil {
requestToken = token
break
}
}
if requestToken == "" {
return validate.NewRequestError(errors.New("Authorization header or query is required"), http.StatusUnauthorized)
}
requestToken = strings.TrimPrefix(requestToken, "Bearer ")
r = r.WithContext(context.WithValue(r.Context(), hashedToken, requestToken))
usr, err := a.services.User.GetSelf(r.Context(), requestToken)
// Check the database for the token
if err != nil {
return validate.NewRequestError(errors.New("Authorization header is required"), http.StatusUnauthorized)
return validate.NewRequestError(errors.New("valid authorization header is required"), http.StatusUnauthorized)
}
r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken))

View File

@@ -89,6 +89,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
a.server.Post(v1Base("/actions/ensure-asset-ids"), v1Ctrl.HandleEnsureAssetID(), userMW...)
a.server.Post(v1Base("/actions/zero-item-time-fields"), v1Ctrl.HandleItemDateZeroOut(), userMW...)
a.server.Post(v1Base("/actions/ensure-import-refs"), v1Ctrl.HandleEnsureImportRefs(), userMW...)
a.server.Get(v1Base("/locations"), v1Ctrl.HandleLocationGetAll(), userMW...)
a.server.Post(v1Base("/locations"), v1Ctrl.HandleLocationCreate(), userMW...)
@@ -106,6 +107,7 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
a.server.Get(v1Base("/items"), v1Ctrl.HandleItemsGetAll(), userMW...)
a.server.Post(v1Base("/items"), v1Ctrl.HandleItemsCreate(), userMW...)
a.server.Post(v1Base("/items/import"), v1Ctrl.HandleItemsImport(), userMW...)
a.server.Get(v1Base("/items/export"), v1Ctrl.HandleItemsExport(), userMW...)
a.server.Get(v1Base("/items/fields"), v1Ctrl.HandleGetAllCustomFieldNames(), userMW...)
a.server.Get(v1Base("/items/fields/values"), v1Ctrl.HandleGetAllCustomFieldValues(), userMW...)
@@ -124,6 +126,13 @@ func (a *app) mountRoutes(repos *repo.AllRepos) {
a.server.Get(v1Base("/asset/{id}"), v1Ctrl.HandleAssetGet(), userMW...)
// Notifiers
a.server.Get(v1Base("/notifiers"), v1Ctrl.HandleGetUserNotifiers(), userMW...)
a.server.Post(v1Base("/notifiers"), v1Ctrl.HandleCreateNotifier(), userMW...)
a.server.Put(v1Base("/notifiers/{id}"), v1Ctrl.HandleUpdateNotifier(), userMW...)
a.server.Delete(v1Base("/notifiers/{id}"), v1Ctrl.HandleDeleteNotifier(), userMW...)
a.server.Post(v1Base("/notifiers/test"), v1Ctrl.HandlerNotifierTest(), userMW...)
// Asset-Like endpoints
a.server.Get(
v1Base("/qrcode"),

View File

@@ -28,13 +28,39 @@ const docTemplate = `{
"Bearer": []
}
],
"description": "Ensures all items in the database have an asset ID",
"produces": [
"application/json"
],
"tags": [
"Group"
"Actions"
],
"summary": "Ensures all items in the database have an asset id",
"summary": "Ensure Asset IDs",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/ensure-import-refs": {
"post": {
"security": [
{
"Bearer": []
}
],
"description": "Ensures all items in the database have an import ref",
"produces": [
"application/json"
],
"tags": [
"Actions"
],
"summary": "Ensures Import Refs",
"responses": {
"200": {
"description": "OK",
@@ -52,13 +78,14 @@ const docTemplate = `{
"Bearer": []
}
],
"description": "Resets all item date fields to the beginning of the day",
"produces": [
"application/json"
],
"tags": [
"Group"
"Actions"
],
"summary": "Resets all item date fields to the beginning of the day",
"summary": "Zero Out Time Fields",
"responses": {
"200": {
"description": "OK",
@@ -80,9 +107,9 @@ const docTemplate = `{
"application/json"
],
"tags": [
"Assets"
"Items"
],
"summary": "Gets an item by Asset ID",
"summary": "Get Item by Asset ID",
"parameters": [
{
"type": "string",
@@ -115,7 +142,7 @@ const docTemplate = `{
"tags": [
"Group"
],
"summary": "Get the current user's group",
"summary": "Get Group",
"responses": {
"200": {
"description": "OK",
@@ -137,7 +164,7 @@ const docTemplate = `{
"tags": [
"Group"
],
"summary": "Updates some fields of the current users group",
"summary": "Update Group",
"parameters": [
{
"description": "User Data",
@@ -172,7 +199,7 @@ const docTemplate = `{
"tags": [
"Group"
],
"summary": "Get the current user",
"summary": "Create Group Invitation",
"parameters": [
{
"description": "User Data",
@@ -207,7 +234,7 @@ const docTemplate = `{
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Group Statistics",
"responses": {
"200": {
"description": "OK",
@@ -231,7 +258,7 @@ const docTemplate = `{
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Label Statistics",
"responses": {
"200": {
"description": "OK",
@@ -258,7 +285,7 @@ const docTemplate = `{
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Location Statistics",
"responses": {
"200": {
"description": "OK",
@@ -285,7 +312,7 @@ const docTemplate = `{
"tags": [
"Statistics"
],
"summary": "Queries the changes overtime of the purchase price over time",
"summary": "Get Purchase Price Statistics",
"parameters": [
{
"type": "string",
@@ -323,7 +350,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "Get All Items",
"summary": "Query All Items",
"parameters": [
{
"type": "string",
@@ -385,7 +412,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "Create a new item",
"summary": "Create Item",
"parameters": [
{
"description": "Item Data",
@@ -407,6 +434,27 @@ const docTemplate = `{
}
}
},
"/v1/items/export": {
"get": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Items"
],
"summary": "Export Items",
"responses": {
"200": {
"description": "text/csv",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/items/fields": {
"get": {
"security": [
@@ -420,7 +468,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Get All Custom Field Names",
"responses": {
"200": {
"description": "OK",
@@ -447,7 +495,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Get All Custom Field Values",
"responses": {
"200": {
"description": "OK",
@@ -474,7 +522,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Import Items",
"parameters": [
{
"type": "file",
@@ -504,7 +552,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "Gets a item and fields",
"summary": "Get Item",
"parameters": [
{
"type": "string",
@@ -535,7 +583,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "updates a item",
"summary": "Update Item",
"parameters": [
{
"type": "string",
@@ -575,7 +623,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "deletes a item",
"summary": "Delete Item",
"parameters": [
{
"type": "string",
@@ -605,7 +653,7 @@ const docTemplate = `{
"tags": [
"Items Attachments"
],
"summary": "imports items into the database",
"summary": "Create Item Attachment",
"parameters": [
{
"type": "string",
@@ -665,7 +713,7 @@ const docTemplate = `{
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Get Item Attachment",
"parameters": [
{
"type": "string",
@@ -700,7 +748,7 @@ const docTemplate = `{
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Update Item Attachment",
"parameters": [
{
"type": "string",
@@ -744,7 +792,7 @@ const docTemplate = `{
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Delete Item Attachment",
"parameters": [
{
"type": "string",
@@ -929,7 +977,7 @@ const docTemplate = `{
"tags": [
"Labels"
],
"summary": "Create a new label",
"summary": "Create Label",
"parameters": [
{
"description": "Label Data",
@@ -964,7 +1012,7 @@ const docTemplate = `{
"tags": [
"Labels"
],
"summary": "Gets a label and fields",
"summary": "Get Label",
"parameters": [
{
"type": "string",
@@ -995,7 +1043,7 @@ const docTemplate = `{
"tags": [
"Labels"
],
"summary": "updates a label",
"summary": "Update Label",
"parameters": [
{
"type": "string",
@@ -1026,7 +1074,7 @@ const docTemplate = `{
"tags": [
"Labels"
],
"summary": "deletes a label",
"summary": "Delete Label",
"parameters": [
{
"type": "string",
@@ -1101,7 +1149,7 @@ const docTemplate = `{
"tags": [
"Locations"
],
"summary": "Create a new location",
"summary": "Create Location",
"parameters": [
{
"description": "Location Data",
@@ -1136,7 +1184,7 @@ const docTemplate = `{
"tags": [
"Locations"
],
"summary": "Get All Locations",
"summary": "Get Locations Tree",
"parameters": [
{
"type": "boolean",
@@ -1183,7 +1231,7 @@ const docTemplate = `{
"tags": [
"Locations"
],
"summary": "Gets a location and fields",
"summary": "Get Location",
"parameters": [
{
"type": "string",
@@ -1214,7 +1262,7 @@ const docTemplate = `{
"tags": [
"Locations"
],
"summary": "updates a location",
"summary": "Update Location",
"parameters": [
{
"type": "string",
@@ -1254,7 +1302,7 @@ const docTemplate = `{
"tags": [
"Locations"
],
"summary": "deletes a location",
"summary": "Delete Location",
"parameters": [
{
"type": "string",
@@ -1271,6 +1319,179 @@ const docTemplate = `{
}
}
},
"/v1/notifiers": {
"get": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Get Notifiers",
"responses": {
"200": {
"description": "OK",
"schema": {
"allOf": [
{
"$ref": "#/definitions/server.Results"
},
{
"type": "object",
"properties": {
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
}
]
}
}
}
},
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Create Notifier",
"parameters": [
{
"description": "Notifier Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.NotifierCreate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
}
},
"/v1/notifiers/test": {
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Test Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{
"type": "string",
"description": "URL",
"name": "url",
"in": "query",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/notifiers/{id}": {
"put": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Notifiers"
],
"summary": "Update Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Notifier Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.NotifierUpdate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
},
"delete": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Notifiers"
],
"summary": "Delete a Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/qrcode": {
"get": {
"security": [
@@ -1284,7 +1505,7 @@ const docTemplate = `{
"tags": [
"Items"
],
"summary": "Encode data into QRCode",
"summary": "Create QR Code",
"parameters": [
{
"type": "string",
@@ -1316,7 +1537,7 @@ const docTemplate = `{
"tags": [
"Reporting"
],
"summary": "Generates a Bill of Materials CSV",
"summary": "Export Bill of Materials",
"responses": {
"200": {
"description": "text/csv",
@@ -1335,7 +1556,7 @@ const docTemplate = `{
"tags": [
"Base"
],
"summary": "Retrieves the basic information about the API",
"summary": "Application Info",
"responses": {
"200": {
"description": "OK",
@@ -1356,7 +1577,7 @@ const docTemplate = `{
"tags": [
"User"
],
"summary": "Updates the users password",
"summary": "Change Password",
"parameters": [
{
"description": "Password Payload",
@@ -1459,7 +1680,7 @@ const docTemplate = `{
"tags": [
"User"
],
"summary": "Get the current user",
"summary": "Register New User",
"parameters": [
{
"description": "User Data",
@@ -1491,7 +1712,7 @@ const docTemplate = `{
"tags": [
"User"
],
"summary": "Get the current user",
"summary": "Get User Self",
"responses": {
"200": {
"description": "OK",
@@ -1525,7 +1746,7 @@ const docTemplate = `{
"tags": [
"User"
],
"summary": "Update the current user",
"summary": "Update Account",
"parameters": [
{
"description": "User Data",
@@ -1570,7 +1791,7 @@ const docTemplate = `{
"tags": [
"User"
],
"summary": "Deletes the user account",
"summary": "Delete Account",
"responses": {
"204": {
"description": "No Content"
@@ -1722,9 +1943,6 @@ const docTemplate = `{
"textValue": {
"type": "string"
},
"timeValue": {
"type": "string"
},
"type": {
"type": "string"
}
@@ -2162,13 +2380,14 @@ const docTemplate = `{
"repo.MaintenanceEntry": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
@@ -2177,42 +2396,56 @@ const docTemplate = `{
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
"repo.MaintenanceEntryCreate": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
"repo.MaintenanceEntryUpdate": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
@@ -2236,6 +2469,72 @@ const docTemplate = `{
}
}
},
"repo.NotifierCreate": {
"type": "object",
"required": [
"name",
"url"
],
"properties": {
"isActive": {
"type": "boolean"
},
"name": {
"type": "string",
"maxLength": 255,
"minLength": 1
},
"url": {
"type": "string"
}
}
},
"repo.NotifierOut": {
"type": "object",
"properties": {
"createdAt": {
"type": "string"
},
"groupId": {
"type": "string"
},
"id": {
"type": "string"
},
"isActive": {
"type": "boolean"
},
"name": {
"type": "string"
},
"updatedAt": {
"type": "string"
},
"userId": {
"type": "string"
}
}
},
"repo.NotifierUpdate": {
"type": "object",
"required": [
"name"
],
"properties": {
"isActive": {
"type": "boolean"
},
"name": {
"type": "string",
"maxLength": 255,
"minLength": 1
},
"url": {
"type": "string",
"x-nullable": true
}
}
},
"repo.PaginationResult-repo_ItemSummary": {
"type": "object",
"properties": {
@@ -2425,6 +2724,9 @@ const docTemplate = `{
"v1.ApiSummary": {
"type": "object",
"properties": {
"allowRegistration": {
"type": "boolean"
},
"build": {
"$ref": "#/definitions/v1.Build"
},
@@ -2537,8 +2839,8 @@ var SwaggerInfo = &swag.Spec{
Host: "",
BasePath: "/api",
Schemes: []string{},
Title: "Go API Templates",
Description: "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.",
Title: "Homebox API",
Description: "Track, Manage, and Organize your Shit.",
InfoInstanceName: "swagger",
SwaggerTemplate: docTemplate,
}

View File

@@ -1,8 +1,8 @@
{
"swagger": "2.0",
"info": {
"description": "This is a simple Rest API Server Template that implements some basic User and Authentication patterns to help you get started and bootstrap your next project!.",
"title": "Go API Templates",
"description": "Track, Manage, and Organize your Shit.",
"title": "Homebox API",
"contact": {
"name": "Don't"
},
@@ -20,13 +20,39 @@
"Bearer": []
}
],
"description": "Ensures all items in the database have an asset ID",
"produces": [
"application/json"
],
"tags": [
"Group"
"Actions"
],
"summary": "Ensures all items in the database have an asset id",
"summary": "Ensure Asset IDs",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/ensure-import-refs": {
"post": {
"security": [
{
"Bearer": []
}
],
"description": "Ensures all items in the database have an import ref",
"produces": [
"application/json"
],
"tags": [
"Actions"
],
"summary": "Ensures Import Refs",
"responses": {
"200": {
"description": "OK",
@@ -44,13 +70,14 @@
"Bearer": []
}
],
"description": "Resets all item date fields to the beginning of the day",
"produces": [
"application/json"
],
"tags": [
"Group"
"Actions"
],
"summary": "Resets all item date fields to the beginning of the day",
"summary": "Zero Out Time Fields",
"responses": {
"200": {
"description": "OK",
@@ -72,9 +99,9 @@
"application/json"
],
"tags": [
"Assets"
"Items"
],
"summary": "Gets an item by Asset ID",
"summary": "Get Item by Asset ID",
"parameters": [
{
"type": "string",
@@ -107,7 +134,7 @@
"tags": [
"Group"
],
"summary": "Get the current user's group",
"summary": "Get Group",
"responses": {
"200": {
"description": "OK",
@@ -129,7 +156,7 @@
"tags": [
"Group"
],
"summary": "Updates some fields of the current users group",
"summary": "Update Group",
"parameters": [
{
"description": "User Data",
@@ -164,7 +191,7 @@
"tags": [
"Group"
],
"summary": "Get the current user",
"summary": "Create Group Invitation",
"parameters": [
{
"description": "User Data",
@@ -199,7 +226,7 @@
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Group Statistics",
"responses": {
"200": {
"description": "OK",
@@ -223,7 +250,7 @@
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Label Statistics",
"responses": {
"200": {
"description": "OK",
@@ -250,7 +277,7 @@
"tags": [
"Statistics"
],
"summary": "Get the current user's group statistics",
"summary": "Get Location Statistics",
"responses": {
"200": {
"description": "OK",
@@ -277,7 +304,7 @@
"tags": [
"Statistics"
],
"summary": "Queries the changes overtime of the purchase price over time",
"summary": "Get Purchase Price Statistics",
"parameters": [
{
"type": "string",
@@ -315,7 +342,7 @@
"tags": [
"Items"
],
"summary": "Get All Items",
"summary": "Query All Items",
"parameters": [
{
"type": "string",
@@ -377,7 +404,7 @@
"tags": [
"Items"
],
"summary": "Create a new item",
"summary": "Create Item",
"parameters": [
{
"description": "Item Data",
@@ -399,6 +426,27 @@
}
}
},
"/v1/items/export": {
"get": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Items"
],
"summary": "Export Items",
"responses": {
"200": {
"description": "text/csv",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/items/fields": {
"get": {
"security": [
@@ -412,7 +460,7 @@
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Get All Custom Field Names",
"responses": {
"200": {
"description": "OK",
@@ -439,7 +487,7 @@
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Get All Custom Field Values",
"responses": {
"200": {
"description": "OK",
@@ -466,7 +514,7 @@
"tags": [
"Items"
],
"summary": "imports items into the database",
"summary": "Import Items",
"parameters": [
{
"type": "file",
@@ -496,7 +544,7 @@
"tags": [
"Items"
],
"summary": "Gets a item and fields",
"summary": "Get Item",
"parameters": [
{
"type": "string",
@@ -527,7 +575,7 @@
"tags": [
"Items"
],
"summary": "updates a item",
"summary": "Update Item",
"parameters": [
{
"type": "string",
@@ -567,7 +615,7 @@
"tags": [
"Items"
],
"summary": "deletes a item",
"summary": "Delete Item",
"parameters": [
{
"type": "string",
@@ -597,7 +645,7 @@
"tags": [
"Items Attachments"
],
"summary": "imports items into the database",
"summary": "Create Item Attachment",
"parameters": [
{
"type": "string",
@@ -657,7 +705,7 @@
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Get Item Attachment",
"parameters": [
{
"type": "string",
@@ -692,7 +740,7 @@
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Update Item Attachment",
"parameters": [
{
"type": "string",
@@ -736,7 +784,7 @@
"tags": [
"Items Attachments"
],
"summary": "retrieves an attachment for an item",
"summary": "Delete Item Attachment",
"parameters": [
{
"type": "string",
@@ -921,7 +969,7 @@
"tags": [
"Labels"
],
"summary": "Create a new label",
"summary": "Create Label",
"parameters": [
{
"description": "Label Data",
@@ -956,7 +1004,7 @@
"tags": [
"Labels"
],
"summary": "Gets a label and fields",
"summary": "Get Label",
"parameters": [
{
"type": "string",
@@ -987,7 +1035,7 @@
"tags": [
"Labels"
],
"summary": "updates a label",
"summary": "Update Label",
"parameters": [
{
"type": "string",
@@ -1018,7 +1066,7 @@
"tags": [
"Labels"
],
"summary": "deletes a label",
"summary": "Delete Label",
"parameters": [
{
"type": "string",
@@ -1093,7 +1141,7 @@
"tags": [
"Locations"
],
"summary": "Create a new location",
"summary": "Create Location",
"parameters": [
{
"description": "Location Data",
@@ -1128,7 +1176,7 @@
"tags": [
"Locations"
],
"summary": "Get All Locations",
"summary": "Get Locations Tree",
"parameters": [
{
"type": "boolean",
@@ -1175,7 +1223,7 @@
"tags": [
"Locations"
],
"summary": "Gets a location and fields",
"summary": "Get Location",
"parameters": [
{
"type": "string",
@@ -1206,7 +1254,7 @@
"tags": [
"Locations"
],
"summary": "updates a location",
"summary": "Update Location",
"parameters": [
{
"type": "string",
@@ -1246,7 +1294,7 @@
"tags": [
"Locations"
],
"summary": "deletes a location",
"summary": "Delete Location",
"parameters": [
{
"type": "string",
@@ -1263,6 +1311,179 @@
}
}
},
"/v1/notifiers": {
"get": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Get Notifiers",
"responses": {
"200": {
"description": "OK",
"schema": {
"allOf": [
{
"$ref": "#/definitions/server.Results"
},
{
"type": "object",
"properties": {
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
}
]
}
}
}
},
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Create Notifier",
"parameters": [
{
"description": "Notifier Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.NotifierCreate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
}
},
"/v1/notifiers/test": {
"post": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Notifiers"
],
"summary": "Test Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{
"type": "string",
"description": "URL",
"name": "url",
"in": "query",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/notifiers/{id}": {
"put": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Notifiers"
],
"summary": "Update Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Notifier Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.NotifierUpdate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.NotifierOut"
}
}
}
},
"delete": {
"security": [
{
"Bearer": []
}
],
"tags": [
"Notifiers"
],
"summary": "Delete a Notifier",
"parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/qrcode": {
"get": {
"security": [
@@ -1276,7 +1497,7 @@
"tags": [
"Items"
],
"summary": "Encode data into QRCode",
"summary": "Create QR Code",
"parameters": [
{
"type": "string",
@@ -1308,7 +1529,7 @@
"tags": [
"Reporting"
],
"summary": "Generates a Bill of Materials CSV",
"summary": "Export Bill of Materials",
"responses": {
"200": {
"description": "text/csv",
@@ -1327,7 +1548,7 @@
"tags": [
"Base"
],
"summary": "Retrieves the basic information about the API",
"summary": "Application Info",
"responses": {
"200": {
"description": "OK",
@@ -1348,7 +1569,7 @@
"tags": [
"User"
],
"summary": "Updates the users password",
"summary": "Change Password",
"parameters": [
{
"description": "Password Payload",
@@ -1451,7 +1672,7 @@
"tags": [
"User"
],
"summary": "Get the current user",
"summary": "Register New User",
"parameters": [
{
"description": "User Data",
@@ -1483,7 +1704,7 @@
"tags": [
"User"
],
"summary": "Get the current user",
"summary": "Get User Self",
"responses": {
"200": {
"description": "OK",
@@ -1517,7 +1738,7 @@
"tags": [
"User"
],
"summary": "Update the current user",
"summary": "Update Account",
"parameters": [
{
"description": "User Data",
@@ -1562,7 +1783,7 @@
"tags": [
"User"
],
"summary": "Deletes the user account",
"summary": "Delete Account",
"responses": {
"204": {
"description": "No Content"
@@ -1714,9 +1935,6 @@
"textValue": {
"type": "string"
},
"timeValue": {
"type": "string"
},
"type": {
"type": "string"
}
@@ -2154,13 +2372,14 @@
"repo.MaintenanceEntry": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
@@ -2169,42 +2388,56 @@
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
"repo.MaintenanceEntryCreate": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
"repo.MaintenanceEntryUpdate": {
"type": "object",
"properties": {
"completedDate": {
"description": "Sold",
"type": "string"
},
"cost": {
"type": "string",
"example": "0"
},
"date": {
"type": "string"
},
"description": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"description": "Sold",
"type": "string"
}
}
},
@@ -2228,6 +2461,72 @@
}
}
},
"repo.NotifierCreate": {
"type": "object",
"required": [
"name",
"url"
],
"properties": {
"isActive": {
"type": "boolean"
},
"name": {
"type": "string",
"maxLength": 255,
"minLength": 1
},
"url": {
"type": "string"
}
}
},
"repo.NotifierOut": {
"type": "object",
"properties": {
"createdAt": {
"type": "string"
},
"groupId": {
"type": "string"
},
"id": {
"type": "string"
},
"isActive": {
"type": "boolean"
},
"name": {
"type": "string"
},
"updatedAt": {
"type": "string"
},
"userId": {
"type": "string"
}
}
},
"repo.NotifierUpdate": {
"type": "object",
"required": [
"name"
],
"properties": {
"isActive": {
"type": "boolean"
},
"name": {
"type": "string",
"maxLength": 255,
"minLength": 1
},
"url": {
"type": "string",
"x-nullable": true
}
}
},
"repo.PaginationResult-repo_ItemSummary": {
"type": "object",
"properties": {
@@ -2417,6 +2716,9 @@
"v1.ApiSummary": {
"type": "object",
"properties": {
"allowRegistration": {
"type": "boolean"
},
"build": {
"$ref": "#/definitions/v1.Build"
},

View File

@@ -93,8 +93,6 @@ definitions:
type: integer
textValue:
type: string
timeValue:
type: string
type:
type: string
type: object
@@ -392,41 +390,53 @@ definitions:
type: object
repo.MaintenanceEntry:
properties:
completedDate:
description: Sold
type: string
cost:
example: "0"
type: string
date:
type: string
description:
type: string
id:
type: string
name:
type: string
scheduledDate:
description: Sold
type: string
type: object
repo.MaintenanceEntryCreate:
properties:
completedDate:
description: Sold
type: string
cost:
example: "0"
type: string
date:
type: string
description:
type: string
name:
type: string
scheduledDate:
description: Sold
type: string
type: object
repo.MaintenanceEntryUpdate:
properties:
completedDate:
description: Sold
type: string
cost:
example: "0"
type: string
date:
type: string
description:
type: string
name:
type: string
scheduledDate:
description: Sold
type: string
type: object
repo.MaintenanceLog:
properties:
@@ -441,6 +451,51 @@ definitions:
itemId:
type: string
type: object
repo.NotifierCreate:
properties:
isActive:
type: boolean
name:
maxLength: 255
minLength: 1
type: string
url:
type: string
required:
- name
- url
type: object
repo.NotifierOut:
properties:
createdAt:
type: string
groupId:
type: string
id:
type: string
isActive:
type: boolean
name:
type: string
updatedAt:
type: string
userId:
type: string
type: object
repo.NotifierUpdate:
properties:
isActive:
type: boolean
name:
maxLength: 255
minLength: 1
type: string
url:
type: string
x-nullable: true
required:
- name
type: object
repo.PaginationResult-repo_ItemSummary:
properties:
items:
@@ -564,6 +619,8 @@ definitions:
type: object
v1.ApiSummary:
properties:
allowRegistration:
type: boolean
build:
$ref: '#/definitions/v1.Build'
demo:
@@ -628,16 +685,15 @@ definitions:
info:
contact:
name: Don't
description: This is a simple Rest API Server Template that implements some basic
User and Authentication patterns to help you get started and bootstrap your next
project!.
description: Track, Manage, and Organize your Shit.
license:
name: MIT
title: Go API Templates
title: Homebox API
version: "1.0"
paths:
/v1/actions/ensure-asset-ids:
post:
description: Ensures all items in the database have an asset ID
produces:
- application/json
responses:
@@ -647,11 +703,27 @@ paths:
$ref: '#/definitions/v1.ActionAmountResult'
security:
- Bearer: []
summary: Ensures all items in the database have an asset id
summary: Ensure Asset IDs
tags:
- Group
- Actions
/v1/actions/ensure-import-refs:
post:
description: Ensures all items in the database have an import ref
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/v1.ActionAmountResult'
security:
- Bearer: []
summary: Ensures Import Refs
tags:
- Actions
/v1/actions/zero-item-time-fields:
post:
description: Resets all item date fields to the beginning of the day
produces:
- application/json
responses:
@@ -661,9 +733,9 @@ paths:
$ref: '#/definitions/v1.ActionAmountResult'
security:
- Bearer: []
summary: Resets all item date fields to the beginning of the day
summary: Zero Out Time Fields
tags:
- Group
- Actions
/v1/assets/{id}:
get:
parameters:
@@ -681,9 +753,9 @@ paths:
$ref: '#/definitions/repo.PaginationResult-repo_ItemSummary'
security:
- Bearer: []
summary: Gets an item by Asset ID
summary: Get Item by Asset ID
tags:
- Assets
- Items
/v1/groups:
get:
produces:
@@ -695,7 +767,7 @@ paths:
$ref: '#/definitions/repo.Group'
security:
- Bearer: []
summary: Get the current user's group
summary: Get Group
tags:
- Group
put:
@@ -715,7 +787,7 @@ paths:
$ref: '#/definitions/repo.Group'
security:
- Bearer: []
summary: Updates some fields of the current users group
summary: Update Group
tags:
- Group
/v1/groups/invitations:
@@ -736,7 +808,7 @@ paths:
$ref: '#/definitions/v1.GroupInvitation'
security:
- Bearer: []
summary: Get the current user
summary: Create Group Invitation
tags:
- Group
/v1/groups/statistics:
@@ -750,7 +822,7 @@ paths:
$ref: '#/definitions/repo.GroupStatistics'
security:
- Bearer: []
summary: Get the current user's group statistics
summary: Get Group Statistics
tags:
- Statistics
/v1/groups/statistics/labels:
@@ -766,7 +838,7 @@ paths:
type: array
security:
- Bearer: []
summary: Get the current user's group statistics
summary: Get Label Statistics
tags:
- Statistics
/v1/groups/statistics/locations:
@@ -782,7 +854,7 @@ paths:
type: array
security:
- Bearer: []
summary: Get the current user's group statistics
summary: Get Location Statistics
tags:
- Statistics
/v1/groups/statistics/purchase-price:
@@ -805,7 +877,7 @@ paths:
$ref: '#/definitions/repo.ValueOverTime'
security:
- Bearer: []
summary: Queries the changes overtime of the purchase price over time
summary: Get Purchase Price Statistics
tags:
- Statistics
/v1/items:
@@ -846,7 +918,7 @@ paths:
$ref: '#/definitions/repo.PaginationResult-repo_ItemSummary'
security:
- Bearer: []
summary: Get All Items
summary: Query All Items
tags:
- Items
post:
@@ -866,7 +938,7 @@ paths:
$ref: '#/definitions/repo.ItemSummary'
security:
- Bearer: []
summary: Create a new item
summary: Create Item
tags:
- Items
/v1/items/{id}:
@@ -884,7 +956,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: deletes a item
summary: Delete Item
tags:
- Items
get:
@@ -903,7 +975,7 @@ paths:
$ref: '#/definitions/repo.ItemOut'
security:
- Bearer: []
summary: Gets a item and fields
summary: Get Item
tags:
- Items
put:
@@ -928,7 +1000,7 @@ paths:
$ref: '#/definitions/repo.ItemOut'
security:
- Bearer: []
summary: updates a item
summary: Update Item
tags:
- Items
/v1/items/{id}/attachments:
@@ -967,7 +1039,7 @@ paths:
$ref: '#/definitions/server.ErrorResponse'
security:
- Bearer: []
summary: imports items into the database
summary: Create Item Attachment
tags:
- Items Attachments
/v1/items/{id}/attachments/{attachment_id}:
@@ -988,7 +1060,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: retrieves an attachment for an item
summary: Delete Item Attachment
tags:
- Items Attachments
get:
@@ -1012,7 +1084,7 @@ paths:
$ref: '#/definitions/v1.ItemAttachmentToken'
security:
- Bearer: []
summary: retrieves an attachment for an item
summary: Get Item Attachment
tags:
- Items Attachments
put:
@@ -1040,7 +1112,7 @@ paths:
$ref: '#/definitions/repo.ItemOut'
security:
- Bearer: []
summary: retrieves an attachment for an item
summary: Update Item Attachment
tags:
- Items Attachments
/v1/items/{id}/maintenance:
@@ -1109,6 +1181,18 @@ paths:
summary: Update Maintenance Entry
tags:
- Maintenance
/v1/items/export:
get:
responses:
"200":
description: text/csv
schema:
type: string
security:
- Bearer: []
summary: Export Items
tags:
- Items
/v1/items/fields:
get:
produces:
@@ -1122,7 +1206,7 @@ paths:
type: array
security:
- Bearer: []
summary: imports items into the database
summary: Get All Custom Field Names
tags:
- Items
/v1/items/fields/values:
@@ -1138,7 +1222,7 @@ paths:
type: array
security:
- Bearer: []
summary: imports items into the database
summary: Get All Custom Field Values
tags:
- Items
/v1/items/import:
@@ -1156,7 +1240,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: imports items into the database
summary: Import Items
tags:
- Items
/v1/labels:
@@ -1197,7 +1281,7 @@ paths:
$ref: '#/definitions/repo.LabelSummary'
security:
- Bearer: []
summary: Create a new label
summary: Create Label
tags:
- Labels
/v1/labels/{id}:
@@ -1215,7 +1299,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: deletes a label
summary: Delete Label
tags:
- Labels
get:
@@ -1234,7 +1318,7 @@ paths:
$ref: '#/definitions/repo.LabelOut'
security:
- Bearer: []
summary: Gets a label and fields
summary: Get Label
tags:
- Labels
put:
@@ -1253,7 +1337,7 @@ paths:
$ref: '#/definitions/repo.LabelOut'
security:
- Bearer: []
summary: updates a label
summary: Update Label
tags:
- Labels
/v1/locations:
@@ -1299,7 +1383,7 @@ paths:
$ref: '#/definitions/repo.LocationSummary'
security:
- Bearer: []
summary: Create a new location
summary: Create Location
tags:
- Locations
/v1/locations/{id}:
@@ -1317,7 +1401,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: deletes a location
summary: Delete Location
tags:
- Locations
get:
@@ -1336,7 +1420,7 @@ paths:
$ref: '#/definitions/repo.LocationOut'
security:
- Bearer: []
summary: Gets a location and fields
summary: Get Location
tags:
- Locations
put:
@@ -1361,7 +1445,7 @@ paths:
$ref: '#/definitions/repo.LocationOut'
security:
- Bearer: []
summary: updates a location
summary: Update Location
tags:
- Locations
/v1/locations/tree:
@@ -1387,9 +1471,112 @@ paths:
type: object
security:
- Bearer: []
summary: Get All Locations
summary: Get Locations Tree
tags:
- Locations
/v1/notifiers:
get:
produces:
- application/json
responses:
"200":
description: OK
schema:
allOf:
- $ref: '#/definitions/server.Results'
- properties:
items:
items:
$ref: '#/definitions/repo.NotifierOut'
type: array
type: object
security:
- Bearer: []
summary: Get Notifiers
tags:
- Notifiers
post:
parameters:
- description: Notifier Data
in: body
name: payload
required: true
schema:
$ref: '#/definitions/repo.NotifierCreate'
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/repo.NotifierOut'
security:
- Bearer: []
summary: Create Notifier
tags:
- Notifiers
/v1/notifiers/{id}:
delete:
parameters:
- description: Notifier ID
in: path
name: id
required: true
type: string
responses:
"204":
description: No Content
security:
- Bearer: []
summary: Delete a Notifier
tags:
- Notifiers
put:
parameters:
- description: Notifier ID
in: path
name: id
required: true
type: string
- description: Notifier Data
in: body
name: payload
required: true
schema:
$ref: '#/definitions/repo.NotifierUpdate'
responses:
"200":
description: OK
schema:
$ref: '#/definitions/repo.NotifierOut'
security:
- Bearer: []
summary: Update Notifier
tags:
- Notifiers
/v1/notifiers/test:
post:
parameters:
- description: Notifier ID
in: path
name: id
required: true
type: string
- description: URL
in: query
name: url
required: true
type: string
produces:
- application/json
responses:
"204":
description: No Content
security:
- Bearer: []
summary: Test Notifier
tags:
- Notifiers
/v1/qrcode:
get:
parameters:
@@ -1406,7 +1593,7 @@ paths:
type: string
security:
- Bearer: []
summary: Encode data into QRCode
summary: Create QR Code
tags:
- Items
/v1/reporting/bill-of-materials:
@@ -1420,7 +1607,7 @@ paths:
type: string
security:
- Bearer: []
summary: Generates a Bill of Materials CSV
summary: Export Bill of Materials
tags:
- Reporting
/v1/status:
@@ -1432,7 +1619,7 @@ paths:
description: OK
schema:
$ref: '#/definitions/v1.ApiSummary'
summary: Retrieves the basic information about the API
summary: Application Info
tags:
- Base
/v1/users/change-password:
@@ -1449,7 +1636,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: Updates the users password
summary: Change Password
tags:
- User
/v1/users/login:
@@ -1515,7 +1702,7 @@ paths:
responses:
"204":
description: No Content
summary: Get the current user
summary: Register New User
tags:
- User
/v1/users/self:
@@ -1527,7 +1714,7 @@ paths:
description: No Content
security:
- Bearer: []
summary: Deletes the user account
summary: Delete Account
tags:
- User
get:
@@ -1545,7 +1732,7 @@ paths:
type: object
security:
- Bearer: []
summary: Get the current user
summary: Get User Self
tags:
- User
put:
@@ -1570,7 +1757,7 @@ paths:
type: object
security:
- Bearer: []
summary: Update the current user
summary: Update Account
tags:
- User
securityDefinitions:

View File

@@ -4,20 +4,22 @@ go 1.19
require (
ariga.io/atlas v0.9.1-0.20230119145809-92243f7c55cb
entgo.io/ent v0.11.7
github.com/ardanlabs/conf/v3 v3.1.3
entgo.io/ent v0.11.8
github.com/ardanlabs/conf/v3 v3.1.4
github.com/containrrr/shoutrrr v0.7.1
github.com/go-chi/chi/v5 v5.0.8
github.com/go-playground/validator/v10 v10.11.2
github.com/gocarina/gocsv v0.0.0-20230123225133-763e25b40669
github.com/gocarina/gocsv v0.0.0-20230219202803-bcce7dc8d0bb
github.com/google/uuid v1.3.0
github.com/gorilla/schema v1.2.0
github.com/mattn/go-sqlite3 v1.14.16
github.com/rs/zerolog v1.29.0
github.com/stretchr/testify v1.8.1
github.com/stretchr/testify v1.8.2
github.com/swaggo/http-swagger v1.3.3
github.com/swaggo/swag v1.8.10
github.com/yeqown/go-qrcode/v2 v2.2.1
github.com/yeqown/go-qrcode/writer/standard v1.2.1
golang.org/x/crypto v0.6.0
golang.org/x/crypto v0.7.0
)
require (
@@ -25,6 +27,7 @@ require (
github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fatih/color v1.13.0 // indirect
github.com/fogleman/gg v1.3.0 // indirect
github.com/go-openapi/inflect v0.19.0 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
@@ -48,10 +51,10 @@ require (
github.com/yeqown/reedsolomon v1.0.0 // indirect
github.com/zclconf/go-cty v1.12.1 // indirect
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 // indirect
golang.org/x/mod v0.7.0 // indirect
golang.org/x/net v0.6.0 // indirect
golang.org/x/sys v0.5.0 // indirect
golang.org/x/text v0.7.0 // indirect
golang.org/x/tools v0.4.0 // indirect
golang.org/x/mod v0.8.0 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/sys v0.6.0 // indirect
golang.org/x/text v0.8.0 // indirect
golang.org/x/tools v0.6.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,13 @@
package services
import (
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog/log"
)
type AllServices struct {
User *UserService
Group *GroupService
Items *ItemService
Reporting *reporting.ReportingService
User *UserService
Group *GroupService
Items *ItemService
}
type OptionsFunc func(*options)
@@ -45,7 +42,5 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
repo: repos,
autoIncrementAssetID: options.autoIncrementAssetID,
},
// TODO: don't use global logger
Reporting: reporting.NewReportingService(repos, &log.Logger),
}
}

View File

@@ -0,0 +1,5 @@
HB.location,HB.name,HB.quantity,HB.description,HB.field.Custom Field 1,HB.field.Custom Field 2,HB.field.Custom Field 3
loc,Item 1,1,Description 1,Value 1[1],Value 1[2],Value 1[3]
loc,Item 2,2,Description 2,Value 2[1],Value 2[2],Value 2[3]
loc,Item 3,3,Description 3,Value 3[1],Value 3[2],Value 3[3]
1 HB.location HB.name HB.quantity HB.description HB.field.Custom Field 1 HB.field.Custom Field 2 HB.field.Custom Field 3
2 loc Item 1 1 Description 1 Value 1[1] Value 1[2] Value 1[3]
3 loc Item 2 2 Description 2 Value 2[1] Value 2[2] Value 2[3]
4 loc Item 3 3 Description 3 Value 3[1] Value 3[2] Value 3[3]

View File

@@ -0,0 +1,4 @@
HB.location,HB.name,HB.quantity,HB.description
loc,Item 1,1,Description 1
loc,Item 2,2,Description 2
loc,Item 3,3,Description 3
1 HB.location HB.name HB.quantity HB.description
2 loc Item 1 1 Description 1
3 loc Item 2 2 Description 2
4 loc Item 3 3 Description 3

View File

@@ -0,0 +1,4 @@
HB.name,HB.asset_id,HB.location,HB.labels
Item 1,1,Path / To / Location 1,L1 ; L2 ; L3
Item 2,000-002,Path /To/ Location 2,L1;L2;L3
Item 3,1000-003,Path / To /Location 3 , L1;L2; L3
1 HB.name HB.asset_id HB.location HB.labels
2 Item 1 1 Path / To / Location 1 L1 ; L2 ; L3
3 Item 2 000-002 Path /To/ Location 2 L1;L2;L3
4 Item 3 1000-003 Path / To /Location 3 L1;L2; L3

View File

@@ -0,0 +1,42 @@
package reporting
import (
"github.com/gocarina/gocsv"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
)
// =================================================================================================
type BillOfMaterialsEntry struct {
PurchaseDate types.Date `csv:"Purchase Date"`
Name string `csv:"Name"`
Description string `csv:"Description"`
Manufacturer string `csv:"Manufacturer"`
SerialNumber string `csv:"Serial Number"`
ModelNumber string `csv:"Model Number"`
Quantity int `csv:"Quantity"`
Price float64 `csv:"Price"`
TotalPrice float64 `csv:"Total Price"`
}
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
// See BillOfMaterialsEntry for the format of the output
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
bomEntries := make([]BillOfMaterialsEntry, len(entities))
for i, entity := range entities {
bomEntries[i] = BillOfMaterialsEntry{
PurchaseDate: entity.PurchaseTime,
Name: entity.Name,
Description: entity.Description,
Manufacturer: entity.Manufacturer,
SerialNumber: entity.SerialNumber,
ModelNumber: entity.ModelNumber,
Quantity: entity.Quantity,
Price: entity.PurchasePrice,
TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
}
}
return gocsv.MarshalBytes(&bomEntries)
}

View File

@@ -0,0 +1,93 @@
package reporting
import (
"bytes"
"encoding/csv"
"errors"
"io"
"strings"
)
var (
ErrNoHomeboxHeaders = errors.New("no headers found")
ErrMissingRequiredHeaders = errors.New("missing required headers `HB.location` or `HB.name`")
)
// determineSeparator determines the separator used in the CSV file
// It returns the separator as a rune and an error if it could not be determined
//
// It is assumed that the first row is the header row and that the separator is the same
// for all rows.
//
// Supported separators are `,` and `\t`
func determineSeparator(data []byte) (rune, error) {
// First row
firstRow := bytes.Split(data, []byte("\n"))[0]
// find first comma or /t
comma := bytes.IndexByte(firstRow, ',')
tab := bytes.IndexByte(firstRow, '\t')
switch {
case comma == -1 && tab == -1:
return 0, errors.New("could not determine separator")
case tab > comma:
return '\t', nil
default:
return ',', nil
}
}
// readRawCsv reads a CSV file and returns the raw data as a 2D string array
// It determines the separator used in the CSV file and returns an error if
// it could not be determined
func readRawCsv(r io.Reader) ([][]string, error) {
data, err := io.ReadAll(r)
if err != nil {
return nil, err
}
reader := csv.NewReader(bytes.NewReader(data))
// Determine separator
sep, err := determineSeparator(data)
if err != nil {
return nil, err
}
reader.Comma = sep
return reader.ReadAll()
}
// parseHeaders parses the homebox headers from the CSV file and returns a map of the headers
// and their column index as well as a list of the field headers (HB.field.*) in the order
// they appear in the CSV file
//
// It returns an error if no homebox headers are found
func parseHeaders(headers []string) (hbHeaders map[string]int, fieldHeaders []string, err error) {
hbHeaders = map[string]int{} // initialize map
for col, h := range headers {
if strings.HasPrefix(h, "HB.field.") {
fieldHeaders = append(fieldHeaders, h)
}
if strings.HasPrefix(h, "HB.") {
hbHeaders[h] = col
}
}
required := []string{"HB.location", "HB.name"}
for _, h := range required {
if _, ok := hbHeaders[h]; !ok {
return nil, nil, ErrMissingRequiredHeaders
}
}
if len(hbHeaders) == 0 {
return nil, nil, ErrNoHomeboxHeaders
}
return hbHeaders, fieldHeaders, nil
}

View File

@@ -0,0 +1,85 @@
package reporting
import (
"strings"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
)
type ExportItemFields struct {
Name string
Value string
}
type ExportTSVRow struct {
ImportRef string `csv:"HB.import_ref"`
Location LocationString `csv:"HB.location"`
LabelStr LabelString `csv:"HB.labels"`
AssetID repo.AssetID `csv:"HB.asset_id"`
Archived bool `csv:"HB.archived"`
Name string `csv:"HB.name"`
Quantity int `csv:"HB.quantity"`
Description string `csv:"HB.description"`
Insured bool `csv:"HB.insured"`
Notes string `csv:"HB.notes"`
PurchasePrice float64 `csv:"HB.purchase_price"`
PurchaseFrom string `csv:"HB.purchase_from"`
PurchaseTime types.Date `csv:"HB.purchase_time"`
Manufacturer string `csv:"HB.manufacturer"`
ModelNumber string `csv:"HB.model_number"`
SerialNumber string `csv:"HB.serial_number"`
LifetimeWarranty bool `csv:"HB.lifetime_warranty"`
WarrantyExpires types.Date `csv:"HB.warranty_expires"`
WarrantyDetails string `csv:"HB.warranty_details"`
SoldTo string `csv:"HB.sold_to"`
SoldPrice float64 `csv:"HB.sold_price"`
SoldTime types.Date `csv:"HB.sold_time"`
SoldNotes string `csv:"HB.sold_notes"`
Fields []ExportItemFields `csv:"-"`
}
// ============================================================================
// LabelString is a string slice that is used to represent a list of labels.
//
// For example, a list of labels "Important; Work" would be represented as a
// LabelString with the following values:
//
// LabelString{"Important", "Work"}
type LabelString []string
func parseLabelString(s string) LabelString {
v, _ := parseSeparatedString(s, ";")
return v
}
func (ls LabelString) String() string {
return strings.Join(ls, "; ")
}
// ============================================================================
// LocationString is a string slice that is used to represent a location
// hierarchy.
//
// For example, a location hierarchy of "Home / Bedroom / Desk" would be
// represented as a LocationString with the following values:
//
// LocationString{"Home", "Bedroom", "Desk"}
type LocationString []string
func parseLocationString(s string) LocationString {
v, _ := parseSeparatedString(s, "/")
return v
}
func (csf LocationString) String() string {
return strings.Join(csf, " / ")
}

View File

@@ -0,0 +1,310 @@
package reporting
import (
"fmt"
"io"
"reflect"
"sort"
"strconv"
"strings"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
"github.com/rs/zerolog/log"
)
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
// the standard format of the file.
//
// See ExportTSVRow for the format of the data in the sheet.
type IOSheet struct {
headers []string
custom []int
index map[string]int
Rows []ExportTSVRow
}
func (s *IOSheet) indexHeaders() {
s.index = make(map[string]int)
for i, h := range s.headers {
if strings.HasPrefix(h, "HB.field") {
s.custom = append(s.custom, i)
}
if strings.HasPrefix(h, "HB.") {
s.index[h] = i
}
}
}
func (s *IOSheet) GetColumn(str string) (col int, ok bool) {
if s.index == nil {
s.indexHeaders()
}
col, ok = s.index[str]
return
}
// Read reads a CSV/TSV and populates the "Rows" field with the data from the sheet
// Custom Fields are supported via the `HB.field.*` headers. The `HB.field.*` the "Name"
// of the field is the part after the `HB.field.` prefix. Additionally, Custom Fields with
// no value are excluded from the row.Fields slice, this includes empty strings.
//
// Note That
// - the first row is assumed to be the header
// - at least 1 row of data is required
// - rows and columns must be rectangular (i.e. all rows must have the same number of columns)
func (s *IOSheet) Read(data io.Reader) error {
sheet, err := readRawCsv(data)
if err != nil {
return err
}
if len(sheet) < 2 {
return fmt.Errorf("sheet must have at least 1 row of data (header + 1)")
}
s.headers = sheet[0]
s.Rows = make([]ExportTSVRow, len(sheet)-1)
for i, row := range sheet[1:] {
if len(row) != len(s.headers) {
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
}
rowData := ExportTSVRow{}
st := reflect.TypeOf(ExportTSVRow{})
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
col, ok := s.GetColumn(tag)
if !ok {
continue
}
val := row[col]
var v interface{}
switch field.Type {
case reflect.TypeOf(""):
v = val
case reflect.TypeOf(int(0)):
v = parseInt(val)
case reflect.TypeOf(bool(false)):
v = parseBool(val)
case reflect.TypeOf(float64(0)):
v = parseFloat(val)
// Custom Types
case reflect.TypeOf(types.Date{}):
v = types.DateFromString(val)
case reflect.TypeOf(repo.AssetID(0)):
v, _ = repo.ParseAssetID(val)
case reflect.TypeOf(LocationString{}):
v = parseLocationString(val)
case reflect.TypeOf(LabelString{}):
v = parseLabelString(val)
}
log.Debug().
Str("tag", tag).
Interface("val", v).
Str("type", fmt.Sprintf("%T", v)).
Msg("parsed value")
// Nil values are not allowed at the moment. This may change.
if v == nil {
return fmt.Errorf("could not convert %q to %s", val, field.Type)
}
ptrField := reflect.ValueOf(&rowData).Elem().Field(i)
ptrField.Set(reflect.ValueOf(v))
}
for _, col := range s.custom {
colName := strings.TrimPrefix(s.headers[col], "HB.field.")
customVal := row[col]
if customVal == "" {
continue
}
rowData.Fields = append(rowData.Fields, ExportItemFields{
Name: colName,
Value: customVal,
})
}
s.Rows[i] = rowData
}
return nil
}
// Write writes the sheet to a writer.
func (s *IOSheet) ReadItems(items []repo.ItemOut) {
s.Rows = make([]ExportTSVRow, len(items))
extraHeaders := map[string]struct{}{}
for i := range items {
item := items[i]
// TODO: Support fetching nested locations
locString := LocationString{item.Location.Name}
labelString := make([]string, len(item.Labels))
for i, l := range item.Labels {
labelString[i] = l.Name
}
customFields := make([]ExportItemFields, len(item.Fields))
for i, f := range item.Fields {
extraHeaders[f.Name] = struct{}{}
customFields[i] = ExportItemFields{
Name: f.Name,
Value: f.TextValue,
}
}
s.Rows[i] = ExportTSVRow{
// fill struct
Location: locString,
LabelStr: labelString,
ImportRef: item.ImportRef,
AssetID: item.AssetID,
Name: item.Name,
Quantity: item.Quantity,
Description: item.Description,
Insured: item.Insured,
Archived: item.Archived,
PurchasePrice: item.PurchasePrice,
PurchaseFrom: item.PurchaseFrom,
PurchaseTime: item.PurchaseTime,
Manufacturer: item.Manufacturer,
ModelNumber: item.ModelNumber,
SerialNumber: item.SerialNumber,
LifetimeWarranty: item.LifetimeWarranty,
WarrantyExpires: item.WarrantyExpires,
WarrantyDetails: item.WarrantyDetails,
SoldTo: item.SoldTo,
SoldTime: item.SoldTime,
SoldPrice: item.SoldPrice,
SoldNotes: item.SoldNotes,
Fields: customFields,
}
}
// Extract and sort additional headers for deterministic output
customHeaders := make([]string, 0, len(extraHeaders))
for k := range extraHeaders {
customHeaders = append(customHeaders, k)
}
sort.Strings(customHeaders)
st := reflect.TypeOf(ExportTSVRow{})
// Write headers
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
s.headers = append(s.headers, tag)
}
for _, h := range customHeaders {
s.headers = append(s.headers, "HB.field."+h)
}
}
// Writes the current sheet to a writer in TSV format.
func (s *IOSheet) TSV() ([][]string, error) {
memcsv := make([][]string, len(s.Rows)+1)
memcsv[0] = s.headers
// use struct tags in rows to dertmine column order
for i, row := range s.Rows {
rowIdx := i + 1
memcsv[rowIdx] = make([]string, len(s.headers))
st := reflect.TypeOf(row)
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
tag := field.Tag.Get("csv")
if tag == "" || tag == "-" {
continue
}
col, ok := s.GetColumn(tag)
if !ok {
continue
}
val := reflect.ValueOf(row).Field(i)
var v string
switch field.Type {
case reflect.TypeOf(""):
v = val.String()
case reflect.TypeOf(int(0)):
v = strconv.Itoa(int(val.Int()))
case reflect.TypeOf(bool(false)):
v = strconv.FormatBool(val.Bool())
case reflect.TypeOf(float64(0)):
v = strconv.FormatFloat(val.Float(), 'f', -1, 64)
// Custom Types
case reflect.TypeOf(types.Date{}):
v = val.Interface().(types.Date).String()
case reflect.TypeOf(repo.AssetID(0)):
v = val.Interface().(repo.AssetID).String()
case reflect.TypeOf(LocationString{}):
v = val.Interface().(LocationString).String()
case reflect.TypeOf(LabelString{}):
v = val.Interface().(LabelString).String()
default:
log.Debug().Str("type", field.Type.String()).Msg("unknown type")
}
memcsv[rowIdx][col] = v
}
for _, f := range row.Fields {
col, ok := s.GetColumn("HB.field." + f.Name)
if !ok {
continue
}
memcsv[i+1][col] = f.Value
}
}
return memcsv, nil
}

View File

@@ -0,0 +1,226 @@
package reporting
import (
"bytes"
"reflect"
"testing"
_ "embed"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert"
)
var (
//go:embed .testdata/import/minimal.csv
minimalImportCSV []byte
//go:embed .testdata/import/fields.csv
customFieldImportCSV []byte
//go:embed .testdata/import/types.csv
customTypesImportCSV []byte
//go:embed .testdata/import.csv
CSVData_Comma []byte
//go:embed .testdata/import.tsv
CSVData_Tab []byte
)
func TestSheet_Read(t *testing.T) {
tests := []struct {
name string
data []byte
want []ExportTSVRow
wantErr bool
}{
{
name: "minimal import",
data: minimalImportCSV,
want: []ExportTSVRow{
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
},
},
{
name: "custom field import",
data: customFieldImportCSV,
want: []ExportTSVRow{
{
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 1[1]"},
{Name: "Custom Field 2", Value: "Value 1[2]"},
{Name: "Custom Field 3", Value: "Value 1[3]"},
},
},
{
Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 2[1]"},
{Name: "Custom Field 2", Value: "Value 2[2]"},
{Name: "Custom Field 3", Value: "Value 2[3]"},
},
},
{
Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3",
Fields: []ExportItemFields{
{Name: "Custom Field 1", Value: "Value 3[1]"},
{Name: "Custom Field 2", Value: "Value 3[2]"},
{Name: "Custom Field 3", Value: "Value 3[3]"},
},
},
},
},
{
name: "custom types import",
data: customTypesImportCSV,
want: []ExportTSVRow{
{
Name: "Item 1",
AssetID: repo.AssetID(1),
Location: LocationString{"Path", "To", "Location 1"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
{
Name: "Item 2",
AssetID: repo.AssetID(2),
Location: LocationString{"Path", "To", "Location 2"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
{
Name: "Item 3",
AssetID: repo.AssetID(1000003),
Location: LocationString{"Path", "To", "Location 3"},
LabelStr: LabelString{"L1", "L2", "L3"},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
reader := bytes.NewReader(tt.data)
sheet := &IOSheet{}
err := sheet.Read(reader)
switch {
case tt.wantErr:
assert.Error(t, err)
default:
assert.NoError(t, err)
assert.ElementsMatch(t, tt.want, sheet.Rows)
}
})
}
}
func Test_parseHeaders(t *testing.T) {
tests := []struct {
name string
rawHeaders []string
wantHbHeaders map[string]int
wantFieldHeaders []string
wantErr bool
}{
{
name: "no hombox headers",
rawHeaders: []string{"Header 1", "Header 2", "Header 3"},
wantHbHeaders: nil,
wantFieldHeaders: nil,
wantErr: true,
},
{
name: "field headers only",
rawHeaders: []string{"HB.location", "HB.name", "HB.field.1", "HB.field.2", "HB.field.3"},
wantHbHeaders: map[string]int{
"HB.location": 0,
"HB.name": 1,
"HB.field.1": 2,
"HB.field.2": 3,
"HB.field.3": 4,
},
wantFieldHeaders: []string{"HB.field.1", "HB.field.2", "HB.field.3"},
wantErr: false,
},
{
name: "mixed headers",
rawHeaders: []string{"Header 1", "HB.name", "Header 2", "HB.field.2", "Header 3", "HB.field.3", "HB.location"},
wantHbHeaders: map[string]int{
"HB.name": 1,
"HB.field.2": 3,
"HB.field.3": 5,
"HB.location": 6,
},
wantFieldHeaders: []string{"HB.field.2", "HB.field.3"},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotHbHeaders, gotFieldHeaders, err := parseHeaders(tt.rawHeaders)
if (err != nil) != tt.wantErr {
t.Errorf("parseHeaders() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotHbHeaders, tt.wantHbHeaders) {
t.Errorf("parseHeaders() gotHbHeaders = %v, want %v", gotHbHeaders, tt.wantHbHeaders)
}
if !reflect.DeepEqual(gotFieldHeaders, tt.wantFieldHeaders) {
t.Errorf("parseHeaders() gotFieldHeaders = %v, want %v", gotFieldHeaders, tt.wantFieldHeaders)
}
})
}
}
func Test_determineSeparator(t *testing.T) {
type args struct {
data []byte
}
tests := []struct {
name string
args args
want rune
wantErr bool
}{
{
name: "comma",
args: args{
data: CSVData_Comma,
},
want: ',',
wantErr: false,
},
{
name: "tab",
args: args{
data: CSVData_Tab,
},
want: '\t',
wantErr: false,
},
{
name: "invalid",
args: args{
data: []byte("a;b;c"),
},
want: 0,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := determineSeparator(tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -1,85 +0,0 @@
package reporting
import (
"context"
"encoding/csv"
"io"
"time"
"github.com/gocarina/gocsv"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog"
)
type ReportingService struct {
repos *repo.AllRepos
l *zerolog.Logger
}
func NewReportingService(repos *repo.AllRepos, l *zerolog.Logger) *ReportingService {
gocsv.SetCSVWriter(func(out io.Writer) *gocsv.SafeCSVWriter {
writer := csv.NewWriter(out)
writer.Comma = '\t'
return gocsv.NewSafeCSVWriter(writer)
})
return &ReportingService{
repos: repos,
l: l,
}
}
// =================================================================================================
// NullableTime is a custom type that implements the MarshalCSV interface
// to allow for nullable time.Time fields in the CSV output to be empty
// and not "0001-01-01". It also overrides the default CSV output format
type NullableTime time.Time
func (t NullableTime) MarshalCSV() (string, error) {
if time.Time(t).IsZero() {
return "", nil
}
// YYYY-MM-DD
return time.Time(t).Format("2006-01-02"), nil
}
type BillOfMaterialsEntry struct {
PurchaseDate NullableTime `csv:"Purchase Date"`
Name string `csv:"Name"`
Description string `csv:"Description"`
Manufacturer string `csv:"Manufacturer"`
SerialNumber string `csv:"Serial Number"`
ModelNumber string `csv:"Model Number"`
Quantity int `csv:"Quantity"`
Price float64 `csv:"Price"`
TotalPrice float64 `csv:"Total Price"`
}
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
// See BillOfMaterialsEntry for the format of the output
func (rs *ReportingService) BillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
entities, err := rs.repos.Items.GetAll(ctx, GID)
if err != nil {
rs.l.Debug().Err(err).Msg("failed to get all items for BOM Csv Reporting")
return nil, err
}
bomEntries := make([]BillOfMaterialsEntry, len(entities))
for i, entity := range entities {
bomEntries[i] = BillOfMaterialsEntry{
PurchaseDate: NullableTime(entity.PurchaseTime),
Name: entity.Name,
Description: entity.Description,
Manufacturer: entity.Manufacturer,
SerialNumber: entity.SerialNumber,
ModelNumber: entity.ModelNumber,
Quantity: entity.Quantity,
Price: entity.PurchasePrice,
TotalPrice: entity.PurchasePrice * float64(entity.Quantity),
}
}
return gocsv.MarshalBytes(&bomEntries)
}

View File

@@ -0,0 +1,38 @@
package reporting
import (
"strconv"
"strings"
)
func parseSeparatedString(s string, sep string) ([]string, error) {
list := strings.Split(s, sep)
csf := make([]string, 0, len(list))
for _, s := range list {
trimmed := strings.TrimSpace(s)
if trimmed != "" {
csf = append(csf, trimmed)
}
}
return csf, nil
}
func parseFloat(s string) float64 {
if s == "" {
return 0
}
f, _ := strconv.ParseFloat(s, 64)
return f
}
func parseBool(s string) bool {
b, _ := strconv.ParseBool(s)
return b
}
func parseInt(s string) int {
i, _ := strconv.Atoi(s)
return i
}

View File

@@ -0,0 +1,65 @@
package reporting
import (
"reflect"
"testing"
)
func Test_parseSeparatedString(t *testing.T) {
type args struct {
s string
sep string
}
tests := []struct {
name string
args args
want []string
wantErr bool
}{
{
name: "comma",
args: args{
s: "a,b,c",
sep: ",",
},
want: []string{"a", "b", "c"},
wantErr: false,
},
{
name: "trimmed comma",
args: args{
s: "a, b, c",
sep: ",",
},
want: []string{"a", "b", "c"},
},
{
name: "excessive whitespace",
args: args{
s: " a, b, c ",
sep: ",",
},
want: []string{"a", "b", "c"},
},
{
name: "empty",
args: args{
s: "",
sep: ",",
},
want: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := parseSeparatedString(tt.args.s, tt.args.sep)
if (err != nil) != tt.wantErr {
t.Errorf("parseSeparatedString() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("parseSeparatedString() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -3,10 +3,13 @@ package services
import (
"context"
"errors"
"fmt"
"io"
"strings"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog/log"
)
var (
@@ -37,7 +40,6 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
if err != nil {
return 0, err
}
@@ -61,190 +63,290 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
return finished, nil
}
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data [][]string) (int, error) {
loaded := []csvRow{}
// Skip first row
for _, row := range data[1:] {
// Skip empty rows
if len(row) == 0 {
continue
}
if len(row) != NumOfCols {
return 0, ErrInvalidCsv
}
r := newCsvRow(row)
loaded = append(loaded, r)
}
// validate rows
var errMap = map[int][]error{}
var hasErr bool
for i, r := range loaded {
errs := r.validate()
if len(errs) > 0 {
hasErr = true
lineNum := i + 2
errMap[lineNum] = errs
}
}
if hasErr {
for lineNum, errs := range errMap {
for _, err := range errs {
log.Error().Err(err).Int("line", lineNum).Msg("csv import error")
}
}
}
// Bootstrap the locations and labels so we can reuse the created IDs for the items
locations := map[string]uuid.UUID{}
existingLocation, err := svc.repo.Locations.GetAll(ctx, GID, repo.LocationQuery{})
func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
if err != nil {
return 0, err
}
for _, loc := range existingLocation {
locations[loc.Name] = loc.ID
finished := 0
for _, itemID := range ids {
ref := uuid.New().String()[0:8]
err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
if err != nil {
return 0, err
}
finished++
}
labels := map[string]uuid.UUID{}
existingLabels, err := svc.repo.Labels.GetAll(ctx, GID)
return finished, nil
}
func serializeLocation[T ~[]string](location T) string {
return strings.Join(location, "/")
}
// CsvImport imports items from a CSV file. using the standard defined format.
//
// CsvImport applies the following rules/operations
//
// 1. If the item does not exist, it is created.
// 2. If the item has a ImportRef and it exists it is skipped
// 3. Locations and Labels are created if they do not exist.
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
sheet := reporting.IOSheet{}
err := sheet.Read(data)
if err != nil {
return 0, err
}
for _, label := range existingLabels {
labels[label.Name] = label.ID
}
for _, row := range loaded {
// ========================================
// Labels
// Locations
if _, exists := locations[row.Location]; !exists {
result, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
Name: row.Location,
Description: "",
})
if err != nil {
return 0, err
}
locations[row.Location] = result.ID
labelMap := make(map[string]uuid.UUID)
{
labels, err := svc.repo.Labels.GetAll(ctx, GID)
if err != nil {
return 0, err
}
// Labels
for _, label := range row.getLabels() {
if _, exists := labels[label]; exists {
continue
}
result, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{
Name: label,
Description: "",
})
if err != nil {
return 0, err
}
labels[label] = result.ID
for _, label := range labels {
labelMap[label.Name] = label.ID
}
}
highest := repo.AssetID(-1)
// ========================================
// Locations
locationMap := make(map[string]uuid.UUID)
{
locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false})
if err != nil {
return 0, err
}
// Traverse the tree and build a map of location full paths to IDs
// where the full path is the location name joined by slashes.
var traverse func(location *repo.TreeItem, path []string)
traverse = func(location *repo.TreeItem, path []string) {
path = append(path, location.Name)
locationMap[serializeLocation(path)] = location.ID
for _, child := range location.Children {
traverse(child, path)
}
}
for _, location := range locations {
traverse(&location, []string{})
}
}
// ========================================
// Import items
// Asset ID Pre-Check
highestAID := repo.AssetID(-1)
if svc.autoIncrementAssetID {
highest, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
if err != nil {
return 0, err
}
}
// Create the items
var count int
for _, row := range loaded {
// Check Import Ref
if row.Item.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.Item.ImportRef)
if exists {
continue
}
finished := 0
for i := range sheet.Rows {
row := sheet.Rows[i]
createRequired := true
// ========================================
// Preflight check for existing item
if row.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
if err != nil {
log.Err(err).Msg("error checking import ref")
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
}
if exists {
createRequired = false
}
}
locationID := locations[row.Location]
labelIDs := []uuid.UUID{}
for _, label := range row.getLabels() {
labelIDs = append(labelIDs, labels[label])
// ========================================
// Pre-Create Labels as necessary
labelIds := make([]uuid.UUID, len(row.LabelStr))
for j := range row.LabelStr {
label := row.LabelStr[j]
id, ok := labelMap[label]
if !ok {
newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label})
if err != nil {
return 0, err
}
id = newLabel.ID
}
labelIds[j] = id
labelMap[label] = id
}
log.Info().
Str("name", row.Item.Name).
Str("location", row.Location).
Msgf("Creating Item: %s", row.Item.Name)
// ========================================
// Pre-Create Locations as necessary
path := serializeLocation(row.Location)
data := repo.ItemCreate{
ImportRef: row.Item.ImportRef,
Name: row.Item.Name,
Description: row.Item.Description,
LabelIDs: labelIDs,
LocationID: locationID,
locationID, ok := locationMap[path]
if !ok { // Traverse the path of LocationStr and check each path element to see if it exists already, if not create it.
paths := []string{}
for i, pathElement := range row.Location {
paths = append(paths, pathElement)
path := serializeLocation(paths)
locationID, ok = locationMap[path]
if !ok {
parentID := uuid.Nil
// Get the parent ID
if i > 0 {
parentPath := serializeLocation(row.Location[:i])
parentID = locationMap[parentPath]
}
newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
ParentID: parentID,
Name: pathElement,
})
if err != nil {
return 0, err
}
locationID = newLocation.ID
}
locationMap[path] = locationID
}
locationID, ok = locationMap[path]
if !ok {
return 0, errors.New("failed to create location")
}
}
if svc.autoIncrementAssetID {
highest++
data.AssetID = highest
var effAID repo.AssetID
if svc.autoIncrementAssetID && row.AssetID.Nil() {
effAID = highestAID + 1
highestAID++
} else {
effAID = row.AssetID
}
result, err := svc.repo.Items.Create(ctx, GID, data)
// ========================================
// Create Item
var item repo.ItemOut
switch {
case createRequired:
newItem := repo.ItemCreate{
ImportRef: row.ImportRef,
Name: row.Name,
Description: row.Description,
AssetID: effAID,
LocationID: locationID,
LabelIDs: labelIds,
}
if err != nil {
return count, err
item, err = svc.repo.Items.Create(ctx, GID, newItem)
if err != nil {
return 0, err
}
default:
item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
if err != nil {
return 0, err
}
}
// Update the item with the rest of the data
_, err = svc.repo.Items.UpdateByGroup(ctx, GID, repo.ItemUpdate{
// Edges
if item.ID == uuid.Nil {
panic("item ID is nil on import - this should never happen")
}
fields := make([]repo.ItemField, len(row.Fields))
for i := range row.Fields {
fields[i] = repo.ItemField{
Name: row.Fields[i].Name,
Type: "text",
TextValue: row.Fields[i].Value,
}
}
updateItem := repo.ItemUpdate{
ID: item.ID,
LabelIDs: labelIds,
LocationID: locationID,
LabelIDs: labelIDs,
AssetID: data.AssetID,
// General Fields
ID: result.ID,
Name: result.Name,
Description: result.Description,
Insured: row.Item.Insured,
Notes: row.Item.Notes,
Quantity: row.Item.Quantity,
Name: row.Name,
Description: row.Description,
AssetID: effAID,
Insured: row.Insured,
Quantity: row.Quantity,
Archived: row.Archived,
// Identifies the item as imported
SerialNumber: row.Item.SerialNumber,
ModelNumber: row.Item.ModelNumber,
Manufacturer: row.Item.Manufacturer,
PurchasePrice: row.PurchasePrice,
PurchaseFrom: row.PurchaseFrom,
PurchaseTime: row.PurchaseTime,
// Purchase
PurchaseFrom: row.Item.PurchaseFrom,
PurchasePrice: row.Item.PurchasePrice,
PurchaseTime: row.Item.PurchaseTime,
Manufacturer: row.Manufacturer,
ModelNumber: row.ModelNumber,
SerialNumber: row.SerialNumber,
// Warranty
LifetimeWarranty: row.Item.LifetimeWarranty,
WarrantyExpires: row.Item.WarrantyExpires,
WarrantyDetails: row.Item.WarrantyDetails,
LifetimeWarranty: row.LifetimeWarranty,
WarrantyExpires: row.WarrantyExpires,
WarrantyDetails: row.WarrantyDetails,
SoldTo: row.Item.SoldTo,
SoldPrice: row.Item.SoldPrice,
SoldTime: row.Item.SoldTime,
SoldNotes: row.Item.SoldNotes,
})
SoldTo: row.SoldTo,
SoldTime: row.SoldTime,
SoldPrice: row.SoldPrice,
SoldNotes: row.SoldNotes,
if err != nil {
return count, err
Notes: row.Notes,
Fields: fields,
}
count++
item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem)
if err != nil {
return 0, err
}
finished++
}
return count, nil
return finished, nil
}
func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil {
return nil, err
}
sheet := reporting.IOSheet{}
sheet.ReadItems(items)
return sheet.TSV()
}
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil {
return nil, err
}
return reporting.BillOfMaterialsTSV(items)
}

View File

@@ -58,5 +58,4 @@ func TestItemService_AddAttachment(t *testing.T) {
bts, err := os.ReadFile(storedPath)
assert.NoError(t, err)
assert.Equal(t, contents, string(bts))
}

View File

@@ -1,151 +0,0 @@
package services
import (
"bytes"
"encoding/csv"
"errors"
"io"
"strconv"
"strings"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
)
func determineSeparator(data []byte) (rune, error) {
// First row
firstRow := bytes.Split(data, []byte("\n"))[0]
// find first comma or /t
comma := bytes.IndexByte(firstRow, ',')
tab := bytes.IndexByte(firstRow, '\t')
switch {
case comma == -1 && tab == -1:
return 0, errors.New("could not determine separator")
case tab > comma:
return '\t', nil
default:
return ',', nil
}
}
func ReadCsv(r io.Reader) ([][]string, error) {
data, err := io.ReadAll(r)
if err != nil {
return nil, err
}
reader := csv.NewReader(bytes.NewReader(data))
// Determine separator
sep, err := determineSeparator(data)
if err != nil {
return nil, err
}
reader.Comma = sep
return reader.ReadAll()
}
var ErrInvalidCsv = errors.New("invalid csv")
const NumOfCols = 21
func parseFloat(s string) float64 {
if s == "" {
return 0
}
f, _ := strconv.ParseFloat(s, 64)
return f
}
func parseBool(s string) bool {
switch strings.ToLower(s) {
case "true", "yes", "1":
return true
default:
return false
}
}
func parseInt(s string) int {
i, _ := strconv.Atoi(s)
return i
}
type csvRow struct {
Item repo.ItemOut
Location string
LabelStr string
}
func newCsvRow(row []string) csvRow {
return csvRow{
Location: row[1],
LabelStr: row[2],
Item: repo.ItemOut{
ItemSummary: repo.ItemSummary{
ImportRef: row[0],
Quantity: parseInt(row[3]),
Name: row[4],
Description: row[5],
Insured: parseBool(row[6]),
PurchasePrice: parseFloat(row[12]),
},
SerialNumber: row[7],
ModelNumber: row[8],
Manufacturer: row[9],
Notes: row[10],
PurchaseFrom: row[11],
PurchaseTime: types.DateFromString(row[13]),
LifetimeWarranty: parseBool(row[14]),
WarrantyExpires: types.DateFromString(row[15]),
WarrantyDetails: row[16],
SoldTo: row[17],
SoldPrice: parseFloat(row[18]),
SoldTime: types.DateFromString(row[19]),
SoldNotes: row[20],
},
}
}
func (c csvRow) getLabels() []string {
split := strings.Split(c.LabelStr, ";")
// Trim each
for i, s := range split {
split[i] = strings.TrimSpace(s)
}
// Remove empty
for i, s := range split {
if s == "" {
split = append(split[:i], split[i+1:]...)
}
}
return split
}
func (c csvRow) validate() []error {
var errs []error
add := func(err error) {
errs = append(errs, err)
}
required := func(s string, name string) {
if s == "" {
add(errors.New(name + " is required"))
}
}
required(c.Location, "Location")
required(c.Item.Name, "Name")
return errs
}

View File

@@ -1,164 +0,0 @@
package services
import (
"bytes"
_ "embed"
"encoding/csv"
"fmt"
"reflect"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
//go:embed .testdata/import.csv
var CSVData_Comma []byte
//go:embed .testdata/import.tsv
var CSVData_Tab []byte
func loadcsv() [][]string {
reader := csv.NewReader(bytes.NewReader(CSVData_Comma))
records, err := reader.ReadAll()
if err != nil {
panic(err)
}
return records
}
func Test_CorrectDateParsing(t *testing.T) {
t.Parallel()
expected := []time.Time{
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
time.Date(2021, 10, 15, 0, 0, 0, 0, time.UTC),
time.Date(2021, 10, 13, 0, 0, 0, 0, time.UTC),
time.Date(2020, 10, 21, 0, 0, 0, 0, time.UTC),
time.Date(2020, 10, 14, 0, 0, 0, 0, time.UTC),
time.Date(2020, 9, 30, 0, 0, 0, 0, time.UTC),
}
records := loadcsv()
for i, record := range records {
if i == 0 {
continue
}
entity := newCsvRow(record)
expected := expected[i-1]
assert.Equal(t, expected, entity.Item.PurchaseTime.Time(), fmt.Sprintf("Failed on row %d", i))
assert.Equal(t, expected, entity.Item.WarrantyExpires.Time(), fmt.Sprintf("Failed on row %d", i))
assert.Equal(t, expected, entity.Item.SoldTime.Time(), fmt.Sprintf("Failed on row %d", i))
}
}
func Test_csvRow_getLabels(t *testing.T) {
type fields struct {
LabelStr string
}
tests := []struct {
name string
fields fields
want []string
}{
{
name: "basic test",
fields: fields{
LabelStr: "IOT;Home Assistant;Z-Wave",
},
want: []string{"IOT", "Home Assistant", "Z-Wave"},
},
{
name: "no labels",
fields: fields{
LabelStr: "",
},
want: []string{},
},
{
name: "single label",
fields: fields{
LabelStr: "IOT",
},
want: []string{"IOT"},
},
{
name: "trailing semicolon",
fields: fields{
LabelStr: "IOT;",
},
want: []string{"IOT"},
},
{
name: "whitespace",
fields: fields{
LabelStr: " IOT; Home Assistant; Z-Wave ",
},
want: []string{"IOT", "Home Assistant", "Z-Wave"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := csvRow{
LabelStr: tt.fields.LabelStr,
}
if got := c.getLabels(); !reflect.DeepEqual(got, tt.want) {
t.Errorf("csvRow.getLabels() = %v, want %v", got, tt.want)
}
})
}
}
func Test_determineSeparator(t *testing.T) {
type args struct {
data []byte
}
tests := []struct {
name string
args args
want rune
wantErr bool
}{
{
name: "comma",
args: args{
data: CSVData_Comma,
},
want: ',',
wantErr: false,
},
{
name: "tab",
args: args{
data: CSVData_Tab,
},
want: '\t',
wantErr: false,
},
{
name: "invalid",
args: args{
data: []byte("a;b;c"),
},
want: 0,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := determineSeparator(tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("determineSeparator() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("determineSeparator() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -1,78 +0,0 @@
package services
import (
"context"
"testing"
"github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert"
)
func TestItemService_CsvImport(t *testing.T) {
data := loadcsv()
svc := &ItemService{
repo: tRepos,
}
count, err := svc.CsvImport(context.Background(), tGroup.ID, data)
assert.Equal(t, 6, count)
assert.NoError(t, err)
// Check import refs are deduplicated
count, err = svc.CsvImport(context.Background(), tGroup.ID, data)
assert.Equal(t, 0, count)
assert.NoError(t, err)
items, err := svc.repo.Items.GetAll(context.Background(), tGroup.ID)
assert.NoError(t, err)
t.Cleanup(func() {
for _, item := range items {
err := svc.repo.Items.Delete(context.Background(), item.ID)
assert.NoError(t, err)
}
})
assert.Equal(t, len(items), 6)
dataCsv := []csvRow{}
for _, item := range data {
dataCsv = append(dataCsv, newCsvRow(item))
}
allLocation, err := tRepos.Locations.GetAll(context.Background(), tGroup.ID, repo.LocationQuery{})
assert.NoError(t, err)
locNames := []string{}
for _, loc := range allLocation {
locNames = append(locNames, loc.Name)
}
allLabels, err := tRepos.Labels.GetAll(context.Background(), tGroup.ID)
assert.NoError(t, err)
labelNames := []string{}
for _, label := range allLabels {
labelNames = append(labelNames, label.Name)
}
ids := []uuid.UUID{}
t.Cleanup((func() {
for _, id := range ids {
err := svc.repo.Items.Delete(context.Background(), id)
assert.NoError(t, err)
}
}))
for _, item := range items {
assert.Contains(t, locNames, item.Location.Name)
for _, label := range item.Labels {
assert.Contains(t, labelNames, label.Name)
}
for _, csvRow := range dataCsv {
if csvRow.Item.Name == item.Name {
assert.Equal(t, csvRow.Item.Description, item.Description)
assert.Equal(t, csvRow.Item.Quantity, item.Quantity)
assert.Equal(t, csvRow.Item.Insured, item.Insured)
}
}
}
}

View File

@@ -139,7 +139,6 @@ func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data repo.
// User Authentication
func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID) (UserAuthTokenDetail, error) {
attachmentToken := hasher.GenerateToken()
attachmentData := repo.UserAuthTokenCreate{
UserID: userId,
@@ -173,7 +172,6 @@ func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID
func (svc *UserService) Login(ctx context.Context, username, password string) (UserAuthTokenDetail, error) {
usr, err := svc.repos.Users.GetOneEmail(ctx, username)
if err != nil {
// SECURITY: Perform hash to ensure response times are the same
hasher.CheckPasswordHash("not-a-real-password", "not-a-real-password")
@@ -197,7 +195,6 @@ func (svc *UserService) RenewToken(ctx context.Context, token string) (UserAuthT
hash := hasher.HashToken(token)
dbToken, err := svc.repos.AuthTokens.GetUserFromToken(ctx, hash)
if err != nil {
return UserAuthTokenDetail{}, ErrorInvalidToken
}

View File

@@ -189,9 +189,3 @@ func (a *Attachment) String() string {
// Attachments is a parsable slice of Attachment.
type Attachments []*Attachment
func (a Attachments) config(cfg config) {
for _i := range a {
a[_i].config = cfg
}
}

View File

@@ -205,13 +205,7 @@ func (ac *AttachmentCreate) sqlSave(ctx context.Context) (*Attachment, error) {
func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
var (
_node = &Attachment{config: ac.config}
_spec = &sqlgraph.CreateSpec{
Table: attachment.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(attachment.Table, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
)
if id, ok := ac.mutation.ID(); ok {
_node.ID = id

View File

@@ -40,15 +40,7 @@ func (ad *AttachmentDelete) ExecX(ctx context.Context) int {
}
func (ad *AttachmentDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(attachment.Table, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
if ps := ad.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -227,10 +227,12 @@ func (aq *AttachmentQuery) AllX(ctx context.Context) []*Attachment {
}
// IDs executes the query and returns a list of Attachment IDs.
func (aq *AttachmentQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (aq *AttachmentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if aq.ctx.Unique == nil && aq.path != nil {
aq.Unique(true)
}
ctx = setContextOp(ctx, aq.ctx, "IDs")
if err := aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -525,20 +527,12 @@ func (aq *AttachmentQuery) sqlCount(ctx context.Context) (int, error) {
}
func (aq *AttachmentQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
From: aq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
_spec.From = aq.sql
if unique := aq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if aq.path != nil {
_spec.Unique = true
}
if fields := aq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -146,16 +146,7 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := au.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
if ps := au.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -318,6 +309,12 @@ func (auo *AttachmentUpdateOne) ClearDocument() *AttachmentUpdateOne {
return auo
}
// Where appends a list predicates to the AttachmentUpdate builder.
func (auo *AttachmentUpdateOne) Where(ps ...predicate.Attachment) *AttachmentUpdateOne {
auo.mutation.Where(ps...)
return auo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (auo *AttachmentUpdateOne) Select(field string, fields ...string) *AttachmentUpdateOne {
@@ -381,16 +378,7 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
if err := auo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: attachment.Table,
Columns: attachment.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: attachment.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(attachment.Table, attachment.Columns, sqlgraph.NewFieldSpec(attachment.FieldID, field.TypeUUID))
id, ok := auo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Attachment.id" for update`)}

View File

@@ -133,9 +133,3 @@ func (ar *AuthRoles) String() string {
// AuthRolesSlice is a parsable slice of AuthRoles.
type AuthRolesSlice []*AuthRoles
func (ar AuthRolesSlice) config(cfg config) {
for _i := range ar {
ar[_i].config = cfg
}
}

View File

@@ -129,13 +129,7 @@ func (arc *AuthRolesCreate) sqlSave(ctx context.Context) (*AuthRoles, error) {
func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
var (
_node = &AuthRoles{config: arc.config}
_spec = &sqlgraph.CreateSpec{
Table: authroles.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: authroles.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(authroles.Table, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
)
if value, ok := arc.mutation.Role(); ok {
_spec.SetField(authroles.FieldRole, field.TypeEnum, value)

View File

@@ -40,15 +40,7 @@ func (ard *AuthRolesDelete) ExecX(ctx context.Context) int {
}
func (ard *AuthRolesDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: authroles.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: authroles.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(authroles.Table, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
if ps := ard.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -203,10 +203,12 @@ func (arq *AuthRolesQuery) AllX(ctx context.Context) []*AuthRoles {
}
// IDs executes the query and returns a list of AuthRoles IDs.
func (arq *AuthRolesQuery) IDs(ctx context.Context) ([]int, error) {
var ids []int
func (arq *AuthRolesQuery) IDs(ctx context.Context) (ids []int, err error) {
if arq.ctx.Unique == nil && arq.path != nil {
arq.Unique(true)
}
ctx = setContextOp(ctx, arq.ctx, "IDs")
if err := arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -450,20 +452,12 @@ func (arq *AuthRolesQuery) sqlCount(ctx context.Context) (int, error) {
}
func (arq *AuthRolesQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: authroles.Table,
Columns: authroles.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: authroles.FieldID,
},
},
From: arq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
_spec.From = arq.sql
if unique := arq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if arq.path != nil {
_spec.Unique = true
}
if fields := arq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -114,16 +114,7 @@ func (aru *AuthRolesUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := aru.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: authroles.Table,
Columns: authroles.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: authroles.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
if ps := aru.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -233,6 +224,12 @@ func (aruo *AuthRolesUpdateOne) ClearToken() *AuthRolesUpdateOne {
return aruo
}
// Where appends a list predicates to the AuthRolesUpdate builder.
func (aruo *AuthRolesUpdateOne) Where(ps ...predicate.AuthRoles) *AuthRolesUpdateOne {
aruo.mutation.Where(ps...)
return aruo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (aruo *AuthRolesUpdateOne) Select(field string, fields ...string) *AuthRolesUpdateOne {
@@ -281,16 +278,7 @@ func (aruo *AuthRolesUpdateOne) sqlSave(ctx context.Context) (_node *AuthRoles,
if err := aruo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: authroles.Table,
Columns: authroles.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: authroles.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(authroles.Table, authroles.Columns, sqlgraph.NewFieldSpec(authroles.FieldID, field.TypeInt))
id, ok := aruo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AuthRoles.id" for update`)}

View File

@@ -190,9 +190,3 @@ func (at *AuthTokens) String() string {
// AuthTokensSlice is a parsable slice of AuthTokens.
type AuthTokensSlice []*AuthTokens
func (at AuthTokensSlice) config(cfg config) {
for _i := range at {
at[_i].config = cfg
}
}

View File

@@ -219,13 +219,7 @@ func (atc *AuthTokensCreate) sqlSave(ctx context.Context) (*AuthTokens, error) {
func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
var (
_node = &AuthTokens{config: atc.config}
_spec = &sqlgraph.CreateSpec{
Table: authtokens.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(authtokens.Table, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
)
if id, ok := atc.mutation.ID(); ok {
_node.ID = id

View File

@@ -40,15 +40,7 @@ func (atd *AuthTokensDelete) ExecX(ctx context.Context) int {
}
func (atd *AuthTokensDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: authtokens.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(authtokens.Table, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
if ps := atd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -228,10 +228,12 @@ func (atq *AuthTokensQuery) AllX(ctx context.Context) []*AuthTokens {
}
// IDs executes the query and returns a list of AuthTokens IDs.
func (atq *AuthTokensQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (atq *AuthTokensQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if atq.ctx.Unique == nil && atq.path != nil {
atq.Unique(true)
}
ctx = setContextOp(ctx, atq.ctx, "IDs")
if err := atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil {
if err = atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -522,20 +524,12 @@ func (atq *AuthTokensQuery) sqlCount(ctx context.Context) (int, error) {
}
func (atq *AuthTokensQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: authtokens.Table,
Columns: authtokens.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
},
From: atq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(authtokens.Table, authtokens.Columns, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
_spec.From = atq.sql
if unique := atq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if atq.path != nil {
_spec.Unique = true
}
if fields := atq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -149,16 +149,7 @@ func (atu *AuthTokensUpdate) defaults() {
}
func (atu *AuthTokensUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: authtokens.Table,
Columns: authtokens.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(authtokens.Table, authtokens.Columns, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
if ps := atu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -346,6 +337,12 @@ func (atuo *AuthTokensUpdateOne) ClearRoles() *AuthTokensUpdateOne {
return atuo
}
// Where appends a list predicates to the AuthTokensUpdate builder.
func (atuo *AuthTokensUpdateOne) Where(ps ...predicate.AuthTokens) *AuthTokensUpdateOne {
atuo.mutation.Where(ps...)
return atuo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (atuo *AuthTokensUpdateOne) Select(field string, fields ...string) *AuthTokensUpdateOne {
@@ -390,16 +387,7 @@ func (atuo *AuthTokensUpdateOne) defaults() {
}
func (atuo *AuthTokensUpdateOne) sqlSave(ctx context.Context) (_node *AuthTokens, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: authtokens.Table,
Columns: authtokens.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: authtokens.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(authtokens.Table, authtokens.Columns, sqlgraph.NewFieldSpec(authtokens.FieldID, field.TypeUUID))
id, ok := atuo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AuthTokens.id" for update`)}

View File

@@ -22,6 +22,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
"entgo.io/ent/dialect"
@@ -56,6 +57,8 @@ type Client struct {
Location *LocationClient
// MaintenanceEntry is the client for interacting with the MaintenanceEntry builders.
MaintenanceEntry *MaintenanceEntryClient
// Notifier is the client for interacting with the Notifier builders.
Notifier *NotifierClient
// User is the client for interacting with the User builders.
User *UserClient
}
@@ -82,6 +85,7 @@ func (c *Client) init() {
c.Label = NewLabelClient(c.config)
c.Location = NewLocationClient(c.config)
c.MaintenanceEntry = NewMaintenanceEntryClient(c.config)
c.Notifier = NewNotifierClient(c.config)
c.User = NewUserClient(c.config)
}
@@ -127,6 +131,7 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
Label: NewLabelClient(cfg),
Location: NewLocationClient(cfg),
MaintenanceEntry: NewMaintenanceEntryClient(cfg),
Notifier: NewNotifierClient(cfg),
User: NewUserClient(cfg),
}, nil
}
@@ -158,6 +163,7 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
Label: NewLabelClient(cfg),
Location: NewLocationClient(cfg),
MaintenanceEntry: NewMaintenanceEntryClient(cfg),
Notifier: NewNotifierClient(cfg),
User: NewUserClient(cfg),
}, nil
}
@@ -198,6 +204,7 @@ func (c *Client) Use(hooks ...Hook) {
c.Label.Use(hooks...)
c.Location.Use(hooks...)
c.MaintenanceEntry.Use(hooks...)
c.Notifier.Use(hooks...)
c.User.Use(hooks...)
}
@@ -215,6 +222,7 @@ func (c *Client) Intercept(interceptors ...Interceptor) {
c.Label.Intercept(interceptors...)
c.Location.Intercept(interceptors...)
c.MaintenanceEntry.Intercept(interceptors...)
c.Notifier.Intercept(interceptors...)
c.User.Intercept(interceptors...)
}
@@ -243,6 +251,8 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
return c.Location.mutate(ctx, m)
case *MaintenanceEntryMutation:
return c.MaintenanceEntry.mutate(ctx, m)
case *NotifierMutation:
return c.Notifier.mutate(ctx, m)
case *UserMutation:
return c.User.mutate(ctx, m)
default:
@@ -266,7 +276,7 @@ func (c *AttachmentClient) Use(hooks ...Hook) {
c.hooks.Attachment = append(c.hooks.Attachment, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `attachment.Intercept(f(g(h())))`.
func (c *AttachmentClient) Intercept(interceptors ...Interceptor) {
c.inters.Attachment = append(c.inters.Attachment, interceptors...)
@@ -416,7 +426,7 @@ func (c *AuthRolesClient) Use(hooks ...Hook) {
c.hooks.AuthRoles = append(c.hooks.AuthRoles, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `authroles.Intercept(f(g(h())))`.
func (c *AuthRolesClient) Intercept(interceptors ...Interceptor) {
c.inters.AuthRoles = append(c.inters.AuthRoles, interceptors...)
@@ -550,7 +560,7 @@ func (c *AuthTokensClient) Use(hooks ...Hook) {
c.hooks.AuthTokens = append(c.hooks.AuthTokens, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `authtokens.Intercept(f(g(h())))`.
func (c *AuthTokensClient) Intercept(interceptors ...Interceptor) {
c.inters.AuthTokens = append(c.inters.AuthTokens, interceptors...)
@@ -700,7 +710,7 @@ func (c *DocumentClient) Use(hooks ...Hook) {
c.hooks.Document = append(c.hooks.Document, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `document.Intercept(f(g(h())))`.
func (c *DocumentClient) Intercept(interceptors ...Interceptor) {
c.inters.Document = append(c.inters.Document, interceptors...)
@@ -850,7 +860,7 @@ func (c *GroupClient) Use(hooks ...Hook) {
c.hooks.Group = append(c.hooks.Group, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `group.Intercept(f(g(h())))`.
func (c *GroupClient) Intercept(interceptors ...Interceptor) {
c.inters.Group = append(c.inters.Group, interceptors...)
@@ -1023,6 +1033,22 @@ func (c *GroupClient) QueryInvitationTokens(gr *Group) *GroupInvitationTokenQuer
return query
}
// QueryNotifiers queries the notifiers edge of a Group.
func (c *GroupClient) QueryNotifiers(gr *Group) *NotifierQuery {
query := (&NotifierClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := gr.ID
step := sqlgraph.NewStep(
sqlgraph.From(group.Table, group.FieldID, id),
sqlgraph.To(notifier.Table, notifier.FieldID),
sqlgraph.Edge(sqlgraph.O2M, false, group.NotifiersTable, group.NotifiersColumn),
)
fromV = sqlgraph.Neighbors(gr.driver.Dialect(), step)
return fromV, nil
}
return query
}
// Hooks returns the client hooks.
func (c *GroupClient) Hooks() []Hook {
return c.hooks.Group
@@ -1064,7 +1090,7 @@ func (c *GroupInvitationTokenClient) Use(hooks ...Hook) {
c.hooks.GroupInvitationToken = append(c.hooks.GroupInvitationToken, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `groupinvitationtoken.Intercept(f(g(h())))`.
func (c *GroupInvitationTokenClient) Intercept(interceptors ...Interceptor) {
c.inters.GroupInvitationToken = append(c.inters.GroupInvitationToken, interceptors...)
@@ -1198,7 +1224,7 @@ func (c *ItemClient) Use(hooks ...Hook) {
c.hooks.Item = append(c.hooks.Item, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `item.Intercept(f(g(h())))`.
func (c *ItemClient) Intercept(interceptors ...Interceptor) {
c.inters.Item = append(c.inters.Item, interceptors...)
@@ -1275,6 +1301,22 @@ func (c *ItemClient) GetX(ctx context.Context, id uuid.UUID) *Item {
return obj
}
// QueryGroup queries the group edge of a Item.
func (c *ItemClient) QueryGroup(i *Item) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := i.ID
step := sqlgraph.NewStep(
sqlgraph.From(item.Table, item.FieldID, id),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn),
)
fromV = sqlgraph.Neighbors(i.driver.Dialect(), step)
return fromV, nil
}
return query
}
// QueryParent queries the parent edge of a Item.
func (c *ItemClient) QueryParent(i *Item) *ItemQuery {
query := (&ItemClient{config: c.config}).Query()
@@ -1307,22 +1349,6 @@ func (c *ItemClient) QueryChildren(i *Item) *ItemQuery {
return query
}
// QueryGroup queries the group edge of a Item.
func (c *ItemClient) QueryGroup(i *Item) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := i.ID
step := sqlgraph.NewStep(
sqlgraph.From(item.Table, item.FieldID, id),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn),
)
fromV = sqlgraph.Neighbors(i.driver.Dialect(), step)
return fromV, nil
}
return query
}
// QueryLabel queries the label edge of a Item.
func (c *ItemClient) QueryLabel(i *Item) *LabelQuery {
query := (&LabelClient{config: c.config}).Query()
@@ -1444,7 +1470,7 @@ func (c *ItemFieldClient) Use(hooks ...Hook) {
c.hooks.ItemField = append(c.hooks.ItemField, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `itemfield.Intercept(f(g(h())))`.
func (c *ItemFieldClient) Intercept(interceptors ...Interceptor) {
c.inters.ItemField = append(c.inters.ItemField, interceptors...)
@@ -1578,7 +1604,7 @@ func (c *LabelClient) Use(hooks ...Hook) {
c.hooks.Label = append(c.hooks.Label, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `label.Intercept(f(g(h())))`.
func (c *LabelClient) Intercept(interceptors ...Interceptor) {
c.inters.Label = append(c.inters.Label, interceptors...)
@@ -1728,7 +1754,7 @@ func (c *LocationClient) Use(hooks ...Hook) {
c.hooks.Location = append(c.hooks.Location, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `location.Intercept(f(g(h())))`.
func (c *LocationClient) Intercept(interceptors ...Interceptor) {
c.inters.Location = append(c.inters.Location, interceptors...)
@@ -1805,6 +1831,22 @@ func (c *LocationClient) GetX(ctx context.Context, id uuid.UUID) *Location {
return obj
}
// QueryGroup queries the group edge of a Location.
func (c *LocationClient) QueryGroup(l *Location) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := l.ID
step := sqlgraph.NewStep(
sqlgraph.From(location.Table, location.FieldID, id),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, location.GroupTable, location.GroupColumn),
)
fromV = sqlgraph.Neighbors(l.driver.Dialect(), step)
return fromV, nil
}
return query
}
// QueryParent queries the parent edge of a Location.
func (c *LocationClient) QueryParent(l *Location) *LocationQuery {
query := (&LocationClient{config: c.config}).Query()
@@ -1837,22 +1879,6 @@ func (c *LocationClient) QueryChildren(l *Location) *LocationQuery {
return query
}
// QueryGroup queries the group edge of a Location.
func (c *LocationClient) QueryGroup(l *Location) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := l.ID
step := sqlgraph.NewStep(
sqlgraph.From(location.Table, location.FieldID, id),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, location.GroupTable, location.GroupColumn),
)
fromV = sqlgraph.Neighbors(l.driver.Dialect(), step)
return fromV, nil
}
return query
}
// QueryItems queries the items edge of a Location.
func (c *LocationClient) QueryItems(l *Location) *ItemQuery {
query := (&ItemClient{config: c.config}).Query()
@@ -1910,7 +1936,7 @@ func (c *MaintenanceEntryClient) Use(hooks ...Hook) {
c.hooks.MaintenanceEntry = append(c.hooks.MaintenanceEntry, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `maintenanceentry.Intercept(f(g(h())))`.
func (c *MaintenanceEntryClient) Intercept(interceptors ...Interceptor) {
c.inters.MaintenanceEntry = append(c.inters.MaintenanceEntry, interceptors...)
@@ -2028,6 +2054,156 @@ func (c *MaintenanceEntryClient) mutate(ctx context.Context, m *MaintenanceEntry
}
}
// NotifierClient is a client for the Notifier schema.
type NotifierClient struct {
config
}
// NewNotifierClient returns a client for the Notifier from the given config.
func NewNotifierClient(c config) *NotifierClient {
return &NotifierClient{config: c}
}
// Use adds a list of mutation hooks to the hooks stack.
// A call to `Use(f, g, h)` equals to `notifier.Hooks(f(g(h())))`.
func (c *NotifierClient) Use(hooks ...Hook) {
c.hooks.Notifier = append(c.hooks.Notifier, hooks...)
}
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `notifier.Intercept(f(g(h())))`.
func (c *NotifierClient) Intercept(interceptors ...Interceptor) {
c.inters.Notifier = append(c.inters.Notifier, interceptors...)
}
// Create returns a builder for creating a Notifier entity.
func (c *NotifierClient) Create() *NotifierCreate {
mutation := newNotifierMutation(c.config, OpCreate)
return &NotifierCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// CreateBulk returns a builder for creating a bulk of Notifier entities.
func (c *NotifierClient) CreateBulk(builders ...*NotifierCreate) *NotifierCreateBulk {
return &NotifierCreateBulk{config: c.config, builders: builders}
}
// Update returns an update builder for Notifier.
func (c *NotifierClient) Update() *NotifierUpdate {
mutation := newNotifierMutation(c.config, OpUpdate)
return &NotifierUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOne returns an update builder for the given entity.
func (c *NotifierClient) UpdateOne(n *Notifier) *NotifierUpdateOne {
mutation := newNotifierMutation(c.config, OpUpdateOne, withNotifier(n))
return &NotifierUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// UpdateOneID returns an update builder for the given id.
func (c *NotifierClient) UpdateOneID(id uuid.UUID) *NotifierUpdateOne {
mutation := newNotifierMutation(c.config, OpUpdateOne, withNotifierID(id))
return &NotifierUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// Delete returns a delete builder for Notifier.
func (c *NotifierClient) Delete() *NotifierDelete {
mutation := newNotifierMutation(c.config, OpDelete)
return &NotifierDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
}
// DeleteOne returns a builder for deleting the given entity.
func (c *NotifierClient) DeleteOne(n *Notifier) *NotifierDeleteOne {
return c.DeleteOneID(n.ID)
}
// DeleteOneID returns a builder for deleting the given entity by its id.
func (c *NotifierClient) DeleteOneID(id uuid.UUID) *NotifierDeleteOne {
builder := c.Delete().Where(notifier.ID(id))
builder.mutation.id = &id
builder.mutation.op = OpDeleteOne
return &NotifierDeleteOne{builder}
}
// Query returns a query builder for Notifier.
func (c *NotifierClient) Query() *NotifierQuery {
return &NotifierQuery{
config: c.config,
ctx: &QueryContext{Type: TypeNotifier},
inters: c.Interceptors(),
}
}
// Get returns a Notifier entity by its id.
func (c *NotifierClient) Get(ctx context.Context, id uuid.UUID) (*Notifier, error) {
return c.Query().Where(notifier.ID(id)).Only(ctx)
}
// GetX is like Get, but panics if an error occurs.
func (c *NotifierClient) GetX(ctx context.Context, id uuid.UUID) *Notifier {
obj, err := c.Get(ctx, id)
if err != nil {
panic(err)
}
return obj
}
// QueryGroup queries the group edge of a Notifier.
func (c *NotifierClient) QueryGroup(n *Notifier) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := n.ID
step := sqlgraph.NewStep(
sqlgraph.From(notifier.Table, notifier.FieldID, id),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, notifier.GroupTable, notifier.GroupColumn),
)
fromV = sqlgraph.Neighbors(n.driver.Dialect(), step)
return fromV, nil
}
return query
}
// QueryUser queries the user edge of a Notifier.
func (c *NotifierClient) QueryUser(n *Notifier) *UserQuery {
query := (&UserClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := n.ID
step := sqlgraph.NewStep(
sqlgraph.From(notifier.Table, notifier.FieldID, id),
sqlgraph.To(user.Table, user.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, notifier.UserTable, notifier.UserColumn),
)
fromV = sqlgraph.Neighbors(n.driver.Dialect(), step)
return fromV, nil
}
return query
}
// Hooks returns the client hooks.
func (c *NotifierClient) Hooks() []Hook {
return c.hooks.Notifier
}
// Interceptors returns the client interceptors.
func (c *NotifierClient) Interceptors() []Interceptor {
return c.inters.Notifier
}
func (c *NotifierClient) mutate(ctx context.Context, m *NotifierMutation) (Value, error) {
switch m.Op() {
case OpCreate:
return (&NotifierCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdate:
return (&NotifierUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpUpdateOne:
return (&NotifierUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
case OpDelete, OpDeleteOne:
return (&NotifierDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
default:
return nil, fmt.Errorf("ent: unknown Notifier mutation op: %q", m.Op())
}
}
// UserClient is a client for the User schema.
type UserClient struct {
config
@@ -2044,7 +2220,7 @@ func (c *UserClient) Use(hooks ...Hook) {
c.hooks.User = append(c.hooks.User, hooks...)
}
// Use adds a list of query interceptors to the interceptors stack.
// Intercept adds a list of query interceptors to the interceptors stack.
// A call to `Intercept(f, g, h)` equals to `user.Intercept(f(g(h())))`.
func (c *UserClient) Intercept(interceptors ...Interceptor) {
c.inters.User = append(c.inters.User, interceptors...)
@@ -2153,6 +2329,22 @@ func (c *UserClient) QueryAuthTokens(u *User) *AuthTokensQuery {
return query
}
// QueryNotifiers queries the notifiers edge of a User.
func (c *UserClient) QueryNotifiers(u *User) *NotifierQuery {
query := (&NotifierClient{config: c.config}).Query()
query.path = func(context.Context) (fromV *sql.Selector, _ error) {
id := u.ID
step := sqlgraph.NewStep(
sqlgraph.From(user.Table, user.FieldID, id),
sqlgraph.To(notifier.Table, notifier.FieldID),
sqlgraph.Edge(sqlgraph.O2M, false, user.NotifiersTable, user.NotifiersColumn),
)
fromV = sqlgraph.Neighbors(u.driver.Dialect(), step)
return fromV, nil
}
return query
}
// Hooks returns the client hooks.
func (c *UserClient) Hooks() []Hook {
return c.hooks.User

View File

@@ -38,6 +38,7 @@ type (
Label []ent.Hook
Location []ent.Hook
MaintenanceEntry []ent.Hook
Notifier []ent.Hook
User []ent.Hook
}
inters struct {
@@ -52,6 +53,7 @@ type (
Label []ent.Interceptor
Location []ent.Interceptor
MaintenanceEntry []ent.Interceptor
Notifier []ent.Interceptor
User []ent.Interceptor
}
)

View File

@@ -185,9 +185,3 @@ func (d *Document) String() string {
// Documents is a parsable slice of Document.
type Documents []*Document
func (d Documents) config(cfg config) {
for _i := range d {
d[_i].config = cfg
}
}

View File

@@ -208,13 +208,7 @@ func (dc *DocumentCreate) sqlSave(ctx context.Context) (*Document, error) {
func (dc *DocumentCreate) createSpec() (*Document, *sqlgraph.CreateSpec) {
var (
_node = &Document{config: dc.config}
_spec = &sqlgraph.CreateSpec{
Table: document.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(document.Table, sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID))
)
if id, ok := dc.mutation.ID(); ok {
_node.ID = id

View File

@@ -40,15 +40,7 @@ func (dd *DocumentDelete) ExecX(ctx context.Context) int {
}
func (dd *DocumentDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: document.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(document.Table, sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID))
if ps := dd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -228,10 +228,12 @@ func (dq *DocumentQuery) AllX(ctx context.Context) []*Document {
}
// IDs executes the query and returns a list of Document IDs.
func (dq *DocumentQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (dq *DocumentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if dq.ctx.Unique == nil && dq.path != nil {
dq.Unique(true)
}
ctx = setContextOp(ctx, dq.ctx, "IDs")
if err := dq.Select(document.FieldID).Scan(ctx, &ids); err != nil {
if err = dq.Select(document.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -526,20 +528,12 @@ func (dq *DocumentQuery) sqlCount(ctx context.Context) (int, error) {
}
func (dq *DocumentQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: document.Table,
Columns: document.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
},
From: dq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(document.Table, document.Columns, sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID))
_spec.From = dq.sql
if unique := dq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if dq.path != nil {
_spec.Unique = true
}
if fields := dq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -165,16 +165,7 @@ func (du *DocumentUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := du.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: document.Table,
Columns: document.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(document.Table, document.Columns, sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID))
if ps := du.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -376,6 +367,12 @@ func (duo *DocumentUpdateOne) RemoveAttachments(a ...*Attachment) *DocumentUpdat
return duo.RemoveAttachmentIDs(ids...)
}
// Where appends a list predicates to the DocumentUpdate builder.
func (duo *DocumentUpdateOne) Where(ps ...predicate.Document) *DocumentUpdateOne {
duo.mutation.Where(ps...)
return duo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (duo *DocumentUpdateOne) Select(field string, fields ...string) *DocumentUpdateOne {
@@ -441,16 +438,7 @@ func (duo *DocumentUpdateOne) sqlSave(ctx context.Context) (_node *Document, err
if err := duo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: document.Table,
Columns: document.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: document.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(document.Table, document.Columns, sqlgraph.NewFieldSpec(document.FieldID, field.TypeUUID))
id, ok := duo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Document.id" for update`)}

View File

@@ -22,6 +22,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@@ -61,6 +62,7 @@ func columnChecker(table string) func(string) error {
label.Table: label.ValidColumn,
location.Table: location.ValidColumn,
maintenanceentry.Table: maintenanceentry.ValidColumn,
notifier.Table: notifier.ValidColumn,
user.Table: user.ValidColumn,
}
check, ok := checks[table]

View File

@@ -44,9 +44,11 @@ type GroupEdges struct {
Documents []*Document `json:"documents,omitempty"`
// InvitationTokens holds the value of the invitation_tokens edge.
InvitationTokens []*GroupInvitationToken `json:"invitation_tokens,omitempty"`
// Notifiers holds the value of the notifiers edge.
Notifiers []*Notifier `json:"notifiers,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [6]bool
loadedTypes [7]bool
}
// UsersOrErr returns the Users value or an error if the edge
@@ -103,6 +105,15 @@ func (e GroupEdges) InvitationTokensOrErr() ([]*GroupInvitationToken, error) {
return nil, &NotLoadedError{edge: "invitation_tokens"}
}
// NotifiersOrErr returns the Notifiers value or an error if the edge
// was not loaded in eager-loading.
func (e GroupEdges) NotifiersOrErr() ([]*Notifier, error) {
if e.loadedTypes[6] {
return e.Notifiers, nil
}
return nil, &NotLoadedError{edge: "notifiers"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Group) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns))
@@ -194,6 +205,11 @@ func (gr *Group) QueryInvitationTokens() *GroupInvitationTokenQuery {
return NewGroupClient(gr.config).QueryInvitationTokens(gr)
}
// QueryNotifiers queries the "notifiers" edge of the Group entity.
func (gr *Group) QueryNotifiers() *NotifierQuery {
return NewGroupClient(gr.config).QueryNotifiers(gr)
}
// Update returns a builder for updating this Group.
// Note that you need to call Group.Unwrap() before calling this method if this Group
// was returned from a transaction, and the transaction was committed or rolled back.
@@ -234,9 +250,3 @@ func (gr *Group) String() string {
// Groups is a parsable slice of Group.
type Groups []*Group
func (gr Groups) config(cfg config) {
for _i := range gr {
gr[_i].config = cfg
}
}

View File

@@ -34,6 +34,8 @@ const (
EdgeDocuments = "documents"
// EdgeInvitationTokens holds the string denoting the invitation_tokens edge name in mutations.
EdgeInvitationTokens = "invitation_tokens"
// EdgeNotifiers holds the string denoting the notifiers edge name in mutations.
EdgeNotifiers = "notifiers"
// Table holds the table name of the group in the database.
Table = "groups"
// UsersTable is the table that holds the users relation/edge.
@@ -78,6 +80,13 @@ const (
InvitationTokensInverseTable = "group_invitation_tokens"
// InvitationTokensColumn is the table column denoting the invitation_tokens relation/edge.
InvitationTokensColumn = "group_invitation_tokens"
// NotifiersTable is the table that holds the notifiers relation/edge.
NotifiersTable = "notifiers"
// NotifiersInverseTable is the table name for the Notifier entity.
// It exists in this package in order to avoid circular dependency with the "notifier" package.
NotifiersInverseTable = "notifiers"
// NotifiersColumn is the table column denoting the notifiers relation/edge.
NotifiersColumn = "group_id"
)
// Columns holds all SQL columns for group fields.
@@ -132,6 +141,10 @@ const (
CurrencyInr Currency = "inr"
CurrencyRmb Currency = "rmb"
CurrencyBgn Currency = "bgn"
CurrencyChf Currency = "chf"
CurrencyPln Currency = "pln"
CurrencyTry Currency = "try"
CurrencyRon Currency = "ron"
)
func (c Currency) String() string {
@@ -141,7 +154,7 @@ func (c Currency) String() string {
// CurrencyValidator is a validator for the "currency" field enum values. It is called by the builders before save.
func CurrencyValidator(c Currency) error {
switch c {
case CurrencyUsd, CurrencyEur, CurrencyGbp, CurrencyJpy, CurrencyZar, CurrencyAud, CurrencyNok, CurrencySek, CurrencyDkk, CurrencyInr, CurrencyRmb, CurrencyBgn:
case CurrencyUsd, CurrencyEur, CurrencyGbp, CurrencyJpy, CurrencyZar, CurrencyAud, CurrencyNok, CurrencySek, CurrencyDkk, CurrencyInr, CurrencyRmb, CurrencyBgn, CurrencyChf, CurrencyPln, CurrencyTry, CurrencyRon:
return nil
default:
return fmt.Errorf("group: invalid enum value for currency field: %q", c)

View File

@@ -398,6 +398,33 @@ func HasInvitationTokensWith(preds ...predicate.GroupInvitationToken) predicate.
})
}
// HasNotifiers applies the HasEdge predicate on the "notifiers" edge.
func HasNotifiers() predicate.Group {
return predicate.Group(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.Edge(sqlgraph.O2M, false, NotifiersTable, NotifiersColumn),
)
sqlgraph.HasNeighbors(s, step)
})
}
// HasNotifiersWith applies the HasEdge predicate on the "notifiers" edge with a given conditions (other predicates).
func HasNotifiersWith(preds ...predicate.Notifier) predicate.Group {
return predicate.Group(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(NotifiersInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.O2M, false, NotifiersTable, NotifiersColumn),
)
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
}
})
})
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Group) predicate.Group {
return predicate.Group(func(s *sql.Selector) {

View File

@@ -17,6 +17,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@@ -179,6 +180,21 @@ func (gc *GroupCreate) AddInvitationTokens(g ...*GroupInvitationToken) *GroupCre
return gc.AddInvitationTokenIDs(ids...)
}
// AddNotifierIDs adds the "notifiers" edge to the Notifier entity by IDs.
func (gc *GroupCreate) AddNotifierIDs(ids ...uuid.UUID) *GroupCreate {
gc.mutation.AddNotifierIDs(ids...)
return gc
}
// AddNotifiers adds the "notifiers" edges to the Notifier entity.
func (gc *GroupCreate) AddNotifiers(n ...*Notifier) *GroupCreate {
ids := make([]uuid.UUID, len(n))
for i := range n {
ids[i] = n[i].ID
}
return gc.AddNotifierIDs(ids...)
}
// Mutation returns the GroupMutation object of the builder.
func (gc *GroupCreate) Mutation() *GroupMutation {
return gc.mutation
@@ -285,13 +301,7 @@ func (gc *GroupCreate) sqlSave(ctx context.Context) (*Group, error) {
func (gc *GroupCreate) createSpec() (*Group, *sqlgraph.CreateSpec) {
var (
_node = &Group{config: gc.config}
_spec = &sqlgraph.CreateSpec{
Table: group.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(group.Table, sqlgraph.NewFieldSpec(group.FieldID, field.TypeUUID))
)
if id, ok := gc.mutation.ID(); ok {
_node.ID = id
@@ -427,6 +437,25 @@ func (gc *GroupCreate) createSpec() (*Group, *sqlgraph.CreateSpec) {
}
_spec.Edges = append(_spec.Edges, edge)
}
if nodes := gc.mutation.NotifiersIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges = append(_spec.Edges, edge)
}
return _node, _spec
}

View File

@@ -40,15 +40,7 @@ func (gd *GroupDelete) ExecX(ctx context.Context) int {
}
func (gd *GroupDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: group.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(group.Table, sqlgraph.NewFieldSpec(group.FieldID, field.TypeUUID))
if ps := gd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -18,6 +18,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@@ -35,6 +36,7 @@ type GroupQuery struct {
withLabels *LabelQuery
withDocuments *DocumentQuery
withInvitationTokens *GroupInvitationTokenQuery
withNotifiers *NotifierQuery
// intermediate query (i.e. traversal path).
sql *sql.Selector
path func(context.Context) (*sql.Selector, error)
@@ -203,6 +205,28 @@ func (gq *GroupQuery) QueryInvitationTokens() *GroupInvitationTokenQuery {
return query
}
// QueryNotifiers chains the current query on the "notifiers" edge.
func (gq *GroupQuery) QueryNotifiers() *NotifierQuery {
query := (&NotifierClient{config: gq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := gq.prepareQuery(ctx); err != nil {
return nil, err
}
selector := gq.sqlQuery(ctx)
if err := selector.Err(); err != nil {
return nil, err
}
step := sqlgraph.NewStep(
sqlgraph.From(group.Table, group.FieldID, selector),
sqlgraph.To(notifier.Table, notifier.FieldID),
sqlgraph.Edge(sqlgraph.O2M, false, group.NotifiersTable, group.NotifiersColumn),
)
fromU = sqlgraph.SetNeighbors(gq.driver.Dialect(), step)
return fromU, nil
}
return query
}
// First returns the first Group entity from the query.
// Returns a *NotFoundError when no Group was found.
func (gq *GroupQuery) First(ctx context.Context) (*Group, error) {
@@ -323,10 +347,12 @@ func (gq *GroupQuery) AllX(ctx context.Context) []*Group {
}
// IDs executes the query and returns a list of Group IDs.
func (gq *GroupQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (gq *GroupQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if gq.ctx.Unique == nil && gq.path != nil {
gq.Unique(true)
}
ctx = setContextOp(ctx, gq.ctx, "IDs")
if err := gq.Select(group.FieldID).Scan(ctx, &ids); err != nil {
if err = gq.Select(group.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -399,6 +425,7 @@ func (gq *GroupQuery) Clone() *GroupQuery {
withLabels: gq.withLabels.Clone(),
withDocuments: gq.withDocuments.Clone(),
withInvitationTokens: gq.withInvitationTokens.Clone(),
withNotifiers: gq.withNotifiers.Clone(),
// clone intermediate query.
sql: gq.sql.Clone(),
path: gq.path,
@@ -471,6 +498,17 @@ func (gq *GroupQuery) WithInvitationTokens(opts ...func(*GroupInvitationTokenQue
return gq
}
// WithNotifiers tells the query-builder to eager-load the nodes that are connected to
// the "notifiers" edge. The optional arguments are used to configure the query builder of the edge.
func (gq *GroupQuery) WithNotifiers(opts ...func(*NotifierQuery)) *GroupQuery {
query := (&NotifierClient{config: gq.config}).Query()
for _, opt := range opts {
opt(query)
}
gq.withNotifiers = query
return gq
}
// GroupBy is used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
@@ -549,13 +587,14 @@ func (gq *GroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Group,
var (
nodes = []*Group{}
_spec = gq.querySpec()
loadedTypes = [6]bool{
loadedTypes = [7]bool{
gq.withUsers != nil,
gq.withLocations != nil,
gq.withItems != nil,
gq.withLabels != nil,
gq.withDocuments != nil,
gq.withInvitationTokens != nil,
gq.withNotifiers != nil,
}
)
_spec.ScanValues = func(columns []string) ([]any, error) {
@@ -620,6 +659,13 @@ func (gq *GroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Group,
return nil, err
}
}
if query := gq.withNotifiers; query != nil {
if err := gq.loadNotifiers(ctx, query, nodes,
func(n *Group) { n.Edges.Notifiers = []*Notifier{} },
func(n *Group, e *Notifier) { n.Edges.Notifiers = append(n.Edges.Notifiers, e) }); err != nil {
return nil, err
}
}
return nodes, nil
}
@@ -809,6 +855,33 @@ func (gq *GroupQuery) loadInvitationTokens(ctx context.Context, query *GroupInvi
}
return nil
}
func (gq *GroupQuery) loadNotifiers(ctx context.Context, query *NotifierQuery, nodes []*Group, init func(*Group), assign func(*Group, *Notifier)) error {
fks := make([]driver.Value, 0, len(nodes))
nodeids := make(map[uuid.UUID]*Group)
for i := range nodes {
fks = append(fks, nodes[i].ID)
nodeids[nodes[i].ID] = nodes[i]
if init != nil {
init(nodes[i])
}
}
query.Where(predicate.Notifier(func(s *sql.Selector) {
s.Where(sql.InValues(group.NotifiersColumn, fks...))
}))
neighbors, err := query.All(ctx)
if err != nil {
return err
}
for _, n := range neighbors {
fk := n.GroupID
node, ok := nodeids[fk]
if !ok {
return fmt.Errorf(`unexpected foreign-key "group_id" returned %v for node %v`, fk, n.ID)
}
assign(node, n)
}
return nil
}
func (gq *GroupQuery) sqlCount(ctx context.Context) (int, error) {
_spec := gq.querySpec()
@@ -820,20 +893,12 @@ func (gq *GroupQuery) sqlCount(ctx context.Context) (int, error) {
}
func (gq *GroupQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: group.Table,
Columns: group.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
From: gq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(group.Table, group.Columns, sqlgraph.NewFieldSpec(group.FieldID, field.TypeUUID))
_spec.From = gq.sql
if unique := gq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if gq.path != nil {
_spec.Unique = true
}
if fields := gq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -18,6 +18,7 @@ import (
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
)
@@ -151,6 +152,21 @@ func (gu *GroupUpdate) AddInvitationTokens(g ...*GroupInvitationToken) *GroupUpd
return gu.AddInvitationTokenIDs(ids...)
}
// AddNotifierIDs adds the "notifiers" edge to the Notifier entity by IDs.
func (gu *GroupUpdate) AddNotifierIDs(ids ...uuid.UUID) *GroupUpdate {
gu.mutation.AddNotifierIDs(ids...)
return gu
}
// AddNotifiers adds the "notifiers" edges to the Notifier entity.
func (gu *GroupUpdate) AddNotifiers(n ...*Notifier) *GroupUpdate {
ids := make([]uuid.UUID, len(n))
for i := range n {
ids[i] = n[i].ID
}
return gu.AddNotifierIDs(ids...)
}
// Mutation returns the GroupMutation object of the builder.
func (gu *GroupUpdate) Mutation() *GroupMutation {
return gu.mutation
@@ -282,6 +298,27 @@ func (gu *GroupUpdate) RemoveInvitationTokens(g ...*GroupInvitationToken) *Group
return gu.RemoveInvitationTokenIDs(ids...)
}
// ClearNotifiers clears all "notifiers" edges to the Notifier entity.
func (gu *GroupUpdate) ClearNotifiers() *GroupUpdate {
gu.mutation.ClearNotifiers()
return gu
}
// RemoveNotifierIDs removes the "notifiers" edge to Notifier entities by IDs.
func (gu *GroupUpdate) RemoveNotifierIDs(ids ...uuid.UUID) *GroupUpdate {
gu.mutation.RemoveNotifierIDs(ids...)
return gu
}
// RemoveNotifiers removes "notifiers" edges to Notifier entities.
func (gu *GroupUpdate) RemoveNotifiers(n ...*Notifier) *GroupUpdate {
ids := make([]uuid.UUID, len(n))
for i := range n {
ids[i] = n[i].ID
}
return gu.RemoveNotifierIDs(ids...)
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (gu *GroupUpdate) Save(ctx context.Context) (int, error) {
gu.defaults()
@@ -337,16 +374,7 @@ func (gu *GroupUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := gu.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: group.Table,
Columns: group.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(group.Table, group.Columns, sqlgraph.NewFieldSpec(group.FieldID, field.TypeUUID))
if ps := gu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -687,6 +715,60 @@ func (gu *GroupUpdate) sqlSave(ctx context.Context) (n int, err error) {
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if gu.mutation.NotifiersCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := gu.mutation.RemovedNotifiersIDs(); len(nodes) > 0 && !gu.mutation.NotifiersCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := gu.mutation.NotifiersIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if n, err = sqlgraph.UpdateNodes(ctx, gu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{group.Label}
@@ -823,6 +905,21 @@ func (guo *GroupUpdateOne) AddInvitationTokens(g ...*GroupInvitationToken) *Grou
return guo.AddInvitationTokenIDs(ids...)
}
// AddNotifierIDs adds the "notifiers" edge to the Notifier entity by IDs.
func (guo *GroupUpdateOne) AddNotifierIDs(ids ...uuid.UUID) *GroupUpdateOne {
guo.mutation.AddNotifierIDs(ids...)
return guo
}
// AddNotifiers adds the "notifiers" edges to the Notifier entity.
func (guo *GroupUpdateOne) AddNotifiers(n ...*Notifier) *GroupUpdateOne {
ids := make([]uuid.UUID, len(n))
for i := range n {
ids[i] = n[i].ID
}
return guo.AddNotifierIDs(ids...)
}
// Mutation returns the GroupMutation object of the builder.
func (guo *GroupUpdateOne) Mutation() *GroupMutation {
return guo.mutation
@@ -954,6 +1051,33 @@ func (guo *GroupUpdateOne) RemoveInvitationTokens(g ...*GroupInvitationToken) *G
return guo.RemoveInvitationTokenIDs(ids...)
}
// ClearNotifiers clears all "notifiers" edges to the Notifier entity.
func (guo *GroupUpdateOne) ClearNotifiers() *GroupUpdateOne {
guo.mutation.ClearNotifiers()
return guo
}
// RemoveNotifierIDs removes the "notifiers" edge to Notifier entities by IDs.
func (guo *GroupUpdateOne) RemoveNotifierIDs(ids ...uuid.UUID) *GroupUpdateOne {
guo.mutation.RemoveNotifierIDs(ids...)
return guo
}
// RemoveNotifiers removes "notifiers" edges to Notifier entities.
func (guo *GroupUpdateOne) RemoveNotifiers(n ...*Notifier) *GroupUpdateOne {
ids := make([]uuid.UUID, len(n))
for i := range n {
ids[i] = n[i].ID
}
return guo.RemoveNotifierIDs(ids...)
}
// Where appends a list predicates to the GroupUpdate builder.
func (guo *GroupUpdateOne) Where(ps ...predicate.Group) *GroupUpdateOne {
guo.mutation.Where(ps...)
return guo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (guo *GroupUpdateOne) Select(field string, fields ...string) *GroupUpdateOne {
@@ -1016,16 +1140,7 @@ func (guo *GroupUpdateOne) sqlSave(ctx context.Context) (_node *Group, err error
if err := guo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: group.Table,
Columns: group.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(group.Table, group.Columns, sqlgraph.NewFieldSpec(group.FieldID, field.TypeUUID))
id, ok := guo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Group.id" for update`)}
@@ -1383,6 +1498,60 @@ func (guo *GroupUpdateOne) sqlSave(ctx context.Context) (_node *Group, err error
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if guo.mutation.NotifiersCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := guo.mutation.RemovedNotifiersIDs(); len(nodes) > 0 && !guo.mutation.NotifiersCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := guo.mutation.NotifiersIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2M,
Inverse: false,
Table: group.NotifiersTable,
Columns: []string{group.NotifiersColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: notifier.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
_node = &Group{config: guo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues

View File

@@ -182,9 +182,3 @@ func (git *GroupInvitationToken) String() string {
// GroupInvitationTokens is a parsable slice of GroupInvitationToken.
type GroupInvitationTokens []*GroupInvitationToken
func (git GroupInvitationTokens) config(cfg config) {
for _i := range git {
git[_i].config = cfg
}
}

View File

@@ -220,13 +220,7 @@ func (gitc *GroupInvitationTokenCreate) sqlSave(ctx context.Context) (*GroupInvi
func (gitc *GroupInvitationTokenCreate) createSpec() (*GroupInvitationToken, *sqlgraph.CreateSpec) {
var (
_node = &GroupInvitationToken{config: gitc.config}
_spec = &sqlgraph.CreateSpec{
Table: groupinvitationtoken.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: groupinvitationtoken.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(groupinvitationtoken.Table, sqlgraph.NewFieldSpec(groupinvitationtoken.FieldID, field.TypeUUID))
)
if id, ok := gitc.mutation.ID(); ok {
_node.ID = id

View File

@@ -40,15 +40,7 @@ func (gitd *GroupInvitationTokenDelete) ExecX(ctx context.Context) int {
}
func (gitd *GroupInvitationTokenDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: groupinvitationtoken.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: groupinvitationtoken.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(groupinvitationtoken.Table, sqlgraph.NewFieldSpec(groupinvitationtoken.FieldID, field.TypeUUID))
if ps := gitd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -203,10 +203,12 @@ func (gitq *GroupInvitationTokenQuery) AllX(ctx context.Context) []*GroupInvitat
}
// IDs executes the query and returns a list of GroupInvitationToken IDs.
func (gitq *GroupInvitationTokenQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (gitq *GroupInvitationTokenQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if gitq.ctx.Unique == nil && gitq.path != nil {
gitq.Unique(true)
}
ctx = setContextOp(ctx, gitq.ctx, "IDs")
if err := gitq.Select(groupinvitationtoken.FieldID).Scan(ctx, &ids); err != nil {
if err = gitq.Select(groupinvitationtoken.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -450,20 +452,12 @@ func (gitq *GroupInvitationTokenQuery) sqlCount(ctx context.Context) (int, error
}
func (gitq *GroupInvitationTokenQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: groupinvitationtoken.Table,
Columns: groupinvitationtoken.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: groupinvitationtoken.FieldID,
},
},
From: gitq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(groupinvitationtoken.Table, groupinvitationtoken.Columns, sqlgraph.NewFieldSpec(groupinvitationtoken.FieldID, field.TypeUUID))
_spec.From = gitq.sql
if unique := gitq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if gitq.path != nil {
_spec.Unique = true
}
if fields := gitq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -144,16 +144,7 @@ func (gitu *GroupInvitationTokenUpdate) defaults() {
}
func (gitu *GroupInvitationTokenUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: groupinvitationtoken.Table,
Columns: groupinvitationtoken.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: groupinvitationtoken.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(groupinvitationtoken.Table, groupinvitationtoken.Columns, sqlgraph.NewFieldSpec(groupinvitationtoken.FieldID, field.TypeUUID))
if ps := gitu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -308,6 +299,12 @@ func (gituo *GroupInvitationTokenUpdateOne) ClearGroup() *GroupInvitationTokenUp
return gituo
}
// Where appends a list predicates to the GroupInvitationTokenUpdate builder.
func (gituo *GroupInvitationTokenUpdateOne) Where(ps ...predicate.GroupInvitationToken) *GroupInvitationTokenUpdateOne {
gituo.mutation.Where(ps...)
return gituo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (gituo *GroupInvitationTokenUpdateOne) Select(field string, fields ...string) *GroupInvitationTokenUpdateOne {
@@ -352,16 +349,7 @@ func (gituo *GroupInvitationTokenUpdateOne) defaults() {
}
func (gituo *GroupInvitationTokenUpdateOne) sqlSave(ctx context.Context) (_node *GroupInvitationToken, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: groupinvitationtoken.Table,
Columns: groupinvitationtoken.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: groupinvitationtoken.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(groupinvitationtoken.Table, groupinvitationtoken.Columns, sqlgraph.NewFieldSpec(groupinvitationtoken.FieldID, field.TypeUUID))
id, ok := gituo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "GroupInvitationToken.id" for update`)}

View File

@@ -48,6 +48,10 @@ func (me *MaintenanceEntry) GetID() uuid.UUID {
return me.ID
}
func (n *Notifier) GetID() uuid.UUID {
return n.ID
}
func (u *User) GetID() uuid.UUID {
return u.ID
}

View File

@@ -141,6 +141,18 @@ func (f MaintenanceEntryFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.V
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MaintenanceEntryMutation", m)
}
// The NotifierFunc type is an adapter to allow the use of ordinary
// function as Notifier mutator.
type NotifierFunc func(context.Context, *ent.NotifierMutation) (ent.Value, error)
// Mutate calls f(ctx, m).
func (f NotifierFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
if mv, ok := m.(*ent.NotifierMutation); ok {
return f(ctx, mv)
}
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.NotifierMutation", m)
}
// The UserFunc type is an adapter to allow the use of ordinary
// function as User mutator.
type UserFunc func(context.Context, *ent.UserMutation) (ent.Value, error)

View File

@@ -75,12 +75,12 @@ type Item struct {
// ItemEdges holds the relations/edges for other nodes in the graph.
type ItemEdges struct {
// Group holds the value of the group edge.
Group *Group `json:"group,omitempty"`
// Parent holds the value of the parent edge.
Parent *Item `json:"parent,omitempty"`
// Children holds the value of the children edge.
Children []*Item `json:"children,omitempty"`
// Group holds the value of the group edge.
Group *Group `json:"group,omitempty"`
// Label holds the value of the label edge.
Label []*Label `json:"label,omitempty"`
// Location holds the value of the location edge.
@@ -96,10 +96,23 @@ type ItemEdges struct {
loadedTypes [8]bool
}
// GroupOrErr returns the Group value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e ItemEdges) GroupOrErr() (*Group, error) {
if e.loadedTypes[0] {
if e.Group == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: group.Label}
}
return e.Group, nil
}
return nil, &NotLoadedError{edge: "group"}
}
// ParentOrErr returns the Parent value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e ItemEdges) ParentOrErr() (*Item, error) {
if e.loadedTypes[0] {
if e.loadedTypes[1] {
if e.Parent == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: item.Label}
@@ -112,25 +125,12 @@ func (e ItemEdges) ParentOrErr() (*Item, error) {
// ChildrenOrErr returns the Children value or an error if the edge
// was not loaded in eager-loading.
func (e ItemEdges) ChildrenOrErr() ([]*Item, error) {
if e.loadedTypes[1] {
if e.loadedTypes[2] {
return e.Children, nil
}
return nil, &NotLoadedError{edge: "children"}
}
// GroupOrErr returns the Group value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found.
func (e ItemEdges) GroupOrErr() (*Group, error) {
if e.loadedTypes[2] {
if e.Group == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: group.Label}
}
return e.Group, nil
}
return nil, &NotLoadedError{edge: "group"}
}
// LabelOrErr returns the Label value or an error if the edge
// was not loaded in eager-loading.
func (e ItemEdges) LabelOrErr() ([]*Label, error) {
@@ -388,6 +388,11 @@ func (i *Item) assignValues(columns []string, values []any) error {
return nil
}
// QueryGroup queries the "group" edge of the Item entity.
func (i *Item) QueryGroup() *GroupQuery {
return NewItemClient(i.config).QueryGroup(i)
}
// QueryParent queries the "parent" edge of the Item entity.
func (i *Item) QueryParent() *ItemQuery {
return NewItemClient(i.config).QueryParent(i)
@@ -398,11 +403,6 @@ func (i *Item) QueryChildren() *ItemQuery {
return NewItemClient(i.config).QueryChildren(i)
}
// QueryGroup queries the "group" edge of the Item entity.
func (i *Item) QueryGroup() *GroupQuery {
return NewItemClient(i.config).QueryGroup(i)
}
// QueryLabel queries the "label" edge of the Item entity.
func (i *Item) QueryLabel() *LabelQuery {
return NewItemClient(i.config).QueryLabel(i)
@@ -525,9 +525,3 @@ func (i *Item) String() string {
// Items is a parsable slice of Item.
type Items []*Item
func (i Items) config(cfg config) {
for _i := range i {
i[_i].config = cfg
}
}

View File

@@ -59,12 +59,12 @@ const (
FieldSoldPrice = "sold_price"
// FieldSoldNotes holds the string denoting the sold_notes field in the database.
FieldSoldNotes = "sold_notes"
// EdgeGroup holds the string denoting the group edge name in mutations.
EdgeGroup = "group"
// EdgeParent holds the string denoting the parent edge name in mutations.
EdgeParent = "parent"
// EdgeChildren holds the string denoting the children edge name in mutations.
EdgeChildren = "children"
// EdgeGroup holds the string denoting the group edge name in mutations.
EdgeGroup = "group"
// EdgeLabel holds the string denoting the label edge name in mutations.
EdgeLabel = "label"
// EdgeLocation holds the string denoting the location edge name in mutations.
@@ -77,6 +77,13 @@ const (
EdgeAttachments = "attachments"
// Table holds the table name of the item in the database.
Table = "items"
// GroupTable is the table that holds the group relation/edge.
GroupTable = "items"
// GroupInverseTable is the table name for the Group entity.
// It exists in this package in order to avoid circular dependency with the "group" package.
GroupInverseTable = "groups"
// GroupColumn is the table column denoting the group relation/edge.
GroupColumn = "group_items"
// ParentTable is the table that holds the parent relation/edge.
ParentTable = "items"
// ParentColumn is the table column denoting the parent relation/edge.
@@ -85,13 +92,6 @@ const (
ChildrenTable = "items"
// ChildrenColumn is the table column denoting the children relation/edge.
ChildrenColumn = "item_children"
// GroupTable is the table that holds the group relation/edge.
GroupTable = "items"
// GroupInverseTable is the table name for the Group entity.
// It exists in this package in order to avoid circular dependency with the "group" package.
GroupInverseTable = "groups"
// GroupColumn is the table column denoting the group relation/edge.
GroupColumn = "group_items"
// LabelTable is the table that holds the label relation/edge. The primary key declared below.
LabelTable = "label_items"
// LabelInverseTable is the table name for the Label entity.

View File

@@ -1406,6 +1406,33 @@ func SoldNotesContainsFold(v string) predicate.Item {
return predicate.Item(sql.FieldContainsFold(FieldSoldNotes, v))
}
// HasGroup applies the HasEdge predicate on the "group" edge.
func HasGroup() predicate.Item {
return predicate.Item(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
)
sqlgraph.HasNeighbors(s, step)
})
}
// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates).
func HasGroupWith(preds ...predicate.Group) predicate.Item {
return predicate.Item(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(GroupInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
)
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
}
})
})
}
// HasParent applies the HasEdge predicate on the "parent" edge.
func HasParent() predicate.Item {
return predicate.Item(func(s *sql.Selector) {
@@ -1460,33 +1487,6 @@ func HasChildrenWith(preds ...predicate.Item) predicate.Item {
})
}
// HasGroup applies the HasEdge predicate on the "group" edge.
func HasGroup() predicate.Item {
return predicate.Item(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
)
sqlgraph.HasNeighbors(s, step)
})
}
// HasGroupWith applies the HasEdge predicate on the "group" edge with a given conditions (other predicates).
func HasGroupWith(preds ...predicate.Group) predicate.Item {
return predicate.Item(func(s *sql.Selector) {
step := sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
sqlgraph.To(GroupInverseTable, FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, GroupTable, GroupColumn),
)
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
for _, p := range preds {
p(s)
}
})
})
}
// HasLabel applies the HasEdge predicate on the "label" edge.
func HasLabel() predicate.Item {
return predicate.Item(func(s *sql.Selector) {

View File

@@ -355,6 +355,17 @@ func (ic *ItemCreate) SetNillableID(u *uuid.UUID) *ItemCreate {
return ic
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (ic *ItemCreate) SetGroupID(id uuid.UUID) *ItemCreate {
ic.mutation.SetGroupID(id)
return ic
}
// SetGroup sets the "group" edge to the Group entity.
func (ic *ItemCreate) SetGroup(g *Group) *ItemCreate {
return ic.SetGroupID(g.ID)
}
// SetParentID sets the "parent" edge to the Item entity by ID.
func (ic *ItemCreate) SetParentID(id uuid.UUID) *ItemCreate {
ic.mutation.SetParentID(id)
@@ -389,17 +400,6 @@ func (ic *ItemCreate) AddChildren(i ...*Item) *ItemCreate {
return ic.AddChildIDs(ids...)
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (ic *ItemCreate) SetGroupID(id uuid.UUID) *ItemCreate {
ic.mutation.SetGroupID(id)
return ic
}
// SetGroup sets the "group" edge to the Group entity.
func (ic *ItemCreate) SetGroup(g *Group) *ItemCreate {
return ic.SetGroupID(g.ID)
}
// AddLabelIDs adds the "label" edge to the Label entity by IDs.
func (ic *ItemCreate) AddLabelIDs(ids ...uuid.UUID) *ItemCreate {
ic.mutation.AddLabelIDs(ids...)
@@ -665,13 +665,7 @@ func (ic *ItemCreate) sqlSave(ctx context.Context) (*Item, error) {
func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
var (
_node = &Item{config: ic.config}
_spec = &sqlgraph.CreateSpec{
Table: item.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(item.Table, sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID))
)
if id, ok := ic.mutation.ID(); ok {
_node.ID = id
@@ -769,6 +763,26 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
_spec.SetField(item.FieldSoldNotes, field.TypeString, value)
_node.SoldNotes = value
}
if nodes := ic.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_node.group_items = &nodes[0]
_spec.Edges = append(_spec.Edges, edge)
}
if nodes := ic.mutation.ParentIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -808,26 +822,6 @@ func (ic *ItemCreate) createSpec() (*Item, *sqlgraph.CreateSpec) {
}
_spec.Edges = append(_spec.Edges, edge)
}
if nodes := ic.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_node.group_items = &nodes[0]
_spec.Edges = append(_spec.Edges, edge)
}
if nodes := ic.mutation.LabelIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,

View File

@@ -40,15 +40,7 @@ func (id *ItemDelete) ExecX(ctx context.Context) int {
}
func (id *ItemDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: item.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(item.Table, sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID))
if ps := id.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -29,9 +29,9 @@ type ItemQuery struct {
order []OrderFunc
inters []Interceptor
predicates []predicate.Item
withGroup *GroupQuery
withParent *ItemQuery
withChildren *ItemQuery
withGroup *GroupQuery
withLabel *LabelQuery
withLocation *LocationQuery
withFields *ItemFieldQuery
@@ -74,6 +74,28 @@ func (iq *ItemQuery) Order(o ...OrderFunc) *ItemQuery {
return iq
}
// QueryGroup chains the current query on the "group" edge.
func (iq *ItemQuery) QueryGroup() *GroupQuery {
query := (&GroupClient{config: iq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := iq.prepareQuery(ctx); err != nil {
return nil, err
}
selector := iq.sqlQuery(ctx)
if err := selector.Err(); err != nil {
return nil, err
}
step := sqlgraph.NewStep(
sqlgraph.From(item.Table, item.FieldID, selector),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn),
)
fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step)
return fromU, nil
}
return query
}
// QueryParent chains the current query on the "parent" edge.
func (iq *ItemQuery) QueryParent() *ItemQuery {
query := (&ItemClient{config: iq.config}).Query()
@@ -118,28 +140,6 @@ func (iq *ItemQuery) QueryChildren() *ItemQuery {
return query
}
// QueryGroup chains the current query on the "group" edge.
func (iq *ItemQuery) QueryGroup() *GroupQuery {
query := (&GroupClient{config: iq.config}).Query()
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
if err := iq.prepareQuery(ctx); err != nil {
return nil, err
}
selector := iq.sqlQuery(ctx)
if err := selector.Err(); err != nil {
return nil, err
}
step := sqlgraph.NewStep(
sqlgraph.From(item.Table, item.FieldID, selector),
sqlgraph.To(group.Table, group.FieldID),
sqlgraph.Edge(sqlgraph.M2O, true, item.GroupTable, item.GroupColumn),
)
fromU = sqlgraph.SetNeighbors(iq.driver.Dialect(), step)
return fromU, nil
}
return query
}
// QueryLabel chains the current query on the "label" edge.
func (iq *ItemQuery) QueryLabel() *LabelQuery {
query := (&LabelClient{config: iq.config}).Query()
@@ -370,10 +370,12 @@ func (iq *ItemQuery) AllX(ctx context.Context) []*Item {
}
// IDs executes the query and returns a list of Item IDs.
func (iq *ItemQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (iq *ItemQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if iq.ctx.Unique == nil && iq.path != nil {
iq.Unique(true)
}
ctx = setContextOp(ctx, iq.ctx, "IDs")
if err := iq.Select(item.FieldID).Scan(ctx, &ids); err != nil {
if err = iq.Select(item.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -440,9 +442,9 @@ func (iq *ItemQuery) Clone() *ItemQuery {
order: append([]OrderFunc{}, iq.order...),
inters: append([]Interceptor{}, iq.inters...),
predicates: append([]predicate.Item{}, iq.predicates...),
withGroup: iq.withGroup.Clone(),
withParent: iq.withParent.Clone(),
withChildren: iq.withChildren.Clone(),
withGroup: iq.withGroup.Clone(),
withLabel: iq.withLabel.Clone(),
withLocation: iq.withLocation.Clone(),
withFields: iq.withFields.Clone(),
@@ -454,6 +456,17 @@ func (iq *ItemQuery) Clone() *ItemQuery {
}
}
// WithGroup tells the query-builder to eager-load the nodes that are connected to
// the "group" edge. The optional arguments are used to configure the query builder of the edge.
func (iq *ItemQuery) WithGroup(opts ...func(*GroupQuery)) *ItemQuery {
query := (&GroupClient{config: iq.config}).Query()
for _, opt := range opts {
opt(query)
}
iq.withGroup = query
return iq
}
// WithParent tells the query-builder to eager-load the nodes that are connected to
// the "parent" edge. The optional arguments are used to configure the query builder of the edge.
func (iq *ItemQuery) WithParent(opts ...func(*ItemQuery)) *ItemQuery {
@@ -476,17 +489,6 @@ func (iq *ItemQuery) WithChildren(opts ...func(*ItemQuery)) *ItemQuery {
return iq
}
// WithGroup tells the query-builder to eager-load the nodes that are connected to
// the "group" edge. The optional arguments are used to configure the query builder of the edge.
func (iq *ItemQuery) WithGroup(opts ...func(*GroupQuery)) *ItemQuery {
query := (&GroupClient{config: iq.config}).Query()
for _, opt := range opts {
opt(query)
}
iq.withGroup = query
return iq
}
// WithLabel tells the query-builder to eager-load the nodes that are connected to
// the "label" edge. The optional arguments are used to configure the query builder of the edge.
func (iq *ItemQuery) WithLabel(opts ...func(*LabelQuery)) *ItemQuery {
@@ -622,9 +624,9 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
withFKs = iq.withFKs
_spec = iq.querySpec()
loadedTypes = [8]bool{
iq.withGroup != nil,
iq.withParent != nil,
iq.withChildren != nil,
iq.withGroup != nil,
iq.withLabel != nil,
iq.withLocation != nil,
iq.withFields != nil,
@@ -632,7 +634,7 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
iq.withAttachments != nil,
}
)
if iq.withParent != nil || iq.withGroup != nil || iq.withLocation != nil {
if iq.withGroup != nil || iq.withParent != nil || iq.withLocation != nil {
withFKs = true
}
if withFKs {
@@ -656,6 +658,12 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
if len(nodes) == 0 {
return nodes, nil
}
if query := iq.withGroup; query != nil {
if err := iq.loadGroup(ctx, query, nodes, nil,
func(n *Item, e *Group) { n.Edges.Group = e }); err != nil {
return nil, err
}
}
if query := iq.withParent; query != nil {
if err := iq.loadParent(ctx, query, nodes, nil,
func(n *Item, e *Item) { n.Edges.Parent = e }); err != nil {
@@ -669,12 +677,6 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
return nil, err
}
}
if query := iq.withGroup; query != nil {
if err := iq.loadGroup(ctx, query, nodes, nil,
func(n *Item, e *Group) { n.Edges.Group = e }); err != nil {
return nil, err
}
}
if query := iq.withLabel; query != nil {
if err := iq.loadLabel(ctx, query, nodes,
func(n *Item) { n.Edges.Label = []*Label{} },
@@ -712,6 +714,38 @@ func (iq *ItemQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Item, e
return nodes, nil
}
func (iq *ItemQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Item, init func(*Item), assign func(*Item, *Group)) error {
ids := make([]uuid.UUID, 0, len(nodes))
nodeids := make(map[uuid.UUID][]*Item)
for i := range nodes {
if nodes[i].group_items == nil {
continue
}
fk := *nodes[i].group_items
if _, ok := nodeids[fk]; !ok {
ids = append(ids, fk)
}
nodeids[fk] = append(nodeids[fk], nodes[i])
}
if len(ids) == 0 {
return nil
}
query.Where(group.IDIn(ids...))
neighbors, err := query.All(ctx)
if err != nil {
return err
}
for _, n := range neighbors {
nodes, ok := nodeids[n.ID]
if !ok {
return fmt.Errorf(`unexpected foreign-key "group_items" returned %v`, n.ID)
}
for i := range nodes {
assign(nodes[i], n)
}
}
return nil
}
func (iq *ItemQuery) loadParent(ctx context.Context, query *ItemQuery, nodes []*Item, init func(*Item), assign func(*Item, *Item)) error {
ids := make([]uuid.UUID, 0, len(nodes))
nodeids := make(map[uuid.UUID][]*Item)
@@ -775,38 +809,6 @@ func (iq *ItemQuery) loadChildren(ctx context.Context, query *ItemQuery, nodes [
}
return nil
}
func (iq *ItemQuery) loadGroup(ctx context.Context, query *GroupQuery, nodes []*Item, init func(*Item), assign func(*Item, *Group)) error {
ids := make([]uuid.UUID, 0, len(nodes))
nodeids := make(map[uuid.UUID][]*Item)
for i := range nodes {
if nodes[i].group_items == nil {
continue
}
fk := *nodes[i].group_items
if _, ok := nodeids[fk]; !ok {
ids = append(ids, fk)
}
nodeids[fk] = append(nodeids[fk], nodes[i])
}
if len(ids) == 0 {
return nil
}
query.Where(group.IDIn(ids...))
neighbors, err := query.All(ctx)
if err != nil {
return err
}
for _, n := range neighbors {
nodes, ok := nodeids[n.ID]
if !ok {
return fmt.Errorf(`unexpected foreign-key "group_items" returned %v`, n.ID)
}
for i := range nodes {
assign(nodes[i], n)
}
}
return nil
}
func (iq *ItemQuery) loadLabel(ctx context.Context, query *LabelQuery, nodes []*Item, init func(*Item), assign func(*Item, *Label)) error {
edgeIDs := make([]driver.Value, len(nodes))
byID := make(map[uuid.UUID]*Item)
@@ -1000,20 +1002,12 @@ func (iq *ItemQuery) sqlCount(ctx context.Context) (int, error) {
}
func (iq *ItemQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: item.Table,
Columns: item.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
},
From: iq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(item.Table, item.Columns, sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID))
_spec.From = iq.sql
if unique := iq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if iq.path != nil {
_spec.Unique = true
}
if fields := iq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -67,6 +67,26 @@ func (iu *ItemUpdate) ClearDescription() *ItemUpdate {
return iu
}
// SetImportRef sets the "import_ref" field.
func (iu *ItemUpdate) SetImportRef(s string) *ItemUpdate {
iu.mutation.SetImportRef(s)
return iu
}
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
func (iu *ItemUpdate) SetNillableImportRef(s *string) *ItemUpdate {
if s != nil {
iu.SetImportRef(*s)
}
return iu
}
// ClearImportRef clears the value of the "import_ref" field.
func (iu *ItemUpdate) ClearImportRef() *ItemUpdate {
iu.mutation.ClearImportRef()
return iu
}
// SetNotes sets the "notes" field.
func (iu *ItemUpdate) SetNotes(s string) *ItemUpdate {
iu.mutation.SetNotes(s)
@@ -413,6 +433,17 @@ func (iu *ItemUpdate) ClearSoldNotes() *ItemUpdate {
return iu
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (iu *ItemUpdate) SetGroupID(id uuid.UUID) *ItemUpdate {
iu.mutation.SetGroupID(id)
return iu
}
// SetGroup sets the "group" edge to the Group entity.
func (iu *ItemUpdate) SetGroup(g *Group) *ItemUpdate {
return iu.SetGroupID(g.ID)
}
// SetParentID sets the "parent" edge to the Item entity by ID.
func (iu *ItemUpdate) SetParentID(id uuid.UUID) *ItemUpdate {
iu.mutation.SetParentID(id)
@@ -447,17 +478,6 @@ func (iu *ItemUpdate) AddChildren(i ...*Item) *ItemUpdate {
return iu.AddChildIDs(ids...)
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (iu *ItemUpdate) SetGroupID(id uuid.UUID) *ItemUpdate {
iu.mutation.SetGroupID(id)
return iu
}
// SetGroup sets the "group" edge to the Group entity.
func (iu *ItemUpdate) SetGroup(g *Group) *ItemUpdate {
return iu.SetGroupID(g.ID)
}
// AddLabelIDs adds the "label" edge to the Label entity by IDs.
func (iu *ItemUpdate) AddLabelIDs(ids ...uuid.UUID) *ItemUpdate {
iu.mutation.AddLabelIDs(ids...)
@@ -542,6 +562,12 @@ func (iu *ItemUpdate) Mutation() *ItemMutation {
return iu.mutation
}
// ClearGroup clears the "group" edge to the Group entity.
func (iu *ItemUpdate) ClearGroup() *ItemUpdate {
iu.mutation.ClearGroup()
return iu
}
// ClearParent clears the "parent" edge to the Item entity.
func (iu *ItemUpdate) ClearParent() *ItemUpdate {
iu.mutation.ClearParent()
@@ -569,12 +595,6 @@ func (iu *ItemUpdate) RemoveChildren(i ...*Item) *ItemUpdate {
return iu.RemoveChildIDs(ids...)
}
// ClearGroup clears the "group" edge to the Group entity.
func (iu *ItemUpdate) ClearGroup() *ItemUpdate {
iu.mutation.ClearGroup()
return iu
}
// ClearLabel clears all "label" edges to the Label entity.
func (iu *ItemUpdate) ClearLabel() *ItemUpdate {
iu.mutation.ClearLabel()
@@ -713,6 +733,11 @@ func (iu *ItemUpdate) check() error {
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
}
}
if v, ok := iu.mutation.ImportRef(); ok {
if err := item.ImportRefValidator(v); err != nil {
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
}
}
if v, ok := iu.mutation.Notes(); ok {
if err := item.NotesValidator(v); err != nil {
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
@@ -753,16 +778,7 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := iu.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: item.Table,
Columns: item.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(item.Table, item.Columns, sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID))
if ps := iu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -782,6 +798,9 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
if iu.mutation.DescriptionCleared() {
_spec.ClearField(item.FieldDescription, field.TypeString)
}
if value, ok := iu.mutation.ImportRef(); ok {
_spec.SetField(item.FieldImportRef, field.TypeString, value)
}
if iu.mutation.ImportRefCleared() {
_spec.ClearField(item.FieldImportRef, field.TypeString)
}
@@ -884,6 +903,41 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
if iu.mutation.SoldNotesCleared() {
_spec.ClearField(item.FieldSoldNotes, field.TypeString)
}
if iu.mutation.GroupCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := iu.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iu.mutation.ParentCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -973,41 +1027,6 @@ func (iu *ItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iu.mutation.GroupCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := iu.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iu.mutation.LabelCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,
@@ -1311,6 +1330,26 @@ func (iuo *ItemUpdateOne) ClearDescription() *ItemUpdateOne {
return iuo
}
// SetImportRef sets the "import_ref" field.
func (iuo *ItemUpdateOne) SetImportRef(s string) *ItemUpdateOne {
iuo.mutation.SetImportRef(s)
return iuo
}
// SetNillableImportRef sets the "import_ref" field if the given value is not nil.
func (iuo *ItemUpdateOne) SetNillableImportRef(s *string) *ItemUpdateOne {
if s != nil {
iuo.SetImportRef(*s)
}
return iuo
}
// ClearImportRef clears the value of the "import_ref" field.
func (iuo *ItemUpdateOne) ClearImportRef() *ItemUpdateOne {
iuo.mutation.ClearImportRef()
return iuo
}
// SetNotes sets the "notes" field.
func (iuo *ItemUpdateOne) SetNotes(s string) *ItemUpdateOne {
iuo.mutation.SetNotes(s)
@@ -1657,6 +1696,17 @@ func (iuo *ItemUpdateOne) ClearSoldNotes() *ItemUpdateOne {
return iuo
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (iuo *ItemUpdateOne) SetGroupID(id uuid.UUID) *ItemUpdateOne {
iuo.mutation.SetGroupID(id)
return iuo
}
// SetGroup sets the "group" edge to the Group entity.
func (iuo *ItemUpdateOne) SetGroup(g *Group) *ItemUpdateOne {
return iuo.SetGroupID(g.ID)
}
// SetParentID sets the "parent" edge to the Item entity by ID.
func (iuo *ItemUpdateOne) SetParentID(id uuid.UUID) *ItemUpdateOne {
iuo.mutation.SetParentID(id)
@@ -1691,17 +1741,6 @@ func (iuo *ItemUpdateOne) AddChildren(i ...*Item) *ItemUpdateOne {
return iuo.AddChildIDs(ids...)
}
// SetGroupID sets the "group" edge to the Group entity by ID.
func (iuo *ItemUpdateOne) SetGroupID(id uuid.UUID) *ItemUpdateOne {
iuo.mutation.SetGroupID(id)
return iuo
}
// SetGroup sets the "group" edge to the Group entity.
func (iuo *ItemUpdateOne) SetGroup(g *Group) *ItemUpdateOne {
return iuo.SetGroupID(g.ID)
}
// AddLabelIDs adds the "label" edge to the Label entity by IDs.
func (iuo *ItemUpdateOne) AddLabelIDs(ids ...uuid.UUID) *ItemUpdateOne {
iuo.mutation.AddLabelIDs(ids...)
@@ -1786,6 +1825,12 @@ func (iuo *ItemUpdateOne) Mutation() *ItemMutation {
return iuo.mutation
}
// ClearGroup clears the "group" edge to the Group entity.
func (iuo *ItemUpdateOne) ClearGroup() *ItemUpdateOne {
iuo.mutation.ClearGroup()
return iuo
}
// ClearParent clears the "parent" edge to the Item entity.
func (iuo *ItemUpdateOne) ClearParent() *ItemUpdateOne {
iuo.mutation.ClearParent()
@@ -1813,12 +1858,6 @@ func (iuo *ItemUpdateOne) RemoveChildren(i ...*Item) *ItemUpdateOne {
return iuo.RemoveChildIDs(ids...)
}
// ClearGroup clears the "group" edge to the Group entity.
func (iuo *ItemUpdateOne) ClearGroup() *ItemUpdateOne {
iuo.mutation.ClearGroup()
return iuo
}
// ClearLabel clears all "label" edges to the Label entity.
func (iuo *ItemUpdateOne) ClearLabel() *ItemUpdateOne {
iuo.mutation.ClearLabel()
@@ -1909,6 +1948,12 @@ func (iuo *ItemUpdateOne) RemoveAttachments(a ...*Attachment) *ItemUpdateOne {
return iuo.RemoveAttachmentIDs(ids...)
}
// Where appends a list predicates to the ItemUpdate builder.
func (iuo *ItemUpdateOne) Where(ps ...predicate.Item) *ItemUpdateOne {
iuo.mutation.Where(ps...)
return iuo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (iuo *ItemUpdateOne) Select(field string, fields ...string) *ItemUpdateOne {
@@ -1964,6 +2009,11 @@ func (iuo *ItemUpdateOne) check() error {
return &ValidationError{Name: "description", err: fmt.Errorf(`ent: validator failed for field "Item.description": %w`, err)}
}
}
if v, ok := iuo.mutation.ImportRef(); ok {
if err := item.ImportRefValidator(v); err != nil {
return &ValidationError{Name: "import_ref", err: fmt.Errorf(`ent: validator failed for field "Item.import_ref": %w`, err)}
}
}
if v, ok := iuo.mutation.Notes(); ok {
if err := item.NotesValidator(v); err != nil {
return &ValidationError{Name: "notes", err: fmt.Errorf(`ent: validator failed for field "Item.notes": %w`, err)}
@@ -2004,16 +2054,7 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
if err := iuo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: item.Table,
Columns: item.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: item.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(item.Table, item.Columns, sqlgraph.NewFieldSpec(item.FieldID, field.TypeUUID))
id, ok := iuo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Item.id" for update`)}
@@ -2050,6 +2091,9 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
if iuo.mutation.DescriptionCleared() {
_spec.ClearField(item.FieldDescription, field.TypeString)
}
if value, ok := iuo.mutation.ImportRef(); ok {
_spec.SetField(item.FieldImportRef, field.TypeString, value)
}
if iuo.mutation.ImportRefCleared() {
_spec.ClearField(item.FieldImportRef, field.TypeString)
}
@@ -2152,6 +2196,41 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
if iuo.mutation.SoldNotesCleared() {
_spec.ClearField(item.FieldSoldNotes, field.TypeString)
}
if iuo.mutation.GroupCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := iuo.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iuo.mutation.ParentCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
@@ -2241,41 +2320,6 @@ func (iuo *ItemUpdateOne) sqlSave(ctx context.Context) (_node *Item, err error)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iuo.mutation.GroupCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := iuo.mutation.GroupIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: item.GroupTable,
Columns: []string{item.GroupColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: group.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if iuo.mutation.LabelCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,

View File

@@ -228,9 +228,3 @@ func (_if *ItemField) String() string {
// ItemFields is a parsable slice of ItemField.
type ItemFields []*ItemField
func (_if ItemFields) config(cfg config) {
for _i := range _if {
_if[_i].config = cfg
}
}

View File

@@ -291,13 +291,7 @@ func (ifc *ItemFieldCreate) sqlSave(ctx context.Context) (*ItemField, error) {
func (ifc *ItemFieldCreate) createSpec() (*ItemField, *sqlgraph.CreateSpec) {
var (
_node = &ItemField{config: ifc.config}
_spec = &sqlgraph.CreateSpec{
Table: itemfield.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: itemfield.FieldID,
},
}
_spec = sqlgraph.NewCreateSpec(itemfield.Table, sqlgraph.NewFieldSpec(itemfield.FieldID, field.TypeUUID))
)
if id, ok := ifc.mutation.ID(); ok {
_node.ID = id

View File

@@ -40,15 +40,7 @@ func (ifd *ItemFieldDelete) ExecX(ctx context.Context) int {
}
func (ifd *ItemFieldDelete) sqlExec(ctx context.Context) (int, error) {
_spec := &sqlgraph.DeleteSpec{
Node: &sqlgraph.NodeSpec{
Table: itemfield.Table,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: itemfield.FieldID,
},
},
}
_spec := sqlgraph.NewDeleteSpec(itemfield.Table, sqlgraph.NewFieldSpec(itemfield.FieldID, field.TypeUUID))
if ps := ifd.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {

View File

@@ -203,10 +203,12 @@ func (ifq *ItemFieldQuery) AllX(ctx context.Context) []*ItemField {
}
// IDs executes the query and returns a list of ItemField IDs.
func (ifq *ItemFieldQuery) IDs(ctx context.Context) ([]uuid.UUID, error) {
var ids []uuid.UUID
func (ifq *ItemFieldQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
if ifq.ctx.Unique == nil && ifq.path != nil {
ifq.Unique(true)
}
ctx = setContextOp(ctx, ifq.ctx, "IDs")
if err := ifq.Select(itemfield.FieldID).Scan(ctx, &ids); err != nil {
if err = ifq.Select(itemfield.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
@@ -450,20 +452,12 @@ func (ifq *ItemFieldQuery) sqlCount(ctx context.Context) (int, error) {
}
func (ifq *ItemFieldQuery) querySpec() *sqlgraph.QuerySpec {
_spec := &sqlgraph.QuerySpec{
Node: &sqlgraph.NodeSpec{
Table: itemfield.Table,
Columns: itemfield.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: itemfield.FieldID,
},
},
From: ifq.sql,
Unique: true,
}
_spec := sqlgraph.NewQuerySpec(itemfield.Table, itemfield.Columns, sqlgraph.NewFieldSpec(itemfield.FieldID, field.TypeUUID))
_spec.From = ifq.sql
if unique := ifq.ctx.Unique; unique != nil {
_spec.Unique = *unique
} else if ifq.path != nil {
_spec.Unique = true
}
if fields := ifq.ctx.Fields; len(fields) > 0 {
_spec.Node.Columns = make([]string, 0, len(fields))

View File

@@ -238,16 +238,7 @@ func (ifu *ItemFieldUpdate) sqlSave(ctx context.Context) (n int, err error) {
if err := ifu.check(); err != nil {
return n, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: itemfield.Table,
Columns: itemfield.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: itemfield.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(itemfield.Table, itemfield.Columns, sqlgraph.NewFieldSpec(itemfield.FieldID, field.TypeUUID))
if ps := ifu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
@@ -489,6 +480,12 @@ func (ifuo *ItemFieldUpdateOne) ClearItem() *ItemFieldUpdateOne {
return ifuo
}
// Where appends a list predicates to the ItemFieldUpdate builder.
func (ifuo *ItemFieldUpdateOne) Where(ps ...predicate.ItemField) *ItemFieldUpdateOne {
ifuo.mutation.Where(ps...)
return ifuo
}
// Select allows selecting one or more fields (columns) of the returned entity.
// The default is selecting all fields defined in the entity schema.
func (ifuo *ItemFieldUpdateOne) Select(field string, fields ...string) *ItemFieldUpdateOne {
@@ -561,16 +558,7 @@ func (ifuo *ItemFieldUpdateOne) sqlSave(ctx context.Context) (_node *ItemField,
if err := ifuo.check(); err != nil {
return _node, err
}
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: itemfield.Table,
Columns: itemfield.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: itemfield.FieldID,
},
},
}
_spec := sqlgraph.NewUpdateSpec(itemfield.Table, itemfield.Columns, sqlgraph.NewFieldSpec(itemfield.FieldID, field.TypeUUID))
id, ok := ifuo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ItemField.id" for update`)}

Some files were not shown because too many files have changed in this diff Show More