mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2026-01-04 03:54:53 +01:00
Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a43ee1840 | ||
|
|
9a6f8546e2 | ||
|
|
10ce256b6a | ||
|
|
cb2c6ef8f2 | ||
|
|
abad8ad1ee | ||
|
|
b02b39c1b3 | ||
|
|
b290175bb0 | ||
|
|
e4aa38b264 | ||
|
|
e60f005990 | ||
|
|
7dfaa0298b | ||
|
|
fbe7382acd | ||
|
|
1003223b47 | ||
|
|
3c532896f5 | ||
|
|
4ba1a263c8 | ||
|
|
94f0123d9c | ||
|
|
1f6782f8be | ||
|
|
ec8703114f | ||
|
|
5cd7792701 | ||
|
|
d82c52df26 | ||
|
|
033c17552b | ||
|
|
2355438962 | ||
|
|
2a6773d1d6 | ||
|
|
c8c07e2878 | ||
|
|
a3c05c3497 | ||
|
|
ab0647fe68 | ||
|
|
0b616225a6 | ||
|
|
dc9c7b76f2 | ||
|
|
b99102e093 | ||
|
|
3077602f93 | ||
|
|
2bd6ff580a | ||
|
|
35941583c8 | ||
|
|
d576c89c7e | ||
|
|
ff355f3cd8 | ||
|
|
03dc7fa841 | ||
|
|
7aaaa346ab | ||
|
|
27309e61da | ||
|
|
61816acdaa | ||
|
|
c31410727b | ||
|
|
4557df86ed | ||
|
|
b8910f1b21 | ||
|
|
48e4f8da2a | ||
|
|
1e0158c27e | ||
|
|
4fb3ddd661 | ||
|
|
690005de06 | ||
|
|
23da976494 | ||
|
|
f0b8bb8b7f | ||
|
|
ecc9fa1959 | ||
|
|
7068a85dfb | ||
|
|
c73922c754 | ||
|
|
ae2179c01c | ||
|
|
09e056a3fb | ||
|
|
4abfc76865 | ||
|
|
aa48c958d7 | ||
|
|
2bd6d0a9e5 | ||
|
|
88275620f2 | ||
|
|
5a058250e6 | ||
|
|
afd7a10003 | ||
|
|
8eedd1e39d | ||
|
|
fedeb1a7e5 | ||
|
|
69b31a3be5 | ||
|
|
31d306ca05 | ||
|
|
1bfb716cea | ||
|
|
13b1524c56 | ||
|
|
b18599b6f4 | ||
|
|
473027c1ae |
7
.gitattributes
vendored
7
.gitattributes
vendored
@@ -1,3 +1,4 @@
|
|||||||
backend/internal/data/ent/** linguist-generated=true
|
backend/internal/data/ent/** linguist-generated
|
||||||
backend/internal/data/ent/schema/** linguist-generated=false
|
backend/internal/data/ent/schema/** -linguist-generated
|
||||||
frontend/lib/api/types/** linguist-generated=true
|
frontend/lib/api/types/** linguist-generated
|
||||||
|
docs/** linguist-documentation
|
||||||
10
.github/ISSUE_TEMPLATE/internal.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/internal.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
name: "🛠️ Internal / Developer Issue"
|
||||||
|
about: "Unstructured issue for project members only. Outside contributors: please use a standard template."
|
||||||
|
title: "[INT]: "
|
||||||
|
labels: ["internal"]
|
||||||
|
assignees: []
|
||||||
|
---
|
||||||
|
|
||||||
|
**Summary:**
|
||||||
|
[Write here]
|
||||||
432
.github/instructions/backend-app-api-handlers.instructions.md
vendored
Normal file
432
.github/instructions/backend-app-api-handlers.instructions.md
vendored
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
---
|
||||||
|
applyTo: '/backend/app/api/handlers/**/*'
|
||||||
|
---
|
||||||
|
|
||||||
|
# Backend API Handlers Instructions (`/backend/app/api/handlers/v1/`)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
API handlers are the HTTP layer that processes requests, calls services, and returns responses. All handlers use the V1 API pattern with Swagger documentation for auto-generation.
|
||||||
|
|
||||||
|
## Architecture Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
HTTP Request → Router → Middleware → Handler → Service → Repository → Database
|
||||||
|
↓
|
||||||
|
HTTP Response
|
||||||
|
```
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/app/api/
|
||||||
|
├── routes.go # Route definitions and middleware
|
||||||
|
├── handlers/
|
||||||
|
│ └── v1/
|
||||||
|
│ ├── controller.go # V1Controller struct and dependencies
|
||||||
|
│ ├── v1_ctrl_items.go # Item endpoints
|
||||||
|
│ ├── v1_ctrl_users.go # User endpoints
|
||||||
|
│ ├── v1_ctrl_locations.go # Location endpoints
|
||||||
|
│ ├── v1_ctrl_auth.go # Authentication endpoints
|
||||||
|
│ ├── helpers.go # HTTP helper functions
|
||||||
|
│ ├── query_params.go # Query parameter parsing
|
||||||
|
│ └── assets/ # Asset handling
|
||||||
|
```
|
||||||
|
|
||||||
|
## Handler Structure
|
||||||
|
|
||||||
|
### V1Controller
|
||||||
|
|
||||||
|
All handlers are methods on `V1Controller`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type V1Controller struct {
|
||||||
|
svc *services.AllServices // Service layer
|
||||||
|
repo *repo.AllRepos // Direct repo access (rare)
|
||||||
|
bus *eventbus.EventBus // Event publishing
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
// Handler logic
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Swagger Documentation
|
||||||
|
|
||||||
|
**CRITICAL:** Every handler must have Swagger comments for API doc generation:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleItemsGetAll godoc
|
||||||
|
//
|
||||||
|
// @Summary Query All Items
|
||||||
|
// @Tags Items
|
||||||
|
// @Produce json
|
||||||
|
// @Param q query string false "search string"
|
||||||
|
// @Param page query int false "page number"
|
||||||
|
// @Param pageSize query int false "items per page"
|
||||||
|
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||||
|
// @Router /v1/items [GET]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**After modifying Swagger comments, ALWAYS run:**
|
||||||
|
```bash
|
||||||
|
task generate # Regenerates Swagger docs and TypeScript types
|
||||||
|
```
|
||||||
|
|
||||||
|
## Standard Handler Pattern
|
||||||
|
|
||||||
|
### 1. Decode Request
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
var itemData repo.ItemCreate
|
||||||
|
if err := server.Decode(r, &itemData); err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... rest of handler
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Extract Context
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Get current user from request (added by auth middleware)
|
||||||
|
user := ctrl.CurrentUser(r)
|
||||||
|
|
||||||
|
// Create service context with group/user IDs
|
||||||
|
ctx := services.NewContext(r.Context(), user)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Call Service
|
||||||
|
|
||||||
|
```go
|
||||||
|
result, err := ctrl.svc.Items.Create(ctx, itemData)
|
||||||
|
if err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Return Response
|
||||||
|
|
||||||
|
```go
|
||||||
|
return server.JSON(w, result, http.StatusCreated)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Handler Patterns
|
||||||
|
|
||||||
|
### GET - Single Item
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleItemGet godoc
|
||||||
|
//
|
||||||
|
// @Summary Get Item
|
||||||
|
// @Tags Items
|
||||||
|
// @Produce json
|
||||||
|
// @Param id path string true "Item ID"
|
||||||
|
// @Success 200 {object} repo.ItemOut
|
||||||
|
// @Router /v1/items/{id} [GET]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
id, err := ctrl.RouteUUID(r, "id")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
item, err := ctrl.svc.Items.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusNotFound)
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, item, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### GET - List with Pagination
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
// Parse query parameters
|
||||||
|
query := extractItemQuery(r)
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
items, err := ctrl.svc.Items.GetAll(ctx, query)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, items, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper to extract query params
|
||||||
|
func extractItemQuery(r *http.Request) repo.ItemQuery {
|
||||||
|
params := r.URL.Query()
|
||||||
|
return repo.ItemQuery{
|
||||||
|
Page: queryIntOrNegativeOne(params.Get("page")),
|
||||||
|
PageSize: queryIntOrNegativeOne(params.Get("pageSize")),
|
||||||
|
Search: params.Get("q"),
|
||||||
|
LocationIDs: queryUUIDList(params, "locations"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### POST - Create
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleItemCreate godoc
|
||||||
|
//
|
||||||
|
// @Summary Create Item
|
||||||
|
// @Tags Items
|
||||||
|
// @Accept json
|
||||||
|
// @Produce json
|
||||||
|
// @Param payload body repo.ItemCreate true "Item Data"
|
||||||
|
// @Success 201 {object} repo.ItemOut
|
||||||
|
// @Router /v1/items [POST]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
var data repo.ItemCreate
|
||||||
|
if err := server.Decode(r, &data); err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
item, err := ctrl.svc.Items.Create(ctx, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, item, http.StatusCreated)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### PUT - Update
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleItemUpdate godoc
|
||||||
|
//
|
||||||
|
// @Summary Update Item
|
||||||
|
// @Tags Items
|
||||||
|
// @Accept json
|
||||||
|
// @Produce json
|
||||||
|
// @Param id path string true "Item ID"
|
||||||
|
// @Param payload body repo.ItemUpdate true "Item Data"
|
||||||
|
// @Success 200 {object} repo.ItemOut
|
||||||
|
// @Router /v1/items/{id} [PUT]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
id, err := ctrl.RouteUUID(r, "id")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var data repo.ItemUpdate
|
||||||
|
if err := server.Decode(r, &data); err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
item, err := ctrl.svc.Items.Update(ctx, id, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, item, http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### DELETE
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleItemDelete godoc
|
||||||
|
//
|
||||||
|
// @Summary Delete Item
|
||||||
|
// @Tags Items
|
||||||
|
// @Param id path string true "Item ID"
|
||||||
|
// @Success 204
|
||||||
|
// @Router /v1/items/{id} [DELETE]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
id, err := ctrl.RouteUUID(r, "id")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
err = ctrl.svc.Items.Delete(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, nil, http.StatusNoContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Upload
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
id, err := ctrl.RouteUUID(r, "id")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse multipart form
|
||||||
|
err = r.ParseMultipartForm(32 << 20) // 32MB max
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
file, header, err := r.FormFile("file")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
attachment, err := ctrl.svc.Items.CreateAttachment(ctx, id, file, header.Filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, attachment, http.StatusCreated)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Routing
|
||||||
|
|
||||||
|
Routes are defined in `backend/app/api/routes.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (a *app) mountRoutes(repos *repo.AllRepos, svc *services.AllServices) {
|
||||||
|
v1 := v1.NewControllerV1(svc, repos)
|
||||||
|
|
||||||
|
a.server.Get("/api/v1/items", v1.HandleItemsGetAll())
|
||||||
|
a.server.Post("/api/v1/items", v1.HandleItemCreate())
|
||||||
|
a.server.Get("/api/v1/items/{id}", v1.HandleItemGet())
|
||||||
|
a.server.Put("/api/v1/items/{id}", v1.HandleItemUpdate())
|
||||||
|
a.server.Delete("/api/v1/items/{id}", v1.HandleItemDelete())
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Helper Functions
|
||||||
|
|
||||||
|
### Query Parameter Parsing
|
||||||
|
|
||||||
|
Located in `query_params.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func queryIntOrNegativeOne(s string) int
|
||||||
|
func queryBool(s string) bool
|
||||||
|
func queryUUIDList(params url.Values, key string) []uuid.UUID
|
||||||
|
```
|
||||||
|
|
||||||
|
### Response Helpers
|
||||||
|
|
||||||
|
```go
|
||||||
|
// From httpkit/server
|
||||||
|
server.JSON(w, data, statusCode) // JSON response
|
||||||
|
server.Respond(w, statusCode) // Empty response
|
||||||
|
validate.NewRequestError(err, statusCode) // Error response
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
|
||||||
|
```go
|
||||||
|
user := ctrl.CurrentUser(r) // Get authenticated user (from middleware)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding a New Endpoint
|
||||||
|
|
||||||
|
### 1. Create Handler
|
||||||
|
|
||||||
|
In `backend/app/api/handlers/v1/v1_ctrl_myentity.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// HandleMyEntityCreate godoc
|
||||||
|
//
|
||||||
|
// @Summary Create MyEntity
|
||||||
|
// @Tags MyEntity
|
||||||
|
// @Accept json
|
||||||
|
// @Produce json
|
||||||
|
// @Param payload body repo.MyEntityCreate true "Data"
|
||||||
|
// @Success 201 {object} repo.MyEntityOut
|
||||||
|
// @Router /v1/my-entity [POST]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleMyEntityCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
var data repo.MyEntityCreate
|
||||||
|
if err := server.Decode(r, &data); err != nil {
|
||||||
|
return validate.NewRequestError(err, http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
result, err := ctrl.svc.MyEntity.Create(ctx, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, result, http.StatusCreated)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Add Route
|
||||||
|
|
||||||
|
In `backend/app/api/routes.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
a.server.Post("/api/v1/my-entity", v1.HandleMyEntityCreate())
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Generate Docs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
task generate # Generates Swagger docs and TypeScript types
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
task go:build # Verify builds
|
||||||
|
task go:test # Run tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
1. **ALWAYS add Swagger comments** - required for API docs and TypeScript type generation
|
||||||
|
2. **Run `task generate` after handler changes** - updates API documentation
|
||||||
|
3. **Use services, not repos directly** - handlers call services, services call repos
|
||||||
|
4. **Always use `services.Context`** - includes auth and multi-tenancy
|
||||||
|
5. **Handle errors properly** - use `validate.NewRequestError()` with appropriate status codes
|
||||||
|
6. **Validate input** - decode and validate request bodies
|
||||||
|
7. **Return correct status codes** - 200 OK, 201 Created, 204 No Content, 400 Bad Request, 404 Not Found
|
||||||
|
|
||||||
|
## Common Issues
|
||||||
|
|
||||||
|
- **"Missing Swagger docs"** → Add `@Summary`, `@Tags`, `@Router` comments, run `task generate`
|
||||||
|
- **TypeScript types outdated** → Run `task generate` to regenerate
|
||||||
|
- **Auth failures** → Ensure route has auth middleware and `@Security Bearer`
|
||||||
|
- **CORS errors** → Check middleware configuration in `routes.go`
|
||||||
341
.github/instructions/backend-internal-core-services.instructions.md
vendored
Normal file
341
.github/instructions/backend-internal-core-services.instructions.md
vendored
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
---
|
||||||
|
applyTo: '/backend/internal/core/services/**/*'
|
||||||
|
---
|
||||||
|
|
||||||
|
# Backend Services Layer Instructions (`/backend/internal/core/services/`)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The services layer contains business logic that orchestrates between repositories and API handlers. Services handle complex operations, validation, and cross-cutting concerns.
|
||||||
|
|
||||||
|
## Architecture Pattern
|
||||||
|
|
||||||
|
```
|
||||||
|
Handler (API) → Service (Business Logic) → Repository (Data Access) → Database
|
||||||
|
```
|
||||||
|
|
||||||
|
**Separation of concerns:**
|
||||||
|
- **Handlers** (`backend/app/api/handlers/v1/`) - HTTP request/response, routing, auth
|
||||||
|
- **Services** (`backend/internal/core/services/`) - Business logic, orchestration
|
||||||
|
- **Repositories** (`backend/internal/data/repo/`) - Database operations, queries
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/internal/core/services/
|
||||||
|
├── all.go # Service aggregation
|
||||||
|
├── service_items.go # Item business logic
|
||||||
|
├── service_items_attachments.go # Item attachments logic
|
||||||
|
├── service_user.go # User management logic
|
||||||
|
├── service_group.go # Group management logic
|
||||||
|
├── service_background.go # Background tasks
|
||||||
|
├── contexts.go # Service context types
|
||||||
|
├── reporting/ # Reporting subsystem
|
||||||
|
│ ├── eventbus/ # Event bus for notifications
|
||||||
|
│ └── *.go # Report generation logic
|
||||||
|
└── *_test.go # Service tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Service Structure
|
||||||
|
|
||||||
|
### Standard Pattern
|
||||||
|
|
||||||
|
```go
|
||||||
|
type ItemService struct {
|
||||||
|
repo *repo.AllRepos // Access to all repositories
|
||||||
|
filepath string // File storage path
|
||||||
|
autoIncrementAssetID bool // Feature flags
|
||||||
|
}
|
||||||
|
|
||||||
|
func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut, error) {
|
||||||
|
// 1. Validation
|
||||||
|
if item.Name == "" {
|
||||||
|
return repo.ItemOut{}, errors.New("name required")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Business logic
|
||||||
|
if svc.autoIncrementAssetID {
|
||||||
|
highest, err := svc.repo.Items.GetHighestAssetID(ctx, ctx.GID)
|
||||||
|
if err != nil {
|
||||||
|
return repo.ItemOut{}, err
|
||||||
|
}
|
||||||
|
item.AssetID = highest + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Repository call
|
||||||
|
return svc.repo.Items.Create(ctx, ctx.GID, item)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service Context
|
||||||
|
|
||||||
|
Services use a custom `Context` type that extends `context.Context`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Context struct {
|
||||||
|
context.Context
|
||||||
|
GID uuid.UUID // Group ID for multi-tenancy
|
||||||
|
UID uuid.UUID // User ID for audit
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Always use `Context` from services package, not raw `context.Context`.**
|
||||||
|
|
||||||
|
## Common Service Patterns
|
||||||
|
|
||||||
|
### 1. CRUD with Business Logic
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (svc *ItemService) Update(ctx Context, id uuid.UUID, data repo.ItemUpdate) (repo.ItemOut, error) {
|
||||||
|
// Fetch existing
|
||||||
|
existing, err := svc.repo.Items.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return repo.ItemOut{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Business rules
|
||||||
|
if existing.Archived && data.Quantity != nil {
|
||||||
|
return repo.ItemOut{}, errors.New("cannot modify archived items")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update
|
||||||
|
return svc.repo.Items.Update(ctx, id, data)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Orchestrating Multiple Repositories
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (svc *ItemService) CreateWithAttachment(ctx Context, item repo.ItemCreate, file io.Reader) (repo.ItemOut, error) {
|
||||||
|
// Create item
|
||||||
|
created, err := svc.repo.Items.Create(ctx, ctx.GID, item)
|
||||||
|
if err != nil {
|
||||||
|
return repo.ItemOut{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload attachment
|
||||||
|
attachment, err := svc.repo.Attachments.Create(ctx, created.ID, file)
|
||||||
|
if err != nil {
|
||||||
|
// Rollback - delete item
|
||||||
|
_ = svc.repo.Items.Delete(ctx, created.ID)
|
||||||
|
return repo.ItemOut{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
created.Attachments = []repo.AttachmentOut{attachment}
|
||||||
|
return created, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Background Tasks
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int, error) {
|
||||||
|
// Get items without asset IDs
|
||||||
|
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, gid)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch assign
|
||||||
|
highest := svc.repo.Items.GetHighestAssetID(ctx, gid)
|
||||||
|
for _, item := range items {
|
||||||
|
highest++
|
||||||
|
_ = svc.repo.Items.Update(ctx, item.ID, repo.ItemUpdate{
|
||||||
|
AssetID: &highest,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return len(items), nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Event Publishing
|
||||||
|
|
||||||
|
Services can publish events to the event bus:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (svc *ItemService) Delete(ctx Context, id uuid.UUID) error {
|
||||||
|
err := svc.repo.Items.Delete(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish event for notifications
|
||||||
|
svc.repo.Bus.Publish(eventbus.Event{
|
||||||
|
Type: "item.deleted",
|
||||||
|
Data: map[string]interface{}{"id": id},
|
||||||
|
})
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Service Aggregation
|
||||||
|
|
||||||
|
All services are bundled in `all.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type AllServices struct {
|
||||||
|
User *UserService
|
||||||
|
Group *GroupService
|
||||||
|
Items *ItemService
|
||||||
|
// ... other services
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(repos *repo.AllRepos, filepath string) *AllServices {
|
||||||
|
return &AllServices{
|
||||||
|
User: &UserService{repo: repos},
|
||||||
|
Items: &ItemService{repo: repos, filepath: filepath},
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Accessed in handlers via:**
|
||||||
|
```go
|
||||||
|
ctrl.svc.Items.Create(ctx, itemData)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Working with Services from Handlers
|
||||||
|
|
||||||
|
Handlers call services, not repositories directly:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// In backend/app/api/handlers/v1/v1_ctrl_items.go
|
||||||
|
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
var itemData repo.ItemCreate
|
||||||
|
if err := server.Decode(r, &itemData); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get context with group/user IDs
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
|
||||||
|
// Call service (not repository)
|
||||||
|
item, err := ctrl.svc.Items.Create(ctx, itemData)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, item, http.StatusCreated)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing Services
|
||||||
|
|
||||||
|
Service tests mock repositories using interfaces:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func TestItemService_Create(t *testing.T) {
|
||||||
|
mockRepo := &mockItemRepo{
|
||||||
|
CreateFunc: func(ctx context.Context, gid uuid.UUID, data repo.ItemCreate) (repo.ItemOut, error) {
|
||||||
|
return repo.ItemOut{ID: uuid.New(), Name: data.Name}, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
svc := &ItemService{repo: &repo.AllRepos{Items: mockRepo}}
|
||||||
|
|
||||||
|
ctx := services.Context{GID: uuid.New(), UID: uuid.New()}
|
||||||
|
result, err := svc.Create(ctx, repo.ItemCreate{Name: "Test"})
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "Test", result.Name)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run service tests:**
|
||||||
|
```bash
|
||||||
|
cd backend && go test ./internal/core/services -v
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding a New Service
|
||||||
|
|
||||||
|
### 1. Create Service File
|
||||||
|
|
||||||
|
Create `backend/internal/core/services/service_myentity.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package services
|
||||||
|
|
||||||
|
type MyEntityService struct {
|
||||||
|
repo *repo.AllRepos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (svc *MyEntityService) Create(ctx Context, data repo.MyEntityCreate) (repo.MyEntityOut, error) {
|
||||||
|
// Business logic here
|
||||||
|
return svc.repo.MyEntity.Create(ctx, ctx.GID, data)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Add to AllServices
|
||||||
|
|
||||||
|
Edit `backend/internal/core/services/all.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type AllServices struct {
|
||||||
|
// ... existing services
|
||||||
|
MyEntity *MyEntityService
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(repos *repo.AllRepos, filepath string) *AllServices {
|
||||||
|
return &AllServices{
|
||||||
|
// ... existing services
|
||||||
|
MyEntity: &MyEntityService{repo: repos},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Use in Handler
|
||||||
|
|
||||||
|
In `backend/app/api/handlers/v1/`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func (ctrl *V1Controller) HandleMyEntityCreate() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
|
||||||
|
result, err := ctrl.svc.MyEntity.Create(ctx, data)
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Run Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
task generate # If you modified schemas
|
||||||
|
task go:test # Run all tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Service Responsibilities
|
||||||
|
|
||||||
|
**Services should:**
|
||||||
|
- ✅ Contain business logic and validation
|
||||||
|
- ✅ Orchestrate multiple repository calls
|
||||||
|
- ✅ Handle transactions (when needed)
|
||||||
|
- ✅ Publish events for side effects
|
||||||
|
- ✅ Enforce access control and multi-tenancy
|
||||||
|
- ✅ Transform data between API and repository formats
|
||||||
|
|
||||||
|
**Services should NOT:**
|
||||||
|
- ❌ Handle HTTP requests/responses (that's handlers)
|
||||||
|
- ❌ Construct SQL queries (that's repositories)
|
||||||
|
- ❌ Import handler packages (creates circular deps)
|
||||||
|
- ❌ Directly access database (use repositories)
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
1. **Always use `services.Context`** - includes group/user IDs for multi-tenancy
|
||||||
|
2. **Services call repos, handlers call services** - maintains layer separation
|
||||||
|
3. **No direct database access** - always through repositories
|
||||||
|
4. **Business logic goes here** - not in handlers or repositories
|
||||||
|
5. **Test services independently** - mock repository dependencies
|
||||||
|
|
||||||
|
## Common Patterns to Follow
|
||||||
|
|
||||||
|
- **Validation:** Check business rules before calling repository
|
||||||
|
- **Error wrapping:** Add context to repository errors
|
||||||
|
- **Logging:** Use `log.Ctx(ctx)` for contextual logging
|
||||||
|
- **Transactions:** Use `repo.WithTx()` for multi-step operations
|
||||||
|
- **Events:** Publish to event bus for notifications/side effects
|
||||||
239
.github/instructions/backend-internal-data.instructions.md
vendored
Normal file
239
.github/instructions/backend-internal-data.instructions.md
vendored
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
---
|
||||||
|
applyTo: 'backend/internal/data/**/*'
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
# Backend Data Layer Instructions (`/backend/internal/data/`)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This directory contains the data access layer using **Ent ORM** (entity framework). It follows a clear separation between schema definitions, generated code, and repository implementations.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
backend/internal/data/
|
||||||
|
├── ent/ # Ent ORM generated code (DO NOT EDIT)
|
||||||
|
│ ├── schema/ # Schema definitions (EDIT THESE)
|
||||||
|
│ │ ├── item.go # Item entity schema
|
||||||
|
│ │ ├── user.go # User entity schema
|
||||||
|
│ │ ├── location.go # Location entity schema
|
||||||
|
│ │ ├── label.go # Label entity schema
|
||||||
|
│ │ └── mixins/ # Reusable schema mixins
|
||||||
|
│ ├── *.go # Generated entity code
|
||||||
|
│ └── migrate/ # Generated migrations
|
||||||
|
├── repo/ # Repository pattern implementations
|
||||||
|
│ ├── repos_all.go # Aggregates all repositories
|
||||||
|
│ ├── repo_items.go # Item repository
|
||||||
|
│ ├── repo_users.go # User repository
|
||||||
|
│ ├── repo_locations.go # Location repository
|
||||||
|
│ └── *_test.go # Repository tests
|
||||||
|
├── migrations/ # Manual SQL migrations
|
||||||
|
│ ├── sqlite3/ # SQLite-specific migrations
|
||||||
|
│ └── postgres/ # PostgreSQL-specific migrations
|
||||||
|
└── types/ # Custom data types
|
||||||
|
```
|
||||||
|
|
||||||
|
## Ent ORM Workflow
|
||||||
|
|
||||||
|
### 1. Defining Schemas (`ent/schema/`)
|
||||||
|
|
||||||
|
**ALWAYS edit schema files here** - these define your database entities:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Example: backend/internal/data/ent/schema/item.go
|
||||||
|
type Item struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Item) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.String("name").NotEmpty(),
|
||||||
|
field.Int("quantity").Default(1),
|
||||||
|
field.Bool("archived").Default(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Item) Edges() []ent.Edge {
|
||||||
|
return []ent.Edge{
|
||||||
|
edge.From("location", Location.Type).Ref("items").Unique(),
|
||||||
|
edge.From("labels", Label.Type).Ref("items"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Item) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("name"),
|
||||||
|
index.Fields("archived"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Common schema patterns:**
|
||||||
|
- Use `mixins.BaseMixin{}` for `id`, `created_at`, `updated_at` fields
|
||||||
|
- Use `mixins.DetailsMixin{}` for `name` and `description` fields
|
||||||
|
- Use `GroupMixin{ref: "items"}` to link entities to groups
|
||||||
|
- Add indexes for frequently queried fields
|
||||||
|
|
||||||
|
### 2. Generating Code
|
||||||
|
|
||||||
|
**After modifying any schema file, ALWAYS run:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
task generate
|
||||||
|
```
|
||||||
|
|
||||||
|
This:
|
||||||
|
1. Runs `go generate ./...` in `backend/internal/` (generates Ent code)
|
||||||
|
2. Generates Swagger docs from API handlers
|
||||||
|
3. Generates TypeScript types for frontend
|
||||||
|
|
||||||
|
**Generated files you'll see:**
|
||||||
|
- `ent/*.go` - Entity types, builders, queries
|
||||||
|
- `ent/migrate/migrate.go` - Auto migrations
|
||||||
|
- `ent/predicate/predicate.go` - Query predicates
|
||||||
|
|
||||||
|
**NEVER edit generated files directly** - changes will be overwritten.
|
||||||
|
|
||||||
|
### 3. Using Generated Code in Repositories
|
||||||
|
|
||||||
|
Repositories in `repo/` use the generated Ent client:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Example: backend/internal/data/repo/repo_items.go
|
||||||
|
type ItemsRepository struct {
|
||||||
|
db *ent.Client
|
||||||
|
bus *eventbus.EventBus
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ItemsRepository) Create(ctx context.Context, gid uuid.UUID, data ItemCreate) (ItemOut, error) {
|
||||||
|
entity, err := r.db.Item.Create().
|
||||||
|
SetName(data.Name).
|
||||||
|
SetQuantity(data.Quantity).
|
||||||
|
SetGroupID(gid).
|
||||||
|
Save(ctx)
|
||||||
|
|
||||||
|
return mapToItemOut(entity), err
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Repository Pattern
|
||||||
|
|
||||||
|
### Structure
|
||||||
|
|
||||||
|
Each entity typically has:
|
||||||
|
- **Repository struct** (`ItemsRepository`) - holds DB client and dependencies
|
||||||
|
- **Input types** (`ItemCreate`, `ItemUpdate`) - API input DTOs
|
||||||
|
- **Output types** (`ItemOut`, `ItemSummary`) - API response DTOs
|
||||||
|
- **Query types** (`ItemQuery`) - search/filter parameters
|
||||||
|
- **Mapper functions** (`mapToItemOut`) - converts Ent entities to output DTOs
|
||||||
|
|
||||||
|
### Key Methods
|
||||||
|
|
||||||
|
Repositories typically implement:
|
||||||
|
- `Create(ctx, gid, input)` - Create new entity
|
||||||
|
- `Get(ctx, id)` - Get single entity by ID
|
||||||
|
- `GetAll(ctx, gid, query)` - Query with pagination/filters
|
||||||
|
- `Update(ctx, id, input)` - Update entity
|
||||||
|
- `Delete(ctx, id)` - Delete entity
|
||||||
|
|
||||||
|
### Working with Ent Queries
|
||||||
|
|
||||||
|
**Loading relationships (edges):**
|
||||||
|
```go
|
||||||
|
items, err := r.db.Item.Query().
|
||||||
|
WithLocation(). // Load location edge
|
||||||
|
WithLabels(). // Load labels edge
|
||||||
|
WithChildren(). // Load child items
|
||||||
|
Where(item.GroupIDEQ(gid)).
|
||||||
|
All(ctx)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Filtering:**
|
||||||
|
```go
|
||||||
|
query := r.db.Item.Query().
|
||||||
|
Where(
|
||||||
|
item.GroupIDEQ(gid),
|
||||||
|
item.ArchivedEQ(false),
|
||||||
|
item.NameContainsFold(search),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Ordering and pagination:**
|
||||||
|
```go
|
||||||
|
items, err := query.
|
||||||
|
Order(ent.Desc(item.FieldCreatedAt)).
|
||||||
|
Limit(pageSize).
|
||||||
|
Offset((page - 1) * pageSize).
|
||||||
|
All(ctx)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Workflows
|
||||||
|
|
||||||
|
### Adding a New Entity
|
||||||
|
|
||||||
|
1. **Create schema:** `backend/internal/data/ent/schema/myentity.go`
|
||||||
|
2. **Run:** `task generate` (generates Ent code)
|
||||||
|
3. **Create repository:** `backend/internal/data/repo/repo_myentity.go`
|
||||||
|
4. **Add to AllRepos:** Edit `repo/repos_all.go` to include new repo
|
||||||
|
5. **Run tests:** `task go:test`
|
||||||
|
|
||||||
|
### Adding Fields to Existing Entity
|
||||||
|
|
||||||
|
1. **Edit schema:** `backend/internal/data/ent/schema/item.go`
|
||||||
|
```go
|
||||||
|
field.String("new_field").Optional()
|
||||||
|
```
|
||||||
|
2. **Run:** `task generate`
|
||||||
|
3. **Update repository:** Add field to input/output types in `repo/repo_items.go`
|
||||||
|
4. **Update mappers:** Ensure mapper functions handle new field
|
||||||
|
5. **Run tests:** `task go:test`
|
||||||
|
|
||||||
|
### Adding Relationships (Edges)
|
||||||
|
|
||||||
|
1. **Edit both schemas:**
|
||||||
|
```go
|
||||||
|
// In item.go
|
||||||
|
edge.From("location", Location.Type).Ref("items").Unique()
|
||||||
|
|
||||||
|
// In location.go
|
||||||
|
edge.To("items", Item.Type)
|
||||||
|
```
|
||||||
|
2. **Run:** `task generate`
|
||||||
|
3. **Use in queries:** `.WithLocation()` to load the edge
|
||||||
|
4. **Run tests:** `task go:test`
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Repository tests use `enttest` for in-memory SQLite:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func TestItemRepo(t *testing.T) {
|
||||||
|
client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&_fk=1")
|
||||||
|
defer client.Close()
|
||||||
|
|
||||||
|
repo := &ItemsRepository{db: client}
|
||||||
|
// Test methods...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run repository tests:**
|
||||||
|
```bash
|
||||||
|
cd backend && go test ./internal/data/repo -v
|
||||||
|
```
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
1. **ALWAYS run `task generate` after schema changes** - builds will fail otherwise
|
||||||
|
2. **NEVER edit files in `ent/` except `ent/schema/`** - they're generated
|
||||||
|
3. **Use repositories, not raw Ent queries in services/handlers** - maintains separation
|
||||||
|
4. **Include `group_id` in all queries** - ensures multi-tenancy
|
||||||
|
5. **Use `.WithX()` to load edges** - avoids N+1 queries
|
||||||
|
6. **Test with both SQLite and PostgreSQL** - CI tests both
|
||||||
|
|
||||||
|
## Common Errors
|
||||||
|
|
||||||
|
- **"undefined: ent.ItemX"** → Run `task generate` after schema changes
|
||||||
|
- **Migration conflicts** → Check `migrations/` for manual migration files
|
||||||
|
- **Foreign key violations** → Ensure edges are properly defined in both schemas
|
||||||
|
- **Slow queries** → Add indexes in schema `Indexes()` method
|
||||||
157
.github/instructions/code.instructions.md
vendored
Normal file
157
.github/instructions/code.instructions.md
vendored
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
# Homebox Repository Instructions for Coding Agents
|
||||||
|
|
||||||
|
## Repository Overview
|
||||||
|
|
||||||
|
**Type**: Full-stack home inventory management web app (monorepo)
|
||||||
|
**Size**: ~265 Go files, ~371 TypeScript/Vue files
|
||||||
|
**Build Tool**: Task (Taskfile.yml) - **ALWAYS use `task` commands**
|
||||||
|
**Database**: SQLite (default) or PostgreSQL
|
||||||
|
|
||||||
|
### Stack
|
||||||
|
- **Backend** (`/backend`): Go 1.24+, Chi router, Ent ORM, port 7745
|
||||||
|
- **Frontend** (`/frontend`): Nuxt 4, Vue 3, TypeScript, Tailwind CSS, pnpm 9.1.4+, dev proxies to backend
|
||||||
|
|
||||||
|
## Critical Build & Validation Commands
|
||||||
|
|
||||||
|
### Initial Setup (Run Once)
|
||||||
|
```bash
|
||||||
|
task setup # Installs swag, goose, Go deps, pnpm deps
|
||||||
|
```
|
||||||
|
|
||||||
|
### Code Generation (Required Before Backend Work)
|
||||||
|
```bash
|
||||||
|
task generate # Generates Ent ORM, Swagger docs, TypeScript types
|
||||||
|
```
|
||||||
|
**ALWAYS run after**: schema changes, API handler changes, before backend server/tests
|
||||||
|
**Note**: "TypeSpecDef is nil" warnings are normal - ignore them
|
||||||
|
|
||||||
|
### Backend Commands
|
||||||
|
```bash
|
||||||
|
task go:build # Build binary (60-90s)
|
||||||
|
task go:test # Unit tests (5-10s)
|
||||||
|
task go:lint # golangci-lint (6m timeout in CI)
|
||||||
|
task go:all # Tidy + lint + test
|
||||||
|
task go:run # Start server (SQLite)
|
||||||
|
task pr # Full PR validation (3-5 min)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Frontend Commands
|
||||||
|
```bash
|
||||||
|
task ui:dev # Dev server port 3000
|
||||||
|
task ui:check # Type checking
|
||||||
|
task ui:fix # eslint --fix + prettier
|
||||||
|
task ui:watch # Vitest watch mode
|
||||||
|
```
|
||||||
|
**Lint**: Max 1 warning in CI (`pnpm run lint:ci`)
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
```bash
|
||||||
|
task test:ci # Integration tests (15-30s + startup)
|
||||||
|
task test:e2e # Playwright E2E (60s+ per shard, needs playwright install)
|
||||||
|
task pr # Full PR validation: generate + go:all + ui:check + ui:fix + test:ci (3-5 min)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
### Key Root Files
|
||||||
|
- `Taskfile.yml` - All commands (always use `task`)
|
||||||
|
- `docker-compose.yml`, `Dockerfile*` - Docker configs
|
||||||
|
- `CONTRIBUTING.md` - Contribution guidelines
|
||||||
|
|
||||||
|
### Backend Structure (`/backend`)
|
||||||
|
```
|
||||||
|
backend/
|
||||||
|
├── app/
|
||||||
|
│ ├── api/ # Main API application
|
||||||
|
│ │ ├── main.go # Entry point
|
||||||
|
│ │ ├── routes.go # Route definitions
|
||||||
|
│ │ ├── handlers/ # HTTP handlers (v1 API)
|
||||||
|
│ │ ├── static/ # Swagger docs, embedded frontend
|
||||||
|
│ │ └── providers/ # Service providers
|
||||||
|
│ └── tools/
|
||||||
|
│ └── typegen/ # TypeScript type generation tool
|
||||||
|
├── internal/
|
||||||
|
│ ├── core/
|
||||||
|
│ │ └── services/ # Business logic layer
|
||||||
|
│ ├── data/
|
||||||
|
│ │ ├── ent/ # Ent ORM generated code + schemas
|
||||||
|
│ │ │ └── schema/ # Schema definitions (edit these)
|
||||||
|
│ │ └── repo/ # Repository pattern implementations
|
||||||
|
│ ├── sys/ # System utilities (config, validation)
|
||||||
|
│ └── web/ # Web middleware
|
||||||
|
├── pkgs/ # Reusable packages
|
||||||
|
├── go.mod, go.sum # Go dependencies
|
||||||
|
└── .golangci.yml # Linter configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
**Patterns**: Schema/API changes → edit source → `task generate`. Never edit generated code in `ent/`.
|
||||||
|
|
||||||
|
### Frontend Structure (`/frontend`)
|
||||||
|
```
|
||||||
|
frontend/
|
||||||
|
├── app.vue # Root component
|
||||||
|
├── nuxt.config.ts # Nuxt configuration
|
||||||
|
├── package.json # Frontend dependencies
|
||||||
|
├── components/ # Vue components (auto-imported)
|
||||||
|
├── pages/ # File-based routing
|
||||||
|
├── layouts/ # Layout components
|
||||||
|
├── composables/ # Vue composables (auto-imported)
|
||||||
|
├── stores/ # Pinia state stores
|
||||||
|
├── lib/
|
||||||
|
│ └── api/
|
||||||
|
│ └── types/ # Generated TypeScript API types
|
||||||
|
├── locales/ # i18n translations
|
||||||
|
├── test/ # Vitest + Playwright tests
|
||||||
|
├── eslint.config.mjs # ESLint configuration
|
||||||
|
└── tailwind.config.js # Tailwind configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
**Patterns**: Auto-imports for `components/` and `composables/`. API types auto-generated - never edit manually.
|
||||||
|
|
||||||
|
## CI/CD Workflows
|
||||||
|
|
||||||
|
PR checks (`.github/workflows/pull-requests.yaml`) on `main`/`vnext`:
|
||||||
|
1. **Backend**: Go 1.24, golangci-lint, `task go:build`, `task go:coverage`
|
||||||
|
2. **Frontend**: Lint (max 1 warning), typecheck, `task test:ci` (SQLite + PostgreSQL v15-17)
|
||||||
|
3. **E2E**: 4 sharded Playwright runs (60min timeout)
|
||||||
|
|
||||||
|
All must pass before merge.
|
||||||
|
|
||||||
|
## Common Pitfalls
|
||||||
|
|
||||||
|
1. **Missing tools**: Run `task setup` first (installs swag, goose, deps)
|
||||||
|
2. **Stale generated code**: Always `task generate` after schema/API changes
|
||||||
|
3. **Test failures**: Integration tests may fail first run (race condition) - retry
|
||||||
|
4. **Port in use**: Backend uses 7745 - kill existing process
|
||||||
|
5. **SQLite locked**: Delete `.data/homebox.db-*` files
|
||||||
|
6. **Clean build**: `rm -rf build/ backend/app/api/static/public/ frontend/.nuxt`
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
Backend defaults in `Taskfile.yml`:
|
||||||
|
- `HBOX_LOG_LEVEL=debug`
|
||||||
|
- `HBOX_DATABASE_DRIVER=sqlite3` (or `postgres`)
|
||||||
|
- `HBOX_DATABASE_SQLITE_PATH=.data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1`
|
||||||
|
- PostgreSQL: `HBOX_DATABASE_*` vars for username/password/host/port/database
|
||||||
|
|
||||||
|
## Validation Checklist
|
||||||
|
|
||||||
|
Before PR:
|
||||||
|
- [ ] `task generate` after schema/API changes
|
||||||
|
- [ ] `task pr` passes (includes lint, test, typecheck)
|
||||||
|
- [ ] No build artifacts committed (check `.gitignore`)
|
||||||
|
- [ ] Code matches existing patterns
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
|
||||||
|
**Dev environment**: `task go:run` (terminal 1) + `task ui:dev` (terminal 2)
|
||||||
|
|
||||||
|
**API changes**: Edit handlers → add Swagger comments → `task generate` → `task go:build` → `task go:test`
|
||||||
|
|
||||||
|
**Schema changes**: Edit `ent/schema/*.go` → `task generate` → update repo methods → `task go:test`
|
||||||
|
|
||||||
|
**Specific tests**: `cd backend && go test ./path -v` or `cd frontend && pnpm run test:watch`
|
||||||
|
|
||||||
|
## Trust These Instructions
|
||||||
|
|
||||||
|
Instructions are validated and current. Only explore further if info is incomplete, incorrect, or you encounter undocumented errors. Use `task --list-all` for all commands.
|
||||||
480
.github/instructions/frontend.instructions.md
vendored
Normal file
480
.github/instructions/frontend.instructions.md
vendored
Normal file
@@ -0,0 +1,480 @@
|
|||||||
|
---
|
||||||
|
applyTo: 'frontend/**/*'
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
# Frontend Components & Pages Instructions (`/frontend/`)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The frontend is a Nuxt 4 application with Vue 3 and TypeScript. It uses auto-imports for components and composables, file-based routing, and generated TypeScript types from the backend API.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
frontend/
|
||||||
|
├── components/ # Vue components (auto-imported)
|
||||||
|
│ ├── Item/ # Item-related components
|
||||||
|
│ ├── Location/ # Location components
|
||||||
|
│ ├── Label/ # Label components
|
||||||
|
│ ├── Form/ # Form components
|
||||||
|
│ └── ui/ # Shadcn-vue UI components
|
||||||
|
├── pages/ # File-based routes (auto-routing)
|
||||||
|
│ ├── index.vue # Home page (/)
|
||||||
|
│ ├── items.vue # Items list (/items)
|
||||||
|
│ ├── item/
|
||||||
|
│ │ └── [id].vue # Item detail (/item/:id)
|
||||||
|
│ ├── locations.vue # Locations list (/locations)
|
||||||
|
│ └── profile.vue # User profile (/profile)
|
||||||
|
├── composables/ # Vue composables (auto-imported)
|
||||||
|
│ ├── use-api.ts # API client wrapper
|
||||||
|
│ ├── use-auth.ts # Authentication
|
||||||
|
│ └── use-user-api.ts # User API helpers
|
||||||
|
├── stores/ # Pinia state management
|
||||||
|
│ ├── auth.ts # Auth state
|
||||||
|
│ └── preferences.ts # User preferences
|
||||||
|
├── lib/
|
||||||
|
│ └── api/
|
||||||
|
│ └── types/ # Generated TypeScript types (DO NOT EDIT)
|
||||||
|
├── layouts/ # Layout components
|
||||||
|
│ └── default.vue # Default layout
|
||||||
|
├── locales/ # i18n translations
|
||||||
|
├── test/ # Tests (Vitest + Playwright)
|
||||||
|
└── nuxt.config.ts # Nuxt configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Auto-Imports
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
Components in `components/` are **automatically imported** - no import statement needed:
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<!-- components/Item/Card.vue -->
|
||||||
|
<template>
|
||||||
|
<div class="item-card">{{ item.name }}</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<!-- pages/items.vue - NO import needed -->
|
||||||
|
<template>
|
||||||
|
<ItemCard :item="item" />
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Naming convention:** Nested path becomes component name
|
||||||
|
- `components/Item/Card.vue` → `<ItemCard />`
|
||||||
|
- `components/Form/TextField.vue` → `<FormTextField />`
|
||||||
|
|
||||||
|
### Composables
|
||||||
|
|
||||||
|
Composables in `composables/` are **automatically imported**:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// composables/use-items.ts
|
||||||
|
export function useItems() {
|
||||||
|
const api = useUserApi()
|
||||||
|
|
||||||
|
async function getItems() {
|
||||||
|
const { data } = await api.items.getAll()
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
return { getItems }
|
||||||
|
}
|
||||||
|
|
||||||
|
// pages/items.vue - NO import needed
|
||||||
|
const { getItems } = useItems()
|
||||||
|
const items = await getItems()
|
||||||
|
```
|
||||||
|
|
||||||
|
## File-Based Routing
|
||||||
|
|
||||||
|
Pages in `pages/` automatically become routes:
|
||||||
|
|
||||||
|
```
|
||||||
|
pages/index.vue → /
|
||||||
|
pages/items.vue → /items
|
||||||
|
pages/item/[id].vue → /item/:id
|
||||||
|
pages/locations.vue → /locations
|
||||||
|
pages/location/[id].vue → /location/:id
|
||||||
|
pages/profile.vue → /profile
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dynamic Routes
|
||||||
|
|
||||||
|
Use square brackets for dynamic segments:
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<!-- pages/item/[id].vue -->
|
||||||
|
<script setup lang="ts">
|
||||||
|
const route = useRoute()
|
||||||
|
const id = route.params.id
|
||||||
|
|
||||||
|
const { data: item } = await useUserApi().items.getOne(id)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<div>
|
||||||
|
<h1>{{ item.name }}</h1>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Integration
|
||||||
|
|
||||||
|
### Generated Types
|
||||||
|
|
||||||
|
API types are auto-generated from backend Swagger docs:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// lib/api/types/data-contracts.ts (GENERATED - DO NOT EDIT)
|
||||||
|
export interface ItemOut {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
quantity: number
|
||||||
|
createdAt: Date | string
|
||||||
|
updatedAt: Date | string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ItemCreate {
|
||||||
|
name: string
|
||||||
|
quantity?: number
|
||||||
|
locationId?: string
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Regenerate after backend API changes:**
|
||||||
|
```bash
|
||||||
|
task generate # Runs in backend, updates frontend/lib/api/types/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using the API Client
|
||||||
|
|
||||||
|
The `useUserApi()` composable provides typed API access:
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import type { ItemCreate, ItemOut } from '~/lib/api/types/data-contracts'
|
||||||
|
|
||||||
|
const api = useUserApi()
|
||||||
|
|
||||||
|
// GET all items
|
||||||
|
const { data: items } = await api.items.getAll({
|
||||||
|
q: 'search term',
|
||||||
|
page: 1,
|
||||||
|
pageSize: 20
|
||||||
|
})
|
||||||
|
|
||||||
|
// GET single item
|
||||||
|
const { data: item } = await api.items.getOne(itemId)
|
||||||
|
|
||||||
|
// POST create item
|
||||||
|
const newItem: ItemCreate = {
|
||||||
|
name: 'New Item',
|
||||||
|
quantity: 1
|
||||||
|
}
|
||||||
|
const { data: created } = await api.items.create(newItem)
|
||||||
|
|
||||||
|
// PUT update item
|
||||||
|
const { data: updated } = await api.items.update(itemId, {
|
||||||
|
quantity: 5
|
||||||
|
})
|
||||||
|
|
||||||
|
// DELETE item
|
||||||
|
await api.items.delete(itemId)
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Component Patterns
|
||||||
|
|
||||||
|
### Standard Vue 3 Composition API
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { ref, computed } from 'vue'
|
||||||
|
import type { ItemOut } from '~/lib/api/types/data-contracts'
|
||||||
|
|
||||||
|
// Props
|
||||||
|
interface Props {
|
||||||
|
item: ItemOut
|
||||||
|
editable?: boolean
|
||||||
|
}
|
||||||
|
const props = defineProps<Props>()
|
||||||
|
|
||||||
|
// Emits
|
||||||
|
interface Emits {
|
||||||
|
(e: 'update', item: ItemOut): void
|
||||||
|
(e: 'delete', id: string): void
|
||||||
|
}
|
||||||
|
const emit = defineEmits<Emits>()
|
||||||
|
|
||||||
|
// State
|
||||||
|
const isEditing = ref(false)
|
||||||
|
const localItem = ref({ ...props.item })
|
||||||
|
|
||||||
|
// Computed
|
||||||
|
const displayName = computed(() => {
|
||||||
|
return props.item.name.toUpperCase()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Methods
|
||||||
|
function handleSave() {
|
||||||
|
emit('update', localItem.value)
|
||||||
|
isEditing.value = false
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<div class="item-card">
|
||||||
|
<h3>{{ displayName }}</h3>
|
||||||
|
<p v-if="!isEditing">Quantity: {{ item.quantity }}</p>
|
||||||
|
|
||||||
|
<input
|
||||||
|
v-if="isEditing"
|
||||||
|
v-model.number="localItem.quantity"
|
||||||
|
type="number"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<button v-if="editable" @click="isEditing = !isEditing">
|
||||||
|
{{ isEditing ? 'Cancel' : 'Edit' }}
|
||||||
|
</button>
|
||||||
|
<button v-if="isEditing" @click="handleSave">Save</button>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<style scoped>
|
||||||
|
.item-card {
|
||||||
|
padding: 1rem;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using Pinia Stores
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { useAuthStore } from '~/stores/auth'
|
||||||
|
|
||||||
|
const authStore = useAuthStore()
|
||||||
|
|
||||||
|
// Access state
|
||||||
|
const user = computed(() => authStore.user)
|
||||||
|
const isLoggedIn = computed(() => authStore.isLoggedIn)
|
||||||
|
|
||||||
|
// Call actions
|
||||||
|
async function logout() {
|
||||||
|
await authStore.logout()
|
||||||
|
navigateTo('/login')
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Form Handling
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { useForm } from 'vee-validate'
|
||||||
|
import type { ItemCreate } from '~/lib/api/types/data-contracts'
|
||||||
|
|
||||||
|
const api = useUserApi()
|
||||||
|
|
||||||
|
const { values, errors, handleSubmit } = useForm<ItemCreate>({
|
||||||
|
initialValues: {
|
||||||
|
name: '',
|
||||||
|
quantity: 1
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const onSubmit = handleSubmit(async (values) => {
|
||||||
|
try {
|
||||||
|
const { data } = await api.items.create(values)
|
||||||
|
navigateTo(`/item/${data.id}`)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create item:', error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<form @submit.prevent="onSubmit">
|
||||||
|
<input v-model="values.name" type="text" placeholder="Item name" />
|
||||||
|
<span v-if="errors.name">{{ errors.name }}</span>
|
||||||
|
|
||||||
|
<input v-model.number="values.quantity" type="number" />
|
||||||
|
<span v-if="errors.quantity">{{ errors.quantity }}</span>
|
||||||
|
|
||||||
|
<button type="submit">Create Item</button>
|
||||||
|
</form>
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Styling
|
||||||
|
|
||||||
|
### Tailwind CSS
|
||||||
|
|
||||||
|
The project uses Tailwind CSS for styling:
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<template>
|
||||||
|
<div class="flex items-center justify-between p-4 bg-white rounded-lg shadow-md">
|
||||||
|
<h3 class="text-lg font-semibold text-gray-900">{{ item.name }}</h3>
|
||||||
|
<span class="text-sm text-gray-500">Qty: {{ item.quantity }}</span>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Shadcn-vue Components
|
||||||
|
|
||||||
|
UI components from `components/ui/` (Shadcn-vue):
|
||||||
|
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { Button } from '@/components/ui/button'
|
||||||
|
import { Card, CardContent, CardHeader } from '@/components/ui/card'
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<h3>{{ item.name }}</h3>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<p>{{ item.description }}</p>
|
||||||
|
<Button @click="handleEdit">Edit</Button>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Vitest (Unit/Integration)
|
||||||
|
|
||||||
|
Tests use Vitest with the backend API running:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// test/items.test.ts
|
||||||
|
import { describe, it, expect } from 'vitest'
|
||||||
|
import { useUserApi } from '~/composables/use-user-api'
|
||||||
|
|
||||||
|
describe('Items API', () => {
|
||||||
|
it('should create and fetch item', async () => {
|
||||||
|
const api = useUserApi()
|
||||||
|
|
||||||
|
// Create item
|
||||||
|
const { data: created } = await api.items.create({
|
||||||
|
name: 'Test Item',
|
||||||
|
quantity: 1
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(created.name).toBe('Test Item')
|
||||||
|
|
||||||
|
// Fetch item
|
||||||
|
const { data: fetched } = await api.items.getOne(created.id)
|
||||||
|
expect(fetched.id).toBe(created.id)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run tests:**
|
||||||
|
```bash
|
||||||
|
task ui:watch # Watch mode
|
||||||
|
cd frontend && pnpm run test:ci # CI mode
|
||||||
|
```
|
||||||
|
|
||||||
|
### Playwright (E2E)
|
||||||
|
|
||||||
|
E2E tests in `test/`:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// test/e2e/items.spec.ts
|
||||||
|
import { test, expect } from '@playwright/test'
|
||||||
|
|
||||||
|
test('should create new item', async ({ page }) => {
|
||||||
|
await page.goto('/items')
|
||||||
|
|
||||||
|
await page.click('button:has-text("New Item")')
|
||||||
|
await page.fill('input[name="name"]', 'Test Item')
|
||||||
|
await page.fill('input[name="quantity"]', '5')
|
||||||
|
await page.click('button:has-text("Save")')
|
||||||
|
|
||||||
|
await expect(page.locator('text=Test Item')).toBeVisible()
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run E2E tests:**
|
||||||
|
```bash
|
||||||
|
task test:e2e # Full E2E suite
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding a New Feature
|
||||||
|
|
||||||
|
### 1. Update Backend API
|
||||||
|
|
||||||
|
Make backend changes first (schema, service, handler):
|
||||||
|
```bash
|
||||||
|
# Edit backend files
|
||||||
|
task generate # Regenerates TypeScript types
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Create Component
|
||||||
|
|
||||||
|
Create `components/MyFeature/Card.vue`:
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
import type { MyFeatureOut } from '~/lib/api/types/data-contracts'
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
feature: MyFeatureOut
|
||||||
|
}
|
||||||
|
defineProps<Props>()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<div>{{ feature.name }}</div>
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Create Page
|
||||||
|
|
||||||
|
Create `pages/my-feature/[id].vue`:
|
||||||
|
```vue
|
||||||
|
<script setup lang="ts">
|
||||||
|
const route = useRoute()
|
||||||
|
const api = useUserApi()
|
||||||
|
|
||||||
|
const { data: feature } = await api.myFeature.getOne(route.params.id)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<MyFeatureCard :feature="feature" />
|
||||||
|
</template>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
task ui:check # Type checking
|
||||||
|
task ui:fix # Linting
|
||||||
|
task ui:watch # Run tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Critical Rules
|
||||||
|
|
||||||
|
1. **Never edit generated types** - `lib/api/types/` is auto-generated, run `task generate` after backend changes
|
||||||
|
2. **No manual imports for components/composables** - auto-imported from `components/` and `composables/`
|
||||||
|
3. **Use TypeScript** - all `.vue` files use `<script setup lang="ts">`
|
||||||
|
4. **Follow file-based routing** - pages in `pages/` become routes automatically
|
||||||
|
5. **Use `useUserApi()` for API calls** - provides typed, authenticated API client
|
||||||
|
6. **Max 1 linting warning in CI** - run `task ui:fix` before committing
|
||||||
|
7. **Test with backend running** - integration tests need API server
|
||||||
|
|
||||||
|
## Common Issues
|
||||||
|
|
||||||
|
- **"Type not found"** → Run `task generate` to regenerate types from backend
|
||||||
|
- **Component not found** → Check naming (nested path = component name)
|
||||||
|
- **API call fails** → Ensure backend is running (`task go:run`)
|
||||||
|
- **Lint errors** → Run `task ui:fix` to auto-fix
|
||||||
|
- **Type errors** → Run `task ui:check` for detailed errors
|
||||||
259
.github/scripts/upgrade-test/README.md
vendored
Normal file
259
.github/scripts/upgrade-test/README.md
vendored
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
# HomeBox Upgrade Testing Workflow
|
||||||
|
|
||||||
|
This document describes the automated upgrade testing workflow for HomeBox.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The upgrade test workflow is designed to ensure data integrity and functionality when upgrading HomeBox from one version to another. It automatically:
|
||||||
|
|
||||||
|
1. Deploys a stable version of HomeBox
|
||||||
|
2. Creates test data (users, items, locations, labels, notifiers, attachments)
|
||||||
|
3. Upgrades to the latest version from the main branch
|
||||||
|
4. Verifies all data and functionality remain intact
|
||||||
|
|
||||||
|
## Workflow File
|
||||||
|
|
||||||
|
**Location**: `.github/workflows/upgrade-test.yaml`
|
||||||
|
|
||||||
|
## Trigger Conditions
|
||||||
|
|
||||||
|
The workflow runs:
|
||||||
|
- **Daily**: Automatically at 2 AM UTC (via cron schedule)
|
||||||
|
- **Manual**: Can be triggered manually via GitHub Actions UI
|
||||||
|
- **On Push**: When changes are made to the workflow files or test scripts
|
||||||
|
|
||||||
|
## Test Scenarios
|
||||||
|
|
||||||
|
### 1. Environment Setup
|
||||||
|
- Pulls the latest stable HomeBox Docker image from GHCR
|
||||||
|
- Starts the application with test configuration
|
||||||
|
- Ensures the service is healthy and ready
|
||||||
|
|
||||||
|
### 2. Data Creation
|
||||||
|
|
||||||
|
The workflow creates comprehensive test data using the `create-test-data.sh` script:
|
||||||
|
|
||||||
|
#### Users and Groups
|
||||||
|
- **Group 1**: 5 users (user1@homebox.test through user5@homebox.test)
|
||||||
|
- **Group 2**: 2 users (user6@homebox.test and user7@homebox.test)
|
||||||
|
- All users have password: `TestPassword123!`
|
||||||
|
|
||||||
|
#### Locations
|
||||||
|
- **Group 1**: Living Room, Garage
|
||||||
|
- **Group 2**: Home Office
|
||||||
|
|
||||||
|
#### Labels
|
||||||
|
- **Group 1**: Electronics, Important
|
||||||
|
- **Group 2**: Work Equipment
|
||||||
|
|
||||||
|
#### Items
|
||||||
|
- **Group 1**: 5 items (Laptop Computer, Power Drill, TV Remote, Tool Box, Coffee Maker)
|
||||||
|
- **Group 2**: 2 items (Monitor, Keyboard)
|
||||||
|
|
||||||
|
#### Attachments
|
||||||
|
- Multiple attachments added to various items (receipts, manuals, warranties)
|
||||||
|
|
||||||
|
#### Notifiers
|
||||||
|
- **Group 1**: Test notifier named "TESTING"
|
||||||
|
|
||||||
|
### 3. Upgrade Process
|
||||||
|
|
||||||
|
1. Stops the stable version container
|
||||||
|
2. Builds a fresh image from the current main branch
|
||||||
|
3. Copies the database to a new location
|
||||||
|
4. Starts the new version with the existing data
|
||||||
|
|
||||||
|
### 4. Verification Tests
|
||||||
|
|
||||||
|
The Playwright test suite (`upgrade-verification.spec.ts`) verifies:
|
||||||
|
|
||||||
|
- ✅ **User Authentication**: All 7 users can log in with their credentials
|
||||||
|
- ✅ **Data Persistence**: All items, locations, and labels are present
|
||||||
|
- ✅ **Attachments**: File attachments are correctly associated with items
|
||||||
|
- ✅ **Notifiers**: The "TESTING" notifier is still configured
|
||||||
|
- ✅ **UI Functionality**: Version display, theme switching work correctly
|
||||||
|
- ✅ **Data Isolation**: Groups can only see their own data
|
||||||
|
|
||||||
|
## Test Data File
|
||||||
|
|
||||||
|
The setup script generates a JSON file at `/tmp/test-users.json` containing:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"users": [
|
||||||
|
{
|
||||||
|
"email": "user1@homebox.test",
|
||||||
|
"password": "TestPassword123!",
|
||||||
|
"token": "...",
|
||||||
|
"group": "1"
|
||||||
|
},
|
||||||
|
...
|
||||||
|
],
|
||||||
|
"locations": {
|
||||||
|
"group1": ["location-id-1", "location-id-2"],
|
||||||
|
"group2": ["location-id-3"]
|
||||||
|
},
|
||||||
|
"labels": {...},
|
||||||
|
"items": {...},
|
||||||
|
"notifiers": {...}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This file is used by the Playwright tests to verify data integrity.
|
||||||
|
|
||||||
|
## Scripts
|
||||||
|
|
||||||
|
### create-test-data.sh
|
||||||
|
|
||||||
|
**Location**: `.github/scripts/upgrade-test/create-test-data.sh`
|
||||||
|
|
||||||
|
**Purpose**: Creates all test data via the HomeBox REST API
|
||||||
|
|
||||||
|
**Environment Variables**:
|
||||||
|
- `HOMEBOX_URL`: Base URL of the HomeBox instance (default: http://localhost:7745)
|
||||||
|
- `TEST_DATA_FILE`: Path to output JSON file (default: /tmp/test-users.json)
|
||||||
|
|
||||||
|
**Requirements**:
|
||||||
|
- `curl`: For API calls
|
||||||
|
- `jq`: For JSON processing
|
||||||
|
|
||||||
|
**Usage**:
|
||||||
|
```bash
|
||||||
|
export HOMEBOX_URL=http://localhost:7745
|
||||||
|
./.github/scripts/upgrade-test/create-test-data.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running Tests Locally
|
||||||
|
|
||||||
|
To run the upgrade tests locally:
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
sudo apt-get install -y jq curl docker.io
|
||||||
|
|
||||||
|
# Install pnpm and Playwright
|
||||||
|
cd frontend
|
||||||
|
pnpm install
|
||||||
|
pnpm exec playwright install --with-deps chromium
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run the test
|
||||||
|
```bash
|
||||||
|
# Start stable version
|
||||||
|
docker run -d \
|
||||||
|
--name homebox-test \
|
||||||
|
-p 7745:7745 \
|
||||||
|
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
|
||||||
|
-v /tmp/homebox-data:/data \
|
||||||
|
ghcr.io/sysadminsmedia/homebox:latest
|
||||||
|
|
||||||
|
# Wait for startup
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Create test data
|
||||||
|
export HOMEBOX_URL=http://localhost:7745
|
||||||
|
./.github/scripts/upgrade-test/create-test-data.sh
|
||||||
|
|
||||||
|
# Stop container
|
||||||
|
docker stop homebox-test
|
||||||
|
docker rm homebox-test
|
||||||
|
|
||||||
|
# Build new version
|
||||||
|
docker build -t homebox:test .
|
||||||
|
|
||||||
|
# Start new version with existing data
|
||||||
|
docker run -d \
|
||||||
|
--name homebox-test \
|
||||||
|
-p 7745:7745 \
|
||||||
|
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
|
||||||
|
-v /tmp/homebox-data:/data \
|
||||||
|
homebox:test
|
||||||
|
|
||||||
|
# Wait for startup
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Run verification tests
|
||||||
|
cd frontend
|
||||||
|
TEST_DATA_FILE=/tmp/test-users.json \
|
||||||
|
E2E_BASE_URL=http://localhost:7745 \
|
||||||
|
pnpm exec playwright test \
|
||||||
|
--project=chromium \
|
||||||
|
test/upgrade/upgrade-verification.spec.ts
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
docker stop homebox-test
|
||||||
|
docker rm homebox-test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Artifacts
|
||||||
|
|
||||||
|
The workflow produces several artifacts:
|
||||||
|
|
||||||
|
1. **playwright-report-upgrade-test**: HTML report of test results
|
||||||
|
2. **playwright-traces**: Detailed traces for debugging failures
|
||||||
|
3. **Docker logs**: Collected on failure for troubleshooting
|
||||||
|
|
||||||
|
## Failure Scenarios
|
||||||
|
|
||||||
|
The workflow will fail if:
|
||||||
|
- The stable version fails to start
|
||||||
|
- Test data creation fails
|
||||||
|
- The new version fails to start with existing data
|
||||||
|
- Any verification test fails
|
||||||
|
- Database migrations fail
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Test Data Creation Fails
|
||||||
|
|
||||||
|
Check the Docker logs:
|
||||||
|
```bash
|
||||||
|
docker logs homebox-old
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify the API is accessible:
|
||||||
|
```bash
|
||||||
|
curl http://localhost:7745/api/v1/status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Verification Tests Fail
|
||||||
|
|
||||||
|
1. Download the Playwright report from GitHub Actions artifacts
|
||||||
|
2. Review the HTML report for detailed failure information
|
||||||
|
3. Check traces for visual debugging
|
||||||
|
|
||||||
|
### Database Issues
|
||||||
|
|
||||||
|
If migrations fail:
|
||||||
|
```bash
|
||||||
|
# Check database file
|
||||||
|
ls -lh /tmp/homebox-data-new/homebox.db
|
||||||
|
|
||||||
|
# Check Docker logs for migration errors
|
||||||
|
docker logs homebox-new
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Potential improvements:
|
||||||
|
- [ ] Test multiple upgrade paths (e.g., v0.10 → v0.11 → v0.12)
|
||||||
|
- [ ] Test with PostgreSQL backend in addition to SQLite
|
||||||
|
- [ ] Add performance benchmarks
|
||||||
|
- [ ] Test with larger datasets
|
||||||
|
- [ ] Add API-level verification in addition to UI tests
|
||||||
|
- [ ] Test backup and restore functionality
|
||||||
|
|
||||||
|
## Related Files
|
||||||
|
|
||||||
|
- `.github/workflows/upgrade-test.yaml` - Main workflow definition
|
||||||
|
- `.github/scripts/upgrade-test/create-test-data.sh` - Data generation script
|
||||||
|
- `frontend/test/upgrade/upgrade-verification.spec.ts` - Playwright verification tests
|
||||||
|
- `.github/workflows/e2e-partial.yaml` - Standard E2E test workflow (for reference)
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
For issues or questions about this workflow:
|
||||||
|
1. Check the GitHub Actions run logs
|
||||||
|
2. Review this documentation
|
||||||
|
3. Open an issue in the repository
|
||||||
413
.github/scripts/upgrade-test/create-test-data.sh
vendored
Executable file
413
.github/scripts/upgrade-test/create-test-data.sh
vendored
Executable file
@@ -0,0 +1,413 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to create test data in HomeBox for upgrade testing
|
||||||
|
# This script creates users, items, attachments, notifiers, locations, and labels
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
HOMEBOX_URL="${HOMEBOX_URL:-http://localhost:7745}"
|
||||||
|
API_URL="${HOMEBOX_URL}/api/v1"
|
||||||
|
TEST_DATA_FILE="${TEST_DATA_FILE:-/tmp/test-users.json}"
|
||||||
|
|
||||||
|
echo "Creating test data in HomeBox at $HOMEBOX_URL"
|
||||||
|
|
||||||
|
# Function to make API calls with error handling
|
||||||
|
api_call() {
|
||||||
|
local method=$1
|
||||||
|
local endpoint=$2
|
||||||
|
local data=$3
|
||||||
|
local token=$4
|
||||||
|
|
||||||
|
if [ -n "$token" ]; then
|
||||||
|
if [ -n "$data" ]; then
|
||||||
|
curl -s -X "$method" \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$data" \
|
||||||
|
"$API_URL$endpoint"
|
||||||
|
else
|
||||||
|
curl -s -X "$method" \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_URL$endpoint"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [ -n "$data" ]; then
|
||||||
|
curl -s -X "$method" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$data" \
|
||||||
|
"$API_URL$endpoint"
|
||||||
|
else
|
||||||
|
curl -s -X "$method" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_URL$endpoint"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to register a user and get token
|
||||||
|
register_user() {
|
||||||
|
local email=$1
|
||||||
|
local name=$2
|
||||||
|
local password=$3
|
||||||
|
local group_token=$4
|
||||||
|
|
||||||
|
echo "Registering user: $email"
|
||||||
|
|
||||||
|
local payload="{\"email\":\"$email\",\"name\":\"$name\",\"password\":\"$password\""
|
||||||
|
|
||||||
|
if [ -n "$group_token" ]; then
|
||||||
|
payload="$payload,\"groupToken\":\"$group_token\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
payload="$payload}"
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload" \
|
||||||
|
"$API_URL/users/register")
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to login and get token
|
||||||
|
login_user() {
|
||||||
|
local email=$1
|
||||||
|
local password=$2
|
||||||
|
|
||||||
|
echo "Logging in user: $email" >&2
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"username\":\"$email\",\"password\":\"$password\"}" \
|
||||||
|
"$API_URL/users/login")
|
||||||
|
|
||||||
|
echo "$response" | jq -r '.token // empty'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create an item
|
||||||
|
create_item() {
|
||||||
|
local token=$1
|
||||||
|
local name=$2
|
||||||
|
local description=$3
|
||||||
|
local location_id=$4
|
||||||
|
|
||||||
|
echo "Creating item: $name" >&2
|
||||||
|
|
||||||
|
local payload="{\"name\":\"$name\",\"description\":\"$description\""
|
||||||
|
|
||||||
|
if [ -n "$location_id" ]; then
|
||||||
|
payload="$payload,\"locationId\":\"$location_id\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
payload="$payload}"
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload" \
|
||||||
|
"$API_URL/items")
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create a location
|
||||||
|
create_location() {
|
||||||
|
local token=$1
|
||||||
|
local name=$2
|
||||||
|
local description=$3
|
||||||
|
|
||||||
|
echo "Creating location: $name" >&2
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"name\":\"$name\",\"description\":\"$description\"}" \
|
||||||
|
"$API_URL/locations")
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create a label
|
||||||
|
create_label() {
|
||||||
|
local token=$1
|
||||||
|
local name=$2
|
||||||
|
local description=$3
|
||||||
|
|
||||||
|
echo "Creating label: $name" >&2
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"name\":\"$name\",\"description\":\"$description\"}" \
|
||||||
|
"$API_URL/labels")
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create a notifier
|
||||||
|
create_notifier() {
|
||||||
|
local token=$1
|
||||||
|
local name=$2
|
||||||
|
local url=$3
|
||||||
|
|
||||||
|
echo "Creating notifier: $name" >&2
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"name\":\"$name\",\"url\":\"$url\",\"isActive\":true}" \
|
||||||
|
"$API_URL/groups/notifiers")
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to attach a file to an item (creates a dummy attachment)
|
||||||
|
attach_file_to_item() {
|
||||||
|
local token=$1
|
||||||
|
local item_id=$2
|
||||||
|
local filename=$3
|
||||||
|
|
||||||
|
echo "Creating attachment for item: $item_id" >&2
|
||||||
|
|
||||||
|
# Create a temporary file with some content
|
||||||
|
local temp_file=$(mktemp)
|
||||||
|
echo "This is a test attachment for $filename" > "$temp_file"
|
||||||
|
|
||||||
|
local response=$(curl -s -X POST \
|
||||||
|
-H "Authorization: Bearer $token" \
|
||||||
|
-F "file=@$temp_file" \
|
||||||
|
-F "type=attachment" \
|
||||||
|
-F "name=$filename" \
|
||||||
|
"$API_URL/items/$item_id/attachments")
|
||||||
|
|
||||||
|
rm -f "$temp_file"
|
||||||
|
|
||||||
|
echo "$response"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Initialize test data storage
|
||||||
|
echo "{\"users\":[]}" > "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
echo "=== Step 1: Create first group with 5 users ==="
|
||||||
|
|
||||||
|
# Register first user (creates a new group)
|
||||||
|
user1_response=$(register_user "user1@homebox.test" "User One" "TestPassword123!")
|
||||||
|
user1_token=$(echo "$user1_response" | jq -r '.token // empty')
|
||||||
|
group_token=$(echo "$user1_response" | jq -r '.group.inviteToken // empty')
|
||||||
|
|
||||||
|
if [ -z "$user1_token" ]; then
|
||||||
|
echo "Failed to register first user"
|
||||||
|
echo "Response: $user1_response"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "First user registered with token. Group token: $group_token"
|
||||||
|
|
||||||
|
# Store user1 data
|
||||||
|
jq --arg email "user1@homebox.test" \
|
||||||
|
--arg password "TestPassword123!" \
|
||||||
|
--arg token "$user1_token" \
|
||||||
|
--arg group "1" \
|
||||||
|
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
# Register 4 more users in the same group
|
||||||
|
for i in {2..5}; do
|
||||||
|
echo "Registering user$i in group 1..."
|
||||||
|
user_response=$(register_user "user${i}@homebox.test" "User $i" "TestPassword123!" "$group_token")
|
||||||
|
user_token=$(echo "$user_response" | jq -r '.token // empty')
|
||||||
|
|
||||||
|
if [ -z "$user_token" ]; then
|
||||||
|
echo "Failed to register user$i"
|
||||||
|
echo "Response: $user_response"
|
||||||
|
else
|
||||||
|
echo "user$i registered successfully"
|
||||||
|
# Store user data
|
||||||
|
jq --arg email "user${i}@homebox.test" \
|
||||||
|
--arg password "TestPassword123!" \
|
||||||
|
--arg token "$user_token" \
|
||||||
|
--arg group "1" \
|
||||||
|
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "=== Step 2: Create second group with 2 users ==="
|
||||||
|
|
||||||
|
# Register first user of second group
|
||||||
|
user6_response=$(register_user "user6@homebox.test" "User Six" "TestPassword123!")
|
||||||
|
user6_token=$(echo "$user6_response" | jq -r '.token // empty')
|
||||||
|
group2_token=$(echo "$user6_response" | jq -r '.group.inviteToken // empty')
|
||||||
|
|
||||||
|
if [ -z "$user6_token" ]; then
|
||||||
|
echo "Failed to register user6"
|
||||||
|
echo "Response: $user6_response"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "user6 registered with token. Group 2 token: $group2_token"
|
||||||
|
|
||||||
|
# Store user6 data
|
||||||
|
jq --arg email "user6@homebox.test" \
|
||||||
|
--arg password "TestPassword123!" \
|
||||||
|
--arg token "$user6_token" \
|
||||||
|
--arg group "2" \
|
||||||
|
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
# Register second user in group 2
|
||||||
|
user7_response=$(register_user "user7@homebox.test" "User Seven" "TestPassword123!" "$group2_token")
|
||||||
|
user7_token=$(echo "$user7_response" | jq -r '.token // empty')
|
||||||
|
|
||||||
|
if [ -z "$user7_token" ]; then
|
||||||
|
echo "Failed to register user7"
|
||||||
|
echo "Response: $user7_response"
|
||||||
|
else
|
||||||
|
echo "user7 registered successfully"
|
||||||
|
# Store user7 data
|
||||||
|
jq --arg email "user7@homebox.test" \
|
||||||
|
--arg password "TestPassword123!" \
|
||||||
|
--arg token "$user7_token" \
|
||||||
|
--arg group "2" \
|
||||||
|
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== Step 3: Create locations for each group ==="
|
||||||
|
|
||||||
|
# Create locations for group 1 (using user1's token)
|
||||||
|
location1=$(create_location "$user1_token" "Living Room" "Main living area")
|
||||||
|
location1_id=$(echo "$location1" | jq -r '.id // empty')
|
||||||
|
echo "Created location: Living Room (ID: $location1_id)"
|
||||||
|
|
||||||
|
location2=$(create_location "$user1_token" "Garage" "Storage and tools")
|
||||||
|
location2_id=$(echo "$location2" | jq -r '.id // empty')
|
||||||
|
echo "Created location: Garage (ID: $location2_id)"
|
||||||
|
|
||||||
|
# Create location for group 2 (using user6's token)
|
||||||
|
location3=$(create_location "$user6_token" "Home Office" "Work from home space")
|
||||||
|
location3_id=$(echo "$location3" | jq -r '.id // empty')
|
||||||
|
echo "Created location: Home Office (ID: $location3_id)"
|
||||||
|
|
||||||
|
# Store locations
|
||||||
|
jq --arg loc1 "$location1_id" \
|
||||||
|
--arg loc2 "$location2_id" \
|
||||||
|
--arg loc3 "$location3_id" \
|
||||||
|
'.locations = {"group1":[$loc1,$loc2],"group2":[$loc3]}' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
echo "=== Step 4: Create labels for each group ==="
|
||||||
|
|
||||||
|
# Create labels for group 1
|
||||||
|
label1=$(create_label "$user1_token" "Electronics" "Electronic devices")
|
||||||
|
label1_id=$(echo "$label1" | jq -r '.id // empty')
|
||||||
|
echo "Created label: Electronics (ID: $label1_id)"
|
||||||
|
|
||||||
|
label2=$(create_label "$user1_token" "Important" "High priority items")
|
||||||
|
label2_id=$(echo "$label2" | jq -r '.id // empty')
|
||||||
|
echo "Created label: Important (ID: $label2_id)"
|
||||||
|
|
||||||
|
# Create label for group 2
|
||||||
|
label3=$(create_label "$user6_token" "Work Equipment" "Items for work")
|
||||||
|
label3_id=$(echo "$label3" | jq -r '.id // empty')
|
||||||
|
echo "Created label: Work Equipment (ID: $label3_id)"
|
||||||
|
|
||||||
|
# Store labels
|
||||||
|
jq --arg lab1 "$label1_id" \
|
||||||
|
--arg lab2 "$label2_id" \
|
||||||
|
--arg lab3 "$label3_id" \
|
||||||
|
'.labels = {"group1":[$lab1,$lab2],"group2":[$lab3]}' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
echo "=== Step 5: Create test notifier ==="
|
||||||
|
|
||||||
|
# Create notifier for group 1
|
||||||
|
notifier1=$(create_notifier "$user1_token" "TESTING" "https://example.com/webhook")
|
||||||
|
notifier1_id=$(echo "$notifier1" | jq -r '.id // empty')
|
||||||
|
echo "Created notifier: TESTING (ID: $notifier1_id)"
|
||||||
|
|
||||||
|
# Store notifier
|
||||||
|
jq --arg not1 "$notifier1_id" \
|
||||||
|
'.notifiers = {"group1":[$not1]}' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
echo "=== Step 6: Create items for all users ==="
|
||||||
|
|
||||||
|
# Create items for users in group 1
|
||||||
|
declare -A user_tokens
|
||||||
|
user_tokens[1]=$user1_token
|
||||||
|
user_tokens[2]=$(echo "$user1_token") # Users in same group share data, but we'll use user1 token
|
||||||
|
user_tokens[3]=$(echo "$user1_token")
|
||||||
|
user_tokens[4]=$(echo "$user1_token")
|
||||||
|
user_tokens[5]=$(echo "$user1_token")
|
||||||
|
|
||||||
|
# Items for group 1 users
|
||||||
|
echo "Creating items for group 1..."
|
||||||
|
item1=$(create_item "$user1_token" "Laptop Computer" "Dell XPS 15 for work" "$location1_id")
|
||||||
|
item1_id=$(echo "$item1" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Laptop Computer (ID: $item1_id)"
|
||||||
|
|
||||||
|
item2=$(create_item "$user1_token" "Power Drill" "DeWalt 20V cordless drill" "$location2_id")
|
||||||
|
item2_id=$(echo "$item2" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Power Drill (ID: $item2_id)"
|
||||||
|
|
||||||
|
item3=$(create_item "$user1_token" "TV Remote" "Samsung TV remote control" "$location1_id")
|
||||||
|
item3_id=$(echo "$item3" | jq -r '.id // empty')
|
||||||
|
echo "Created item: TV Remote (ID: $item3_id)"
|
||||||
|
|
||||||
|
item4=$(create_item "$user1_token" "Tool Box" "Red metal tool box with tools" "$location2_id")
|
||||||
|
item4_id=$(echo "$item4" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Tool Box (ID: $item4_id)"
|
||||||
|
|
||||||
|
item5=$(create_item "$user1_token" "Coffee Maker" "Breville espresso machine" "$location1_id")
|
||||||
|
item5_id=$(echo "$item5" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Coffee Maker (ID: $item5_id)"
|
||||||
|
|
||||||
|
# Items for group 2 users
|
||||||
|
echo "Creating items for group 2..."
|
||||||
|
item6=$(create_item "$user6_token" "Monitor" "27 inch 4K monitor" "$location3_id")
|
||||||
|
item6_id=$(echo "$item6" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Monitor (ID: $item6_id)"
|
||||||
|
|
||||||
|
item7=$(create_item "$user6_token" "Keyboard" "Mechanical keyboard" "$location3_id")
|
||||||
|
item7_id=$(echo "$item7" | jq -r '.id // empty')
|
||||||
|
echo "Created item: Keyboard (ID: $item7_id)"
|
||||||
|
|
||||||
|
# Store items
|
||||||
|
jq --argjson group1_items "[\"$item1_id\",\"$item2_id\",\"$item3_id\",\"$item4_id\",\"$item5_id\"]" \
|
||||||
|
--argjson group2_items "[\"$item6_id\",\"$item7_id\"]" \
|
||||||
|
'.items = {"group1":$group1_items,"group2":$group2_items}' \
|
||||||
|
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
|
||||||
|
|
||||||
|
echo "=== Step 7: Add attachments to items ==="
|
||||||
|
|
||||||
|
# Add attachments for group 1 items
|
||||||
|
echo "Adding attachments to group 1 items..."
|
||||||
|
attach_file_to_item "$user1_token" "$item1_id" "laptop-receipt.pdf"
|
||||||
|
attach_file_to_item "$user1_token" "$item1_id" "laptop-warranty.pdf"
|
||||||
|
attach_file_to_item "$user1_token" "$item2_id" "drill-manual.pdf"
|
||||||
|
attach_file_to_item "$user1_token" "$item3_id" "remote-guide.pdf"
|
||||||
|
attach_file_to_item "$user1_token" "$item4_id" "toolbox-inventory.txt"
|
||||||
|
|
||||||
|
# Add attachments for group 2 items
|
||||||
|
echo "Adding attachments to group 2 items..."
|
||||||
|
attach_file_to_item "$user6_token" "$item6_id" "monitor-receipt.pdf"
|
||||||
|
attach_file_to_item "$user6_token" "$item7_id" "keyboard-manual.pdf"
|
||||||
|
|
||||||
|
echo "=== Test Data Creation Complete ==="
|
||||||
|
echo "Test data file saved to: $TEST_DATA_FILE"
|
||||||
|
echo "Summary:"
|
||||||
|
echo " - Users created: 7 (5 in group 1, 2 in group 2)"
|
||||||
|
echo " - Locations created: 3"
|
||||||
|
echo " - Labels created: 3"
|
||||||
|
echo " - Notifiers created: 1"
|
||||||
|
echo " - Items created: 7"
|
||||||
|
echo " - Attachments created: 7"
|
||||||
|
|
||||||
|
# Display the test data file for verification
|
||||||
|
echo ""
|
||||||
|
echo "Test data:"
|
||||||
|
cat "$TEST_DATA_FILE" | jq '.'
|
||||||
|
|
||||||
|
exit 0
|
||||||
16
.github/workflows/binaries-publish.yaml
vendored
16
.github/workflows/binaries-publish.yaml
vendored
@@ -17,19 +17,17 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.15.3
|
|
||||||
|
|
||||||
- name: Build Frontend and Copy to Backend
|
- name: Build Frontend and Copy to Backend
|
||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
@@ -51,7 +49,7 @@ jobs:
|
|||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
id: releaser
|
id: releaser
|
||||||
if: startsWith(github.ref, 'refs/tags/')
|
if: startsWith(github.ref, 'refs/tags/')
|
||||||
uses: goreleaser/goreleaser-action@v5
|
uses: goreleaser/goreleaser-action@e435ccd777264be153ace6237001ef4d979d3a7a
|
||||||
with:
|
with:
|
||||||
workdir: "backend"
|
workdir: "backend"
|
||||||
distribution: goreleaser
|
distribution: goreleaser
|
||||||
@@ -75,7 +73,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Run GoReleaser No Release
|
- name: Run GoReleaser No Release
|
||||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||||
uses: goreleaser/goreleaser-action@v5
|
uses: goreleaser/goreleaser-action@e435ccd777264be153ace6237001ef4d979d3a7a
|
||||||
with:
|
with:
|
||||||
workdir: "backend"
|
workdir: "backend"
|
||||||
distribution: goreleaser
|
distribution: goreleaser
|
||||||
@@ -93,7 +91,7 @@ jobs:
|
|||||||
actions: read # To read the workflow path.
|
actions: read # To read the workflow path.
|
||||||
id-token: write # To sign the provenance.
|
id-token: write # To sign the provenance.
|
||||||
contents: write # To add assets to a release.
|
contents: write # To add assets to a release.
|
||||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.9.0
|
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@f7dd8c54c2067bafc12ca7a55595d5ee9b75204a
|
||||||
with:
|
with:
|
||||||
base64-subjects: "${{ needs.goreleaser.outputs.hashes }}"
|
base64-subjects: "${{ needs.goreleaser.outputs.hashes }}"
|
||||||
upload-assets: true # upload to a new release
|
upload-assets: true # upload to a new release
|
||||||
@@ -105,7 +103,7 @@ jobs:
|
|||||||
permissions: read-all
|
permissions: read-all
|
||||||
steps:
|
steps:
|
||||||
- name: Install the verifier
|
- name: Install the verifier
|
||||||
uses: slsa-framework/slsa-verifier/actions/installer@v2.4.0
|
uses: slsa-framework/slsa-verifier/actions/installer@ea584f4502babc6f60d9bc799dbbb13c1caa9ee6
|
||||||
|
|
||||||
- name: Download assets
|
- name: Download assets
|
||||||
env:
|
env:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
- uses: dataaxiom/ghcr-cleanup-action@cd0cdb900b5dbf3a6f2cc869f0dbb0b8211f50c4
|
||||||
with:
|
with:
|
||||||
dry-run: true
|
dry-run: true
|
||||||
delete-ghost-images: true
|
delete-ghost-images: true
|
||||||
@@ -32,7 +32,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
- uses: dataaxiom/ghcr-cleanup-action@cd0cdb900b5dbf3a6f2cc869f0dbb0b8211f50c4
|
||||||
with:
|
with:
|
||||||
dry-run: false
|
dry-run: false
|
||||||
delete-untagged: true
|
delete-untagged: true
|
||||||
|
|||||||
14
.github/workflows/copilot-setup-steps.yml
vendored
14
.github/workflows/copilot-setup-steps.yml
vendored
@@ -26,25 +26,23 @@ jobs:
|
|||||||
# If you do not check out your code, Copilot will do this for you.
|
# If you do not check out your code, Copilot will do this for you.
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "24"
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- name: Install Task
|
- name: Install Task
|
||||||
uses: arduino/setup-task@v1
|
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
|||||||
28
.github/workflows/docker-publish-hardened.yaml
vendored
28
.github/workflows/docker-publish-hardened.yaml
vendored
@@ -33,7 +33,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
@@ -43,10 +43,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
include:
|
||||||
- linux/amd64
|
- platform: linux/amd64
|
||||||
- linux/arm64
|
runner: ubuntu-latest
|
||||||
- linux/arm/v7
|
- platform: linux/arm64
|
||||||
|
runner: ubuntu-24.04-arm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Enable Debug Logs
|
- name: Enable Debug Logs
|
||||||
@@ -56,7 +57,7 @@ jobs:
|
|||||||
ACTIONS_STEP_DEBUG: true
|
ACTIONS_STEP_DEBUG: true
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
@@ -77,6 +78,15 @@ jobs:
|
|||||||
images: |
|
images: |
|
||||||
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
||||||
name=${{ env.GHCR_REPO }}
|
name=${{ env.GHCR_REPO }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}}
|
||||||
|
type=schedule,pattern=nightly
|
||||||
|
flavor: |
|
||||||
|
suffix=-hardened,onlatest=true
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
@@ -123,7 +133,7 @@ jobs:
|
|||||||
annotations: ${{ steps.meta.outputs.annotations }}
|
annotations: ${{ steps.meta.outputs.annotations }}
|
||||||
|
|
||||||
- name: Attest platform-specific images
|
- name: Attest platform-specific images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -216,7 +226,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest GHCR images
|
- name: Attest GHCR images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -240,7 +250,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest Dockerhub images
|
- name: Attest Dockerhub images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
||||||
|
|||||||
52
.github/workflows/docker-publish-rootless.yaml
vendored
52
.github/workflows/docker-publish-rootless.yaml
vendored
@@ -37,7 +37,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
@@ -47,10 +47,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
include:
|
||||||
- linux/amd64
|
- platform: linux/amd64
|
||||||
- linux/arm64
|
runner: ubuntu-latest
|
||||||
- linux/arm/v7
|
- platform: linux/arm64
|
||||||
|
runner: ubuntu-24.04-arm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Enable Debug Logs
|
- name: Enable Debug Logs
|
||||||
@@ -60,7 +61,7 @@ jobs:
|
|||||||
ACTIONS_STEP_DEBUG: true
|
ACTIONS_STEP_DEBUG: true
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
@@ -75,40 +76,49 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
||||||
name=${{ env.GHCR_REPO }}
|
name=${{ env.GHCR_REPO }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}}
|
||||||
|
type=schedule,pattern=nightly
|
||||||
|
flavor: |
|
||||||
|
suffix=-rootless,onlatest=true
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
|
||||||
with:
|
with:
|
||||||
image: ghcr.io/sysadminsmedia/binfmt:latest
|
image: ghcr.io/sysadminsmedia/binfmt:latest
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
|
||||||
with:
|
with:
|
||||||
driver-opts: |
|
driver-opts: |
|
||||||
image=ghcr.io/sysadminsmedia/buildkit:master
|
image=ghcr.io/sysadminsmedia/buildkit:master
|
||||||
|
|
||||||
- name: Build and push by digest
|
- name: Build and push by digest
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
|
||||||
with:
|
with:
|
||||||
context: . # Explicitly specify the build context
|
context: . # Explicitly specify the build context
|
||||||
file: ./Dockerfile.rootless # Explicitly specify the Dockerfile
|
file: ./Dockerfile.rootless # Explicitly specify the Dockerfile
|
||||||
@@ -125,7 +135,7 @@ jobs:
|
|||||||
annotations: ${{ steps.meta.outputs.annotations }}
|
annotations: ${{ steps.meta.outputs.annotations }}
|
||||||
|
|
||||||
- name: Attest platform-specific images
|
- name: Attest platform-specific images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -139,7 +149,7 @@ jobs:
|
|||||||
touch "/tmp/digests/${digest#sha256:}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||||
with:
|
with:
|
||||||
name: digests-${{ env.PLATFORM_PAIR }}
|
name: digests-${{ env.PLATFORM_PAIR }}
|
||||||
path: /tmp/digests/*
|
path: /tmp/digests/*
|
||||||
@@ -159,35 +169,35 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||||
with:
|
with:
|
||||||
path: /tmp/digests
|
path: /tmp/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
|
||||||
with:
|
with:
|
||||||
driver-opts: |
|
driver-opts: |
|
||||||
image=ghcr.io/sysadminsmedia/buildkit:master
|
image=ghcr.io/sysadminsmedia/buildkit:master
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
||||||
@@ -218,7 +228,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest GHCR images
|
- name: Attest GHCR images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -242,7 +252,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest Dockerhub images
|
- name: Attest Dockerhub images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
||||||
|
|||||||
50
.github/workflows/docker-publish.yaml
vendored
50
.github/workflows/docker-publish.yaml
vendored
@@ -37,7 +37,7 @@ permissions:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # Allows access to repository contents (read-only)
|
contents: read # Allows access to repository contents (read-only)
|
||||||
packages: write # Allows pushing to GHCR
|
packages: write # Allows pushing to GHCR
|
||||||
@@ -47,14 +47,15 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
include:
|
||||||
- linux/amd64
|
- platform: linux/amd64
|
||||||
- linux/arm64
|
runner: ubuntu-latest
|
||||||
- linux/arm/v7
|
- platform: linux/arm64
|
||||||
|
runner: ubuntu-24.04-arm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
@@ -70,40 +71,47 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
||||||
name=${{ env.GHCR_REPO }}
|
name=${{ env.GHCR_REPO }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=semver,pattern={{major}}
|
||||||
|
type=schedule,pattern=nightly
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
|
||||||
with:
|
with:
|
||||||
image: ghcr.io/sysadminsmedia/binfmt:latest
|
image: ghcr.io/sysadminsmedia/binfmt:latest
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
|
||||||
with:
|
with:
|
||||||
driver-opts: |
|
driver-opts: |
|
||||||
image=ghcr.io/sysadminsmedia/buildkit:latest
|
image=ghcr.io/sysadminsmedia/buildkit:latest
|
||||||
|
|
||||||
- name: Build and push by digest
|
- name: Build and push by digest
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
|
||||||
with:
|
with:
|
||||||
platforms: ${{ matrix.platform }}
|
platforms: ${{ matrix.platform }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
@@ -118,7 +126,7 @@ jobs:
|
|||||||
annotations: ${{ steps.meta.outputs.annotations }}
|
annotations: ${{ steps.meta.outputs.annotations }}
|
||||||
|
|
||||||
- name: Attest platform-specific images
|
- name: Attest platform-specific images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -132,7 +140,7 @@ jobs:
|
|||||||
touch "/tmp/digests/${digest#sha256:}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||||
with:
|
with:
|
||||||
name: digests-${{ env.PLATFORM_PAIR }}
|
name: digests-${{ env.PLATFORM_PAIR }}
|
||||||
path: /tmp/digests/*
|
path: /tmp/digests/*
|
||||||
@@ -152,35 +160,35 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||||
with:
|
with:
|
||||||
path: /tmp/digests
|
path: /tmp/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
|
||||||
with:
|
with:
|
||||||
driver-opts: |
|
driver-opts: |
|
||||||
image=ghcr.io/sysadminsmedia/buildkit:master
|
image=ghcr.io/sysadminsmedia/buildkit:master
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
|
||||||
@@ -209,7 +217,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest GHCR images
|
- name: Attest GHCR images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.GHCR_REPO }}
|
subject-name: ${{ env.GHCR_REPO }}
|
||||||
@@ -233,7 +241,7 @@ jobs:
|
|||||||
echo "digest=$digest" >> $GITHUB_OUTPUT
|
echo "digest=$digest" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Attest Dockerhub images
|
- name: Attest Dockerhub images
|
||||||
uses: actions/attest-build-provenance@v1
|
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
|
||||||
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
|
||||||
with:
|
with:
|
||||||
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
|
||||||
|
|||||||
34
.github/workflows/e2e-partial.yaml
vendored
34
.github/workflows/e2e-partial.yaml
vendored
@@ -1,5 +1,11 @@
|
|||||||
name: E2E (Playwright)
|
name: E2E (Playwright)
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
checks: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
@@ -15,28 +21,26 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install Task
|
- name: Install Task
|
||||||
uses: arduino/setup-task@v1
|
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.23"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
|
||||||
with:
|
with:
|
||||||
node-version: lts/*
|
node-version: lts/*
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
@@ -49,7 +53,7 @@ jobs:
|
|||||||
- name: Run E2E Tests
|
- name: Run E2E Tests
|
||||||
run: task test:e2e -- --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
run: task test:e2e -- --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||||
name: Upload partial Playwright report
|
name: Upload partial Playwright report
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
with:
|
with:
|
||||||
@@ -64,20 +68,18 @@ jobs:
|
|||||||
name: Merge Playwright Reports
|
name: Merge Playwright Reports
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
|
||||||
with:
|
with:
|
||||||
node-version: lts/*
|
node-version: lts/*
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
|
|
||||||
- name: Download blob reports from GitHub Actions Artifacts
|
- name: Download blob reports from GitHub Actions Artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||||
with:
|
with:
|
||||||
path: frontend/all-blob-reports
|
path: frontend/all-blob-reports
|
||||||
pattern: blob-report-*
|
pattern: blob-report-*
|
||||||
@@ -88,7 +90,7 @@ jobs:
|
|||||||
working-directory: frontend
|
working-directory: frontend
|
||||||
|
|
||||||
- name: Upload HTML report
|
- name: Upload HTML report
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||||
with:
|
with:
|
||||||
name: html-report--attempt-${{ github.run_attempt }}
|
name: html-report--attempt-${{ github.run_attempt }}
|
||||||
path: frontend/playwright-report
|
path: frontend/playwright-report
|
||||||
|
|||||||
50
.github/workflows/issue-gatekeeper.yml
vendored
Normal file
50
.github/workflows/issue-gatekeeper.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: Issue Gatekeeper
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [ opened ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-permissions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Verify Internal Template Use
|
||||||
|
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const issue_number = context.issue.number;
|
||||||
|
const actor = context.payload.sender.login;
|
||||||
|
|
||||||
|
// 1. Get user permission level
|
||||||
|
const { data: perms } = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
username: actor
|
||||||
|
});
|
||||||
|
|
||||||
|
const isMember = ['admin', 'write'].includes(perms.permission);
|
||||||
|
const body = context.payload.issue.body || "";
|
||||||
|
|
||||||
|
// 2. Check if they used the internal template (or if the issue is blank)
|
||||||
|
// We detect this by checking for our specific template string or the 'internal' label
|
||||||
|
const usedInternal = context.payload.issue.labels.some(l => l.name === 'internal');
|
||||||
|
|
||||||
|
if (usedInternal && !isMember) {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number,
|
||||||
|
body: `@${actor}, the "Internal" template is restricted to project members. Please use one of the standard bug or feature templates for this repository.`
|
||||||
|
});
|
||||||
|
|
||||||
|
await github.rest.issues.update({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number,
|
||||||
|
state: 'closed'
|
||||||
|
});
|
||||||
|
}
|
||||||
14
.github/workflows/partial-backend.yaml
vendored
14
.github/workflows/partial-backend.yaml
vendored
@@ -1,5 +1,11 @@
|
|||||||
name: Go Build/Test
|
name: Go Build/Test
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
checks: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
@@ -7,21 +13,21 @@ jobs:
|
|||||||
Go:
|
Go:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.24"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- name: Install Task
|
- name: Install Task
|
||||||
uses: arduino/setup-task@v1
|
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v7
|
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20
|
||||||
with:
|
with:
|
||||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||||
version: latest
|
version: latest
|
||||||
|
|||||||
42
.github/workflows/partial-frontend.yaml
vendored
42
.github/workflows/partial-frontend.yaml
vendored
@@ -1,5 +1,11 @@
|
|||||||
name: Frontend
|
name: Frontend
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
checks: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
@@ -9,13 +15,11 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
@@ -48,28 +52,26 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install Task
|
- name: Install Task
|
||||||
uses: arduino/setup-task@v1
|
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.23"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: lts/*
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
@@ -99,28 +101,26 @@ jobs:
|
|||||||
- 5432:5432
|
- 5432:5432
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Install Task
|
- name: Install Task
|
||||||
uses: arduino/setup-task@v1
|
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
|
||||||
with:
|
with:
|
||||||
go-version: "1.23"
|
go-version: "1.24"
|
||||||
cache-dependency-path: backend/go.mod
|
cache-dependency-path: backend/go.mod
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
|
||||||
with:
|
with:
|
||||||
node-version: lts/*
|
node-version: lts/*
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v3.0.0
|
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
|
||||||
with:
|
|
||||||
version: 9.12.2
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
|
|||||||
6
.github/workflows/pull-requests.yaml
vendored
6
.github/workflows/pull-requests.yaml
vendored
@@ -1,5 +1,11 @@
|
|||||||
name: Pull Request CI
|
name: Pull Request CI
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
checks: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
|
|||||||
6
.github/workflows/update-currencies.yml
vendored
6
.github/workflows/update-currencies.yml
vendored
@@ -15,12 +15,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.8'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: env.changed == 'true'
|
if: env.changed == 'true'
|
||||||
uses: peter-evans/create-pull-request@v7
|
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: update-currencies
|
branch: update-currencies
|
||||||
|
|||||||
177
.github/workflows/upgrade-test.yaml
vendored
Normal file
177
.github/workflows/upgrade-test.yaml
vendored
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
#name: HomeBox Upgrade Test
|
||||||
|
|
||||||
|
# on:
|
||||||
|
# schedule:
|
||||||
|
# Run daily at 2 AM UTC
|
||||||
|
# - cron: '0 2 * * *'
|
||||||
|
# workflow_dispatch: # Allow manual trigger
|
||||||
|
# push:
|
||||||
|
# branches:
|
||||||
|
# - main
|
||||||
|
# paths:
|
||||||
|
# - '.github/workflows/upgrade-test.yaml'
|
||||||
|
# - '.github/scripts/upgrade-test/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
upgrade-test:
|
||||||
|
name: Test Upgrade Path
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 60
|
||||||
|
permissions:
|
||||||
|
contents: read # Read repository contents
|
||||||
|
packages: read # Pull Docker images from GHCR
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: lts/*
|
||||||
|
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v3.0.0
|
||||||
|
with:
|
||||||
|
version: 9.12.2
|
||||||
|
|
||||||
|
- name: Install Playwright
|
||||||
|
run: |
|
||||||
|
cd frontend
|
||||||
|
pnpm install
|
||||||
|
pnpm exec playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Create test data directory
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/homebox-data-old
|
||||||
|
mkdir -p /tmp/homebox-data-new
|
||||||
|
chmod -R 777 /tmp/homebox-data-old
|
||||||
|
chmod -R 777 /tmp/homebox-data-new
|
||||||
|
|
||||||
|
# Step 1: Pull and deploy latest stable version
|
||||||
|
- name: Pull latest stable HomeBox image
|
||||||
|
run: |
|
||||||
|
docker pull ghcr.io/sysadminsmedia/homebox:latest
|
||||||
|
|
||||||
|
- name: Start HomeBox (stable version)
|
||||||
|
run: |
|
||||||
|
docker run -d \
|
||||||
|
--name homebox-old \
|
||||||
|
--restart unless-stopped \
|
||||||
|
-p 7745:7745 \
|
||||||
|
-e HBOX_LOG_LEVEL=debug \
|
||||||
|
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
|
||||||
|
-e TZ=UTC \
|
||||||
|
-v /tmp/homebox-data-old:/data \
|
||||||
|
ghcr.io/sysadminsmedia/homebox:latest
|
||||||
|
|
||||||
|
# Wait for the service to be ready
|
||||||
|
timeout 60 bash -c 'until curl -f http://localhost:7745/api/v1/status; do sleep 2; done'
|
||||||
|
echo "HomeBox stable version is ready"
|
||||||
|
|
||||||
|
# Step 2: Create test data
|
||||||
|
- name: Create test data
|
||||||
|
run: |
|
||||||
|
chmod +x .github/scripts/upgrade-test/create-test-data.sh
|
||||||
|
.github/scripts/upgrade-test/create-test-data.sh
|
||||||
|
env:
|
||||||
|
HOMEBOX_URL: http://localhost:7745
|
||||||
|
|
||||||
|
- name: Verify initial data creation
|
||||||
|
run: |
|
||||||
|
echo "Verifying test data was created..."
|
||||||
|
# Check if database file exists and has content
|
||||||
|
if [ -f /tmp/homebox-data-old/homebox.db ]; then
|
||||||
|
ls -lh /tmp/homebox-data-old/homebox.db
|
||||||
|
echo "Database file exists"
|
||||||
|
else
|
||||||
|
echo "Database file not found!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Stop old HomeBox instance
|
||||||
|
run: |
|
||||||
|
docker stop homebox-old
|
||||||
|
docker rm homebox-old
|
||||||
|
|
||||||
|
# Step 3: Build latest version from main branch
|
||||||
|
- name: Build HomeBox from main branch
|
||||||
|
run: |
|
||||||
|
docker build \
|
||||||
|
--build-arg VERSION=main \
|
||||||
|
--build-arg COMMIT=${{ github.sha }} \
|
||||||
|
--build-arg BUILD_TIME="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
|
||||||
|
-t homebox:test \
|
||||||
|
-f Dockerfile \
|
||||||
|
.
|
||||||
|
|
||||||
|
# Step 4: Copy data and start new version
|
||||||
|
- name: Copy data to new location
|
||||||
|
run: |
|
||||||
|
cp -r /tmp/homebox-data-old/* /tmp/homebox-data-new/
|
||||||
|
chmod -R 777 /tmp/homebox-data-new
|
||||||
|
|
||||||
|
- name: Start HomeBox (new version)
|
||||||
|
run: |
|
||||||
|
docker run -d \
|
||||||
|
--name homebox-new \
|
||||||
|
--restart unless-stopped \
|
||||||
|
-p 7745:7745 \
|
||||||
|
-e HBOX_LOG_LEVEL=debug \
|
||||||
|
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
|
||||||
|
-e TZ=UTC \
|
||||||
|
-v /tmp/homebox-data-new:/data \
|
||||||
|
homebox:test
|
||||||
|
|
||||||
|
# Wait for the service to be ready
|
||||||
|
timeout 60 bash -c 'until curl -f http://localhost:7745/api/v1/status; do sleep 2; done'
|
||||||
|
echo "HomeBox new version is ready"
|
||||||
|
|
||||||
|
# Step 5: Run verification tests with Playwright
|
||||||
|
- name: Run verification tests
|
||||||
|
run: |
|
||||||
|
cd frontend
|
||||||
|
TEST_DATA_FILE=/tmp/test-users.json \
|
||||||
|
E2E_BASE_URL=http://localhost:7745 \
|
||||||
|
pnpm exec playwright test \
|
||||||
|
-c ./test/playwright.config.ts \
|
||||||
|
--project=chromium \
|
||||||
|
test/upgrade/upgrade-verification.spec.ts
|
||||||
|
env:
|
||||||
|
HOMEBOX_URL: http://localhost:7745
|
||||||
|
|
||||||
|
- name: Upload Playwright report
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: playwright-report-upgrade-test
|
||||||
|
path: frontend/playwright-report/
|
||||||
|
retention-days: 30
|
||||||
|
|
||||||
|
- name: Upload test traces
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: playwright-traces
|
||||||
|
path: frontend/test-results/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Collect logs on failure
|
||||||
|
if: failure()
|
||||||
|
run: |
|
||||||
|
echo "=== Docker logs for new version ==="
|
||||||
|
docker logs homebox-new || true
|
||||||
|
echo "=== Database content ==="
|
||||||
|
ls -la /tmp/homebox-data-new/ || true
|
||||||
|
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker stop homebox-new || true
|
||||||
|
docker rm homebox-new || true
|
||||||
|
docker rmi homebox:test || true
|
||||||
@@ -8,7 +8,7 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
|
|||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
github.com/sysadminsmedia/homebox/backend v0.0.0-20251212183312-2d1d3d927bfd h1:QULUJSgHc4rSlTjb2qYT6FIgwDWFCqEpnYqc/ltsrkk=
|
github.com/sysadminsmedia/homebox/backend v0.0.0-20251228172914-2a6773d1d610 h1:kNLtnxaPaOryBUZ7RgUHPQVWxIExXYR/q9pYCbum5Vk=
|
||||||
github.com/sysadminsmedia/homebox/backend v0.0.0-20251212183312-2d1d3d927bfd/go.mod h1:jB+tPmHtPDM1VnAjah0gvcRfP/s7c+rtQwpA8cvZD/U=
|
github.com/sysadminsmedia/homebox/backend v0.0.0-20251228172914-2a6773d1d610/go.mod h1:9zHHw5TNttw5Kn4Wks+SxwXmJPz6PgGNbnB4BtF1Z4c=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
@@ -21,6 +21,9 @@
|
|||||||
<img src="https://img.shields.io/mastodon/follow/110749314839831923?domain=infosec.exchange"/>
|
<img src="https://img.shields.io/mastodon/follow/110749314839831923?domain=infosec.exchange"/>
|
||||||
<img src="https://img.shields.io/lemmy/homebox%40lemmy.world?label=lemmy"/>
|
<img src="https://img.shields.io/lemmy/homebox%40lemmy.world?label=lemmy"/>
|
||||||
</p>
|
</p>
|
||||||
|
<p align="center" style="width: 100%;">
|
||||||
|
<a href="https://www.pikapods.com/pods?run=homebox"><img src="https://www.pikapods.com/static/run-button.svg"/></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
## What is HomeBox
|
## What is HomeBox
|
||||||
|
|
||||||
|
|||||||
@@ -17,8 +17,6 @@ builds:
|
|||||||
- freebsd
|
- freebsd
|
||||||
goarch:
|
goarch:
|
||||||
- amd64
|
- amd64
|
||||||
- "386"
|
|
||||||
- arm
|
|
||||||
- arm64
|
- arm64
|
||||||
- riscv64
|
- riscv64
|
||||||
flags:
|
flags:
|
||||||
@@ -28,20 +26,9 @@ builds:
|
|||||||
- -X main.version={{.Version}}
|
- -X main.version={{.Version}}
|
||||||
- -X main.commit={{.Commit}}
|
- -X main.commit={{.Commit}}
|
||||||
- -X main.date={{.Date}}
|
- -X main.date={{.Date}}
|
||||||
ignore:
|
|
||||||
- goos: windows
|
|
||||||
goarch: arm
|
|
||||||
- goos: windows
|
|
||||||
goarch: "386"
|
|
||||||
- goos: freebsd
|
|
||||||
goarch: arm
|
|
||||||
- goos: freebsd
|
|
||||||
goarch: "386"
|
|
||||||
tags:
|
tags:
|
||||||
- >-
|
- >-
|
||||||
{{- if eq .Arch "riscv64" }}nodynamic
|
{{- if eq .Arch "riscv64" }}nodynamic
|
||||||
{{- else if eq .Arch "arm" }}nodynamic
|
|
||||||
{{- else if eq .Arch "386" }}nodynamic
|
|
||||||
{{- else if eq .Os "freebsd" }}nodynamic
|
{{- else if eq .Os "freebsd" }}nodynamic
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
@@ -62,7 +49,6 @@ archives:
|
|||||||
{{ .ProjectName }}_
|
{{ .ProjectName }}_
|
||||||
{{- title .Os }}_
|
{{- title .Os }}_
|
||||||
{{- if eq .Arch "amd64" }}x86_64
|
{{- if eq .Arch "amd64" }}x86_64
|
||||||
{{- else if eq .Arch "386" }}i386
|
|
||||||
{{- else }}{{ .Arch }}{{ end }}
|
{{- else }}{{ .Arch }}{{ end }}
|
||||||
{{- if .Arm }}v{{ .Arm }}{{ end }}
|
{{- if .Arm }}v{{ .Arm }}{{ end }}
|
||||||
# use zip for windows archives
|
# use zip for windows archives
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package v1
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@@ -9,6 +10,7 @@ import (
|
|||||||
"github.com/hay-kot/httpkit/server"
|
"github.com/hay-kot/httpkit/server"
|
||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||||
|
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -94,3 +96,64 @@ func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
|
|||||||
func (ctrl *V1Controller) HandleCreateMissingThumbnails() errchain.HandlerFunc {
|
func (ctrl *V1Controller) HandleCreateMissingThumbnails() errchain.HandlerFunc {
|
||||||
return actionHandlerFactory("create missing thumbnails", ctrl.repo.Attachments.CreateMissingThumbnails)
|
return actionHandlerFactory("create missing thumbnails", ctrl.repo.Attachments.CreateMissingThumbnails)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WipeInventoryOptions represents the options for wiping inventory
|
||||||
|
type WipeInventoryOptions struct {
|
||||||
|
WipeLabels bool `json:"wipeLabels"`
|
||||||
|
WipeLocations bool `json:"wipeLocations"`
|
||||||
|
WipeMaintenance bool `json:"wipeMaintenance"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// HandleWipeInventory godoc
|
||||||
|
//
|
||||||
|
// @Summary Wipe Inventory
|
||||||
|
// @Description Deletes all items in the inventory
|
||||||
|
// @Tags Actions
|
||||||
|
// @Produce json
|
||||||
|
// @Param options body WipeInventoryOptions false "Wipe options"
|
||||||
|
// @Success 200 {object} ActionAmountResult
|
||||||
|
// @Router /v1/actions/wipe-inventory [Post]
|
||||||
|
// @Security Bearer
|
||||||
|
func (ctrl *V1Controller) HandleWipeInventory() errchain.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) error {
|
||||||
|
if ctrl.isDemo {
|
||||||
|
return validate.NewRequestError(errors.New("wipe inventory is not allowed in demo mode"), http.StatusForbidden)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := services.NewContext(r.Context())
|
||||||
|
|
||||||
|
// Check if user is owner
|
||||||
|
if !ctx.User.IsOwner {
|
||||||
|
return validate.NewRequestError(errors.New("only group owners can wipe inventory"), http.StatusForbidden)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse options from request body
|
||||||
|
var options WipeInventoryOptions
|
||||||
|
if err := server.Decode(r, &options); err != nil {
|
||||||
|
// If no body provided, use default (false for all)
|
||||||
|
options = WipeInventoryOptions{
|
||||||
|
WipeLabels: false,
|
||||||
|
WipeLocations: false,
|
||||||
|
WipeMaintenance: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
totalCompleted, err := ctrl.repo.Items.WipeInventory(ctx, ctx.GID, options.WipeLabels, options.WipeLocations, options.WipeMaintenance)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Str("action_ref", "wipe inventory").Msg("failed to run action")
|
||||||
|
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish mutation events for wiped resources
|
||||||
|
if ctrl.bus != nil {
|
||||||
|
if options.WipeLabels {
|
||||||
|
ctrl.bus.Publish(eventbus.EventLabelMutation, eventbus.GroupMutationEvent{GID: ctx.GID})
|
||||||
|
}
|
||||||
|
if options.WipeLocations {
|
||||||
|
ctrl.bus.Publish(eventbus.EventLocationMutation, eventbus.GroupMutationEvent{GID: ctx.GID})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return server.JSON(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ func run(cfg *config.Config) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.ToLower(cfg.Database.Driver) == "postgres" {
|
if strings.ToLower(cfg.Database.Driver) == config.DriverPostgres {
|
||||||
if !validatePostgresSSLMode(cfg.Database.SslMode) {
|
if !validatePostgresSSLMode(cfg.Database.SslMode) {
|
||||||
log.Error().Str("sslmode", cfg.Database.SslMode).Msg("invalid sslmode")
|
log.Error().Str("sslmode", cfg.Database.SslMode).Msg("invalid sslmode")
|
||||||
return fmt.Errorf("invalid sslmode: %s", cfg.Database.SslMode)
|
return fmt.Errorf("invalid sslmode: %s", cfg.Database.SslMode)
|
||||||
|
|||||||
@@ -108,6 +108,7 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
|||||||
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||||
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||||
r.Post("/actions/create-missing-thumbnails", chain.ToHandlerFunc(v1Ctrl.HandleCreateMissingThumbnails(), userMW...))
|
r.Post("/actions/create-missing-thumbnails", chain.ToHandlerFunc(v1Ctrl.HandleCreateMissingThumbnails(), userMW...))
|
||||||
|
r.Post("/actions/wipe-inventory", chain.ToHandlerFunc(v1Ctrl.HandleWipeInventory(), userMW...))
|
||||||
|
|
||||||
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||||
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ func setupStorageDir(cfg *config.Config) error {
|
|||||||
func setupDatabaseURL(cfg *config.Config) (string, error) {
|
func setupDatabaseURL(cfg *config.Config) (string, error) {
|
||||||
databaseURL := ""
|
databaseURL := ""
|
||||||
switch strings.ToLower(cfg.Database.Driver) {
|
switch strings.ToLower(cfg.Database.Driver) {
|
||||||
case "sqlite3":
|
case config.DriverSqlite3:
|
||||||
databaseURL = cfg.Database.SqlitePath
|
databaseURL = cfg.Database.SqlitePath
|
||||||
dbFilePath := strings.Split(cfg.Database.SqlitePath, "?")[0]
|
dbFilePath := strings.Split(cfg.Database.SqlitePath, "?")[0]
|
||||||
dbDir := filepath.Dir(dbFilePath)
|
dbDir := filepath.Dir(dbFilePath)
|
||||||
@@ -49,7 +49,7 @@ func setupDatabaseURL(cfg *config.Config) (string, error) {
|
|||||||
log.Error().Err(err).Str("path", dbDir).Msg("failed to create SQLite database directory")
|
log.Error().Err(err).Str("path", dbDir).Msg("failed to create SQLite database directory")
|
||||||
return "", fmt.Errorf("failed to create SQLite database directory: %w", err)
|
return "", fmt.Errorf("failed to create SQLite database directory: %w", err)
|
||||||
}
|
}
|
||||||
case "postgres":
|
case config.DriverPostgres:
|
||||||
databaseURL = fmt.Sprintf("host=%s port=%s dbname=%s sslmode=%s", cfg.Database.Host, cfg.Database.Port, cfg.Database.Database, cfg.Database.SslMode)
|
databaseURL = fmt.Sprintf("host=%s port=%s dbname=%s sslmode=%s", cfg.Database.Host, cfg.Database.Port, cfg.Database.Database, cfg.Database.SslMode)
|
||||||
if cfg.Database.Username != "" {
|
if cfg.Database.Username != "" {
|
||||||
databaseURL += fmt.Sprintf(" user=%s", cfg.Database.Username)
|
databaseURL += fmt.Sprintf(" user=%s", cfg.Database.Username)
|
||||||
|
|||||||
@@ -118,6 +118,41 @@ const docTemplate = `{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Wipe options",
|
||||||
|
"name": "options",
|
||||||
|
"in": "body",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -5184,6 +5219,20 @@ const docTemplate = `{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -114,6 +114,42 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Wipe options"
|
||||||
|
},
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -5381,6 +5417,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -67,6 +67,27 @@ paths:
|
|||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/v1.ActionAmountResult"
|
$ref: "#/components/schemas/v1.ActionAmountResult"
|
||||||
|
/v1/actions/wipe-inventory:
|
||||||
|
post:
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
description: Deletes all items in the inventory
|
||||||
|
tags:
|
||||||
|
- Actions
|
||||||
|
summary: Wipe Inventory
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/v1.WipeInventoryOptions"
|
||||||
|
description: Wipe options
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/v1.ActionAmountResult"
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
security:
|
security:
|
||||||
@@ -3449,6 +3470,15 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
token:
|
token:
|
||||||
type: string
|
type: string
|
||||||
|
v1.WipeInventoryOptions:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
wipeLabels:
|
||||||
|
type: boolean
|
||||||
|
wipeLocations:
|
||||||
|
type: boolean
|
||||||
|
wipeMaintenance:
|
||||||
|
type: boolean
|
||||||
v1.Wrapped:
|
v1.Wrapped:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|||||||
@@ -116,6 +116,41 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Wipe options",
|
||||||
|
"name": "options",
|
||||||
|
"in": "body",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -5182,6 +5217,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -1867,6 +1867,15 @@ definitions:
|
|||||||
token:
|
token:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
v1.WipeInventoryOptions:
|
||||||
|
properties:
|
||||||
|
wipeLabels:
|
||||||
|
type: boolean
|
||||||
|
wipeLocations:
|
||||||
|
type: boolean
|
||||||
|
wipeMaintenance:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
v1.Wrapped:
|
v1.Wrapped:
|
||||||
properties:
|
properties:
|
||||||
item: {}
|
item: {}
|
||||||
@@ -1947,6 +1956,27 @@ paths:
|
|||||||
summary: Set Primary Photos
|
summary: Set Primary Photos
|
||||||
tags:
|
tags:
|
||||||
- Actions
|
- Actions
|
||||||
|
/v1/actions/wipe-inventory:
|
||||||
|
post:
|
||||||
|
description: Deletes all items in the inventory
|
||||||
|
parameters:
|
||||||
|
- description: Wipe options
|
||||||
|
in: body
|
||||||
|
name: options
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.WipeInventoryOptions'
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.ActionAmountResult'
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
summary: Wipe Inventory
|
||||||
|
tags:
|
||||||
|
- Actions
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
description: Resets all item date fields to the beginning of the day
|
description: Resets all item date fields to the beginning of the day
|
||||||
|
|||||||
@@ -10,31 +10,22 @@ cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIi
|
|||||||
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
|
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
|
||||||
cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
|
cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
|
||||||
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
||||||
cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8=
|
|
||||||
cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE=
|
|
||||||
cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc=
|
cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc=
|
||||||
cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU=
|
cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU=
|
||||||
cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc=
|
|
||||||
cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA=
|
|
||||||
cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY=
|
cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY=
|
||||||
cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE=
|
cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw=
|
||||||
cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY=
|
|
||||||
cloud.google.com/go/longrunning v0.7.0 h1:FV0+SYF1RIj59gyoWDRi45GiYUMM3K1qO51qoboQT1E=
|
cloud.google.com/go/longrunning v0.7.0 h1:FV0+SYF1RIj59gyoWDRi45GiYUMM3K1qO51qoboQT1E=
|
||||||
cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM=
|
cloud.google.com/go/longrunning v0.7.0/go.mod h1:ySn2yXmjbK9Ba0zsQqunhDkYi0+9rlXIwnoAf+h+TPY=
|
||||||
cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U=
|
|
||||||
cloud.google.com/go/monitoring v1.24.3 h1:dde+gMNc0UhPZD1Azu6at2e79bfdztVDS5lvhOdsgaE=
|
cloud.google.com/go/monitoring v1.24.3 h1:dde+gMNc0UhPZD1Azu6at2e79bfdztVDS5lvhOdsgaE=
|
||||||
cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI=
|
cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI=
|
||||||
cloud.google.com/go/pubsub v1.50.0 h1:hnYpOIxVlgVD1Z8LN7est4DQZK3K6tvZNurZjIVjUe0=
|
|
||||||
cloud.google.com/go/pubsub v1.50.0/go.mod h1:Di2Y+nqXBpIS+dXUEJPQzLh8PbIQZMLE9IVUFhf2zmM=
|
|
||||||
cloud.google.com/go/pubsub v1.50.1 h1:fzbXpPyJnSGvWXF1jabhQeXyxdbCIkXTpjXHy7xviBM=
|
cloud.google.com/go/pubsub v1.50.1 h1:fzbXpPyJnSGvWXF1jabhQeXyxdbCIkXTpjXHy7xviBM=
|
||||||
cloud.google.com/go/pubsub v1.50.1/go.mod h1:6YVJv3MzWJUVdvQXG081sFvS0dWQOdnV+oTo++q/xFk=
|
cloud.google.com/go/pubsub v1.50.1/go.mod h1:6YVJv3MzWJUVdvQXG081sFvS0dWQOdnV+oTo++q/xFk=
|
||||||
cloud.google.com/go/pubsub/v2 v2.2.1 h1:3brZcshL3fIiD1qOxAE2QW9wxsfjioy014x4yC9XuYI=
|
cloud.google.com/go/pubsub/v2 v2.2.1 h1:3brZcshL3fIiD1qOxAE2QW9wxsfjioy014x4yC9XuYI=
|
||||||
cloud.google.com/go/pubsub/v2 v2.2.1/go.mod h1:O5f0KHG9zDheZAd3z5rlCRhxt2JQtB+t/IYLKK3Bpvw=
|
cloud.google.com/go/pubsub/v2 v2.2.1/go.mod h1:O5f0KHG9zDheZAd3z5rlCRhxt2JQtB+t/IYLKK3Bpvw=
|
||||||
cloud.google.com/go/storage v1.56.0 h1:iixmq2Fse2tqxMbWhLWC9HfBj1qdxqAmiK8/eqtsLxI=
|
cloud.google.com/go/storage v1.56.0 h1:iixmq2Fse2tqxMbWhLWC9HfBj1qdxqAmiK8/eqtsLxI=
|
||||||
cloud.google.com/go/storage v1.56.0/go.mod h1:Tpuj6t4NweCLzlNbw9Z9iwxEkrSem20AetIeH/shgVU=
|
cloud.google.com/go/storage v1.56.0/go.mod h1:Tpuj6t4NweCLzlNbw9Z9iwxEkrSem20AetIeH/shgVU=
|
||||||
cloud.google.com/go/trace v1.11.6 h1:2O2zjPzqPYAHrn3OKl029qlqG6W8ZdYaOWRyr8NgMT4=
|
|
||||||
cloud.google.com/go/trace v1.11.6/go.mod h1:GA855OeDEBiBMzcckLPE2kDunIpC72N+Pq8WFieFjnI=
|
|
||||||
cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U=
|
cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U=
|
||||||
|
cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s=
|
||||||
entgo.io/ent v0.14.5 h1:Rj2WOYJtCkWyFo6a+5wB3EfBRP0rnx1fMk6gGA0UUe4=
|
entgo.io/ent v0.14.5 h1:Rj2WOYJtCkWyFo6a+5wB3EfBRP0rnx1fMk6gGA0UUe4=
|
||||||
entgo.io/ent v0.14.5/go.mod h1:zTzLmWtPvGpmSwtkaayM2cm5m819NdM7z7tYPq3vN0U=
|
entgo.io/ent v0.14.5/go.mod h1:zTzLmWtPvGpmSwtkaayM2cm5m819NdM7z7tYPq3vN0U=
|
||||||
github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk=
|
github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk=
|
||||||
@@ -88,8 +79,6 @@ github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7l
|
|||||||
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||||
github.com/ardanlabs/conf/v3 v3.9.0 h1:aRBYHeD39/OkuaEXYIEoi4wvF3OnS7jUAPxXyLfEu20=
|
|
||||||
github.com/ardanlabs/conf/v3 v3.9.0/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk=
|
|
||||||
github.com/ardanlabs/conf/v3 v3.10.0 h1:qIrJ/WBmH/hFQ/IX4xH9LX9LzwK44T9aEOy78M+4S+0=
|
github.com/ardanlabs/conf/v3 v3.10.0 h1:qIrJ/WBmH/hFQ/IX4xH9LX9LzwK44T9aEOy78M+4S+0=
|
||||||
github.com/ardanlabs/conf/v3 v3.10.0/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk=
|
github.com/ardanlabs/conf/v3 v3.10.0/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk=
|
||||||
github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+Xsqk=
|
github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+Xsqk=
|
||||||
@@ -183,14 +172,10 @@ github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzP
|
|||||||
github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
|
github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
|
||||||
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
||||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.11 h1:AQvxbp830wPhHTqc1u7nzoLT+ZFxGY7emj5DR5DYFik=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.11/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
|
||||||
github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw=
|
github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||||
github.com/gen2brain/avif v0.4.4 h1:Ga/ss7qcWWQm2bxFpnjYjhJsNfZrWs5RsyklgFjKRSE=
|
github.com/gen2brain/avif v0.4.4 h1:Ga/ss7qcWWQm2bxFpnjYjhJsNfZrWs5RsyklgFjKRSE=
|
||||||
github.com/gen2brain/avif v0.4.4/go.mod h1:/XCaJcjZraQwKVhpu9aEd9aLOssYOawLvhMBtmHVGqk=
|
github.com/gen2brain/avif v0.4.4/go.mod h1:/XCaJcjZraQwKVhpu9aEd9aLOssYOawLvhMBtmHVGqk=
|
||||||
github.com/gen2brain/heic v0.4.6 h1:sNh3mfaEZLmDJnFc5WoLxCzh/wj5GwfJScPfvF5CNJE=
|
|
||||||
github.com/gen2brain/heic v0.4.6/go.mod h1:ECnpqbqLu0qSje4KSNWUUDK47UPXPzl80T27GWGEL5I=
|
|
||||||
github.com/gen2brain/heic v0.4.7 h1:xw/e9R3HdIvb+uEhRDMRJdviYnB3ODe/VwL8SYLaMGc=
|
github.com/gen2brain/heic v0.4.7 h1:xw/e9R3HdIvb+uEhRDMRJdviYnB3ODe/VwL8SYLaMGc=
|
||||||
github.com/gen2brain/heic v0.4.7/go.mod h1:ECnpqbqLu0qSje4KSNWUUDK47UPXPzl80T27GWGEL5I=
|
github.com/gen2brain/heic v0.4.7/go.mod h1:ECnpqbqLu0qSje4KSNWUUDK47UPXPzl80T27GWGEL5I=
|
||||||
github.com/gen2brain/jpegxl v0.4.5 h1:TWpVEn5xkIfsswzkjHBArd0Cc9AE0tbjBSoa0jDsrbo=
|
github.com/gen2brain/jpegxl v0.4.5 h1:TWpVEn5xkIfsswzkjHBArd0Cc9AE0tbjBSoa0jDsrbo=
|
||||||
@@ -210,16 +195,10 @@ github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
|||||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||||
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
|
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
|
||||||
github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
|
github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
|
||||||
github.com/go-openapi/jsonpointer v0.22.3 h1:dKMwfV4fmt6Ah90zloTbUKWMD+0he+12XYAsPotrkn8=
|
|
||||||
github.com/go-openapi/jsonpointer v0.22.3/go.mod h1:0lBbqeRsQ5lIanv3LHZBrmRGHLHcQoOXQnf88fHlGWo=
|
|
||||||
github.com/go-openapi/jsonpointer v0.22.4 h1:dZtK82WlNpVLDW2jlA1YCiVJFVqkED1MegOUy9kR5T4=
|
github.com/go-openapi/jsonpointer v0.22.4 h1:dZtK82WlNpVLDW2jlA1YCiVJFVqkED1MegOUy9kR5T4=
|
||||||
github.com/go-openapi/jsonpointer v0.22.4/go.mod h1:elX9+UgznpFhgBuaMQ7iu4lvvX1nvNsesQ3oxmYTw80=
|
github.com/go-openapi/jsonpointer v0.22.4/go.mod h1:elX9+UgznpFhgBuaMQ7iu4lvvX1nvNsesQ3oxmYTw80=
|
||||||
github.com/go-openapi/jsonreference v0.21.3 h1:96Dn+MRPa0nYAR8DR1E03SblB5FJvh7W6krPI0Z7qMc=
|
|
||||||
github.com/go-openapi/jsonreference v0.21.3/go.mod h1:RqkUP0MrLf37HqxZxrIAtTWW4ZJIK1VzduhXYBEeGc4=
|
|
||||||
github.com/go-openapi/jsonreference v0.21.4 h1:24qaE2y9bx/q3uRK/qN+TDwbok1NhbSmGjjySRCHtC8=
|
github.com/go-openapi/jsonreference v0.21.4 h1:24qaE2y9bx/q3uRK/qN+TDwbok1NhbSmGjjySRCHtC8=
|
||||||
github.com/go-openapi/jsonreference v0.21.4/go.mod h1:rIENPTjDbLpzQmQWCj5kKj3ZlmEh+EFVbz3RTUh30/4=
|
github.com/go-openapi/jsonreference v0.21.4/go.mod h1:rIENPTjDbLpzQmQWCj5kKj3ZlmEh+EFVbz3RTUh30/4=
|
||||||
github.com/go-openapi/spec v0.22.1 h1:beZMa5AVQzRspNjvhe5aG1/XyBSMeX1eEOs7dMoXh/k=
|
|
||||||
github.com/go-openapi/spec v0.22.1/go.mod h1:c7aeIQT175dVowfp7FeCvXXnjN/MrpaONStibD2WtDA=
|
|
||||||
github.com/go-openapi/spec v0.22.3 h1:qRSmj6Smz2rEBxMnLRBMeBWxbbOvuOoElvSvObIgwQc=
|
github.com/go-openapi/spec v0.22.3 h1:qRSmj6Smz2rEBxMnLRBMeBWxbbOvuOoElvSvObIgwQc=
|
||||||
github.com/go-openapi/spec v0.22.3/go.mod h1:iIImLODL2loCh3Vnox8TY2YWYJZjMAKYyLH2Mu8lOZs=
|
github.com/go-openapi/spec v0.22.3/go.mod h1:iIImLODL2loCh3Vnox8TY2YWYJZjMAKYyLH2Mu8lOZs=
|
||||||
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
|
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
|
||||||
@@ -249,8 +228,6 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
|
|||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
|
|
||||||
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
|
|
||||||
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
|
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
|
||||||
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
|
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
|
||||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||||
@@ -290,8 +267,6 @@ github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4=
|
|||||||
github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
|
github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ=
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
||||||
github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo=
|
|
||||||
github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc=
|
|
||||||
github.com/googleapis/gax-go/v2 v2.16.0 h1:iHbQmKLLZrexmb0OSsNGTeSTS0HO4YvFOG8g5E4Zd0Y=
|
github.com/googleapis/gax-go/v2 v2.16.0 h1:iHbQmKLLZrexmb0OSsNGTeSTS0HO4YvFOG8g5E4Zd0Y=
|
||||||
github.com/googleapis/gax-go/v2 v2.16.0/go.mod h1:o1vfQjjNZn4+dPnRdl/4ZD7S9414Y4xA+a/6Icj6l14=
|
github.com/googleapis/gax-go/v2 v2.16.0/go.mod h1:o1vfQjjNZn4+dPnRdl/4ZD7S9414Y4xA+a/6Icj6l14=
|
||||||
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
|
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
|
||||||
@@ -362,8 +337,6 @@ github.com/nats-io/jwt/v2 v2.5.0 h1:WQQ40AAlqqfx+f6ku+i0pOVm+ASirD4fUh+oQsiE9Ak=
|
|||||||
github.com/nats-io/jwt/v2 v2.5.0/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI=
|
github.com/nats-io/jwt/v2 v2.5.0/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI=
|
||||||
github.com/nats-io/nats-server/v2 v2.9.23 h1:6Wj6H6QpP9FMlpCyWUaNu2yeZ/qGj+mdRkZ1wbikExU=
|
github.com/nats-io/nats-server/v2 v2.9.23 h1:6Wj6H6QpP9FMlpCyWUaNu2yeZ/qGj+mdRkZ1wbikExU=
|
||||||
github.com/nats-io/nats-server/v2 v2.9.23/go.mod h1:wEjrEy9vnqIGE4Pqz4/c75v9Pmaq7My2IgFmnykc4C0=
|
github.com/nats-io/nats-server/v2 v2.9.23/go.mod h1:wEjrEy9vnqIGE4Pqz4/c75v9Pmaq7My2IgFmnykc4C0=
|
||||||
github.com/nats-io/nats.go v1.47.0 h1:YQdADw6J/UfGUd2Oy6tn4Hq6YHxCaJrVKayxxFqYrgM=
|
|
||||||
github.com/nats-io/nats.go v1.47.0/go.mod h1:iRWIPokVIFbVijxuMQq4y9ttaBTMe0SFdlZfMDd+33g=
|
|
||||||
github.com/nats-io/nats.go v1.48.0 h1:pSFyXApG+yWU/TgbKCjmm5K4wrHu86231/w84qRVR+U=
|
github.com/nats-io/nats.go v1.48.0 h1:pSFyXApG+yWU/TgbKCjmm5K4wrHu86231/w84qRVR+U=
|
||||||
github.com/nats-io/nats.go v1.48.0/go.mod h1:iRWIPokVIFbVijxuMQq4y9ttaBTMe0SFdlZfMDd+33g=
|
github.com/nats-io/nats.go v1.48.0/go.mod h1:iRWIPokVIFbVijxuMQq4y9ttaBTMe0SFdlZfMDd+33g=
|
||||||
github.com/nats-io/nkeys v0.4.12 h1:nssm7JKOG9/x4J8II47VWCL1Ds29avyiQDRn0ckMvDc=
|
github.com/nats-io/nkeys v0.4.12 h1:nssm7JKOG9/x4J8II47VWCL1Ds29avyiQDRn0ckMvDc=
|
||||||
@@ -380,8 +353,6 @@ github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE=
|
|||||||
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
|
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
|
||||||
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
|
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
|
||||||
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
|
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
|
||||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
|
||||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
|
||||||
github.com/pierrec/lz4/v4 v4.1.23 h1:oJE7T90aYBGtFNrI8+KbETnPymobAhzRrR8Mu8n1yfU=
|
github.com/pierrec/lz4/v4 v4.1.23 h1:oJE7T90aYBGtFNrI8+KbETnPymobAhzRrR8Mu8n1yfU=
|
||||||
github.com/pierrec/lz4/v4 v4.1.23/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
github.com/pierrec/lz4/v4 v4.1.23/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
|
||||||
@@ -437,8 +408,6 @@ github.com/swaggo/http-swagger/v2 v2.0.2 h1:FKCdLsl+sFCx60KFsyM0rDarwiUSZ8DqbfSy
|
|||||||
github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ=
|
github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ=
|
||||||
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
|
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
|
||||||
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
|
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
|
||||||
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
|
|
||||||
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
|
|
||||||
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
|
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
|
||||||
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
|
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
|
||||||
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
|
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
|
||||||
@@ -476,26 +445,16 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.6
|
|||||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
|
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU=
|
||||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY=
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY=
|
||||||
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
|
||||||
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
|
||||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0 h1:6VjV6Et+1Hd2iLZEPtdV7vie80Yyqf7oikJLjQ/myi0=
|
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0 h1:6VjV6Et+1Hd2iLZEPtdV7vie80Yyqf7oikJLjQ/myi0=
|
||||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0/go.mod h1:u8hcp8ji5gaM/RfcOo8z9NMnf1pVLfVY7lBY2VOGuUU=
|
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0/go.mod h1:u8hcp8ji5gaM/RfcOo8z9NMnf1pVLfVY7lBY2VOGuUU=
|
||||||
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
|
|
||||||
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
|
|
||||||
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||||
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||||
go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
|
|
||||||
go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
|
|
||||||
go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
|
go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
|
||||||
go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
|
go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
|
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
|
go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
||||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
|
||||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
|
||||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||||
@@ -516,21 +475,13 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
|
|||||||
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
|
||||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
|
||||||
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||||
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||||
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 h1:DHNhtq3sNNzrvduZZIiFyXWOL9IWaDPHqTnLJp+rCBY=
|
|
||||||
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
|
||||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
|
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
|
||||||
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||||
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
|
|
||||||
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
|
|
||||||
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
|
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
|
||||||
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
|
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
|
||||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
|
||||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
@@ -540,18 +491,12 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
|
||||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
|
||||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||||
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
|
|
||||||
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
|
||||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||||
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
|
||||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
|
||||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@@ -567,8 +512,6 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
|
||||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
|
||||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
@@ -578,8 +521,6 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
|
||||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
|
||||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||||
@@ -587,8 +528,6 @@ golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
|||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
|
||||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
|
||||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
@@ -597,28 +536,16 @@ golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhS
|
|||||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||||
google.golang.org/api v0.257.0 h1:8Y0lzvHlZps53PEaw+G29SsQIkuKrumGWs9puiexNAA=
|
|
||||||
google.golang.org/api v0.257.0/go.mod h1:4eJrr+vbVaZSqs7vovFd1Jb/A6ml6iw2e6FBYf3GAO4=
|
|
||||||
google.golang.org/api v0.258.0 h1:IKo1j5FBlN74fe5isA2PVozN3Y5pwNKriEgAXPOkDAc=
|
google.golang.org/api v0.258.0 h1:IKo1j5FBlN74fe5isA2PVozN3Y5pwNKriEgAXPOkDAc=
|
||||||
google.golang.org/api v0.258.0/go.mod h1:qhOMTQEZ6lUps63ZNq9jhODswwjkjYYguA7fA3TBFww=
|
google.golang.org/api v0.258.0/go.mod h1:qhOMTQEZ6lUps63ZNq9jhODswwjkjYYguA7fA3TBFww=
|
||||||
google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 h1:Nt6z9UHqSlIdIGJdz6KhTIs2VRx/iOsA5iE8bmQNcxs=
|
|
||||||
google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79/go.mod h1:kTmlBHMPqR5uCZPBvwa2B18mvubkjyY3CRLI0c6fj0s=
|
|
||||||
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217 h1:GvESR9BIyHUahIb0NcTum6itIWtdoglGX+rnGxm2934=
|
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217 h1:GvESR9BIyHUahIb0NcTum6itIWtdoglGX+rnGxm2934=
|
||||||
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:yJ2HH4EHEDTd3JiLmhds6NkJ17ITVYOdV3m3VKOnws0=
|
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:yJ2HH4EHEDTd3JiLmhds6NkJ17ITVYOdV3m3VKOnws0=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8 h1:mepRgnBZa07I4TRuomDE4sTIYieg/osKmzIf4USdWS4=
|
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8/go.mod h1:fDMmzKV90WSg1NbozdqrE64fkuTv6mlq2zxo9ad+3yo=
|
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls=
|
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
|
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 h1:gRkg/vSppuSQoDjxyiGfN4Upv/h/DQmIR10ZU8dh4Ww=
|
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
|
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b h1:Mv8VFug0MP9e5vUxfBcE3vUkV6CImK3cMNMIDFjmzxU=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b h1:Mv8VFug0MP9e5vUxfBcE3vUkV6CImK3cMNMIDFjmzxU=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
|
||||||
google.golang.org/grpc v1.77.0 h1:wVVY6/8cGA6vvffn+wWK5ToddbgdU3d8MNENr4evgXM=
|
|
||||||
google.golang.org/grpc v1.77.0/go.mod h1:z0BY1iVj0q8E1uSQCjL9cppRj+gnZjzDnzV0dHhrNig=
|
|
||||||
google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
|
google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
|
||||||
google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
|
google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
|
||||||
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
|
|
||||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
|
||||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
@@ -640,8 +567,6 @@ modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=
|
|||||||
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
|
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
|
||||||
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||||
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||||
modernc.org/libc v1.67.1 h1:bFaqOaa5/zbWYJo8aW0tXPX21hXsngG2M7mckCnFSVk=
|
|
||||||
modernc.org/libc v1.67.1/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
|
|
||||||
modernc.org/libc v1.67.2 h1:ZbNmly1rcbjhot5jlOZG0q4p5VwFfjwWqZ5rY2xxOXo=
|
modernc.org/libc v1.67.2 h1:ZbNmly1rcbjhot5jlOZG0q4p5VwFfjwWqZ5rY2xxOXo=
|
||||||
modernc.org/libc v1.67.2/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
|
modernc.org/libc v1.67.2/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
|
||||||
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||||
@@ -652,8 +577,6 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
|||||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||||
modernc.org/sqlite v1.40.1 h1:VfuXcxcUWWKRBuP8+BR9L7VnmusMgBNNnBYGEe9w/iY=
|
|
||||||
modernc.org/sqlite v1.40.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
|
||||||
modernc.org/sqlite v1.41.0 h1:bJXddp4ZpsqMsNN1vS0jWo4IJTZzb8nWpcgvyCFG9Ck=
|
modernc.org/sqlite v1.41.0 h1:bJXddp4ZpsqMsNN1vS0jWo4IJTZzb8nWpcgvyCFG9Ck=
|
||||||
modernc.org/sqlite v1.41.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
modernc.org/sqlite v1.41.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
|
||||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||||
|
|||||||
5
backend/internal/data/ent/item_predicates.go
generated
5
backend/internal/data/ent/item_predicates.go
generated
@@ -4,6 +4,7 @@ import (
|
|||||||
"entgo.io/ent/dialect/sql"
|
"entgo.io/ent/dialect/sql"
|
||||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||||
|
conf "github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||||
"github.com/sysadminsmedia/homebox/backend/pkgs/textutils"
|
"github.com/sysadminsmedia/homebox/backend/pkgs/textutils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -24,7 +25,7 @@ func AccentInsensitiveContains(field string, searchValue string) predicate.Item
|
|||||||
dialect := s.Dialect()
|
dialect := s.Dialect()
|
||||||
|
|
||||||
switch dialect {
|
switch dialect {
|
||||||
case "sqlite3":
|
case conf.DriverSqlite3:
|
||||||
// For SQLite, we'll create a custom normalization function using REPLACE
|
// For SQLite, we'll create a custom normalization function using REPLACE
|
||||||
// to handle common accented characters
|
// to handle common accented characters
|
||||||
normalizeFunc := buildSQLiteNormalizeExpression(s.C(field))
|
normalizeFunc := buildSQLiteNormalizeExpression(s.C(field))
|
||||||
@@ -32,7 +33,7 @@ func AccentInsensitiveContains(field string, searchValue string) predicate.Item
|
|||||||
"LOWER("+normalizeFunc+") LIKE ?",
|
"LOWER("+normalizeFunc+") LIKE ?",
|
||||||
"%"+normalizedSearch+"%",
|
"%"+normalizedSearch+"%",
|
||||||
))
|
))
|
||||||
case "postgres":
|
case conf.DriverPostgres:
|
||||||
// For PostgreSQL, use REPLACE-based normalization to avoid unaccent dependency
|
// For PostgreSQL, use REPLACE-based normalization to avoid unaccent dependency
|
||||||
normalizeFunc := buildGenericNormalizeExpression(s.C(field))
|
normalizeFunc := buildGenericNormalizeExpression(s.C(field))
|
||||||
// Use sql.P() for proper PostgreSQL parameter binding ($1, $2, etc.)
|
// Use sql.P() for proper PostgreSQL parameter binding ($1, $2, etc.)
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
|
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed all:postgres
|
//go:embed all:postgres
|
||||||
@@ -21,9 +22,9 @@ var sqliteFiles embed.FS
|
|||||||
// embedded file system containing the migration files for the specified dialect.
|
// embedded file system containing the migration files for the specified dialect.
|
||||||
func Migrations(dialect string) (embed.FS, error) {
|
func Migrations(dialect string) (embed.FS, error) {
|
||||||
switch dialect {
|
switch dialect {
|
||||||
case "postgres":
|
case config.DriverPostgres:
|
||||||
return postgresFiles, nil
|
return postgresFiles, nil
|
||||||
case "sqlite3":
|
case config.DriverSqlite3:
|
||||||
return sqliteFiles, nil
|
return sqliteFiles, nil
|
||||||
default:
|
default:
|
||||||
log.Error().Str("dialect", dialect).Msg("unknown sql dialect")
|
log.Error().Str("dialect", dialect).Msg("unknown sql dialect")
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
-- +goose Up
|
-- +goose Up
|
||||||
|
-- +goose no transaction
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
-- SQLite doesn't support ALTER COLUMN directly, so we need to recreate the table
|
-- SQLite doesn't support ALTER COLUMN directly, so we need to recreate the table
|
||||||
-- Create a temporary table with the new schema
|
-- Create a temporary table with the new schema
|
||||||
CREATE TABLE users_temp (
|
CREATE TABLE users_temp (
|
||||||
@@ -29,4 +31,5 @@ DROP TABLE users;
|
|||||||
ALTER TABLE users_temp RENAME TO users;
|
ALTER TABLE users_temp RENAME TO users;
|
||||||
|
|
||||||
-- Recreate the unique index
|
-- Recreate the unique index
|
||||||
CREATE UNIQUE INDEX users_email_key on users (email);
|
CREATE UNIQUE INDEX users_email_key on users (email);
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
|
|||||||
@@ -809,6 +809,88 @@ func (e *ItemsRepository) DeleteByGroup(ctx context.Context, gid, id uuid.UUID)
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (e *ItemsRepository) WipeInventory(ctx context.Context, gid uuid.UUID, wipeLabels bool, wipeLocations bool, wipeMaintenance bool) (int, error) {
|
||||||
|
deleted := 0
|
||||||
|
|
||||||
|
// Wipe maintenance records if requested
|
||||||
|
// IMPORTANT: Must delete maintenance records BEFORE items since they are linked to items
|
||||||
|
if wipeMaintenance {
|
||||||
|
maintenanceCount, err := e.db.MaintenanceEntry.Delete().
|
||||||
|
Where(maintenanceentry.HasItemWith(item.HasGroupWith(group.ID(gid)))).
|
||||||
|
Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("failed to delete maintenance entries during wipe inventory")
|
||||||
|
} else {
|
||||||
|
log.Info().Int("count", maintenanceCount).Msg("deleted maintenance entries during wipe inventory")
|
||||||
|
deleted += maintenanceCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all items for the group
|
||||||
|
items, err := e.db.Item.Query().
|
||||||
|
Where(item.HasGroupWith(group.ID(gid))).
|
||||||
|
WithAttachments().
|
||||||
|
All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete each item with its attachments
|
||||||
|
// Note: We manually delete attachments and items instead of calling DeleteByGroup
|
||||||
|
// to continue processing remaining items even if some deletions fail
|
||||||
|
for _, itm := range items {
|
||||||
|
// Delete all attachments first
|
||||||
|
for _, att := range itm.Edges.Attachments {
|
||||||
|
err := e.attachments.Delete(ctx, gid, itm.ID, att.ID)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Str("attachment_id", att.ID.String()).Msg("failed to delete attachment during wipe inventory")
|
||||||
|
// Continue with other attachments even if one fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the item
|
||||||
|
_, err = e.db.Item.
|
||||||
|
Delete().
|
||||||
|
Where(
|
||||||
|
item.ID(itm.ID),
|
||||||
|
item.HasGroupWith(group.ID(gid)),
|
||||||
|
).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Str("item_id", itm.ID.String()).Msg("failed to delete item during wipe inventory")
|
||||||
|
// Skip to next item without incrementing counter
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only increment counter if deletion succeeded
|
||||||
|
deleted++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wipe labels if requested
|
||||||
|
if wipeLabels {
|
||||||
|
labelCount, err := e.db.Label.Delete().Where(label.HasGroupWith(group.ID(gid))).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("failed to delete labels during wipe inventory")
|
||||||
|
} else {
|
||||||
|
log.Info().Int("count", labelCount).Msg("deleted labels during wipe inventory")
|
||||||
|
deleted += labelCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wipe locations if requested
|
||||||
|
if wipeLocations {
|
||||||
|
locationCount, err := e.db.Location.Delete().Where(location.HasGroupWith(group.ID(gid))).Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Err(err).Msg("failed to delete locations during wipe inventory")
|
||||||
|
} else {
|
||||||
|
log.Info().Int("count", locationCount).Msg("deleted locations during wipe inventory")
|
||||||
|
deleted += locationCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
e.publishMutationEvent(gid)
|
||||||
|
return deleted, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (e *ItemsRepository) UpdateByGroup(ctx context.Context, gid uuid.UUID, data ItemUpdate) (ItemOut, error) {
|
func (e *ItemsRepository) UpdateByGroup(ctx context.Context, gid uuid.UUID, data ItemUpdate) (ItemOut, error) {
|
||||||
q := e.db.Item.Update().Where(item.ID(data.ID), item.HasGroupWith(group.ID(gid))).
|
q := e.db.Item.Update().Where(item.ID(data.ID), item.HasGroupWith(group.ID(gid))).
|
||||||
SetName(data.Name).
|
SetName(data.Name).
|
||||||
|
|||||||
@@ -398,4 +398,161 @@ func TestItemsRepository_DeleteByGroupWithAttachments(t *testing.T) {
|
|||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestItemsRepository_WipeInventory(t *testing.T) {
|
||||||
|
// Create test data: items, labels, locations, and maintenance entries
|
||||||
|
|
||||||
|
// Create locations
|
||||||
|
loc1, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Location 1",
|
||||||
|
Description: "Test location for wipe test",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
loc2, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Location 2",
|
||||||
|
Description: "Another test location",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Create labels
|
||||||
|
label1, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Label 1",
|
||||||
|
Description: "Test label for wipe test",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
label2, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Label 2",
|
||||||
|
Description: "Another test label",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Create items
|
||||||
|
item1, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Item 1",
|
||||||
|
Description: "Test item for wipe test",
|
||||||
|
LocationID: loc1.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label1.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
item2, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Item 2",
|
||||||
|
Description: "Another test item",
|
||||||
|
LocationID: loc2.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label2.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Create maintenance entries for items
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item1.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Test Maintenance 1",
|
||||||
|
Description: "Test maintenance entry",
|
||||||
|
Cost: 100.0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item2.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Test Maintenance 2",
|
||||||
|
Description: "Another test maintenance entry",
|
||||||
|
Cost: 200.0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Test 1: Wipe inventory with all options enabled
|
||||||
|
t.Run("wipe all including labels, locations, and maintenance", func(t *testing.T) {
|
||||||
|
deleted, err := tRepos.Items.WipeInventory(context.Background(), tGroup.ID, true, true, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Greater(t, deleted, 0, "Should have deleted at least some entities")
|
||||||
|
|
||||||
|
// Verify items are deleted
|
||||||
|
_, err = tRepos.Items.GetOneByGroup(context.Background(), tGroup.ID, item1.ID)
|
||||||
|
require.Error(t, err, "Item 1 should be deleted")
|
||||||
|
|
||||||
|
_, err = tRepos.Items.GetOneByGroup(context.Background(), tGroup.ID, item2.ID)
|
||||||
|
require.Error(t, err, "Item 2 should be deleted")
|
||||||
|
|
||||||
|
// Verify maintenance entries are deleted (query by item ID, should return empty)
|
||||||
|
maint1List, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item1.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, maint1List, "Maintenance entry 1 should be deleted")
|
||||||
|
|
||||||
|
maint2List, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item2.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, maint2List, "Maintenance entry 2 should be deleted")
|
||||||
|
|
||||||
|
// Verify labels are deleted
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label1.ID)
|
||||||
|
require.Error(t, err, "Label 1 should be deleted")
|
||||||
|
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label2.ID)
|
||||||
|
require.Error(t, err, "Label 2 should be deleted")
|
||||||
|
|
||||||
|
// Verify locations are deleted
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc1.ID)
|
||||||
|
require.Error(t, err, "Location 1 should be deleted")
|
||||||
|
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc2.ID)
|
||||||
|
require.Error(t, err, "Location 2 should be deleted")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestItemsRepository_WipeInventory_OnlyItems(t *testing.T) {
|
||||||
|
// Create test data
|
||||||
|
loc, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Location",
|
||||||
|
Description: "Test location for wipe test",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
label, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Label",
|
||||||
|
Description: "Test label for wipe test",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
item, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Item",
|
||||||
|
Description: "Test item for wipe test",
|
||||||
|
LocationID: loc.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Test Maintenance",
|
||||||
|
Description: "Test maintenance entry",
|
||||||
|
Cost: 100.0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Test: Wipe inventory with only items (no labels, locations, or maintenance)
|
||||||
|
deleted, err := tRepos.Items.WipeInventory(context.Background(), tGroup.ID, false, false, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Greater(t, deleted, 0, "Should have deleted at least the item")
|
||||||
|
|
||||||
|
// Verify item is deleted
|
||||||
|
_, err = tRepos.Items.GetOneByGroup(context.Background(), tGroup.ID, item.ID)
|
||||||
|
require.Error(t, err, "Item should be deleted")
|
||||||
|
|
||||||
|
// Verify maintenance entry is deleted due to cascade
|
||||||
|
maintList, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, maintList, "Maintenance entry should be cascade deleted with item")
|
||||||
|
|
||||||
|
// Verify label still exists
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label.ID)
|
||||||
|
require.NoError(t, err, "Label should still exist")
|
||||||
|
|
||||||
|
// Verify location still exists
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc.ID)
|
||||||
|
require.NoError(t, err, "Location should still exist")
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
_ = tRepos.Labels.DeleteByGroup(context.Background(), tGroup.ID, label.ID)
|
||||||
|
_ = tRepos.Locations.delete(context.Background(), loc.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
194
backend/internal/data/repo/repo_wipe_integration_test.go
Normal file
194
backend/internal/data/repo/repo_wipe_integration_test.go
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
package repo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestWipeInventory_Integration tests the complete wipe inventory flow
|
||||||
|
func TestWipeInventory_Integration(t *testing.T) {
|
||||||
|
// Create test data: locations, labels, items with maintenance
|
||||||
|
|
||||||
|
// 1. Create locations
|
||||||
|
loc1, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Garage",
|
||||||
|
Description: "Garage location",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
loc2, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Basement",
|
||||||
|
Description: "Basement location",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// 2. Create labels
|
||||||
|
label1, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Electronics",
|
||||||
|
Description: "Electronics label",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
label2, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Tools",
|
||||||
|
Description: "Tools label",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// 3. Create items
|
||||||
|
item1, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Laptop",
|
||||||
|
Description: "Work laptop",
|
||||||
|
LocationID: loc1.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label1.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
item2, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Drill",
|
||||||
|
Description: "Power drill",
|
||||||
|
LocationID: loc2.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label2.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
item3, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Monitor",
|
||||||
|
Description: "Computer monitor",
|
||||||
|
LocationID: loc1.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label1.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// 4. Create maintenance entries
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item1.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Laptop cleaning",
|
||||||
|
Description: "Cleaned keyboard and screen",
|
||||||
|
Cost: 0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item2.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Drill maintenance",
|
||||||
|
Description: "Oiled motor",
|
||||||
|
Cost: 5.00,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item3.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "Monitor calibration",
|
||||||
|
Description: "Color calibration",
|
||||||
|
Cost: 0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// 5. Verify items exist
|
||||||
|
allItems, err := tRepos.Items.GetAll(context.Background(), tGroup.ID)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.GreaterOrEqual(t, len(allItems), 3, "Should have at least 3 items")
|
||||||
|
|
||||||
|
// 6. Verify maintenance entries exist
|
||||||
|
maint1List, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item1.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, maint1List, "Item 1 should have maintenance records")
|
||||||
|
|
||||||
|
maint2List, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item2.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, maint2List, "Item 2 should have maintenance records")
|
||||||
|
|
||||||
|
// 7. Test wipe inventory with all options enabled
|
||||||
|
deleted, err := tRepos.Items.WipeInventory(context.Background(), tGroup.ID, true, true, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Greater(t, deleted, 0, "Should have deleted entities")
|
||||||
|
|
||||||
|
// 8. Verify all items are deleted
|
||||||
|
allItemsAfter, err := tRepos.Items.GetAll(context.Background(), tGroup.ID)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, 0, len(allItemsAfter), "All items should be deleted")
|
||||||
|
|
||||||
|
// 9. Verify maintenance entries are deleted
|
||||||
|
maint1After, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item1.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, maint1After, "Item 1 maintenance records should be deleted")
|
||||||
|
|
||||||
|
// 10. Verify labels are deleted
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label1.ID)
|
||||||
|
require.Error(t, err, "Label 1 should be deleted")
|
||||||
|
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label2.ID)
|
||||||
|
require.Error(t, err, "Label 2 should be deleted")
|
||||||
|
|
||||||
|
// 11. Verify locations are deleted
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc1.ID)
|
||||||
|
require.Error(t, err, "Location 1 should be deleted")
|
||||||
|
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc2.ID)
|
||||||
|
require.Error(t, err, "Location 2 should be deleted")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestWipeInventory_SelectiveWipe tests wiping only certain entity types
|
||||||
|
func TestWipeInventory_SelectiveWipe(t *testing.T) {
|
||||||
|
// Create test data
|
||||||
|
loc, err := tRepos.Locations.Create(context.Background(), tGroup.ID, LocationCreate{
|
||||||
|
Name: "Test Office",
|
||||||
|
Description: "Office location",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
label, err := tRepos.Labels.Create(context.Background(), tGroup.ID, LabelCreate{
|
||||||
|
Name: "Test Important",
|
||||||
|
Description: "Important label",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
item, err := tRepos.Items.Create(context.Background(), tGroup.ID, ItemCreate{
|
||||||
|
Name: "Test Computer",
|
||||||
|
Description: "Desktop computer",
|
||||||
|
LocationID: loc.ID,
|
||||||
|
LabelIDs: []uuid.UUID{label.ID},
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = tRepos.MaintEntry.Create(context.Background(), item.ID, MaintenanceEntryCreate{
|
||||||
|
CompletedDate: types.DateFromTime(time.Now()),
|
||||||
|
Name: "System update",
|
||||||
|
Description: "OS update",
|
||||||
|
Cost: 0,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Test: Wipe only items (keep labels and locations)
|
||||||
|
deleted, err := tRepos.Items.WipeInventory(context.Background(), tGroup.ID, false, false, false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Greater(t, deleted, 0, "Should have deleted at least items")
|
||||||
|
|
||||||
|
// Verify item is deleted
|
||||||
|
_, err = tRepos.Items.GetOneByGroup(context.Background(), tGroup.ID, item.ID)
|
||||||
|
require.Error(t, err, "Item should be deleted")
|
||||||
|
|
||||||
|
// Verify maintenance is cascade deleted
|
||||||
|
maintList, err := tRepos.MaintEntry.GetMaintenanceByItemID(context.Background(), tGroup.ID, item.ID, MaintenanceFilters{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, maintList, "Maintenance should be cascade deleted")
|
||||||
|
|
||||||
|
// Verify label still exists
|
||||||
|
_, err = tRepos.Labels.GetOneByGroup(context.Background(), tGroup.ID, label.ID)
|
||||||
|
require.NoError(t, err, "Label should still exist")
|
||||||
|
|
||||||
|
// Verify location still exists
|
||||||
|
_, err = tRepos.Locations.Get(context.Background(), loc.ID)
|
||||||
|
require.NoError(t, err, "Location should still exist")
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
_ = tRepos.Labels.DeleteByGroup(context.Background(), tGroup.ID, label.ID)
|
||||||
|
_ = tRepos.Locations.delete(context.Background(), loc.ID)
|
||||||
|
}
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DriverSqlite3 = "sqlite3"
|
DriverSqlite3 = "sqlite3"
|
||||||
|
DriverPostgres = "postgres"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Storage struct {
|
type Storage struct {
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ export default defineConfig({
|
|||||||
nav: [
|
nav: [
|
||||||
{ text: 'API Docs', link: '/en/api' },
|
{ text: 'API Docs', link: '/en/api' },
|
||||||
{ text: 'Demo', link: 'https://demo.homebox.software' },
|
{ text: 'Demo', link: 'https://demo.homebox.software' },
|
||||||
|
{ text: 'Blog', link: 'https://sysadminsjournal.com/tag/homebox/' }
|
||||||
],
|
],
|
||||||
|
|
||||||
sidebar: {
|
sidebar: {
|
||||||
|
|||||||
@@ -116,6 +116,41 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Wipe options",
|
||||||
|
"name": "options",
|
||||||
|
"in": "body",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -4032,7 +4067,8 @@
|
|||||||
"properties": {
|
"properties": {
|
||||||
"defaultDescription": {
|
"defaultDescription": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 1000
|
"maxLength": 1000,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultInsured": {
|
"defaultInsured": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
@@ -4041,34 +4077,41 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
},
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultLifetimeWarranty": {
|
"defaultLifetimeWarranty": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"defaultLocationId": {
|
"defaultLocationId": {
|
||||||
"description": "Default location and labels",
|
"description": "Default location and labels",
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultManufacturer": {
|
"defaultManufacturer": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultModelNumber": {
|
"defaultModelNumber": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultName": {
|
"defaultName": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultQuantity": {
|
"defaultQuantity": {
|
||||||
"description": "Default values for items",
|
"description": "Default values for items",
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultWarrantyDetails": {
|
"defaultWarrantyDetails": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 1000
|
"maxLength": 1000,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@@ -4209,7 +4252,8 @@
|
|||||||
"properties": {
|
"properties": {
|
||||||
"defaultDescription": {
|
"defaultDescription": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 1000
|
"maxLength": 1000,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultInsured": {
|
"defaultInsured": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
@@ -4218,34 +4262,41 @@
|
|||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
}
|
},
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultLifetimeWarranty": {
|
"defaultLifetimeWarranty": {
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
"defaultLocationId": {
|
"defaultLocationId": {
|
||||||
"description": "Default location and labels",
|
"description": "Default location and labels",
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultManufacturer": {
|
"defaultManufacturer": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultModelNumber": {
|
"defaultModelNumber": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultName": {
|
"defaultName": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 255
|
"maxLength": 255,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultQuantity": {
|
"defaultQuantity": {
|
||||||
"description": "Default values for items",
|
"description": "Default values for items",
|
||||||
"type": "integer"
|
"type": "integer",
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"defaultWarrantyDetails": {
|
"defaultWarrantyDetails": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"maxLength": 1000
|
"maxLength": 1000,
|
||||||
|
"x-nullable": true
|
||||||
},
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@@ -5166,6 +5217,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -1084,32 +1084,40 @@ definitions:
|
|||||||
defaultDescription:
|
defaultDescription:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultInsured:
|
defaultInsured:
|
||||||
type: boolean
|
type: boolean
|
||||||
defaultLabelIds:
|
defaultLabelIds:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
type: array
|
type: array
|
||||||
|
x-nullable: true
|
||||||
defaultLifetimeWarranty:
|
defaultLifetimeWarranty:
|
||||||
type: boolean
|
type: boolean
|
||||||
defaultLocationId:
|
defaultLocationId:
|
||||||
description: Default location and labels
|
description: Default location and labels
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultManufacturer:
|
defaultManufacturer:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultModelNumber:
|
defaultModelNumber:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultName:
|
defaultName:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultQuantity:
|
defaultQuantity:
|
||||||
description: Default values for items
|
description: Default values for items
|
||||||
type: integer
|
type: integer
|
||||||
|
x-nullable: true
|
||||||
defaultWarrantyDetails:
|
defaultWarrantyDetails:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
description:
|
description:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
@@ -1205,32 +1213,40 @@ definitions:
|
|||||||
defaultDescription:
|
defaultDescription:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultInsured:
|
defaultInsured:
|
||||||
type: boolean
|
type: boolean
|
||||||
defaultLabelIds:
|
defaultLabelIds:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
type: array
|
type: array
|
||||||
|
x-nullable: true
|
||||||
defaultLifetimeWarranty:
|
defaultLifetimeWarranty:
|
||||||
type: boolean
|
type: boolean
|
||||||
defaultLocationId:
|
defaultLocationId:
|
||||||
description: Default location and labels
|
description: Default location and labels
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultManufacturer:
|
defaultManufacturer:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultModelNumber:
|
defaultModelNumber:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultName:
|
defaultName:
|
||||||
maxLength: 255
|
maxLength: 255
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
defaultQuantity:
|
defaultQuantity:
|
||||||
description: Default values for items
|
description: Default values for items
|
||||||
type: integer
|
type: integer
|
||||||
|
x-nullable: true
|
||||||
defaultWarrantyDetails:
|
defaultWarrantyDetails:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
|
x-nullable: true
|
||||||
description:
|
description:
|
||||||
maxLength: 1000
|
maxLength: 1000
|
||||||
type: string
|
type: string
|
||||||
@@ -1851,6 +1867,15 @@ definitions:
|
|||||||
token:
|
token:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
v1.WipeInventoryOptions:
|
||||||
|
properties:
|
||||||
|
wipeLabels:
|
||||||
|
type: boolean
|
||||||
|
wipeLocations:
|
||||||
|
type: boolean
|
||||||
|
wipeMaintenance:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
v1.Wrapped:
|
v1.Wrapped:
|
||||||
properties:
|
properties:
|
||||||
item: {}
|
item: {}
|
||||||
@@ -1931,6 +1956,27 @@ paths:
|
|||||||
summary: Set Primary Photos
|
summary: Set Primary Photos
|
||||||
tags:
|
tags:
|
||||||
- Actions
|
- Actions
|
||||||
|
/v1/actions/wipe-inventory:
|
||||||
|
post:
|
||||||
|
description: Deletes all items in the inventory
|
||||||
|
parameters:
|
||||||
|
- description: Wipe options
|
||||||
|
in: body
|
||||||
|
name: options
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.WipeInventoryOptions'
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.ActionAmountResult'
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
summary: Wipe Inventory
|
||||||
|
tags:
|
||||||
|
- Actions
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
description: Resets all item date fields to the beginning of the day
|
description: Resets all item date fields to the beginning of the day
|
||||||
|
|||||||
@@ -114,6 +114,42 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"requestBody": {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Wipe options"
|
||||||
|
},
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -5381,6 +5417,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -67,6 +67,27 @@ paths:
|
|||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: "#/components/schemas/v1.ActionAmountResult"
|
$ref: "#/components/schemas/v1.ActionAmountResult"
|
||||||
|
/v1/actions/wipe-inventory:
|
||||||
|
post:
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
description: Deletes all items in the inventory
|
||||||
|
tags:
|
||||||
|
- Actions
|
||||||
|
summary: Wipe Inventory
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/v1.WipeInventoryOptions"
|
||||||
|
description: Wipe options
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/v1.ActionAmountResult"
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
security:
|
security:
|
||||||
@@ -3449,6 +3470,15 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
token:
|
token:
|
||||||
type: string
|
type: string
|
||||||
|
v1.WipeInventoryOptions:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
wipeLabels:
|
||||||
|
type: boolean
|
||||||
|
wipeLocations:
|
||||||
|
type: boolean
|
||||||
|
wipeMaintenance:
|
||||||
|
type: boolean
|
||||||
v1.Wrapped:
|
v1.Wrapped:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|||||||
@@ -116,6 +116,41 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"/v1/actions/wipe-inventory": {
|
||||||
|
"post": {
|
||||||
|
"security": [
|
||||||
|
{
|
||||||
|
"Bearer": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "Deletes all items in the inventory",
|
||||||
|
"produces": [
|
||||||
|
"application/json"
|
||||||
|
],
|
||||||
|
"tags": [
|
||||||
|
"Actions"
|
||||||
|
],
|
||||||
|
"summary": "Wipe Inventory",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"description": "Wipe options",
|
||||||
|
"name": "options",
|
||||||
|
"in": "body",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.WipeInventoryOptions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"responses": {
|
||||||
|
"200": {
|
||||||
|
"description": "OK",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"/v1/actions/zero-item-time-fields": {
|
"/v1/actions/zero-item-time-fields": {
|
||||||
"post": {
|
"post": {
|
||||||
"security": [
|
"security": [
|
||||||
@@ -5182,6 +5217,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"v1.WipeInventoryOptions": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"wipeLabels": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeLocations": {
|
||||||
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"wipeMaintenance": {
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"v1.Wrapped": {
|
"v1.Wrapped": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|||||||
@@ -1867,6 +1867,15 @@ definitions:
|
|||||||
token:
|
token:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
v1.WipeInventoryOptions:
|
||||||
|
properties:
|
||||||
|
wipeLabels:
|
||||||
|
type: boolean
|
||||||
|
wipeLocations:
|
||||||
|
type: boolean
|
||||||
|
wipeMaintenance:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
v1.Wrapped:
|
v1.Wrapped:
|
||||||
properties:
|
properties:
|
||||||
item: {}
|
item: {}
|
||||||
@@ -1947,6 +1956,27 @@ paths:
|
|||||||
summary: Set Primary Photos
|
summary: Set Primary Photos
|
||||||
tags:
|
tags:
|
||||||
- Actions
|
- Actions
|
||||||
|
/v1/actions/wipe-inventory:
|
||||||
|
post:
|
||||||
|
description: Deletes all items in the inventory
|
||||||
|
parameters:
|
||||||
|
- description: Wipe options
|
||||||
|
in: body
|
||||||
|
name: options
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.WipeInventoryOptions'
|
||||||
|
produces:
|
||||||
|
- application/json
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
schema:
|
||||||
|
$ref: '#/definitions/v1.ActionAmountResult'
|
||||||
|
security:
|
||||||
|
- Bearer: []
|
||||||
|
summary: Wipe Inventory
|
||||||
|
tags:
|
||||||
|
- Actions
|
||||||
/v1/actions/zero-item-time-fields:
|
/v1/actions/zero-item-time-fields:
|
||||||
post:
|
post:
|
||||||
description: Resets all item date fields to the beginning of the day
|
description: Resets all item date fields to the beginning of the day
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import type { TreeItem } from "~~/lib/api/types/data-contracts";
|
import type { TreeItem } from "~~/lib/api/types/data-contracts";
|
||||||
import LocationTreeNode from "./Node.vue";
|
import LocationTreeNode from "./Node.vue";
|
||||||
|
import { Button } from "~~/components/ui/button";
|
||||||
|
import { useDialog } from "~/components/ui/dialog-provider";
|
||||||
|
import { DialogID } from "~/components/ui/dialog-provider/utils";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
locs: TreeItem[];
|
locs: TreeItem[];
|
||||||
treeId: string;
|
treeId: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const { openDialog } = useDialog();
|
||||||
|
|
||||||
const props = defineProps<Props>();
|
const props = defineProps<Props>();
|
||||||
|
|
||||||
const collator = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
|
const collator = new Intl.Collator(undefined, { numeric: true, sensitivity: "base" });
|
||||||
@@ -19,9 +24,31 @@
|
|||||||
|
|
||||||
<template>
|
<template>
|
||||||
<div>
|
<div>
|
||||||
<p v-if="sortedLocs.length === 0" class="text-center text-sm">
|
<div
|
||||||
{{ $t("location.tree.no_locations") }}
|
v-if="sortedLocs.length === 0"
|
||||||
</p>
|
class="py-6 text-center text-sm text-muted-foreground"
|
||||||
<LocationTreeNode v-for="item in sortedLocs" :key="item.id" :item="item" :tree-id="treeId" />
|
role="status"
|
||||||
|
aria-live="polite"
|
||||||
|
>
|
||||||
|
<p class="mx-auto max-w-xs">
|
||||||
|
{{ $t("components.location.tree.no_locations") }}
|
||||||
|
</p>
|
||||||
|
<Button
|
||||||
|
class="mt-3"
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
type="button"
|
||||||
|
:aria-label="$t('components.location.create_modal.title') || $t('global.create')"
|
||||||
|
@click="openDialog(DialogID.CreateLocation)"
|
||||||
|
>
|
||||||
|
{{ $t("components.location.create_modal.title") || $t("global.create") }}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<ul role="tree" :aria-labelledby="treeId" class="space-y-1">
|
||||||
|
<li v-for="item in sortedLocs" :key="item.id" role="treeitem">
|
||||||
|
<LocationTreeNode :item="item" :tree-id="treeId" />
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|||||||
129
frontend/components/WipeInventoryDialog.vue
Normal file
129
frontend/components/WipeInventoryDialog.vue
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
<template>
|
||||||
|
<AlertDialog :open="dialog" @update:open="handleOpenChange">
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader>
|
||||||
|
<AlertDialogTitle>{{ $t("tools.actions_set.wipe_inventory") }}</AlertDialogTitle>
|
||||||
|
<AlertDialogDescription>
|
||||||
|
{{ $t("tools.actions_set.wipe_inventory_confirm") }}
|
||||||
|
</AlertDialogDescription>
|
||||||
|
</AlertDialogHeader>
|
||||||
|
|
||||||
|
<div class="space-y-2">
|
||||||
|
<div class="flex items-center space-x-2">
|
||||||
|
<input
|
||||||
|
id="wipe-labels-checkbox"
|
||||||
|
v-model="wipeLabels"
|
||||||
|
type="checkbox"
|
||||||
|
class="size-4 rounded border-gray-300"
|
||||||
|
/>
|
||||||
|
<label for="wipe-labels-checkbox" class="cursor-pointer text-sm font-medium">
|
||||||
|
{{ $t("tools.actions_set.wipe_inventory_labels") }}
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex items-center space-x-2">
|
||||||
|
<input
|
||||||
|
id="wipe-locations-checkbox"
|
||||||
|
v-model="wipeLocations"
|
||||||
|
type="checkbox"
|
||||||
|
class="size-4 rounded border-gray-300"
|
||||||
|
/>
|
||||||
|
<label for="wipe-locations-checkbox" class="cursor-pointer text-sm font-medium">
|
||||||
|
{{ $t("tools.actions_set.wipe_inventory_locations") }}
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex items-center space-x-2">
|
||||||
|
<input
|
||||||
|
id="wipe-maintenance-checkbox"
|
||||||
|
v-model="wipeMaintenance"
|
||||||
|
type="checkbox"
|
||||||
|
class="size-4 rounded border-gray-300"
|
||||||
|
/>
|
||||||
|
<label for="wipe-maintenance-checkbox" class="cursor-pointer text-sm font-medium">
|
||||||
|
{{ $t("tools.actions_set.wipe_inventory_maintenance") }}
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p class="text-sm text-gray-600">
|
||||||
|
{{ $t("tools.actions_set.wipe_inventory_note") }}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<AlertDialogCancel @click="close">
|
||||||
|
{{ $t("global.cancel") }}
|
||||||
|
</AlertDialogCancel>
|
||||||
|
<Button @click="confirm">
|
||||||
|
{{ $t("global.confirm") }}
|
||||||
|
</Button>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialog>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { DialogID } from "~/components/ui/dialog-provider/utils";
|
||||||
|
import { useDialog } from "~/components/ui/dialog-provider";
|
||||||
|
import {
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogCancel,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogDescription,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogTitle,
|
||||||
|
} from "@/components/ui/alert-dialog";
|
||||||
|
import { Button } from "@/components/ui/button";
|
||||||
|
|
||||||
|
const { registerOpenDialogCallback, closeDialog, addAlert, removeAlert } = useDialog();
|
||||||
|
|
||||||
|
const dialog = ref(false);
|
||||||
|
const wipeLabels = ref(false);
|
||||||
|
const wipeLocations = ref(false);
|
||||||
|
const wipeMaintenance = ref(false);
|
||||||
|
const isConfirming = ref(false);
|
||||||
|
|
||||||
|
registerOpenDialogCallback(DialogID.WipeInventory, () => {
|
||||||
|
dialog.value = true;
|
||||||
|
wipeLabels.value = false;
|
||||||
|
wipeLocations.value = false;
|
||||||
|
wipeMaintenance.value = false;
|
||||||
|
isConfirming.value = false;
|
||||||
|
});
|
||||||
|
|
||||||
|
watch(
|
||||||
|
dialog,
|
||||||
|
val => {
|
||||||
|
if (val) {
|
||||||
|
addAlert("wipe-inventory-dialog");
|
||||||
|
} else {
|
||||||
|
removeAlert("wipe-inventory-dialog");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ immediate: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
function handleOpenChange(open: boolean) {
|
||||||
|
if (!open && !isConfirming.value) {
|
||||||
|
close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function close() {
|
||||||
|
dialog.value = false;
|
||||||
|
closeDialog(DialogID.WipeInventory, undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
function confirm() {
|
||||||
|
isConfirming.value = true;
|
||||||
|
const result = {
|
||||||
|
wipeLabels: wipeLabels.value,
|
||||||
|
wipeLocations: wipeLocations.value,
|
||||||
|
wipeMaintenance: wipeMaintenance.value,
|
||||||
|
};
|
||||||
|
closeDialog(DialogID.WipeInventory, result);
|
||||||
|
dialog.value = false;
|
||||||
|
isConfirming.value = false;
|
||||||
|
}
|
||||||
|
</script>
|
||||||
@@ -26,6 +26,7 @@ export enum DialogID {
|
|||||||
UpdateLocation = "update-location",
|
UpdateLocation = "update-location",
|
||||||
UpdateTemplate = "update-template",
|
UpdateTemplate = "update-template",
|
||||||
ItemChangeDetails = "item-table-updater",
|
ItemChangeDetails = "item-table-updater",
|
||||||
|
WipeInventory = "wipe-inventory",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -71,6 +72,7 @@ export type DialogResultMap = {
|
|||||||
[DialogID.ItemImage]?: { action: "delete"; id: string };
|
[DialogID.ItemImage]?: { action: "delete"; id: string };
|
||||||
[DialogID.EditMaintenance]?: boolean;
|
[DialogID.EditMaintenance]?: boolean;
|
||||||
[DialogID.ItemChangeDetails]?: boolean;
|
[DialogID.ItemChangeDetails]?: boolean;
|
||||||
|
[DialogID.WipeInventory]?: { wipeLabels: boolean; wipeLocations: boolean; wipeMaintenance: boolean };
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Helpers to split IDs by requirement */
|
/** Helpers to split IDs by requirement */
|
||||||
|
|||||||
@@ -8,6 +8,7 @@
|
|||||||
<ModalConfirm />
|
<ModalConfirm />
|
||||||
<OutdatedModal v-if="status" :status="status" />
|
<OutdatedModal v-if="status" :status="status" />
|
||||||
<ItemCreateModal />
|
<ItemCreateModal />
|
||||||
|
<WipeInventoryDialog />
|
||||||
<LabelCreateModal />
|
<LabelCreateModal />
|
||||||
<LocationCreateModal />
|
<LocationCreateModal />
|
||||||
<ItemBarcodeModal />
|
<ItemBarcodeModal />
|
||||||
@@ -216,6 +217,7 @@
|
|||||||
import ModalConfirm from "~/components/ModalConfirm.vue";
|
import ModalConfirm from "~/components/ModalConfirm.vue";
|
||||||
import OutdatedModal from "~/components/App/OutdatedModal.vue";
|
import OutdatedModal from "~/components/App/OutdatedModal.vue";
|
||||||
import ItemCreateModal from "~/components/Item/CreateModal.vue";
|
import ItemCreateModal from "~/components/Item/CreateModal.vue";
|
||||||
|
import WipeInventoryDialog from "~/components/WipeInventoryDialog.vue";
|
||||||
|
|
||||||
import LabelCreateModal from "~/components/Label/CreateModal.vue";
|
import LabelCreateModal from "~/components/Label/CreateModal.vue";
|
||||||
import LocationCreateModal from "~/components/Location/CreateModal.vue";
|
import LocationCreateModal from "~/components/Location/CreateModal.vue";
|
||||||
|
|||||||
@@ -31,4 +31,14 @@ export class ActionsAPI extends BaseAPI {
|
|||||||
url: route("/actions/create-missing-thumbnails"),
|
url: route("/actions/create-missing-thumbnails"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
wipeInventory(options?: { wipeLabels?: boolean; wipeLocations?: boolean; wipeMaintenance?: boolean }) {
|
||||||
|
return this.http.post<
|
||||||
|
{ wipeLabels?: boolean; wipeLocations?: boolean; wipeMaintenance?: boolean },
|
||||||
|
ActionAmountResult
|
||||||
|
>({
|
||||||
|
url: route("/actions/wipe-inventory"),
|
||||||
|
body: options || {},
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
6
frontend/lib/api/types/data-contracts.ts
generated
6
frontend/lib/api/types/data-contracts.ts
generated
@@ -1150,6 +1150,12 @@ export interface TokenResponse {
|
|||||||
token: string;
|
token: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface WipeInventoryOptions {
|
||||||
|
wipeLabels: boolean;
|
||||||
|
wipeLocations: boolean;
|
||||||
|
wipeMaintenance: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface Wrapped {
|
export interface Wrapped {
|
||||||
item: any;
|
item: any;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -226,7 +226,7 @@
|
|||||||
"clear": "Clear Location Selection"
|
"clear": "Clear Location Selection"
|
||||||
},
|
},
|
||||||
"tree": {
|
"tree": {
|
||||||
"no_locations": "No locations available. Add new locations through the\n '<span class=\"link-primary\">'Create'</span>' button on the navigation bar."
|
"no_locations": "No locations available. Use the Create button to add a new location."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"quick_menu": {
|
"quick_menu": {
|
||||||
@@ -735,12 +735,23 @@
|
|||||||
"set_primary_photo_button": "Set Primary Photo",
|
"set_primary_photo_button": "Set Primary Photo",
|
||||||
"set_primary_photo_confirm": "Are you sure you want to set primary photos? This can take a while and cannot be undone.",
|
"set_primary_photo_confirm": "Are you sure you want to set primary photos? This can take a while and cannot be undone.",
|
||||||
"set_primary_photo_sub": "In version v0.10.0 of Homebox, the primary image field was added to attachments of type photo. This action will set the primary image field to the first image in the attachments array in the database, if it is not already set. '<a class=\"link\" href=\"https://github.com/hay-kot/homebox/pull/576\">'See GitHub PR #576'</a>'",
|
"set_primary_photo_sub": "In version v0.10.0 of Homebox, the primary image field was added to attachments of type photo. This action will set the primary image field to the first image in the attachments array in the database, if it is not already set. '<a class=\"link\" href=\"https://github.com/hay-kot/homebox/pull/576\">'See GitHub PR #576'</a>'",
|
||||||
|
"wipe_inventory": "Wipe Inventory",
|
||||||
|
"wipe_inventory_button": "Wipe Inventory",
|
||||||
|
"wipe_inventory_confirm": "Are you sure you want to wipe your entire inventory? This will delete all items and cannot be undone.",
|
||||||
|
"wipe_inventory_labels": "Also wipe all labels (tags)",
|
||||||
|
"wipe_inventory_locations": "Also wipe all locations",
|
||||||
|
"wipe_inventory_maintenance": "Also wipe all maintenance records",
|
||||||
|
"wipe_inventory_note": "Note: Only group owners can perform this action.",
|
||||||
|
"wipe_inventory_sub": "Permanently deletes all items in your inventory. This action is irreversible and will remove all item data including attachments and photos.",
|
||||||
"zero_datetimes": "Zero Item Date Times",
|
"zero_datetimes": "Zero Item Date Times",
|
||||||
"zero_datetimes_button": "Zero Item Date Times",
|
"zero_datetimes_button": "Zero Item Date Times",
|
||||||
"zero_datetimes_confirm": "Are you sure you want to reset all date and time values? This can take a while and cannot be undone.",
|
"zero_datetimes_confirm": "Are you sure you want to reset all date and time values? This can take a while and cannot be undone.",
|
||||||
"zero_datetimes_sub": "Resets the time value for all date time fields in your inventory to the beginning of the date. This is to fix a bug that was introduced early on in the development of the site that caused the time value to be stored with the time which caused issues with date fields displaying accurate values. '<a class=\"link\" href=\"https://github.com/hay-kot/homebox/issues/236\" target=\"_blank\">'See Github Issue #236 for more details.'</a>'"
|
"zero_datetimes_sub": "Resets the time value for all date time fields in your inventory to the beginning of the date. This is to fix a bug that was introduced early on in the development of the site that caused the time value to be stored with the time which caused issues with date fields displaying accurate values. '<a class=\"link\" href=\"https://github.com/hay-kot/homebox/issues/236\" target=\"_blank\">'See Github Issue #236 for more details.'</a>'"
|
||||||
},
|
},
|
||||||
"actions_sub": "Apply Actions to your inventory in bulk. These are irreversible actions. '<b>'Be careful.'</b>'",
|
"actions_sub": "Apply Actions to your inventory in bulk. These are irreversible actions. '<b>'Be careful.'</b>'",
|
||||||
|
"demo_mode_error": {
|
||||||
|
"wipe_inventory": "Inventory, labels, locations and maintenance records cannot be wiped whilst Homebox is in demo mode. Please ensure that you are not in demo mode and try again."
|
||||||
|
},
|
||||||
"import_export": "Import/Export",
|
"import_export": "Import/Export",
|
||||||
"import_export_set": {
|
"import_export_set": {
|
||||||
"export": "Export Inventory",
|
"export": "Export Inventory",
|
||||||
@@ -768,7 +779,9 @@
|
|||||||
"failed_ensure_ids": "Failed to ensure asset IDs.",
|
"failed_ensure_ids": "Failed to ensure asset IDs.",
|
||||||
"failed_ensure_import_refs": "Failed to ensure import refs.",
|
"failed_ensure_import_refs": "Failed to ensure import refs.",
|
||||||
"failed_set_primary_photos": "Failed to set primary photos.",
|
"failed_set_primary_photos": "Failed to set primary photos.",
|
||||||
"failed_zero_datetimes": "Failed to reset date and time values."
|
"failed_wipe_inventory": "Failed to wipe inventory.",
|
||||||
|
"failed_zero_datetimes": "Failed to reset date and time values.",
|
||||||
|
"wipe_inventory_success": "Successfully wiped inventory. { results } items deleted."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -115,7 +115,9 @@
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
item.value.quantity = newQuantity;
|
if (resp.data) {
|
||||||
|
item.value = resp.data;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type FilteredAttachments = {
|
type FilteredAttachments = {
|
||||||
|
|||||||
@@ -90,6 +90,12 @@
|
|||||||
<div v-html="DOMPurify.sanitize($t('tools.actions_set.create_missing_thumbnails_sub'))" />
|
<div v-html="DOMPurify.sanitize($t('tools.actions_set.create_missing_thumbnails_sub'))" />
|
||||||
<template #button> {{ $t("tools.actions_set.create_missing_thumbnails_button") }} </template>
|
<template #button> {{ $t("tools.actions_set.create_missing_thumbnails_button") }} </template>
|
||||||
</DetailAction>
|
</DetailAction>
|
||||||
|
<DetailAction @action="wipeInventory">
|
||||||
|
<template #title> {{ $t("tools.actions_set.wipe_inventory") }} </template>
|
||||||
|
<!-- eslint-disable-next-line vue/no-v-html -->
|
||||||
|
<div v-html="DOMPurify.sanitize($t('tools.actions_set.wipe_inventory_sub'))" />
|
||||||
|
<template #button> {{ $t("tools.actions_set.wipe_inventory_button") }} </template>
|
||||||
|
</DetailAction>
|
||||||
</div>
|
</div>
|
||||||
</BaseCard>
|
</BaseCard>
|
||||||
</BaseContainer>
|
</BaseContainer>
|
||||||
@@ -126,6 +132,13 @@
|
|||||||
const api = useUserApi();
|
const api = useUserApi();
|
||||||
const confirm = useConfirm();
|
const confirm = useConfirm();
|
||||||
|
|
||||||
|
// Fetch status to check for demo mode
|
||||||
|
const pubApi = usePublicApi();
|
||||||
|
const { data: status } = useAsyncData(async () => {
|
||||||
|
const { data } = await pubApi.status();
|
||||||
|
return data;
|
||||||
|
});
|
||||||
|
|
||||||
function getBillOfMaterials() {
|
function getBillOfMaterials() {
|
||||||
const url = api.reports.billOfMaterialsURL();
|
const url = api.reports.billOfMaterialsURL();
|
||||||
window.open(url, "_blank");
|
window.open(url, "_blank");
|
||||||
@@ -220,6 +233,35 @@
|
|||||||
|
|
||||||
toast.success(t("tools.toast.asset_success", { results: result.data.completed }));
|
toast.success(t("tools.toast.asset_success", { results: result.data.completed }));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function wipeInventory() {
|
||||||
|
// Check if in demo mode
|
||||||
|
if (status.value?.demo) {
|
||||||
|
await confirm.open(t("tools.demo_mode_error.wipe_inventory"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
openDialog(DialogID.WipeInventory, {
|
||||||
|
onClose: async result => {
|
||||||
|
if (!result) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiResult = await api.actions.wipeInventory({
|
||||||
|
wipeLabels: result.wipeLabels,
|
||||||
|
wipeLocations: result.wipeLocations,
|
||||||
|
wipeMaintenance: result.wipeMaintenance,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (apiResult.error) {
|
||||||
|
toast.error(t("tools.toast.failed_wipe_inventory"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
toast.success(t("tools.toast.wipe_inventory_success", { results: apiResult.data.completed }));
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style scoped></style>
|
<style scoped></style>
|
||||||
|
|||||||
9
frontend/pnpm-lock.yaml
generated
9
frontend/pnpm-lock.yaml
generated
@@ -5235,8 +5235,8 @@ packages:
|
|||||||
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
|
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
|
||||||
qs@6.14.0:
|
qs@6.14.1:
|
||||||
resolution: {integrity: sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==}
|
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||||
engines: {node: '>=0.6'}
|
engines: {node: '>=0.6'}
|
||||||
|
|
||||||
quansync@0.2.11:
|
quansync@0.2.11:
|
||||||
@@ -6289,6 +6289,7 @@ packages:
|
|||||||
vue-i18n@11.2.7:
|
vue-i18n@11.2.7:
|
||||||
resolution: {integrity: sha512-LPv8bAY5OA0UvFEXl4vBQOBqJzRrlExy92tWgRuwW7tbykHf7CH71G2Y4TM2OwGcIS4+hyqKHS2EVBqaYwPY9Q==}
|
resolution: {integrity: sha512-LPv8bAY5OA0UvFEXl4vBQOBqJzRrlExy92tWgRuwW7tbykHf7CH71G2Y4TM2OwGcIS4+hyqKHS2EVBqaYwPY9Q==}
|
||||||
engines: {node: '>= 16'}
|
engines: {node: '>= 16'}
|
||||||
|
deprecated: This version is NOT deprecated. Previous deprecation was a mistake.
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
vue: ^3.0.0
|
vue: ^3.0.0
|
||||||
|
|
||||||
@@ -11388,7 +11389,7 @@ snapshots:
|
|||||||
micro-api-client: 3.3.0
|
micro-api-client: 3.3.0
|
||||||
node-fetch: 3.3.2
|
node-fetch: 3.3.2
|
||||||
p-wait-for: 5.0.2
|
p-wait-for: 5.0.2
|
||||||
qs: 6.14.0
|
qs: 6.14.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
nitropack@2.12.9(@netlify/blobs@9.1.2):
|
nitropack@2.12.9(@netlify/blobs@9.1.2):
|
||||||
@@ -12198,7 +12199,7 @@ snapshots:
|
|||||||
|
|
||||||
punycode@2.3.1: {}
|
punycode@2.3.1: {}
|
||||||
|
|
||||||
qs@6.14.0:
|
qs@6.14.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
side-channel: 1.1.0
|
side-channel: 1.1.0
|
||||||
optional: true
|
optional: true
|
||||||
|
|||||||
182
frontend/test/e2e/wipe-inventory.browser.spec.ts
Normal file
182
frontend/test/e2e/wipe-inventory.browser.spec.ts
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
import type { Page } from "@playwright/test";
|
||||||
|
import { expect, test } from "@playwright/test";
|
||||||
|
|
||||||
|
const STATUS_ROUTE = "**/api/v1/status";
|
||||||
|
const WIPE_ROUTE = "**/api/v1/actions/wipe-inventory";
|
||||||
|
|
||||||
|
const buildStatusResponse = (demo: boolean) => ({
|
||||||
|
allowRegistration: true,
|
||||||
|
build: { buildTime: new Date().toISOString(), commit: "test", version: "v0.0.0" },
|
||||||
|
demo,
|
||||||
|
health: true,
|
||||||
|
labelPrinting: false,
|
||||||
|
latest: { date: new Date().toISOString(), version: "v0.0.0" },
|
||||||
|
message: "",
|
||||||
|
oidc: { allowLocal: true, autoRedirect: false, buttonText: "", enabled: false },
|
||||||
|
title: "Homebox",
|
||||||
|
versions: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
async function mockStatus(page: Page, demo: boolean) {
|
||||||
|
await page.route(STATUS_ROUTE, route => {
|
||||||
|
route.fulfill({
|
||||||
|
status: 200,
|
||||||
|
contentType: "application/json",
|
||||||
|
body: JSON.stringify(buildStatusResponse(demo)),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function login(page: Page, email = "demo@example.com", password = "demo") {
|
||||||
|
await page.goto("/home");
|
||||||
|
await expect(page).toHaveURL("/");
|
||||||
|
await page.fill("input[type='text']", email);
|
||||||
|
await page.fill("input[type='password']", password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function openWipeInventory(page: Page) {
|
||||||
|
await page.goto("/tools");
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
||||||
|
|
||||||
|
const wipeButton = page.getByRole("button", { name: "Wipe Inventory" }).last();
|
||||||
|
await expect(wipeButton).toBeVisible();
|
||||||
|
await wipeButton.click();
|
||||||
|
}
|
||||||
|
|
||||||
|
test.describe("Wipe Inventory", () => {
|
||||||
|
test("shows demo mode warning without wipe options", async ({ page }) => {
|
||||||
|
await mockStatus(page, true);
|
||||||
|
await login(page);
|
||||||
|
await openWipeInventory(page);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
page.getByText(
|
||||||
|
"Inventory, labels, locations and maintenance records cannot be wiped whilst Homebox is in demo mode.",
|
||||||
|
{ exact: false }
|
||||||
|
)
|
||||||
|
).toBeVisible();
|
||||||
|
|
||||||
|
await expect(page.locator("input#wipe-labels-checkbox")).toHaveCount(0);
|
||||||
|
await expect(page.locator("input#wipe-locations-checkbox")).toHaveCount(0);
|
||||||
|
await expect(page.locator("input#wipe-maintenance-checkbox")).toHaveCount(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe("production mode", () => {
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
await mockStatus(page, false);
|
||||||
|
await login(page);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("renders wipe options and submits all flags", async ({ page }) => {
|
||||||
|
await page.route(WIPE_ROUTE, route => {
|
||||||
|
route.fulfill({ status: 200, contentType: "application/json", body: JSON.stringify({ completed: 0 }) });
|
||||||
|
});
|
||||||
|
|
||||||
|
await openWipeInventory(page);
|
||||||
|
await expect(page.getByText("Wipe Inventory").first()).toBeVisible();
|
||||||
|
|
||||||
|
const labels = page.locator("input#wipe-labels-checkbox");
|
||||||
|
const locations = page.locator("input#wipe-locations-checkbox");
|
||||||
|
const maintenance = page.locator("input#wipe-maintenance-checkbox");
|
||||||
|
|
||||||
|
await expect(labels).toBeVisible();
|
||||||
|
await expect(locations).toBeVisible();
|
||||||
|
await expect(maintenance).toBeVisible();
|
||||||
|
|
||||||
|
await labels.check();
|
||||||
|
await locations.check();
|
||||||
|
await maintenance.check();
|
||||||
|
|
||||||
|
const requestPromise = page.waitForRequest(WIPE_ROUTE);
|
||||||
|
await page.getByRole("button", { name: "Confirm" }).last().click();
|
||||||
|
const request = await requestPromise;
|
||||||
|
|
||||||
|
expect(request.postDataJSON()).toEqual({
|
||||||
|
wipeLabels: true,
|
||||||
|
wipeLocations: true,
|
||||||
|
wipeMaintenance: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(page.locator("[role='status']").first()).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("blocks wipe attempts from non-owners", async ({ page }) => {
|
||||||
|
await page.route(WIPE_ROUTE, route => {
|
||||||
|
route.fulfill({
|
||||||
|
status: 403,
|
||||||
|
contentType: "application/json",
|
||||||
|
body: JSON.stringify({ message: "forbidden" }),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await openWipeInventory(page);
|
||||||
|
|
||||||
|
const requestPromise = page.waitForRequest(WIPE_ROUTE);
|
||||||
|
await page.getByRole("button", { name: "Confirm" }).last().click();
|
||||||
|
await requestPromise;
|
||||||
|
|
||||||
|
await expect(page.getByText("Failed to wipe inventory.")).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
const checkboxCases = [
|
||||||
|
{
|
||||||
|
name: "labels only",
|
||||||
|
selection: { labels: true, locations: false, maintenance: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "locations only",
|
||||||
|
selection: { labels: false, locations: true, maintenance: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "maintenance only",
|
||||||
|
selection: { labels: false, locations: false, maintenance: true },
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const scenario of checkboxCases) {
|
||||||
|
test(`submits correct flags when ${scenario.name} is selected`, async ({ page }) => {
|
||||||
|
await page.route(WIPE_ROUTE, route => {
|
||||||
|
route.fulfill({ status: 200, contentType: "application/json", body: JSON.stringify({ completed: 0 }) });
|
||||||
|
});
|
||||||
|
|
||||||
|
await openWipeInventory(page);
|
||||||
|
await expect(page.getByText("Wipe Inventory").first()).toBeVisible();
|
||||||
|
|
||||||
|
const labels = page.locator("input#wipe-labels-checkbox");
|
||||||
|
const locations = page.locator("input#wipe-locations-checkbox");
|
||||||
|
const maintenance = page.locator("input#wipe-maintenance-checkbox");
|
||||||
|
|
||||||
|
if (scenario.selection.labels) {
|
||||||
|
await labels.check();
|
||||||
|
} else {
|
||||||
|
await labels.uncheck();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scenario.selection.locations) {
|
||||||
|
await locations.check();
|
||||||
|
} else {
|
||||||
|
await locations.uncheck();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scenario.selection.maintenance) {
|
||||||
|
await maintenance.check();
|
||||||
|
} else {
|
||||||
|
await maintenance.uncheck();
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestPromise = page.waitForRequest(WIPE_ROUTE);
|
||||||
|
await page.getByRole("button", { name: "Confirm" }).last().click();
|
||||||
|
const request = await requestPromise;
|
||||||
|
|
||||||
|
expect(request.postDataJSON()).toEqual({
|
||||||
|
wipeLabels: scenario.selection.labels,
|
||||||
|
wipeLocations: scenario.selection.locations,
|
||||||
|
wipeMaintenance: scenario.selection.maintenance,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
418
frontend/test/upgrade/upgrade-verification.spec.ts
Normal file
418
frontend/test/upgrade/upgrade-verification.spec.ts
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
/**
|
||||||
|
* HomeBox Upgrade Verification Tests
|
||||||
|
*
|
||||||
|
* NOTE: These tests are ONLY meant to run in the upgrade-test workflow.
|
||||||
|
* They require test data to be pre-created by the create-test-data.sh script.
|
||||||
|
* These tests are stored in test/upgrade/ (not test/e2e/) to prevent them
|
||||||
|
* from running during normal E2E test runs.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { expect, test } from "@playwright/test";
|
||||||
|
import * as fs from "fs";
|
||||||
|
|
||||||
|
// Load test data created by the setup script
|
||||||
|
const testDataPath = process.env.TEST_DATA_FILE || "/tmp/test-users.json";
|
||||||
|
|
||||||
|
interface TestUser {
|
||||||
|
email: string;
|
||||||
|
password: string;
|
||||||
|
token: string;
|
||||||
|
group: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TestData {
|
||||||
|
users?: TestUser[];
|
||||||
|
locations?: Record<string, string[]>;
|
||||||
|
labels?: Record<string, string[]>;
|
||||||
|
items?: Record<string, string[]>;
|
||||||
|
notifiers?: Record<string, string[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
let testData: TestData = {};
|
||||||
|
|
||||||
|
test.beforeAll(() => {
|
||||||
|
if (fs.existsSync(testDataPath)) {
|
||||||
|
const rawData = fs.readFileSync(testDataPath, "utf-8");
|
||||||
|
testData = JSON.parse(rawData);
|
||||||
|
console.log("Loaded test data:", JSON.stringify(testData, null, 2));
|
||||||
|
} else {
|
||||||
|
console.error(`Test data file not found at ${testDataPath}`);
|
||||||
|
throw new Error("Test data file not found");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe("HomeBox Upgrade Verification", () => {
|
||||||
|
test("verify all users can log in", async ({ page }) => {
|
||||||
|
// Test each user from the test data
|
||||||
|
for (const user of testData.users || []) {
|
||||||
|
await page.goto("/");
|
||||||
|
await expect(page).toHaveURL("/");
|
||||||
|
|
||||||
|
// Wait for login form to be ready
|
||||||
|
await page.waitForSelector("input[type='text']", { state: "visible" });
|
||||||
|
|
||||||
|
// Fill in login form
|
||||||
|
await page.fill("input[type='text']", user.email);
|
||||||
|
await page.fill("input[type='password']", user.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
|
||||||
|
// Wait for navigation to home page
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
console.log(`✓ User ${user.email} logged in successfully`);
|
||||||
|
|
||||||
|
// Navigate back to login for next user
|
||||||
|
await page.goto("/");
|
||||||
|
await page.waitForSelector("input[type='text']", { state: "visible" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify application version is displayed", async ({ page }) => {
|
||||||
|
// Login as first user
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
// Look for version in footer or about section
|
||||||
|
// The version might be in the footer or a settings page
|
||||||
|
// Check if footer exists and contains version info
|
||||||
|
const footer = page.locator("footer");
|
||||||
|
if ((await footer.count()) > 0) {
|
||||||
|
const footerText = await footer.textContent();
|
||||||
|
console.log("Footer text:", footerText);
|
||||||
|
|
||||||
|
// Version should be present in some form
|
||||||
|
// This is a basic check - the version format may vary
|
||||||
|
expect(footerText).toBeTruthy();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("✓ Application version check complete");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify locations are present", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
// Wait for page to load
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Try to find locations link in navigation
|
||||||
|
const locationsLink = page.locator("a[href*='location'], button:has-text('Locations')").first();
|
||||||
|
|
||||||
|
if ((await locationsLink.count()) > 0) {
|
||||||
|
await locationsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
// Check if locations are displayed
|
||||||
|
// The exact structure depends on the UI, but we should see location names
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Verify some of our test locations exist
|
||||||
|
expect(pageContent).toContain("Living Room");
|
||||||
|
console.log("✓ Locations verified");
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find locations navigation - skipping detailed check");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify labels are present", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Try to find labels link in navigation
|
||||||
|
const labelsLink = page.locator("a[href*='label'], button:has-text('Labels')").first();
|
||||||
|
|
||||||
|
if ((await labelsLink.count()) > 0) {
|
||||||
|
await labelsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Verify some of our test labels exist
|
||||||
|
expect(pageContent).toContain("Electronics");
|
||||||
|
console.log("✓ Labels verified");
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find labels navigation - skipping detailed check");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify items are present", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Navigate to items list
|
||||||
|
// This might be the home page or a separate items page
|
||||||
|
const itemsLink = page.locator("a[href*='item'], button:has-text('Items')").first();
|
||||||
|
|
||||||
|
if ((await itemsLink.count()) > 0) {
|
||||||
|
await itemsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Verify some of our test items exist
|
||||||
|
expect(pageContent).toContain("Laptop Computer");
|
||||||
|
console.log("✓ Items verified");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify notifier is present", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Navigate to settings or profile
|
||||||
|
// Notifiers are typically in settings
|
||||||
|
const settingsLink = page.locator("a[href*='setting'], a[href*='profile'], button:has-text('Settings')").first();
|
||||||
|
|
||||||
|
if ((await settingsLink.count()) > 0) {
|
||||||
|
await settingsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
// Look for notifiers section
|
||||||
|
const notifiersLink = page.locator("a:has-text('Notif'), button:has-text('Notif')").first();
|
||||||
|
|
||||||
|
if ((await notifiersLink.count()) > 0) {
|
||||||
|
await notifiersLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Verify our test notifier exists
|
||||||
|
expect(pageContent).toContain("TESTING");
|
||||||
|
console.log("✓ Notifier verified");
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find notifiers section - skipping detailed check");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find settings navigation - skipping notifier check");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify attachments are present for items", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Search for "Laptop Computer" which should have attachments
|
||||||
|
const searchInput = page.locator("input[type='search'], input[placeholder*='Search']").first();
|
||||||
|
|
||||||
|
if ((await searchInput.count()) > 0) {
|
||||||
|
await searchInput.fill("Laptop Computer");
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
// Click on the laptop item
|
||||||
|
const laptopItem = page.locator("text=Laptop Computer").first();
|
||||||
|
await laptopItem.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
// Look for attachments section
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Check for attachment indicators (could be files, documents, attachments, etc.)
|
||||||
|
const hasAttachments =
|
||||||
|
pageContent?.includes("laptop-receipt") ||
|
||||||
|
pageContent?.includes("laptop-warranty") ||
|
||||||
|
pageContent?.includes("attachment") ||
|
||||||
|
pageContent?.includes("Attachment") ||
|
||||||
|
pageContent?.includes("document");
|
||||||
|
|
||||||
|
expect(hasAttachments).toBeTruthy();
|
||||||
|
console.log("✓ Attachments verified");
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find search - trying direct navigation");
|
||||||
|
|
||||||
|
// Try alternative: look for items link and browse
|
||||||
|
const itemsLink = page.locator("a[href*='item'], button:has-text('Items')").first();
|
||||||
|
if ((await itemsLink.count()) > 0) {
|
||||||
|
await itemsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
const laptopLink = page.locator("text=Laptop Computer").first();
|
||||||
|
if ((await laptopLink.count()) > 0) {
|
||||||
|
await laptopLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
const hasAttachments =
|
||||||
|
pageContent?.includes("laptop-receipt") ||
|
||||||
|
pageContent?.includes("laptop-warranty") ||
|
||||||
|
pageContent?.includes("attachment");
|
||||||
|
|
||||||
|
expect(hasAttachments).toBeTruthy();
|
||||||
|
console.log("✓ Attachments verified via direct navigation");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify theme can be adjusted", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Look for theme toggle (usually a sun/moon icon or settings)
|
||||||
|
// Common selectors for theme toggles
|
||||||
|
const themeToggle = page
|
||||||
|
.locator(
|
||||||
|
"button[aria-label*='theme'], button[aria-label*='Theme'], " +
|
||||||
|
"button:has-text('Dark'), button:has-text('Light'), " +
|
||||||
|
"[data-theme-toggle], .theme-toggle"
|
||||||
|
)
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if ((await themeToggle.count()) > 0) {
|
||||||
|
// Get initial theme state (could be from class, attribute, or computed style)
|
||||||
|
const bodyBefore = page.locator("body");
|
||||||
|
const classNameBefore = (await bodyBefore.getAttribute("class")) || "";
|
||||||
|
|
||||||
|
// Click theme toggle
|
||||||
|
await themeToggle.click();
|
||||||
|
// Wait for theme change to complete
|
||||||
|
await page.waitForTimeout(500);
|
||||||
|
|
||||||
|
// Get theme state after toggle
|
||||||
|
const classNameAfter = (await bodyBefore.getAttribute("class")) || "";
|
||||||
|
|
||||||
|
// Verify that something changed
|
||||||
|
expect(classNameBefore).not.toBe(classNameAfter);
|
||||||
|
|
||||||
|
console.log(`✓ Theme toggle working (${classNameBefore} -> ${classNameAfter})`);
|
||||||
|
} else {
|
||||||
|
// Try to find theme in settings
|
||||||
|
const settingsLink = page.locator("a[href*='setting'], a[href*='profile']").first();
|
||||||
|
|
||||||
|
if ((await settingsLink.count()) > 0) {
|
||||||
|
await settingsLink.click();
|
||||||
|
await page.waitForLoadState("networkidle");
|
||||||
|
|
||||||
|
const themeOption = page.locator("select[name*='theme'], button:has-text('Theme')").first();
|
||||||
|
|
||||||
|
if ((await themeOption.count()) > 0) {
|
||||||
|
console.log("✓ Theme settings found");
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find theme toggle - feature may not be easily accessible");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log("! Could not find theme controls");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify data counts match expectations", async ({ page }) => {
|
||||||
|
const firstUser = testData.users?.[0];
|
||||||
|
if (!firstUser) {
|
||||||
|
throw new Error("No users found in test data");
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", firstUser.email);
|
||||||
|
await page.fill("input[type='password']", firstUser.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
// Check that we have the expected number of items for group 1 (5 items)
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Look for item count indicators
|
||||||
|
// This is dependent on the UI showing counts
|
||||||
|
console.log("✓ Logged in and able to view dashboard");
|
||||||
|
|
||||||
|
// Verify at least that the page loaded and shows some content
|
||||||
|
expect(pageContent).toBeTruthy();
|
||||||
|
if (pageContent) {
|
||||||
|
expect(pageContent.length).toBeGreaterThan(100);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("verify second group users and data isolation", async ({ page }) => {
|
||||||
|
// Login as user from group 2
|
||||||
|
const group2User = testData.users?.find(u => u.group === "2");
|
||||||
|
if (!group2User) {
|
||||||
|
console.log("! No group 2 users found - skipping isolation test");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await page.goto("/");
|
||||||
|
await page.fill("input[type='text']", group2User.email);
|
||||||
|
await page.fill("input[type='password']", group2User.password);
|
||||||
|
await page.click("button[type='submit']");
|
||||||
|
await expect(page).toHaveURL("/home", { timeout: 10000 });
|
||||||
|
|
||||||
|
await page.waitForSelector("body", { state: "visible" });
|
||||||
|
|
||||||
|
const pageContent = await page.textContent("body");
|
||||||
|
|
||||||
|
// Verify group 2 can see their items
|
||||||
|
expect(pageContent).toContain("Monitor");
|
||||||
|
|
||||||
|
// Verify group 2 cannot see group 1 items
|
||||||
|
expect(pageContent).not.toContain("Laptop Computer");
|
||||||
|
|
||||||
|
console.log("✓ Data isolation verified between groups");
|
||||||
|
});
|
||||||
|
});
|
||||||
8
pnpm-lock.yaml
generated
8
pnpm-lock.yaml
generated
@@ -3211,8 +3211,8 @@ packages:
|
|||||||
protocols@2.0.2:
|
protocols@2.0.2:
|
||||||
resolution: {integrity: sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==}
|
resolution: {integrity: sha512-hHVTzba3wboROl0/aWRRG9dMytgH6ow//STBZh43l/wQgmMhYhOFi0EHWAPtoCz9IAUymsyP0TSBHkhgMEGNnQ==}
|
||||||
|
|
||||||
qs@6.14.0:
|
qs@6.14.1:
|
||||||
resolution: {integrity: sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==}
|
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||||
engines: {node: '>=0.6'}
|
engines: {node: '>=0.6'}
|
||||||
|
|
||||||
quansync@0.2.11:
|
quansync@0.2.11:
|
||||||
@@ -6860,7 +6860,7 @@ snapshots:
|
|||||||
micro-api-client: 3.3.0
|
micro-api-client: 3.3.0
|
||||||
node-fetch: 3.3.2
|
node-fetch: 3.3.2
|
||||||
p-wait-for: 5.0.2
|
p-wait-for: 5.0.2
|
||||||
qs: 6.14.0
|
qs: 6.14.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
nitropack@2.12.9(@netlify/blobs@9.1.2):
|
nitropack@2.12.9(@netlify/blobs@9.1.2):
|
||||||
@@ -7502,7 +7502,7 @@ snapshots:
|
|||||||
|
|
||||||
protocols@2.0.2: {}
|
protocols@2.0.2: {}
|
||||||
|
|
||||||
qs@6.14.0:
|
qs@6.14.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
side-channel: 1.1.0
|
side-channel: 1.1.0
|
||||||
optional: true
|
optional: true
|
||||||
|
|||||||
Reference in New Issue
Block a user