Compare commits

..

1 Commits

Author SHA1 Message Date
tonya
646a80f494 feat: begin work on language updates 2025-01-04 13:11:23 +00:00
710 changed files with 27600 additions and 119046 deletions

View File

@@ -1,3 +1,5 @@
FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:22-bullseye
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
ARG VARIANT=16-bullseye
FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-${VARIANT}
RUN sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b ~/.local/bin
RUN sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b ~/.local/bin

View File

@@ -3,7 +3,13 @@
{
"name": "Node.js & TypeScript",
"build": {
"dockerfile": "Dockerfile"
"dockerfile": "Dockerfile",
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
// Append -bullseye or -buster to pin to an OS version.
// Use -bullseye variants on local on arm64/Apple Silicon.
"args": {
"VARIANT": "18-bullseye"
}
},
// Configure tool-specific properties.
@@ -29,6 +35,6 @@
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node",
"features": {
"ghcr.io/devcontainers/features/go:1": "1.24"
"ghcr.io/devcontainers/features/go:1": "1.21"
}
}

3
.gitattributes vendored
View File

@@ -1,3 +0,0 @@
backend/internal/data/ent/** linguist-generated=true
backend/internal/data/ent/schema/** linguist-generated=false
frontend/lib/api/types/** linguist-generated=true

40
.github/AGENTS.md vendored
View File

@@ -1,40 +0,0 @@
This is a Go based repository with a VueJS client for the frontend built with Vite and Nuxt, with ShadCN.
To make life easier, the use of a Taskfile is included for the majority of development commands.
Please follow these guidelines when contributing:
## Required Before Each Commit
- Generate Swagger Files: `task swag --force`
- Generate JS API Client: `task typescript-types --force`
- Lint Golang: `task go:lint`
- Lint frontend: `task ui:fix`
## Repository Structure
### Backend
- `backend/`: Contains the backend folders
- `backend/app`: Contains main app code including API endpoints
- `backend/internal/core`: Contains basic services such as currencies
- `backend/data`: Contains all information related to data, including `ent` schemas, repos, migrations, etc.
- `backend/data/migrations`: Contains migration data, the `sqlite3` sub-folder contains sqlite migrations, `postgres` sub-folder the postgres migrations, BOTH are REQUIRED.
- `backend/data/ent/schema`: Contains the actual `ent` data models.
- `backend/data/repo`: Contains the data repositories
- `backend/pkgs`: Contains general helper functions and services
### Frontend
- `frontend/`: Contains initial frontend files
- `frontend/components`: Contains the ShadCN components
- `frontend/locales`: Contains the i18n JSON for languages
- `frontend/pages`: Contains VueJS pages
- `frontend/test`: Contains Playwright setup
- `frontend/test/e2e`: Contains actual Playwright test files
### Docs
- `docs/`: Contains VitePress based documentation
## Key Guidelines
1. Follow best practices for the various programming languages
2. Maintain existing code structure and organization when possible
3. Use dependency injection when reasonable
4. Write tests for new functionality and after fixing bugs to validate they're fixed
5. Document changes to the `docs/` folder when appropriate

1
.github/FUNDING.yml vendored
View File

@@ -1,2 +1 @@
open_collective: homebox
github: [tankerkiller125,katosdev,tonyaellie]

View File

@@ -58,17 +58,6 @@ body:
- Other
validations:
required: true
- type: dropdown
id: database
attributes:
label: Database Type
description: What database backend are you using?
multiple: false
options:
- SQLite
- PostgreSQL
validations:
required: true
- type: dropdown
id: arch
attributes:

View File

@@ -1,11 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: GitHub Community Support
url: https://github.com/sysadminsmedia/homebox/discussions/categories/support
about: Get support for issues here
- name: Feature Requests
url: https://github.com/sysadminsmedia/homebox/discussions/categories/ideas
about: Have an idea for Homebox? Share it in our discussions forum. If we decide to take it on we will create an issue for it.
- name: Translate
url: https://translate.sysadminsmedia.com
about: Help us translate Homebox! All contributions and all languages welcome!

View File

@@ -1,4 +1,9 @@
---
name: "Feature Request"
description: "Submit a feature request for the current release"
labels: ["⬆️ enhancement"]
projects: ["sysadminsmedia/2"]
type: "Enhancement"
body:
- type: textarea
id: problem-statement
@@ -26,5 +31,9 @@ body:
label: Contributions
description: Please confirm the following
options:
- label: I have searched through existing ideas in the discussions to check if this is a duplicate
- label: I have searched through existing issues and feature requests to see if my idea has already been proposed.
required: true
- label: If this feature is accepted, I would be willing to help implement and maintain this feature.
required: false
- label: If this feature is accepted, I'm willing to sponsor the development of this feature.
required: false

View File

@@ -1,10 +0,0 @@
---
name: "🛠️ Internal / Developer Issue"
about: "Unstructured issue for project members only. Outside contributors: please use a standard template."
title: "[INT]: "
labels: ["internal"]
assignees: []
---
**Summary:**
[Write here]

View File

@@ -1,432 +0,0 @@
---
applyTo: '/backend/app/api/handlers/**/*'
---
# Backend API Handlers Instructions (`/backend/app/api/handlers/v1/`)
## Overview
API handlers are the HTTP layer that processes requests, calls services, and returns responses. All handlers use the V1 API pattern with Swagger documentation for auto-generation.
## Architecture Flow
```
HTTP Request → Router → Middleware → Handler → Service → Repository → Database
HTTP Response
```
## Directory Structure
```
backend/app/api/
├── routes.go # Route definitions and middleware
├── handlers/
│ └── v1/
│ ├── controller.go # V1Controller struct and dependencies
│ ├── v1_ctrl_items.go # Item endpoints
│ ├── v1_ctrl_users.go # User endpoints
│ ├── v1_ctrl_locations.go # Location endpoints
│ ├── v1_ctrl_auth.go # Authentication endpoints
│ ├── helpers.go # HTTP helper functions
│ ├── query_params.go # Query parameter parsing
│ └── assets/ # Asset handling
```
## Handler Structure
### V1Controller
All handlers are methods on `V1Controller`:
```go
type V1Controller struct {
svc *services.AllServices // Service layer
repo *repo.AllRepos // Direct repo access (rare)
bus *eventbus.EventBus // Event publishing
}
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Handler logic
}
}
```
### Swagger Documentation
**CRITICAL:** Every handler must have Swagger comments for API doc generation:
```go
// HandleItemsGetAll godoc
//
// @Summary Query All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// ...
}
}
```
**After modifying Swagger comments, ALWAYS run:**
```bash
task generate # Regenerates Swagger docs and TypeScript types
```
## Standard Handler Pattern
### 1. Decode Request
```go
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
var itemData repo.ItemCreate
if err := server.Decode(r, &itemData); err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
// ... rest of handler
}
}
```
### 2. Extract Context
```go
// Get current user from request (added by auth middleware)
user := ctrl.CurrentUser(r)
// Create service context with group/user IDs
ctx := services.NewContext(r.Context(), user)
```
### 3. Call Service
```go
result, err := ctrl.svc.Items.Create(ctx, itemData)
if err != nil {
return validate.NewRequestError(err, http.StatusInternalServerError)
}
```
### 4. Return Response
```go
return server.JSON(w, result, http.StatusCreated)
```
## Common Handler Patterns
### GET - Single Item
```go
// HandleItemGet godoc
//
// @Summary Get Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
id, err := ctrl.RouteUUID(r, "id")
if err != nil {
return err
}
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
item, err := ctrl.svc.Items.Get(ctx, id)
if err != nil {
return validate.NewRequestError(err, http.StatusNotFound)
}
return server.JSON(w, item, http.StatusOK)
}
}
```
### GET - List with Pagination
```go
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Parse query parameters
query := extractItemQuery(r)
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
items, err := ctrl.svc.Items.GetAll(ctx, query)
if err != nil {
return err
}
return server.JSON(w, items, http.StatusOK)
}
}
// Helper to extract query params
func extractItemQuery(r *http.Request) repo.ItemQuery {
params := r.URL.Query()
return repo.ItemQuery{
Page: queryIntOrNegativeOne(params.Get("page")),
PageSize: queryIntOrNegativeOne(params.Get("pageSize")),
Search: params.Get("q"),
LocationIDs: queryUUIDList(params, "locations"),
}
}
```
### POST - Create
```go
// HandleItemCreate godoc
//
// @Summary Create Item
// @Tags Items
// @Accept json
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 201 {object} repo.ItemOut
// @Router /v1/items [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
var data repo.ItemCreate
if err := server.Decode(r, &data); err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
item, err := ctrl.svc.Items.Create(ctx, data)
if err != nil {
return err
}
return server.JSON(w, item, http.StatusCreated)
}
}
```
### PUT - Update
```go
// HandleItemUpdate godoc
//
// @Summary Update Item
// @Tags Items
// @Accept json
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
id, err := ctrl.RouteUUID(r, "id")
if err != nil {
return err
}
var data repo.ItemUpdate
if err := server.Decode(r, &data); err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
item, err := ctrl.svc.Items.Update(ctx, id, data)
if err != nil {
return err
}
return server.JSON(w, item, http.StatusOK)
}
}
```
### DELETE
```go
// HandleItemDelete godoc
//
// @Summary Delete Item
// @Tags Items
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
id, err := ctrl.RouteUUID(r, "id")
if err != nil {
return err
}
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
err = ctrl.svc.Items.Delete(ctx, id)
if err != nil {
return err
}
return server.JSON(w, nil, http.StatusNoContent)
}
}
```
### File Upload
```go
func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
id, err := ctrl.RouteUUID(r, "id")
if err != nil {
return err
}
// Parse multipart form
err = r.ParseMultipartForm(32 << 20) // 32MB max
if err != nil {
return err
}
file, header, err := r.FormFile("file")
if err != nil {
return err
}
defer file.Close()
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
attachment, err := ctrl.svc.Items.CreateAttachment(ctx, id, file, header.Filename)
if err != nil {
return err
}
return server.JSON(w, attachment, http.StatusCreated)
}
}
```
## Routing
Routes are defined in `backend/app/api/routes.go`:
```go
func (a *app) mountRoutes(repos *repo.AllRepos, svc *services.AllServices) {
v1 := v1.NewControllerV1(svc, repos)
a.server.Get("/api/v1/items", v1.HandleItemsGetAll())
a.server.Post("/api/v1/items", v1.HandleItemCreate())
a.server.Get("/api/v1/items/{id}", v1.HandleItemGet())
a.server.Put("/api/v1/items/{id}", v1.HandleItemUpdate())
a.server.Delete("/api/v1/items/{id}", v1.HandleItemDelete())
}
```
## Helper Functions
### Query Parameter Parsing
Located in `query_params.go`:
```go
func queryIntOrNegativeOne(s string) int
func queryBool(s string) bool
func queryUUIDList(params url.Values, key string) []uuid.UUID
```
### Response Helpers
```go
// From httpkit/server
server.JSON(w, data, statusCode) // JSON response
server.Respond(w, statusCode) // Empty response
validate.NewRequestError(err, statusCode) // Error response
```
### Authentication
```go
user := ctrl.CurrentUser(r) // Get authenticated user (from middleware)
```
## Adding a New Endpoint
### 1. Create Handler
In `backend/app/api/handlers/v1/v1_ctrl_myentity.go`:
```go
// HandleMyEntityCreate godoc
//
// @Summary Create MyEntity
// @Tags MyEntity
// @Accept json
// @Produce json
// @Param payload body repo.MyEntityCreate true "Data"
// @Success 201 {object} repo.MyEntityOut
// @Router /v1/my-entity [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleMyEntityCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
var data repo.MyEntityCreate
if err := server.Decode(r, &data); err != nil {
return validate.NewRequestError(err, http.StatusBadRequest)
}
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
result, err := ctrl.svc.MyEntity.Create(ctx, data)
if err != nil {
return err
}
return server.JSON(w, result, http.StatusCreated)
}
}
```
### 2. Add Route
In `backend/app/api/routes.go`:
```go
a.server.Post("/api/v1/my-entity", v1.HandleMyEntityCreate())
```
### 3. Generate Docs
```bash
task generate # Generates Swagger docs and TypeScript types
```
### 4. Test
```bash
task go:build # Verify builds
task go:test # Run tests
```
## Critical Rules
1. **ALWAYS add Swagger comments** - required for API docs and TypeScript type generation
2. **Run `task generate` after handler changes** - updates API documentation
3. **Use services, not repos directly** - handlers call services, services call repos
4. **Always use `services.Context`** - includes auth and multi-tenancy
5. **Handle errors properly** - use `validate.NewRequestError()` with appropriate status codes
6. **Validate input** - decode and validate request bodies
7. **Return correct status codes** - 200 OK, 201 Created, 204 No Content, 400 Bad Request, 404 Not Found
## Common Issues
- **"Missing Swagger docs"** → Add `@Summary`, `@Tags`, `@Router` comments, run `task generate`
- **TypeScript types outdated** → Run `task generate` to regenerate
- **Auth failures** → Ensure route has auth middleware and `@Security Bearer`
- **CORS errors** → Check middleware configuration in `routes.go`

View File

@@ -1,341 +0,0 @@
---
applyTo: '/backend/internal/core/services/**/*'
---
# Backend Services Layer Instructions (`/backend/internal/core/services/`)
## Overview
The services layer contains business logic that orchestrates between repositories and API handlers. Services handle complex operations, validation, and cross-cutting concerns.
## Architecture Pattern
```
Handler (API) → Service (Business Logic) → Repository (Data Access) → Database
```
**Separation of concerns:**
- **Handlers** (`backend/app/api/handlers/v1/`) - HTTP request/response, routing, auth
- **Services** (`backend/internal/core/services/`) - Business logic, orchestration
- **Repositories** (`backend/internal/data/repo/`) - Database operations, queries
## Directory Structure
```
backend/internal/core/services/
├── all.go # Service aggregation
├── service_items.go # Item business logic
├── service_items_attachments.go # Item attachments logic
├── service_user.go # User management logic
├── service_group.go # Group management logic
├── service_background.go # Background tasks
├── contexts.go # Service context types
├── reporting/ # Reporting subsystem
│ ├── eventbus/ # Event bus for notifications
│ └── *.go # Report generation logic
└── *_test.go # Service tests
```
## Service Structure
### Standard Pattern
```go
type ItemService struct {
repo *repo.AllRepos // Access to all repositories
filepath string // File storage path
autoIncrementAssetID bool // Feature flags
}
func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut, error) {
// 1. Validation
if item.Name == "" {
return repo.ItemOut{}, errors.New("name required")
}
// 2. Business logic
if svc.autoIncrementAssetID {
highest, err := svc.repo.Items.GetHighestAssetID(ctx, ctx.GID)
if err != nil {
return repo.ItemOut{}, err
}
item.AssetID = highest + 1
}
// 3. Repository call
return svc.repo.Items.Create(ctx, ctx.GID, item)
}
```
### Service Context
Services use a custom `Context` type that extends `context.Context`:
```go
type Context struct {
context.Context
GID uuid.UUID // Group ID for multi-tenancy
UID uuid.UUID // User ID for audit
}
```
**Always use `Context` from services package, not raw `context.Context`.**
## Common Service Patterns
### 1. CRUD with Business Logic
```go
func (svc *ItemService) Update(ctx Context, id uuid.UUID, data repo.ItemUpdate) (repo.ItemOut, error) {
// Fetch existing
existing, err := svc.repo.Items.Get(ctx, id)
if err != nil {
return repo.ItemOut{}, err
}
// Business rules
if existing.Archived && data.Quantity != nil {
return repo.ItemOut{}, errors.New("cannot modify archived items")
}
// Update
return svc.repo.Items.Update(ctx, id, data)
}
```
### 2. Orchestrating Multiple Repositories
```go
func (svc *ItemService) CreateWithAttachment(ctx Context, item repo.ItemCreate, file io.Reader) (repo.ItemOut, error) {
// Create item
created, err := svc.repo.Items.Create(ctx, ctx.GID, item)
if err != nil {
return repo.ItemOut{}, err
}
// Upload attachment
attachment, err := svc.repo.Attachments.Create(ctx, created.ID, file)
if err != nil {
// Rollback - delete item
_ = svc.repo.Items.Delete(ctx, created.ID)
return repo.ItemOut{}, err
}
created.Attachments = []repo.AttachmentOut{attachment}
return created, nil
}
```
### 3. Background Tasks
```go
func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int, error) {
// Get items without asset IDs
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, gid)
if err != nil {
return 0, err
}
// Batch assign
highest := svc.repo.Items.GetHighestAssetID(ctx, gid)
for _, item := range items {
highest++
_ = svc.repo.Items.Update(ctx, item.ID, repo.ItemUpdate{
AssetID: &highest,
})
}
return len(items), nil
}
```
### 4. Event Publishing
Services can publish events to the event bus:
```go
func (svc *ItemService) Delete(ctx Context, id uuid.UUID) error {
err := svc.repo.Items.Delete(ctx, id)
if err != nil {
return err
}
// Publish event for notifications
svc.repo.Bus.Publish(eventbus.Event{
Type: "item.deleted",
Data: map[string]interface{}{"id": id},
})
return nil
}
```
## Service Aggregation
All services are bundled in `all.go`:
```go
type AllServices struct {
User *UserService
Group *GroupService
Items *ItemService
// ... other services
}
func New(repos *repo.AllRepos, filepath string) *AllServices {
return &AllServices{
User: &UserService{repo: repos},
Items: &ItemService{repo: repos, filepath: filepath},
// ...
}
}
```
**Accessed in handlers via:**
```go
ctrl.svc.Items.Create(ctx, itemData)
```
## Working with Services from Handlers
Handlers call services, not repositories directly:
```go
// In backend/app/api/handlers/v1/v1_ctrl_items.go
func (ctrl *V1Controller) HandleItemCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
var itemData repo.ItemCreate
if err := server.Decode(r, &itemData); err != nil {
return err
}
// Get context with group/user IDs
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
// Call service (not repository)
item, err := ctrl.svc.Items.Create(ctx, itemData)
if err != nil {
return err
}
return server.JSON(w, item, http.StatusCreated)
}
}
```
## Testing Services
Service tests mock repositories using interfaces:
```go
func TestItemService_Create(t *testing.T) {
mockRepo := &mockItemRepo{
CreateFunc: func(ctx context.Context, gid uuid.UUID, data repo.ItemCreate) (repo.ItemOut, error) {
return repo.ItemOut{ID: uuid.New(), Name: data.Name}, nil
},
}
svc := &ItemService{repo: &repo.AllRepos{Items: mockRepo}}
ctx := services.Context{GID: uuid.New(), UID: uuid.New()}
result, err := svc.Create(ctx, repo.ItemCreate{Name: "Test"})
assert.NoError(t, err)
assert.Equal(t, "Test", result.Name)
}
```
**Run service tests:**
```bash
cd backend && go test ./internal/core/services -v
```
## Adding a New Service
### 1. Create Service File
Create `backend/internal/core/services/service_myentity.go`:
```go
package services
type MyEntityService struct {
repo *repo.AllRepos
}
func (svc *MyEntityService) Create(ctx Context, data repo.MyEntityCreate) (repo.MyEntityOut, error) {
// Business logic here
return svc.repo.MyEntity.Create(ctx, ctx.GID, data)
}
```
### 2. Add to AllServices
Edit `backend/internal/core/services/all.go`:
```go
type AllServices struct {
// ... existing services
MyEntity *MyEntityService
}
func New(repos *repo.AllRepos, filepath string) *AllServices {
return &AllServices{
// ... existing services
MyEntity: &MyEntityService{repo: repos},
}
}
```
### 3. Use in Handler
In `backend/app/api/handlers/v1/`:
```go
func (ctrl *V1Controller) HandleMyEntityCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context(), ctrl.CurrentUser(r))
result, err := ctrl.svc.MyEntity.Create(ctx, data)
// ...
}
}
```
### 4. Run Tests
```bash
task generate # If you modified schemas
task go:test # Run all tests
```
## Common Service Responsibilities
**Services should:**
- ✅ Contain business logic and validation
- ✅ Orchestrate multiple repository calls
- ✅ Handle transactions (when needed)
- ✅ Publish events for side effects
- ✅ Enforce access control and multi-tenancy
- ✅ Transform data between API and repository formats
**Services should NOT:**
- ❌ Handle HTTP requests/responses (that's handlers)
- ❌ Construct SQL queries (that's repositories)
- ❌ Import handler packages (creates circular deps)
- ❌ Directly access database (use repositories)
## Critical Rules
1. **Always use `services.Context`** - includes group/user IDs for multi-tenancy
2. **Services call repos, handlers call services** - maintains layer separation
3. **No direct database access** - always through repositories
4. **Business logic goes here** - not in handlers or repositories
5. **Test services independently** - mock repository dependencies
## Common Patterns to Follow
- **Validation:** Check business rules before calling repository
- **Error wrapping:** Add context to repository errors
- **Logging:** Use `log.Ctx(ctx)` for contextual logging
- **Transactions:** Use `repo.WithTx()` for multi-step operations
- **Events:** Publish to event bus for notifications/side effects

View File

@@ -1,239 +0,0 @@
---
applyTo: 'backend/internal/data/**/*'
---
# Backend Data Layer Instructions (`/backend/internal/data/`)
## Overview
This directory contains the data access layer using **Ent ORM** (entity framework). It follows a clear separation between schema definitions, generated code, and repository implementations.
## Directory Structure
```
backend/internal/data/
├── ent/ # Ent ORM generated code (DO NOT EDIT)
│ ├── schema/ # Schema definitions (EDIT THESE)
│ │ ├── item.go # Item entity schema
│ │ ├── user.go # User entity schema
│ │ ├── location.go # Location entity schema
│ │ ├── label.go # Label entity schema
│ │ └── mixins/ # Reusable schema mixins
│ ├── *.go # Generated entity code
│ └── migrate/ # Generated migrations
├── repo/ # Repository pattern implementations
│ ├── repos_all.go # Aggregates all repositories
│ ├── repo_items.go # Item repository
│ ├── repo_users.go # User repository
│ ├── repo_locations.go # Location repository
│ └── *_test.go # Repository tests
├── migrations/ # Manual SQL migrations
│ ├── sqlite3/ # SQLite-specific migrations
│ └── postgres/ # PostgreSQL-specific migrations
└── types/ # Custom data types
```
## Ent ORM Workflow
### 1. Defining Schemas (`ent/schema/`)
**ALWAYS edit schema files here** - these define your database entities:
```go
// Example: backend/internal/data/ent/schema/item.go
type Item struct {
ent.Schema
}
func (Item) Fields() []ent.Field {
return []ent.Field{
field.String("name").NotEmpty(),
field.Int("quantity").Default(1),
field.Bool("archived").Default(false),
}
}
func (Item) Edges() []ent.Edge {
return []ent.Edge{
edge.From("location", Location.Type).Ref("items").Unique(),
edge.From("labels", Label.Type).Ref("items"),
}
}
func (Item) Indexes() []ent.Index {
return []ent.Index{
index.Fields("name"),
index.Fields("archived"),
}
}
```
**Common schema patterns:**
- Use `mixins.BaseMixin{}` for `id`, `created_at`, `updated_at` fields
- Use `mixins.DetailsMixin{}` for `name` and `description` fields
- Use `GroupMixin{ref: "items"}` to link entities to groups
- Add indexes for frequently queried fields
### 2. Generating Code
**After modifying any schema file, ALWAYS run:**
```bash
task generate
```
This:
1. Runs `go generate ./...` in `backend/internal/` (generates Ent code)
2. Generates Swagger docs from API handlers
3. Generates TypeScript types for frontend
**Generated files you'll see:**
- `ent/*.go` - Entity types, builders, queries
- `ent/migrate/migrate.go` - Auto migrations
- `ent/predicate/predicate.go` - Query predicates
**NEVER edit generated files directly** - changes will be overwritten.
### 3. Using Generated Code in Repositories
Repositories in `repo/` use the generated Ent client:
```go
// Example: backend/internal/data/repo/repo_items.go
type ItemsRepository struct {
db *ent.Client
bus *eventbus.EventBus
}
func (r *ItemsRepository) Create(ctx context.Context, gid uuid.UUID, data ItemCreate) (ItemOut, error) {
entity, err := r.db.Item.Create().
SetName(data.Name).
SetQuantity(data.Quantity).
SetGroupID(gid).
Save(ctx)
return mapToItemOut(entity), err
}
```
## Repository Pattern
### Structure
Each entity typically has:
- **Repository struct** (`ItemsRepository`) - holds DB client and dependencies
- **Input types** (`ItemCreate`, `ItemUpdate`) - API input DTOs
- **Output types** (`ItemOut`, `ItemSummary`) - API response DTOs
- **Query types** (`ItemQuery`) - search/filter parameters
- **Mapper functions** (`mapToItemOut`) - converts Ent entities to output DTOs
### Key Methods
Repositories typically implement:
- `Create(ctx, gid, input)` - Create new entity
- `Get(ctx, id)` - Get single entity by ID
- `GetAll(ctx, gid, query)` - Query with pagination/filters
- `Update(ctx, id, input)` - Update entity
- `Delete(ctx, id)` - Delete entity
### Working with Ent Queries
**Loading relationships (edges):**
```go
items, err := r.db.Item.Query().
WithLocation(). // Load location edge
WithLabels(). // Load labels edge
WithChildren(). // Load child items
Where(item.GroupIDEQ(gid)).
All(ctx)
```
**Filtering:**
```go
query := r.db.Item.Query().
Where(
item.GroupIDEQ(gid),
item.ArchivedEQ(false),
item.NameContainsFold(search),
)
```
**Ordering and pagination:**
```go
items, err := query.
Order(ent.Desc(item.FieldCreatedAt)).
Limit(pageSize).
Offset((page - 1) * pageSize).
All(ctx)
```
## Common Workflows
### Adding a New Entity
1. **Create schema:** `backend/internal/data/ent/schema/myentity.go`
2. **Run:** `task generate` (generates Ent code)
3. **Create repository:** `backend/internal/data/repo/repo_myentity.go`
4. **Add to AllRepos:** Edit `repo/repos_all.go` to include new repo
5. **Run tests:** `task go:test`
### Adding Fields to Existing Entity
1. **Edit schema:** `backend/internal/data/ent/schema/item.go`
```go
field.String("new_field").Optional()
```
2. **Run:** `task generate`
3. **Update repository:** Add field to input/output types in `repo/repo_items.go`
4. **Update mappers:** Ensure mapper functions handle new field
5. **Run tests:** `task go:test`
### Adding Relationships (Edges)
1. **Edit both schemas:**
```go
// In item.go
edge.From("location", Location.Type).Ref("items").Unique()
// In location.go
edge.To("items", Item.Type)
```
2. **Run:** `task generate`
3. **Use in queries:** `.WithLocation()` to load the edge
4. **Run tests:** `task go:test`
## Testing
Repository tests use `enttest` for in-memory SQLite:
```go
func TestItemRepo(t *testing.T) {
client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&_fk=1")
defer client.Close()
repo := &ItemsRepository{db: client}
// Test methods...
}
```
**Run repository tests:**
```bash
cd backend && go test ./internal/data/repo -v
```
## Critical Rules
1. **ALWAYS run `task generate` after schema changes** - builds will fail otherwise
2. **NEVER edit files in `ent/` except `ent/schema/`** - they're generated
3. **Use repositories, not raw Ent queries in services/handlers** - maintains separation
4. **Include `group_id` in all queries** - ensures multi-tenancy
5. **Use `.WithX()` to load edges** - avoids N+1 queries
6. **Test with both SQLite and PostgreSQL** - CI tests both
## Common Errors
- **"undefined: ent.ItemX"** → Run `task generate` after schema changes
- **Migration conflicts** → Check `migrations/` for manual migration files
- **Foreign key violations** → Ensure edges are properly defined in both schemas
- **Slow queries** → Add indexes in schema `Indexes()` method

View File

@@ -1,157 +0,0 @@
# Homebox Repository Instructions for Coding Agents
## Repository Overview
**Type**: Full-stack home inventory management web app (monorepo)
**Size**: ~265 Go files, ~371 TypeScript/Vue files
**Build Tool**: Task (Taskfile.yml) - **ALWAYS use `task` commands**
**Database**: SQLite (default) or PostgreSQL
### Stack
- **Backend** (`/backend`): Go 1.24+, Chi router, Ent ORM, port 7745
- **Frontend** (`/frontend`): Nuxt 4, Vue 3, TypeScript, Tailwind CSS, pnpm 9.1.4+, dev proxies to backend
## Critical Build & Validation Commands
### Initial Setup (Run Once)
```bash
task setup # Installs swag, goose, Go deps, pnpm deps
```
### Code Generation (Required Before Backend Work)
```bash
task generate # Generates Ent ORM, Swagger docs, TypeScript types
```
**ALWAYS run after**: schema changes, API handler changes, before backend server/tests
**Note**: "TypeSpecDef is nil" warnings are normal - ignore them
### Backend Commands
```bash
task go:build # Build binary (60-90s)
task go:test # Unit tests (5-10s)
task go:lint # golangci-lint (6m timeout in CI)
task go:all # Tidy + lint + test
task go:run # Start server (SQLite)
task pr # Full PR validation (3-5 min)
```
### Frontend Commands
```bash
task ui:dev # Dev server port 3000
task ui:check # Type checking
task ui:fix # eslint --fix + prettier
task ui:watch # Vitest watch mode
```
**Lint**: Max 1 warning in CI (`pnpm run lint:ci`)
### Testing
```bash
task test:ci # Integration tests (15-30s + startup)
task test:e2e # Playwright E2E (60s+ per shard, needs playwright install)
task pr # Full PR validation: generate + go:all + ui:check + ui:fix + test:ci (3-5 min)
```
## Project Structure
### Key Root Files
- `Taskfile.yml` - All commands (always use `task`)
- `docker-compose.yml`, `Dockerfile*` - Docker configs
- `CONTRIBUTING.md` - Contribution guidelines
### Backend Structure (`/backend`)
```
backend/
├── app/
│ ├── api/ # Main API application
│ │ ├── main.go # Entry point
│ │ ├── routes.go # Route definitions
│ │ ├── handlers/ # HTTP handlers (v1 API)
│ │ ├── static/ # Swagger docs, embedded frontend
│ │ └── providers/ # Service providers
│ └── tools/
│ └── typegen/ # TypeScript type generation tool
├── internal/
│ ├── core/
│ │ └── services/ # Business logic layer
│ ├── data/
│ │ ├── ent/ # Ent ORM generated code + schemas
│ │ │ └── schema/ # Schema definitions (edit these)
│ │ └── repo/ # Repository pattern implementations
│ ├── sys/ # System utilities (config, validation)
│ └── web/ # Web middleware
├── pkgs/ # Reusable packages
├── go.mod, go.sum # Go dependencies
└── .golangci.yml # Linter configuration
```
**Patterns**: Schema/API changes → edit source → `task generate`. Never edit generated code in `ent/`.
### Frontend Structure (`/frontend`)
```
frontend/
├── app.vue # Root component
├── nuxt.config.ts # Nuxt configuration
├── package.json # Frontend dependencies
├── components/ # Vue components (auto-imported)
├── pages/ # File-based routing
├── layouts/ # Layout components
├── composables/ # Vue composables (auto-imported)
├── stores/ # Pinia state stores
├── lib/
│ └── api/
│ └── types/ # Generated TypeScript API types
├── locales/ # i18n translations
├── test/ # Vitest + Playwright tests
├── eslint.config.mjs # ESLint configuration
└── tailwind.config.js # Tailwind configuration
```
**Patterns**: Auto-imports for `components/` and `composables/`. API types auto-generated - never edit manually.
## CI/CD Workflows
PR checks (`.github/workflows/pull-requests.yaml`) on `main`/`vnext`:
1. **Backend**: Go 1.24, golangci-lint, `task go:build`, `task go:coverage`
2. **Frontend**: Lint (max 1 warning), typecheck, `task test:ci` (SQLite + PostgreSQL v15-17)
3. **E2E**: 4 sharded Playwright runs (60min timeout)
All must pass before merge.
## Common Pitfalls
1. **Missing tools**: Run `task setup` first (installs swag, goose, deps)
2. **Stale generated code**: Always `task generate` after schema/API changes
3. **Test failures**: Integration tests may fail first run (race condition) - retry
4. **Port in use**: Backend uses 7745 - kill existing process
5. **SQLite locked**: Delete `.data/homebox.db-*` files
6. **Clean build**: `rm -rf build/ backend/app/api/static/public/ frontend/.nuxt`
## Environment Variables
Backend defaults in `Taskfile.yml`:
- `HBOX_LOG_LEVEL=debug`
- `HBOX_DATABASE_DRIVER=sqlite3` (or `postgres`)
- `HBOX_DATABASE_SQLITE_PATH=.data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1`
- PostgreSQL: `HBOX_DATABASE_*` vars for username/password/host/port/database
## Validation Checklist
Before PR:
- [ ] `task generate` after schema/API changes
- [ ] `task pr` passes (includes lint, test, typecheck)
- [ ] No build artifacts committed (check `.gitignore`)
- [ ] Code matches existing patterns
## Quick Reference
**Dev environment**: `task go:run` (terminal 1) + `task ui:dev` (terminal 2)
**API changes**: Edit handlers → add Swagger comments → `task generate``task go:build``task go:test`
**Schema changes**: Edit `ent/schema/*.go``task generate` → update repo methods → `task go:test`
**Specific tests**: `cd backend && go test ./path -v` or `cd frontend && pnpm run test:watch`
## Trust These Instructions
Instructions are validated and current. Only explore further if info is incomplete, incorrect, or you encounter undocumented errors. Use `task --list-all` for all commands.

View File

@@ -1,480 +0,0 @@
---
applyTo: 'frontend/**/*'
---
# Frontend Components & Pages Instructions (`/frontend/`)
## Overview
The frontend is a Nuxt 4 application with Vue 3 and TypeScript. It uses auto-imports for components and composables, file-based routing, and generated TypeScript types from the backend API.
## Directory Structure
```
frontend/
├── components/ # Vue components (auto-imported)
│ ├── Item/ # Item-related components
│ ├── Location/ # Location components
│ ├── Label/ # Label components
│ ├── Form/ # Form components
│ └── ui/ # Shadcn-vue UI components
├── pages/ # File-based routes (auto-routing)
│ ├── index.vue # Home page (/)
│ ├── items.vue # Items list (/items)
│ ├── item/
│ │ └── [id].vue # Item detail (/item/:id)
│ ├── locations.vue # Locations list (/locations)
│ └── profile.vue # User profile (/profile)
├── composables/ # Vue composables (auto-imported)
│ ├── use-api.ts # API client wrapper
│ ├── use-auth.ts # Authentication
│ └── use-user-api.ts # User API helpers
├── stores/ # Pinia state management
│ ├── auth.ts # Auth state
│ └── preferences.ts # User preferences
├── lib/
│ └── api/
│ └── types/ # Generated TypeScript types (DO NOT EDIT)
├── layouts/ # Layout components
│ └── default.vue # Default layout
├── locales/ # i18n translations
├── test/ # Tests (Vitest + Playwright)
└── nuxt.config.ts # Nuxt configuration
```
## Auto-Imports
### Components
Components in `components/` are **automatically imported** - no import statement needed:
```vue
<!-- components/Item/Card.vue -->
<template>
<div class="item-card">{{ item.name }}</div>
</template>
<!-- pages/items.vue - NO import needed -->
<template>
<ItemCard :item="item" />
</template>
```
**Naming convention:** Nested path becomes component name
- `components/Item/Card.vue``<ItemCard />`
- `components/Form/TextField.vue``<FormTextField />`
### Composables
Composables in `composables/` are **automatically imported**:
```ts
// composables/use-items.ts
export function useItems() {
const api = useUserApi()
async function getItems() {
const { data } = await api.items.getAll()
return data
}
return { getItems }
}
// pages/items.vue - NO import needed
const { getItems } = useItems()
const items = await getItems()
```
## File-Based Routing
Pages in `pages/` automatically become routes:
```
pages/index.vue → /
pages/items.vue → /items
pages/item/[id].vue → /item/:id
pages/locations.vue → /locations
pages/location/[id].vue → /location/:id
pages/profile.vue → /profile
```
### Dynamic Routes
Use square brackets for dynamic segments:
```vue
<!-- pages/item/[id].vue -->
<script setup lang="ts">
const route = useRoute()
const id = route.params.id
const { data: item } = await useUserApi().items.getOne(id)
</script>
<template>
<div>
<h1>{{ item.name }}</h1>
</div>
</template>
```
## API Integration
### Generated Types
API types are auto-generated from backend Swagger docs:
```ts
// lib/api/types/data-contracts.ts (GENERATED - DO NOT EDIT)
export interface ItemOut {
id: string
name: string
quantity: number
createdAt: Date | string
updatedAt: Date | string
}
export interface ItemCreate {
name: string
quantity?: number
locationId?: string
}
```
**Regenerate after backend API changes:**
```bash
task generate # Runs in backend, updates frontend/lib/api/types/
```
### Using the API Client
The `useUserApi()` composable provides typed API access:
```vue
<script setup lang="ts">
import type { ItemCreate, ItemOut } from '~/lib/api/types/data-contracts'
const api = useUserApi()
// GET all items
const { data: items } = await api.items.getAll({
q: 'search term',
page: 1,
pageSize: 20
})
// GET single item
const { data: item } = await api.items.getOne(itemId)
// POST create item
const newItem: ItemCreate = {
name: 'New Item',
quantity: 1
}
const { data: created } = await api.items.create(newItem)
// PUT update item
const { data: updated } = await api.items.update(itemId, {
quantity: 5
})
// DELETE item
await api.items.delete(itemId)
</script>
```
## Component Patterns
### Standard Vue 3 Composition API
```vue
<script setup lang="ts">
import { ref, computed } from 'vue'
import type { ItemOut } from '~/lib/api/types/data-contracts'
// Props
interface Props {
item: ItemOut
editable?: boolean
}
const props = defineProps<Props>()
// Emits
interface Emits {
(e: 'update', item: ItemOut): void
(e: 'delete', id: string): void
}
const emit = defineEmits<Emits>()
// State
const isEditing = ref(false)
const localItem = ref({ ...props.item })
// Computed
const displayName = computed(() => {
return props.item.name.toUpperCase()
})
// Methods
function handleSave() {
emit('update', localItem.value)
isEditing.value = false
}
</script>
<template>
<div class="item-card">
<h3>{{ displayName }}</h3>
<p v-if="!isEditing">Quantity: {{ item.quantity }}</p>
<input
v-if="isEditing"
v-model.number="localItem.quantity"
type="number"
/>
<button v-if="editable" @click="isEditing = !isEditing">
{{ isEditing ? 'Cancel' : 'Edit' }}
</button>
<button v-if="isEditing" @click="handleSave">Save</button>
</div>
</template>
<style scoped>
.item-card {
padding: 1rem;
border: 1px solid #ccc;
border-radius: 0.5rem;
}
</style>
```
### Using Pinia Stores
```vue
<script setup lang="ts">
import { useAuthStore } from '~/stores/auth'
const authStore = useAuthStore()
// Access state
const user = computed(() => authStore.user)
const isLoggedIn = computed(() => authStore.isLoggedIn)
// Call actions
async function logout() {
await authStore.logout()
navigateTo('/login')
}
</script>
```
### Form Handling
```vue
<script setup lang="ts">
import { useForm } from 'vee-validate'
import type { ItemCreate } from '~/lib/api/types/data-contracts'
const api = useUserApi()
const { values, errors, handleSubmit } = useForm<ItemCreate>({
initialValues: {
name: '',
quantity: 1
}
})
const onSubmit = handleSubmit(async (values) => {
try {
const { data } = await api.items.create(values)
navigateTo(`/item/${data.id}`)
} catch (error) {
console.error('Failed to create item:', error)
}
})
</script>
<template>
<form @submit.prevent="onSubmit">
<input v-model="values.name" type="text" placeholder="Item name" />
<span v-if="errors.name">{{ errors.name }}</span>
<input v-model.number="values.quantity" type="number" />
<span v-if="errors.quantity">{{ errors.quantity }}</span>
<button type="submit">Create Item</button>
</form>
</template>
```
## Styling
### Tailwind CSS
The project uses Tailwind CSS for styling:
```vue
<template>
<div class="flex items-center justify-between p-4 bg-white rounded-lg shadow-md">
<h3 class="text-lg font-semibold text-gray-900">{{ item.name }}</h3>
<span class="text-sm text-gray-500">Qty: {{ item.quantity }}</span>
</div>
</template>
```
### Shadcn-vue Components
UI components from `components/ui/` (Shadcn-vue):
```vue
<script setup lang="ts">
import { Button } from '@/components/ui/button'
import { Card, CardContent, CardHeader } from '@/components/ui/card'
</script>
<template>
<Card>
<CardHeader>
<h3>{{ item.name }}</h3>
</CardHeader>
<CardContent>
<p>{{ item.description }}</p>
<Button @click="handleEdit">Edit</Button>
</CardContent>
</Card>
</template>
```
## Testing
### Vitest (Unit/Integration)
Tests use Vitest with the backend API running:
```ts
// test/items.test.ts
import { describe, it, expect } from 'vitest'
import { useUserApi } from '~/composables/use-user-api'
describe('Items API', () => {
it('should create and fetch item', async () => {
const api = useUserApi()
// Create item
const { data: created } = await api.items.create({
name: 'Test Item',
quantity: 1
})
expect(created.name).toBe('Test Item')
// Fetch item
const { data: fetched } = await api.items.getOne(created.id)
expect(fetched.id).toBe(created.id)
})
})
```
**Run tests:**
```bash
task ui:watch # Watch mode
cd frontend && pnpm run test:ci # CI mode
```
### Playwright (E2E)
E2E tests in `test/`:
```ts
// test/e2e/items.spec.ts
import { test, expect } from '@playwright/test'
test('should create new item', async ({ page }) => {
await page.goto('/items')
await page.click('button:has-text("New Item")')
await page.fill('input[name="name"]', 'Test Item')
await page.fill('input[name="quantity"]', '5')
await page.click('button:has-text("Save")')
await expect(page.locator('text=Test Item')).toBeVisible()
})
```
**Run E2E tests:**
```bash
task test:e2e # Full E2E suite
```
## Adding a New Feature
### 1. Update Backend API
Make backend changes first (schema, service, handler):
```bash
# Edit backend files
task generate # Regenerates TypeScript types
```
### 2. Create Component
Create `components/MyFeature/Card.vue`:
```vue
<script setup lang="ts">
import type { MyFeatureOut } from '~/lib/api/types/data-contracts'
interface Props {
feature: MyFeatureOut
}
defineProps<Props>()
</script>
<template>
<div>{{ feature.name }}</div>
</template>
```
### 3. Create Page
Create `pages/my-feature/[id].vue`:
```vue
<script setup lang="ts">
const route = useRoute()
const api = useUserApi()
const { data: feature } = await api.myFeature.getOne(route.params.id)
</script>
<template>
<MyFeatureCard :feature="feature" />
</template>
```
### 4. Test
```bash
task ui:check # Type checking
task ui:fix # Linting
task ui:watch # Run tests
```
## Critical Rules
1. **Never edit generated types** - `lib/api/types/` is auto-generated, run `task generate` after backend changes
2. **No manual imports for components/composables** - auto-imported from `components/` and `composables/`
3. **Use TypeScript** - all `.vue` files use `<script setup lang="ts">`
4. **Follow file-based routing** - pages in `pages/` become routes automatically
5. **Use `useUserApi()` for API calls** - provides typed, authenticated API client
6. **Max 1 linting warning in CI** - run `task ui:fix` before committing
7. **Test with backend running** - integration tests need API server
## Common Issues
- **"Type not found"** → Run `task generate` to regenerate types from backend
- **Component not found** → Check naming (nested path = component name)
- **API call fails** → Ensure backend is running (`task go:run`)
- **Lint errors** → Run `task ui:fix` to auto-fix
- **Type errors** → Run `task ui:check` for detailed errors

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 81 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 87 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 83 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 86 KiB

View File

@@ -1,8 +0,0 @@
# Screenshots
These screenshots are taken from our public [Demo](https://demo.homebox.software) instance.
Note that whilst we will make every effort to ensure that these are maintained and updated, they may be outdated or missing functionality and we would always advise reviewing our demo instances:
- [Demo](https://demo.homebox.software)
- [Nightly](https://nightly.homebox.software)
- [VNext](https://vnext.homebox.software/)

View File

@@ -1,213 +1,65 @@
#!/usr/bin/env python3
import csv
import io
import json
import logging
import os
import sys
from pathlib import Path
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
API_URL = 'https://restcountries.com/v3.1/all?fields=name,common,currencies'
# Default to a pinned commit for supply-chain security
DEFAULT_ISO_4217_URL = 'https://raw.githubusercontent.com/datasets/currency-codes/052b3088938ba32028a14e75040c286c5e142145/data/codes-all.csv'
ISO_4217_URL = os.environ.get('ISO_4217_URL', DEFAULT_ISO_4217_URL)
SAVE_PATH = Path('backend/internal/core/currencies/currencies.json')
TIMEOUT = 10 # seconds
# Known currency decimal overrides
CURRENCY_DECIMAL_OVERRIDES = {
"BTC": 8, # Bitcoin uses 8 decimal places
"JPY": 0, # Japanese Yen has no decimal places
"BHD": 3, # Bahraini Dinar uses 3 decimal places
}
DEFAULT_DECIMALS = 2
MIN_DECIMALS = 0
MAX_DECIMALS = 6
def setup_logging():
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s: %(message)s'
)
def get_currency_decimals(code, iso_data):
"""
Get the decimal places for a currency code.
Checks overrides first, then ISO data, then uses default.
Clamps result to safe range [MIN_DECIMALS, MAX_DECIMALS].
"""
# Normalize the input code
normalized_code = (code or "").strip().upper()
# First check overrides
if normalized_code in CURRENCY_DECIMAL_OVERRIDES:
decimals = CURRENCY_DECIMAL_OVERRIDES[normalized_code]
# Then check ISO data
elif normalized_code in iso_data:
decimals = iso_data[normalized_code]
# Finally use default
else:
decimals = DEFAULT_DECIMALS
# Ensure it's an integer and clamp to safe range
try:
decimals = int(decimals)
except (ValueError, TypeError):
decimals = DEFAULT_DECIMALS
return max(MIN_DECIMALS, min(MAX_DECIMALS, decimals))
def fetch_iso_4217_data():
"""
Fetch ISO 4217 currency data to get minor units (decimal places).
Returns a dict mapping currency code to minor units.
"""
# Log the resolved URL for transparency
logging.info("Fetching ISO 4217 data from: %s", ISO_4217_URL)
if not ISO_4217_URL.lower().startswith("https://"):
logging.error("Refusing non-HTTPS ISO_4217_URL: %s", ISO_4217_URL)
return {}
session = requests.Session()
retries = Retry(
total=3,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=frozenset(['GET'])
)
session.mount('https://', HTTPAdapter(max_retries=retries))
try:
# Add Accept header for CSV content
headers = {'Accept': 'text/csv'}
resp = session.get(ISO_4217_URL, timeout=TIMEOUT, headers=headers)
resp.raise_for_status()
except requests.exceptions.RequestException as e:
logging.error("Failed to fetch ISO 4217 data: %s", e)
return {}
# Parse CSV data
iso_data = {}
try:
# Decode with utf-8-sig to strip BOM if present
csv_content = resp.content.decode('utf-8-sig')
csv_reader = csv.DictReader(io.StringIO(csv_content))
for row in csv_reader:
code = row.get('AlphabeticCode', '').strip()
minor_unit = row.get('MinorUnit', '').strip()
if code and minor_unit != 'N.A.':
try:
# Convert minor unit to int (decimal places)
iso_data[code] = int(minor_unit) if minor_unit.isdigit() else 2
except (ValueError, TypeError):
iso_data[code] = 2 # Default to 2 if parsing fails
logging.info("Successfully loaded decimal data for %d currencies from ISO 4217", len(iso_data))
return iso_data
except Exception as e:
logging.error("Failed to parse ISO 4217 CSV data: %s", e)
return {}
import json
import os
def fetch_currencies():
# First, fetch ISO 4217 data for decimal places
iso_data = fetch_iso_4217_data()
session = requests.Session()
retries = Retry(
total=3,
backoff_factor=1,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=frozenset(['GET'])
)
session.mount('https://', HTTPAdapter(max_retries=retries))
try:
resp = session.get(API_URL, timeout=TIMEOUT)
resp.raise_for_status()
response = requests.get('https://restcountries.com/v3.1/all')
response.raise_for_status()
except requests.exceptions.Timeout:
print("Request to the API timed out.")
return []
except requests.exceptions.RequestException as e:
logging.error("API request failed: %s", e)
return None # signal fatal
print(f"An error occurred while making the request: {e}")
return []
try:
countries = resp.json()
except ValueError as e:
logging.error("Failed to parse JSON response: %s", e)
return None # signal fatal
countries = response.json()
except json.JSONDecodeError:
print("Failed to decode JSON from the response.")
return []
results = []
currencies_list = []
for country in countries:
country_name = country.get('name', {}).get('common') or "Unknown"
for code, info in country.get('currencies', {}).items():
# Get decimal places using the helper function
decimals = get_currency_decimals(code, iso_data)
results.append({
'code': code,
'local': country_name,
'symbol': info.get('symbol', ''),
'name': info.get('name', ''),
'decimals': decimals
country_name = country.get('name', {}).get('common')
country_currencies = country.get('currencies', {})
for currency_code, currency_info in country_currencies.items():
symbol = currency_info.get('symbol', '')
currencies_list.append({
'code': currency_code,
'local': country_name,
'symbol': symbol,
'name': currency_info.get('name')
})
# sort by country name for consistency
return sorted(results, key=lambda x: x['local'].lower())
return currencies_list
def load_existing(path: Path):
if not path.exists():
return []
def save_currencies(currencies, file_path):
try:
with path.open('r', encoding='utf-8') as f:
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(currencies, f, ensure_ascii=False, indent=4)
except IOError as e:
print(f"An error occurred while writing to the file: {e}")
def load_existing_currencies(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
except (IOError, ValueError) as e:
logging.warning("Could not load existing file (%s): %s", path, e)
return []
def save_currencies(data, path: Path):
path.parent.mkdir(parents=True, exist_ok=True)
with path.open('w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
logging.info("Wrote %d entries to %s", len(data), path)
except (IOError, json.JSONDecodeError):
return [] # Return an empty list if file doesn't exist or is invalid
def main():
setup_logging()
logging.info("🔄 Starting currency update")
existing = load_existing(SAVE_PATH)
new = fetch_currencies()
# Fatal API / parse error → exit non-zero so the workflow will fail
if new is None:
logging.error("Aborting: failed to fetch or parse API data.")
sys.exit(1)
# Empty array → log & exit zero (no file change)
if not new:
logging.warning("API returned empty list; skipping write.")
sys.exit(0)
# Identical to existing → nothing to do
if new == existing:
logging.info("Up-to-date; skipping write.")
sys.exit(0)
# Otherwise actually overwrite
save_currencies(new, SAVE_PATH)
logging.info("✅ Currency file updated.")
save_path = 'backend/internal/core/currencies/currencies.json'
existing_currencies = load_existing_currencies(save_path)
new_currencies = fetch_currencies()
if new_currencies == existing_currencies:
print("Currencies up-to-date with API, skipping commit.")
else:
save_currencies(new_currencies, save_path)
print("Currencies updated and saved.")
if __name__ == "__main__":
main()
main()

View File

@@ -1,349 +0,0 @@
#!/usr/bin/env python3
"""
Script to automatically update language names in the English translation file.
Queries Weblate for translation completion and language names.
Only adds languages with >=80% completion to en.json.
"""
import json
import logging
import sys
from pathlib import Path
from typing import Dict, List, Optional
import requests
from babel import Locale, UnknownLocaleError
LOCALES_DIR = Path('frontend/locales')
EN_JSON_PATH = LOCALES_DIR / 'en.json'
WEBLATE_API_URL = 'https://translate.sysadminsmedia.com/api'
WEBLATE_PROJECT = 'homebox'
WEBLATE_COMPONENT = 'frontend'
COMPLETION_THRESHOLD = 80.0 # Minimum completion percentage to include language
TIMEOUT = 10 # seconds
def setup_logging():
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s: %(message)s'
)
def get_locale_files() -> List[str]:
"""Get all locale codes from JSON files in the locales directory."""
if not LOCALES_DIR.exists():
logging.error("Locales directory not found: %s", LOCALES_DIR)
return []
locale_codes = []
for file in sorted(LOCALES_DIR.glob('*.json')):
# Extract locale code from filename (e.g., "en.json" -> "en")
locale_code = file.stem
# Validate locale code format - should not contain dots
if '.' not in locale_code:
locale_codes.append(locale_code)
else:
logging.warning("Skipping invalid locale code: %s", locale_code)
logging.info("Found %d locale files", len(locale_codes))
return sorted(locale_codes)
def fetch_weblate_translations() -> Optional[Dict[str, Dict]]:
"""
Fetch translation statistics from Weblate API.
Returns:
Dict mapping locale code to translation data (percent, name, native_name)
or None if API is unavailable
"""
url = f"{WEBLATE_API_URL}/components/{WEBLATE_PROJECT}/{WEBLATE_COMPONENT}/translations/"
try:
# Weblate API may require pagination
translations = {}
page_url = url
while page_url:
logging.info("Fetching translations from Weblate: %s", page_url)
resp = requests.get(page_url, timeout=TIMEOUT)
if resp.status_code != 200:
logging.warning("Weblate API returned status %d", resp.status_code)
return None
data = resp.json()
for trans in data.get('results', []):
# Weblate uses underscores, we use hyphens
locale_code = trans.get('language_code', '').replace('_', '-')
percent = trans.get('translated_percent', 0.0)
lang_info = trans.get('language', {})
english_name = lang_info.get('name', '')
native_name = lang_info.get('native', '')
translations[locale_code] = {
'percent': percent,
'english_name': english_name,
'native_name': native_name
}
# Check for next page
page_url = data.get('next')
logging.info("Fetched %d translations from Weblate", len(translations))
return translations
except requests.exceptions.RequestException as e:
logging.warning("Failed to fetch from Weblate API: %s", e)
return None
except Exception as e:
logging.error("Unexpected error fetching Weblate data: %s", e)
return None
def get_language_name_from_babel(locale_code: str) -> Optional[str]:
"""
Get the language name using Babel in format "English (Native)".
Special handling for variants that need disambiguation (Portuguese, Chinese).
Args:
locale_code: Language/locale code (e.g., 'en', 'pt-BR', 'zh-CN')
Returns:
Language name in format "English (Native)" or None if cannot parse
"""
try:
# Special handling for ar-AA (non-standard code, use standard 'ar')
if locale_code == 'ar-AA':
locale = Locale.parse('ar')
else:
# Parse locale code using Babel
locale = Locale.parse(locale_code.replace('-', '_'))
# Get English display name
english_name = locale.get_display_name('en')
# Get native display name
native_name = locale.get_display_name(locale)
if not english_name:
return None
# Special handling for Portuguese variants (distinguish Brazil vs Portugal)
if locale_code == 'pt-BR':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Portuguese — Brazil ({native_base})"
elif locale_code == 'pt-PT':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Portuguese — Portugal ({native_base})"
# Special handling for Chinese variants (distinguish Simplified/Traditional and regions)
if locale_code == 'zh-CN':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Chinese — Simplified ({native_base})"
elif locale_code == 'zh-TW':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Chinese — Traditional ({native_base})"
elif locale_code == 'zh-HK':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Chinese — Hong Kong ({native_base})"
elif locale_code == 'zh-MO':
native_base = native_name.split('(')[0].strip() if '(' in native_name else native_name
return f"Chinese — Macau ({native_base})"
# Format: "English (Native)" if native name differs and is available
if native_name and native_name != english_name:
# Clean up nested parentheses for complex locales
if '(' in english_name and '(' in native_name:
# For cases like "Japanese (Japan) (日本語 (日本))"
# Simplify to "Japanese (日本語)"
english_base = english_name.split('(')[0].strip()
native_base = native_name.split('(')[0].strip()
return f"{english_base} ({native_base})"
else:
return f"{english_name} ({native_name})"
else:
return english_name
except (UnknownLocaleError, ValueError) as e:
logging.debug("Could not parse locale '%s' with Babel: %s", locale_code, e)
return None
def get_language_name(locale_code: str, weblate_data: Optional[Dict] = None) -> Optional[str]:
"""
Get the display name for a locale code.
Priority: Weblate API > Babel > None
Args:
locale_code: Language/locale code (e.g., 'en', 'pt-BR', 'zh-CN')
weblate_data: Translation data from Weblate (if available)
Returns:
Language name in format "English (Native)" or None if invalid
"""
# Validate locale code format
if '.' in locale_code or locale_code.startswith('languages.'):
logging.error("Invalid locale code format: %s", locale_code)
return None
# Try Weblate first
if weblate_data and locale_code in weblate_data:
english_name = weblate_data[locale_code].get('english_name', '')
native_name = weblate_data[locale_code].get('native_name', '')
if english_name:
# Format: "English (Native)" if both names available and different
if native_name and native_name != english_name:
return f"{english_name} ({native_name})"
else:
return english_name
# Fallback to Babel
babel_name = get_language_name_from_babel(locale_code)
if babel_name:
return babel_name
# If all else fails, return None (don't guess)
logging.warning("Could not determine language name for: %s", locale_code)
return None
def load_en_json() -> dict:
"""Load the English translation JSON file."""
if not EN_JSON_PATH.exists():
logging.error("English translation file not found: %s", EN_JSON_PATH)
return {}
try:
with EN_JSON_PATH.open('r', encoding='utf-8') as f:
return json.load(f)
except (IOError, json.JSONDecodeError) as e:
logging.error("Failed to load %s: %s", EN_JSON_PATH, e)
return {}
def save_en_json(data: dict):
"""Save the English translation JSON file."""
try:
with EN_JSON_PATH.open('w', encoding='utf-8') as f:
# Use 4-space indentation to match existing file format
json.dump(data, f, ensure_ascii=False, indent=4)
# Add newline at end of file
f.write('\n')
logging.info("Saved updated en.json")
except IOError as e:
logging.error("Failed to save %s: %s", EN_JSON_PATH, e)
sys.exit(1)
def update_language_names(en_data: dict, locale_codes: List[str], weblate_data: Optional[Dict] = None) -> bool:
"""
Update the languages section in en.json.
- Add new languages with >=80% completion (from Weblate) or that exist as locale files
- Never remove existing entries (even if completion drops below 80%)
Args:
en_data: The parsed en.json data
locale_codes: List of all locale codes from files
weblate_data: Translation data from Weblate (if available)
Returns:
True if changes were made, False otherwise
"""
# Ensure languages section exists
if 'languages' not in en_data:
en_data['languages'] = {}
logging.info("Created 'languages' section in en.json")
languages = en_data['languages']
original_languages = languages.copy()
# Process each locale file
added_count = 0
skipped_count = 0
for locale_code in locale_codes:
# Skip if already in languages (never remove existing entries)
if locale_code in languages:
continue
# Check Weblate completion threshold if data available
if weblate_data and locale_code in weblate_data:
percent = weblate_data[locale_code].get('percent', 0.0)
if percent < COMPLETION_THRESHOLD:
logging.info("Skipping %s: %.1f%% completion (threshold: %.1f%%)",
locale_code, percent, COMPLETION_THRESHOLD)
skipped_count += 1
continue
else:
logging.info("Including %s: %.1f%% completion", locale_code, percent)
else:
# If Weblate data not available, include locale file but log warning
logging.info("Including %s: Weblate data not available, locale file exists", locale_code)
# Get language name
language_name = get_language_name(locale_code, weblate_data)
if language_name:
languages[locale_code] = language_name
logging.info("Added language: %s = %s", locale_code, language_name)
added_count += 1
else:
logging.warning("Skipping %s: could not determine language name", locale_code)
skipped_count += 1
# Sort languages alphabetically by key
en_data['languages'] = dict(sorted(languages.items()))
# Check if anything changed
changed = (original_languages != en_data['languages'])
if changed:
logging.info("Updated %d language names, skipped %d", added_count, skipped_count)
else:
logging.info("All languages already present, no changes needed")
return changed
def main():
setup_logging()
logging.info("🔄 Starting language names update")
# Get all locale files
locale_codes = get_locale_files()
if not locale_codes:
logging.error("No locale files found")
sys.exit(1)
# Load English translation file
en_data = load_en_json()
if not en_data:
logging.error("Failed to load English translation file")
sys.exit(1)
# Fetch Weblate translation statistics
weblate_data = fetch_weblate_translations()
if weblate_data:
logging.info("Successfully fetched Weblate data for %d languages", len(weblate_data))
else:
logging.warning("Weblate data not available, proceeding with locale files only")
# Update language names
changed = update_language_names(en_data, locale_codes, weblate_data)
if changed:
save_en_json(en_data)
logging.info("✅ Language names updated successfully")
else:
logging.info("✅ No updates needed, en.json is already up-to-date")
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -1,259 +0,0 @@
# HomeBox Upgrade Testing Workflow
This document describes the automated upgrade testing workflow for HomeBox.
## Overview
The upgrade test workflow is designed to ensure data integrity and functionality when upgrading HomeBox from one version to another. It automatically:
1. Deploys a stable version of HomeBox
2. Creates test data (users, items, locations, labels, notifiers, attachments)
3. Upgrades to the latest version from the main branch
4. Verifies all data and functionality remain intact
## Workflow File
**Location**: `.github/workflows/upgrade-test.yaml`
## Trigger Conditions
The workflow runs:
- **Daily**: Automatically at 2 AM UTC (via cron schedule)
- **Manual**: Can be triggered manually via GitHub Actions UI
- **On Push**: When changes are made to the workflow files or test scripts
## Test Scenarios
### 1. Environment Setup
- Pulls the latest stable HomeBox Docker image from GHCR
- Starts the application with test configuration
- Ensures the service is healthy and ready
### 2. Data Creation
The workflow creates comprehensive test data using the `create-test-data.sh` script:
#### Users and Groups
- **Group 1**: 5 users (user1@homebox.test through user5@homebox.test)
- **Group 2**: 2 users (user6@homebox.test and user7@homebox.test)
- All users have password: `TestPassword123!`
#### Locations
- **Group 1**: Living Room, Garage
- **Group 2**: Home Office
#### Labels
- **Group 1**: Electronics, Important
- **Group 2**: Work Equipment
#### Items
- **Group 1**: 5 items (Laptop Computer, Power Drill, TV Remote, Tool Box, Coffee Maker)
- **Group 2**: 2 items (Monitor, Keyboard)
#### Attachments
- Multiple attachments added to various items (receipts, manuals, warranties)
#### Notifiers
- **Group 1**: Test notifier named "TESTING"
### 3. Upgrade Process
1. Stops the stable version container
2. Builds a fresh image from the current main branch
3. Copies the database to a new location
4. Starts the new version with the existing data
### 4. Verification Tests
The Playwright test suite (`upgrade-verification.spec.ts`) verifies:
-**User Authentication**: All 7 users can log in with their credentials
-**Data Persistence**: All items, locations, and labels are present
-**Attachments**: File attachments are correctly associated with items
-**Notifiers**: The "TESTING" notifier is still configured
-**UI Functionality**: Version display, theme switching work correctly
-**Data Isolation**: Groups can only see their own data
## Test Data File
The setup script generates a JSON file at `/tmp/test-users.json` containing:
```json
{
"users": [
{
"email": "user1@homebox.test",
"password": "TestPassword123!",
"token": "...",
"group": "1"
},
...
],
"locations": {
"group1": ["location-id-1", "location-id-2"],
"group2": ["location-id-3"]
},
"labels": {...},
"items": {...},
"notifiers": {...}
}
```
This file is used by the Playwright tests to verify data integrity.
## Scripts
### create-test-data.sh
**Location**: `.github/scripts/upgrade-test/create-test-data.sh`
**Purpose**: Creates all test data via the HomeBox REST API
**Environment Variables**:
- `HOMEBOX_URL`: Base URL of the HomeBox instance (default: http://localhost:7745)
- `TEST_DATA_FILE`: Path to output JSON file (default: /tmp/test-users.json)
**Requirements**:
- `curl`: For API calls
- `jq`: For JSON processing
**Usage**:
```bash
export HOMEBOX_URL=http://localhost:7745
./.github/scripts/upgrade-test/create-test-data.sh
```
## Running Tests Locally
To run the upgrade tests locally:
### Prerequisites
```bash
# Install dependencies
sudo apt-get install -y jq curl docker.io
# Install pnpm and Playwright
cd frontend
pnpm install
pnpm exec playwright install --with-deps chromium
```
### Run the test
```bash
# Start stable version
docker run -d \
--name homebox-test \
-p 7745:7745 \
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
-v /tmp/homebox-data:/data \
ghcr.io/sysadminsmedia/homebox:latest
# Wait for startup
sleep 10
# Create test data
export HOMEBOX_URL=http://localhost:7745
./.github/scripts/upgrade-test/create-test-data.sh
# Stop container
docker stop homebox-test
docker rm homebox-test
# Build new version
docker build -t homebox:test .
# Start new version with existing data
docker run -d \
--name homebox-test \
-p 7745:7745 \
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
-v /tmp/homebox-data:/data \
homebox:test
# Wait for startup
sleep 10
# Run verification tests
cd frontend
TEST_DATA_FILE=/tmp/test-users.json \
E2E_BASE_URL=http://localhost:7745 \
pnpm exec playwright test \
--project=chromium \
test/upgrade/upgrade-verification.spec.ts
# Cleanup
docker stop homebox-test
docker rm homebox-test
```
## Artifacts
The workflow produces several artifacts:
1. **playwright-report-upgrade-test**: HTML report of test results
2. **playwright-traces**: Detailed traces for debugging failures
3. **Docker logs**: Collected on failure for troubleshooting
## Failure Scenarios
The workflow will fail if:
- The stable version fails to start
- Test data creation fails
- The new version fails to start with existing data
- Any verification test fails
- Database migrations fail
## Troubleshooting
### Test Data Creation Fails
Check the Docker logs:
```bash
docker logs homebox-old
```
Verify the API is accessible:
```bash
curl http://localhost:7745/api/v1/status
```
### Verification Tests Fail
1. Download the Playwright report from GitHub Actions artifacts
2. Review the HTML report for detailed failure information
3. Check traces for visual debugging
### Database Issues
If migrations fail:
```bash
# Check database file
ls -lh /tmp/homebox-data-new/homebox.db
# Check Docker logs for migration errors
docker logs homebox-new
```
## Future Enhancements
Potential improvements:
- [ ] Test multiple upgrade paths (e.g., v0.10 → v0.11 → v0.12)
- [ ] Test with PostgreSQL backend in addition to SQLite
- [ ] Add performance benchmarks
- [ ] Test with larger datasets
- [ ] Add API-level verification in addition to UI tests
- [ ] Test backup and restore functionality
## Related Files
- `.github/workflows/upgrade-test.yaml` - Main workflow definition
- `.github/scripts/upgrade-test/create-test-data.sh` - Data generation script
- `frontend/test/upgrade/upgrade-verification.spec.ts` - Playwright verification tests
- `.github/workflows/e2e-partial.yaml` - Standard E2E test workflow (for reference)
## Support
For issues or questions about this workflow:
1. Check the GitHub Actions run logs
2. Review this documentation
3. Open an issue in the repository

View File

@@ -1,413 +0,0 @@
#!/bin/bash
# Script to create test data in HomeBox for upgrade testing
# This script creates users, items, attachments, notifiers, locations, and labels
set -e
HOMEBOX_URL="${HOMEBOX_URL:-http://localhost:7745}"
API_URL="${HOMEBOX_URL}/api/v1"
TEST_DATA_FILE="${TEST_DATA_FILE:-/tmp/test-users.json}"
echo "Creating test data in HomeBox at $HOMEBOX_URL"
# Function to make API calls with error handling
api_call() {
local method=$1
local endpoint=$2
local data=$3
local token=$4
if [ -n "$token" ]; then
if [ -n "$data" ]; then
curl -s -X "$method" \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
-d "$data" \
"$API_URL$endpoint"
else
curl -s -X "$method" \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
"$API_URL$endpoint"
fi
else
if [ -n "$data" ]; then
curl -s -X "$method" \
-H "Content-Type: application/json" \
-d "$data" \
"$API_URL$endpoint"
else
curl -s -X "$method" \
-H "Content-Type: application/json" \
"$API_URL$endpoint"
fi
fi
}
# Function to register a user and get token
register_user() {
local email=$1
local name=$2
local password=$3
local group_token=$4
echo "Registering user: $email"
local payload="{\"email\":\"$email\",\"name\":\"$name\",\"password\":\"$password\""
if [ -n "$group_token" ]; then
payload="$payload,\"groupToken\":\"$group_token\""
fi
payload="$payload}"
local response=$(curl -s -X POST \
-H "Content-Type: application/json" \
-d "$payload" \
"$API_URL/users/register")
echo "$response"
}
# Function to login and get token
login_user() {
local email=$1
local password=$2
echo "Logging in user: $email" >&2
local response=$(curl -s -X POST \
-H "Content-Type: application/json" \
-d "{\"username\":\"$email\",\"password\":\"$password\"}" \
"$API_URL/users/login")
echo "$response" | jq -r '.token // empty'
}
# Function to create an item
create_item() {
local token=$1
local name=$2
local description=$3
local location_id=$4
echo "Creating item: $name" >&2
local payload="{\"name\":\"$name\",\"description\":\"$description\""
if [ -n "$location_id" ]; then
payload="$payload,\"locationId\":\"$location_id\""
fi
payload="$payload}"
local response=$(curl -s -X POST \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
-d "$payload" \
"$API_URL/items")
echo "$response"
}
# Function to create a location
create_location() {
local token=$1
local name=$2
local description=$3
echo "Creating location: $name" >&2
local response=$(curl -s -X POST \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
-d "{\"name\":\"$name\",\"description\":\"$description\"}" \
"$API_URL/locations")
echo "$response"
}
# Function to create a label
create_label() {
local token=$1
local name=$2
local description=$3
echo "Creating label: $name" >&2
local response=$(curl -s -X POST \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
-d "{\"name\":\"$name\",\"description\":\"$description\"}" \
"$API_URL/labels")
echo "$response"
}
# Function to create a notifier
create_notifier() {
local token=$1
local name=$2
local url=$3
echo "Creating notifier: $name" >&2
local response=$(curl -s -X POST \
-H "Authorization: Bearer $token" \
-H "Content-Type: application/json" \
-d "{\"name\":\"$name\",\"url\":\"$url\",\"isActive\":true}" \
"$API_URL/groups/notifiers")
echo "$response"
}
# Function to attach a file to an item (creates a dummy attachment)
attach_file_to_item() {
local token=$1
local item_id=$2
local filename=$3
echo "Creating attachment for item: $item_id" >&2
# Create a temporary file with some content
local temp_file=$(mktemp)
echo "This is a test attachment for $filename" > "$temp_file"
local response=$(curl -s -X POST \
-H "Authorization: Bearer $token" \
-F "file=@$temp_file" \
-F "type=attachment" \
-F "name=$filename" \
"$API_URL/items/$item_id/attachments")
rm -f "$temp_file"
echo "$response"
}
# Initialize test data storage
echo "{\"users\":[]}" > "$TEST_DATA_FILE"
echo "=== Step 1: Create first group with 5 users ==="
# Register first user (creates a new group)
user1_response=$(register_user "user1@homebox.test" "User One" "TestPassword123!")
user1_token=$(echo "$user1_response" | jq -r '.token // empty')
group_token=$(echo "$user1_response" | jq -r '.group.inviteToken // empty')
if [ -z "$user1_token" ]; then
echo "Failed to register first user"
echo "Response: $user1_response"
exit 1
fi
echo "First user registered with token. Group token: $group_token"
# Store user1 data
jq --arg email "user1@homebox.test" \
--arg password "TestPassword123!" \
--arg token "$user1_token" \
--arg group "1" \
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
# Register 4 more users in the same group
for i in {2..5}; do
echo "Registering user$i in group 1..."
user_response=$(register_user "user${i}@homebox.test" "User $i" "TestPassword123!" "$group_token")
user_token=$(echo "$user_response" | jq -r '.token // empty')
if [ -z "$user_token" ]; then
echo "Failed to register user$i"
echo "Response: $user_response"
else
echo "user$i registered successfully"
# Store user data
jq --arg email "user${i}@homebox.test" \
--arg password "TestPassword123!" \
--arg token "$user_token" \
--arg group "1" \
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
fi
done
echo "=== Step 2: Create second group with 2 users ==="
# Register first user of second group
user6_response=$(register_user "user6@homebox.test" "User Six" "TestPassword123!")
user6_token=$(echo "$user6_response" | jq -r '.token // empty')
group2_token=$(echo "$user6_response" | jq -r '.group.inviteToken // empty')
if [ -z "$user6_token" ]; then
echo "Failed to register user6"
echo "Response: $user6_response"
exit 1
fi
echo "user6 registered with token. Group 2 token: $group2_token"
# Store user6 data
jq --arg email "user6@homebox.test" \
--arg password "TestPassword123!" \
--arg token "$user6_token" \
--arg group "2" \
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
# Register second user in group 2
user7_response=$(register_user "user7@homebox.test" "User Seven" "TestPassword123!" "$group2_token")
user7_token=$(echo "$user7_response" | jq -r '.token // empty')
if [ -z "$user7_token" ]; then
echo "Failed to register user7"
echo "Response: $user7_response"
else
echo "user7 registered successfully"
# Store user7 data
jq --arg email "user7@homebox.test" \
--arg password "TestPassword123!" \
--arg token "$user7_token" \
--arg group "2" \
'.users += [{"email":$email,"password":$password,"token":$token,"group":$group}]' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
fi
echo "=== Step 3: Create locations for each group ==="
# Create locations for group 1 (using user1's token)
location1=$(create_location "$user1_token" "Living Room" "Main living area")
location1_id=$(echo "$location1" | jq -r '.id // empty')
echo "Created location: Living Room (ID: $location1_id)"
location2=$(create_location "$user1_token" "Garage" "Storage and tools")
location2_id=$(echo "$location2" | jq -r '.id // empty')
echo "Created location: Garage (ID: $location2_id)"
# Create location for group 2 (using user6's token)
location3=$(create_location "$user6_token" "Home Office" "Work from home space")
location3_id=$(echo "$location3" | jq -r '.id // empty')
echo "Created location: Home Office (ID: $location3_id)"
# Store locations
jq --arg loc1 "$location1_id" \
--arg loc2 "$location2_id" \
--arg loc3 "$location3_id" \
'.locations = {"group1":[$loc1,$loc2],"group2":[$loc3]}' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
echo "=== Step 4: Create labels for each group ==="
# Create labels for group 1
label1=$(create_label "$user1_token" "Electronics" "Electronic devices")
label1_id=$(echo "$label1" | jq -r '.id // empty')
echo "Created label: Electronics (ID: $label1_id)"
label2=$(create_label "$user1_token" "Important" "High priority items")
label2_id=$(echo "$label2" | jq -r '.id // empty')
echo "Created label: Important (ID: $label2_id)"
# Create label for group 2
label3=$(create_label "$user6_token" "Work Equipment" "Items for work")
label3_id=$(echo "$label3" | jq -r '.id // empty')
echo "Created label: Work Equipment (ID: $label3_id)"
# Store labels
jq --arg lab1 "$label1_id" \
--arg lab2 "$label2_id" \
--arg lab3 "$label3_id" \
'.labels = {"group1":[$lab1,$lab2],"group2":[$lab3]}' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
echo "=== Step 5: Create test notifier ==="
# Create notifier for group 1
notifier1=$(create_notifier "$user1_token" "TESTING" "https://example.com/webhook")
notifier1_id=$(echo "$notifier1" | jq -r '.id // empty')
echo "Created notifier: TESTING (ID: $notifier1_id)"
# Store notifier
jq --arg not1 "$notifier1_id" \
'.notifiers = {"group1":[$not1]}' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
echo "=== Step 6: Create items for all users ==="
# Create items for users in group 1
declare -A user_tokens
user_tokens[1]=$user1_token
user_tokens[2]=$(echo "$user1_token") # Users in same group share data, but we'll use user1 token
user_tokens[3]=$(echo "$user1_token")
user_tokens[4]=$(echo "$user1_token")
user_tokens[5]=$(echo "$user1_token")
# Items for group 1 users
echo "Creating items for group 1..."
item1=$(create_item "$user1_token" "Laptop Computer" "Dell XPS 15 for work" "$location1_id")
item1_id=$(echo "$item1" | jq -r '.id // empty')
echo "Created item: Laptop Computer (ID: $item1_id)"
item2=$(create_item "$user1_token" "Power Drill" "DeWalt 20V cordless drill" "$location2_id")
item2_id=$(echo "$item2" | jq -r '.id // empty')
echo "Created item: Power Drill (ID: $item2_id)"
item3=$(create_item "$user1_token" "TV Remote" "Samsung TV remote control" "$location1_id")
item3_id=$(echo "$item3" | jq -r '.id // empty')
echo "Created item: TV Remote (ID: $item3_id)"
item4=$(create_item "$user1_token" "Tool Box" "Red metal tool box with tools" "$location2_id")
item4_id=$(echo "$item4" | jq -r '.id // empty')
echo "Created item: Tool Box (ID: $item4_id)"
item5=$(create_item "$user1_token" "Coffee Maker" "Breville espresso machine" "$location1_id")
item5_id=$(echo "$item5" | jq -r '.id // empty')
echo "Created item: Coffee Maker (ID: $item5_id)"
# Items for group 2 users
echo "Creating items for group 2..."
item6=$(create_item "$user6_token" "Monitor" "27 inch 4K monitor" "$location3_id")
item6_id=$(echo "$item6" | jq -r '.id // empty')
echo "Created item: Monitor (ID: $item6_id)"
item7=$(create_item "$user6_token" "Keyboard" "Mechanical keyboard" "$location3_id")
item7_id=$(echo "$item7" | jq -r '.id // empty')
echo "Created item: Keyboard (ID: $item7_id)"
# Store items
jq --argjson group1_items "[\"$item1_id\",\"$item2_id\",\"$item3_id\",\"$item4_id\",\"$item5_id\"]" \
--argjson group2_items "[\"$item6_id\",\"$item7_id\"]" \
'.items = {"group1":$group1_items,"group2":$group2_items}' \
"$TEST_DATA_FILE" > "$TEST_DATA_FILE.tmp" && mv "$TEST_DATA_FILE.tmp" "$TEST_DATA_FILE"
echo "=== Step 7: Add attachments to items ==="
# Add attachments for group 1 items
echo "Adding attachments to group 1 items..."
attach_file_to_item "$user1_token" "$item1_id" "laptop-receipt.pdf"
attach_file_to_item "$user1_token" "$item1_id" "laptop-warranty.pdf"
attach_file_to_item "$user1_token" "$item2_id" "drill-manual.pdf"
attach_file_to_item "$user1_token" "$item3_id" "remote-guide.pdf"
attach_file_to_item "$user1_token" "$item4_id" "toolbox-inventory.txt"
# Add attachments for group 2 items
echo "Adding attachments to group 2 items..."
attach_file_to_item "$user6_token" "$item6_id" "monitor-receipt.pdf"
attach_file_to_item "$user6_token" "$item7_id" "keyboard-manual.pdf"
echo "=== Test Data Creation Complete ==="
echo "Test data file saved to: $TEST_DATA_FILE"
echo "Summary:"
echo " - Users created: 7 (5 in group 1, 2 in group 2)"
echo " - Locations created: 3"
echo " - Labels created: 3"
echo " - Notifiers created: 1"
echo " - Items created: 7"
echo " - Attachments created: 7"
# Display the test data file for verification
echo ""
echo "Test data:"
cat "$TEST_DATA_FILE" | jq '.'
exit 0

View File

@@ -1,131 +1,47 @@
name: Publish Release Binaries
on:
workflow_dispatch:
push:
tags: [ 'v*.*.*' ]
jobs:
# backend-tests:
# name: "Backend Server Tests"
# uses: sysadminsmedia/homebox/.github/workflows/partial-backend.yaml@main
# frontend-tests:
# name: "Frontend and End-to-End Tests"
# uses: sysadminsmedia/homebox/.github/workflows/partial-frontend.yaml@main
goreleaser:
name: goreleaser
runs-on: ubuntu-latest
outputs:
hashes: ${{ steps.binary.outputs.hashes }}
permissions:
contents: write
packages: write
id-token: write
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
uses: actions/setup-go@v5
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- uses: pnpm/action-setup@v2
with:
version: 7.30.1
- name: Build Frontend and Copy to Backend
working-directory: frontend
run: |
pnpm install
pnpm install --shamefully-hoist
pnpm run build
cp -r ./.output/public ../backend/app/api/static/
- name: Install CoSign
working-directory: backend
run: |
go install github.com/sigstore/cosign/cmd/cosign@latest
- name: Install Syft
working-directory: backend
run: |
go install github.com/anchore/syft/cmd/syft@latest
- name: Run GoReleaser
id: releaser
if: startsWith(github.ref, 'refs/tags/')
uses: goreleaser/goreleaser-action@e435ccd777264be153ace6237001ef4d979d3a7a
uses: goreleaser/goreleaser-action@v5
with:
workdir: "backend"
distribution: goreleaser
version: "~> v2"
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COSIGN_PWD: ${{ secrets.COSIGN_PWD }}
COSIGN_YES: "true"
- name: Generate binary hashes
if: startsWith(github.ref, 'refs/tags/')
id: binary
env:
ARTIFACTS: "${{ steps.releaser.outputs.artifacts }}"
run: |
set -euo pipefail
checksum_file=$(echo "$ARTIFACTS" | jq -r '.[] | select (.type=="Checksum") | .path')
echo "hashes=$(cat $checksum_file | base64 -w0)" >> "$GITHUB_OUTPUT"
- name: Run GoReleaser No Release
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
uses: goreleaser/goreleaser-action@e435ccd777264be153ace6237001ef4d979d3a7a
with:
workdir: "backend"
distribution: goreleaser
version: "~> v2"
args: release --clean --snapshot --skip=publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COSIGN_PWD: ${{ secrets.COSIGN_PWD }}
COSIGN_YES: "true"
binary-provenance:
if: startsWith(github.ref, 'refs/tags/')
needs: [ goreleaser ]
permissions:
actions: read # To read the workflow path.
id-token: write # To sign the provenance.
contents: write # To add assets to a release.
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@f7dd8c54c2067bafc12ca7a55595d5ee9b75204a
with:
base64-subjects: "${{ needs.goreleaser.outputs.hashes }}"
upload-assets: true # upload to a new release
verification-with-slsa-verifier:
if: startsWith(github.ref, 'refs/tags/')
needs: [goreleaser, binary-provenance]
runs-on: ubuntu-latest
permissions: read-all
steps:
- name: Install the verifier
uses: slsa-framework/slsa-verifier/actions/installer@ea584f4502babc6f60d9bc799dbbb13c1caa9ee6
- name: Download assets
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PROVENANCE: "${{ needs.binary-provenance.outputs.provenance-name }}"
run: |
set -euo pipefail
gh -R "$GITHUB_REPOSITORY" release download "$GITHUB_REF_NAME" -p "*.tar.gz"
gh -R "$GITHUB_REPOSITORY" release download "$GITHUB_REF_NAME" -p "*.zip"
gh -R "$GITHUB_REPOSITORY" release download "$GITHUB_REF_NAME" -p "$PROVENANCE"
- name: Verify assets
env:
CHECKSUMS: ${{ needs.goreleaser.outputs.hashes }}
PROVENANCE: "${{ needs.binary-provenance.outputs.provenance-name }}"
run: |
set -euo pipefail
checksums=$(echo "$CHECKSUMS" | base64 -d)
while read -r line; do
fn=$(echo $line | cut -d ' ' -f2)
echo "Verifying $fn"
slsa-verifier verify-artifact --provenance-path "$PROVENANCE" \
--source-uri "github.com/$GITHUB_REPOSITORY" \
--source-tag "$GITHUB_REF_NAME" \
"$fn"
done <<<"$checksums"

View File

@@ -1,41 +0,0 @@
name: Docker Cleanup
on:
schedule:
- cron: '00 0 * * *'
workflow_dispatch:
jobs:
delete-old-images-main:
name: Delete Old Images for Main Repo
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- uses: dataaxiom/ghcr-cleanup-action@cd0cdb900b5dbf3a6f2cc869f0dbb0b8211f50c4
with:
dry-run: true
delete-ghost-images: true
delete-partial-images: true
delete-orphaned-images: true
delete-untagged: true
validate: true
token: '${{ github.token }}'
package: homebox
use-regex: true
exclude-tags: latest,latest-rootless,main,main-rootless,nightly,nightly-rootless,*.*.*,0.*,0,*.*.*-rootless,0.*-rootless,0-rootless
older-than: 180 days
delete-old-images-devcache:
name: Delete Old Devcache Images
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- uses: dataaxiom/ghcr-cleanup-action@cd0cdb900b5dbf3a6f2cc869f0dbb0b8211f50c4
with:
dry-run: false
delete-untagged: true
token: '${{ github.token }}'
package: devcache
older-than: 90 days

View File

@@ -1,50 +0,0 @@
name: "Copilot Setup Steps"
# Automatically run the setup steps when they are changed to allow for easy validation, and
# allow manual testing through the repository's "Actions" tab
on:
workflow_dispatch:
push:
paths:
- .github/workflows/copilot-setup-steps.yml
pull_request:
paths:
- .github/workflows/copilot-setup-steps.yml
jobs:
# The job MUST be called `copilot-setup-steps` or it will not be picked up by Copilot.
copilot-setup-steps:
runs-on: ubuntu-latest
# Set the permissions to the lowest permissions possible needed for your steps.
# Copilot will be given its own token for its operations.
permissions:
# If you want to clone the repository as part of your setup steps, for example to install dependencies, you'll need the `contents: read` permission. If you don't clone the repository in your setup steps, Copilot will do this for you automatically after the steps complete.
contents: read
# You can define any steps you want, and they will run before the agent starts.
# If you do not check out your code, Copilot will do this for you.
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- name: Set up Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
with:
node-version: "24"
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
- name: Install Task
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Perform setup
run: task setup

View File

@@ -0,0 +1,105 @@
name: Docker publish ARM
on:
schedule:
- cron: '00 0 * * *'
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows'
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows'
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v4
# Step 2: Set up Buildx without specifying driver
# Let it use default settings to avoid the 'no remote endpoint' issue
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
with:
install: true # Ensure Buildx is installed and set up properly
use: true # Use Buildx instance directly for this job
# Step 3: Login against Docker registry except on PR
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Step 4: Extract metadata for Docker images
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=schedule,pattern=nightly
flavor: |
suffix=-arm,onlatest=true
# Step 5: Build and push the Docker image
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5.0.0
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/arm64,linux/arm/v7
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
# Step 6: Attest built image to prove build provenance
- name: Attest
uses: actions/attest-build-provenance@v1
id: attest
if: ${{ github.event_name != 'pull_request' }}
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -1,249 +0,0 @@
name: Docker publish hardened
on:
schedule:
- cron: '00 0 * * *'
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile.hardened'
- '.dockerignore'
- '.github/workflows/docker-publish-hardened.yaml'
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile.hardened'
- '.dockerignore'
- '.github/workflows/docker-publish-hardened.yaml'
permissions:
contents: read # Access to repository contents
packages: write # Write access for pushing to GHCR
id-token: write # Required for OIDC authentication (if used)
attestations: write # Required for signing and attestation (if needed)
env:
DOCKERHUB_REPO: sysadminsmedia/homebox
GHCR_REPO: ghcr.io/${{ github.repository }}
jobs:
build:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
id-token: write
attestations: write
strategy:
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
steps:
- name: Enable Debug Logs
run: echo "##[debug]Enabling debug logging"
env:
ACTIONS_RUNNER_DEBUG: true
ACTIONS_STEP_DEBUG: true
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- name: Prepare
run: |
echo "BUILD_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)" >> $GITHUB_ENV
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
branch=${{ github.event.pull_request.number || github.ref_name }}
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
echo "DOCKERNAMES=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}" >> $GITHUB_ENV
if [[ "${{ github.event_name }}" != "schedule" ]] || [[ "${{ github.ref }}" != refs/tags/* ]]; then
echo "DOCKERNAMES=${{ env.GHCR_REPO }}" >> $GITHUB_ENV
fi
- name: Docker meta
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
with:
image: ghcr.io/sysadminsmedia/binfmt:latest
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:master
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
with:
context: . # Explicitly specify the build context
file: ./Dockerfile.hardened # Explicitly specify the Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.DOCKERNAMES }}",push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-hardened
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-hardened,mode=max,ignore-error=true
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
BUILD_TIME=${{ env.BUILD_TIME }}
provenance: mode=max
sbom: true
annotations: ${{ steps.meta.outputs.annotations }}
- name: Attest platform-specific images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: github.event_name != 'pull_request'
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
if: github.event_name != 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
attestations: write
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:master
- name: Docker meta
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=schedule,pattern=nightly
flavor: |
suffix=-hardened,onlatest=true
- name: Create manifest list and push GHCR
id: push-ghcr
working-directory: /tmp/digests
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-ghcr.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-ghcr.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-ghcr.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest GHCR images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: github.event_name != 'pull_request'
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.push-ghcr.outputs.digest }}
push-to-registry: true
- name: Create manifest list and push Dockerhub
id: push-dockerhub
working-directory: /tmp/digests
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-dockerhub.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-dockerhub.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-dockerhub.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest Dockerhub images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
subject-digest: ${{ steps.push-dockerhub.outputs.digest }}
push-to-registry: true

View File

@@ -0,0 +1,104 @@
name: Docker publish rootless ARM
on:
schedule:
- cron: '00 0 * * *'
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows'
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows'
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
build-rootless:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v4
# Step 2: Set up Buildx without specifying driver
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
with:
install: true # Ensure Buildx is installed and set up properly
use: true # Use Buildx instance directly for this job
# Step 3: Login to Docker registry except on PR
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Step 4: Extract metadata for Docker images
- name: Extract Docker metadata
id: metadata
uses: docker/metadata-action@v5.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=schedule,pattern=nightly
flavor: |
suffix=-rootless-arm,onlatest=true
# Step 5: Build and push the Docker image
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5.0.0
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
platforms: linux/arm64,linux/arm/v7
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
# Step 6: Attest built image to prove build provenance
- name: Attest
uses: actions/attest-build-provenance@v1
id: attest
if: ${{ github.event_name != 'pull_request' }}
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -8,191 +8,69 @@ on:
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/docker-publish-rootless.yaml'
ignore:
- 'docs/**'
- '.github/workflows'
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/docker-publish-rootless.yaml'
ignore:
- 'docs/**'
- '.github/workflows'
permissions:
contents: read # Access to repository contents
packages: write # Write access for pushing to GHCR
id-token: write # Required for OIDC authentication (if used)
attestations: write # Required for signing and attestation (if needed)
env:
DOCKERHUB_REPO: sysadminsmedia/homebox
GHCR_REPO: ghcr.io/${{ github.repository }}
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
id-token: write
attestations: write
strategy:
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
build-rootless:
steps:
- name: Enable Debug Logs
run: echo "##[debug]Enabling debug logging"
env:
ACTIONS_RUNNER_DEBUG: true
ACTIONS_STEP_DEBUG: true
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
branch=${{ github.event.pull_request.number || github.ref_name }}
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
echo "DOCKERNAMES=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}" >> $GITHUB_ENV
if [[ "${{ github.event_name }}" != "schedule" ]] || [[ "${{ github.ref }}" != refs/tags/* ]]; then
echo "DOCKERNAMES=${{ env.GHCR_REPO }}" >> $GITHUB_ENV
fi
- name: Docker meta
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
with:
image: ghcr.io/sysadminsmedia/binfmt:latest
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:master
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
with:
context: . # Explicitly specify the build context
file: ./Dockerfile.rootless # Explicitly specify the Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.DOCKERNAMES }}",push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless,mode=max,ignore-error=true
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
provenance: mode=max
sbom: true
annotations: ${{ steps.meta.outputs.annotations }}
- name: Attest platform-specific images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: github.event_name != 'pull_request'
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
if: github.event_name != 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
attestations: write
needs:
- build
id-token: write
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Checkout repository
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3.0.0 # v3.0.0
with:
registry: ghcr.io
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: metadata
uses: docker/metadata-action@v5.0.0 # v5.0.0
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:master
- name: Docker meta
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
@@ -202,50 +80,29 @@ jobs:
type=schedule,pattern=nightly
flavor: |
suffix=-rootless,onlatest=true
- name: Create manifest list and push GHCR
id: push-ghcr
working-directory: /tmp/digests
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-ghcr.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-ghcr.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-ghcr.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest GHCR images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: github.event_name != 'pull_request'
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5.0.0 # v5.0.0
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.push-ghcr.outputs.digest }}
push-to-registry: true
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
platforms: linux/amd64
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
- name: Create manifest list and push Dockerhub
id: push-dockerhub
working-directory: /tmp/digests
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-dockerhub.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-dockerhub.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-dockerhub.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest Dockerhub images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
- name: Attest
uses: actions/attest-build-provenance@v1
id: attest
if: ${{ github.event_name != 'pull_request' }}
with:
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
subject-digest: ${{ steps.push-dockerhub.outputs.digest }}
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -9,10 +9,10 @@ on:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/docker-publish.yaml'
ignore:
- 'docs/**'
- '.github/workflows'
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
@@ -20,172 +20,56 @@ on:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/docker-publish.yaml'
ignore:
- 'docs/**'
- '.github/workflows'
env:
DOCKERHUB_REPO: sysadminsmedia/homebox
GHCR_REPO: ghcr.io/${{ github.repository }}
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
permissions:
contents: read # Access to repository contents
packages: write # Write access for pushing to GHCR
id-token: write # Required for OIDC authentication (if used)
attestations: write # Required for signing and attestation (if needed)
jobs:
build:
runs-on: ${{ matrix.runner }}
permissions:
contents: read # Allows access to repository contents (read-only)
packages: write # Allows pushing to GHCR
id-token: write # Allows identity token write access for authentication
attestations: write # Needed for signing and attestation (if required)
strategy:
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
attestations: write
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
branch=${{ github.event.pull_request.number || github.ref_name }}
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
echo "DOCKERNAMES=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}" >> $GITHUB_ENV
if [[ "${{ github.event_name }}" != "schedule" ]] || [[ "${{ github.ref }}" != refs/tags/* ]]; then
echo "DOCKERNAMES=${{ env.GHCR_REPO }}" >> $GITHUB_ENV
fi
- name: Docker meta
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392
with:
image: ghcr.io/sysadminsmedia/binfmt:latest
uses: actions/checkout@v4
# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:latest
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
with:
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.DOCKERNAMES }}",push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR}}-${{ env.BRANCH }},mode=max,ignore-error=true
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
provenance: mode=max
sbom: true
annotations: ${{ steps.meta.outputs.annotations }}
- name: Attest platform-specific images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@v3.0.0 # v3.0.0
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.build.outputs.digest }}
push-to-registry: true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
if: github.event_name != 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read # Allows access to repository contents (read-only)
packages: write # Allows pushing to GHCR
id-token: write # Allows identity token write access for authentication
attestations: write # Needed for signing and attestation (if required)
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1
with:
registry: ghcr.io
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435
with:
driver-opts: |
image=ghcr.io/sysadminsmedia/buildkit:master
- name: Docker meta
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f
uses: docker/metadata-action@v5.0.0 # v5.0.0
with:
images: |
name=${{ env.DOCKERHUB_REPO }},enable=${{ github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') }}
name=${{ env.GHCR_REPO }}
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
@@ -194,49 +78,28 @@ jobs:
type=semver,pattern={{major}}
type=schedule,pattern=nightly
- name: Create manifest list and push GHCR
id: push-ghcr
working-directory: /tmp/digests
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-ghcr.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-ghcr.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-ghcr.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest GHCR images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: github.event_name != 'pull_request'
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v5.0.0 # v5.0.0
with:
subject-name: ${{ env.GHCR_REPO }}
subject-digest: ${{ steps.push-ghcr.outputs.digest }}
push-to-registry: true
- name: Create manifest list and push Dockerhub
id: push-dockerhub
working-directory: /tmp/digests
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
run: |
set -euo pipefail
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *) 2>&1 | tee /tmp/push-dockerhub.out
digest=$(grep -oE 'sha256:[a-f0-9]{64}' /tmp/push-dockerhub.out | head -n1 || true)
if [ -z "$digest" ]; then
echo "No digest found in imagetools output:"
cat /tmp/push-dockerhub.out
exit 1
fi
echo "digest=$digest" >> $GITHUB_OUTPUT
- name: Attest Dockerhub images
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8
if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/'))
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
- name: Attest
uses: actions/attest-build-provenance@v1
id: attest
if: ${{ github.event_name != 'pull_request' }}
with:
subject-name: docker.io/${{ env.DOCKERHUB_REPO }}
subject-digest: ${{ steps.push-dockerhub.outputs.digest }}
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -1,97 +0,0 @@
name: E2E (Playwright)
permissions:
contents: read
actions: read
checks: write
pull-requests: write
on:
workflow_call:
jobs:
playwright-tests:
timeout-minutes: 60
strategy:
matrix:
shardIndex: [1,2,3,4]
shardTotal: [4]
name: E2E Playwright Testing ${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
fetch-depth: 0
- name: Install Task
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
with:
node-version: lts/*
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- name: Install dependencies
run: pnpm install
working-directory: frontend
- name: Install Go Dependencies
run: go mod download
working-directory: backend
- name: Run E2E Tests
run: task test:e2e -- --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }}
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
name: Upload partial Playwright report
if: ${{ !cancelled() }}
with:
name: blob-report-${{ matrix.shardIndex }}
path: frontend/blob-report/
retention-days: 2
merge-reports:
# Merge reports after playwright-tests, even if some shards have failed
if: ${{ !cancelled() }}
needs: [playwright-tests]
name: Merge Playwright Reports
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
with:
node-version: lts/*
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- name: Install dependencies
run: pnpm install
working-directory: frontend
- name: Download blob reports from GitHub Actions Artifacts
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
with:
path: frontend/all-blob-reports
pattern: blob-report-*
merge-multiple: true
- name: Merge into HTML Report
run: pnpm exec playwright merge-reports --reporter html,github ./all-blob-reports
working-directory: frontend
- name: Upload HTML report
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
with:
name: html-report--attempt-${{ github.run_attempt }}
path: frontend/playwright-report
retention-days: 30

View File

@@ -1,50 +0,0 @@
name: Issue Gatekeeper
permissions:
issues: write
on:
issues:
types: [ opened ]
jobs:
check-permissions:
runs-on: ubuntu-latest
steps:
- name: Verify Internal Template Use
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd
with:
script: |
const { owner, repo } = context.repo;
const issue_number = context.issue.number;
const actor = context.payload.sender.login;
// 1. Get user permission level
const { data: perms } = await github.rest.repos.getCollaboratorPermissionLevel({
owner,
repo,
username: actor
});
const isMember = ['admin', 'write'].includes(perms.permission);
const body = context.payload.issue.body || "";
// 2. Check if they used the internal template (or if the issue is blank)
// We detect this by checking for our specific template string or the 'internal' label
const usedInternal = context.payload.issue.labels.some(l => l.name === 'internal');
if (usedInternal && !isMember) {
await github.rest.issues.createComment({
owner,
repo,
issue_number,
body: `@${actor}, the "Internal" template is restricted to project members. Please use one of the standard bug or feature templates for this repository.`
});
await github.rest.issues.update({
owner,
repo,
issue_number,
state: 'closed'
});
}

View File

@@ -1,11 +1,5 @@
name: Go Build/Test
permissions:
contents: read
actions: read
checks: write
pull-requests: write
on:
workflow_call:
@@ -13,21 +7,20 @@ jobs:
Go:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
uses: actions/setup-go@v5
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
go-version: "1.21"
- name: Install Task
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
uses: arduino/setup-task@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20
uses: golangci/golangci-lint-action@v4
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest

View File

@@ -1,10 +1,4 @@
name: Frontend
permissions:
contents: read
actions: read
checks: write
pull-requests: write
name: Frontend / E2E
on:
workflow_call:
@@ -15,14 +9,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- uses: pnpm/action-setup@v3.0.0
with:
version: 9.12.2
- name: Install dependencies
run: pnpm install
run: pnpm install --shamefully-hoist
working-directory: frontend
- name: Run Lint
@@ -36,42 +32,29 @@ jobs:
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17
env:
POSTGRES_USER: homebox
POSTGRES_PASSWORD: homebox
POSTGRES_DB: homebox
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Install Task
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
uses: arduino/setup-task@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
uses: actions/setup-go@v5
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
go-version: "1.21"
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
- uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 18
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- uses: pnpm/action-setup@v3.0.0
with:
version: 9.12.2
- name: Install dependencies
run: pnpm install
@@ -79,52 +62,3 @@ jobs:
- name: Run Integration Tests
run: task test:ci
integration-tests-pgsql:
strategy:
matrix:
database_version: [17,16,15]
name: Integration Tests PGSQL ${{ matrix.database_version }}
runs-on: ubuntu-latest
services:
postgres:
image: postgres:${{ matrix.database_version }}
env:
POSTGRES_USER: homebox
POSTGRES_PASSWORD: homebox
POSTGRES_DB: homebox
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
fetch-depth: 0
- name: Install Task
uses: arduino/setup-task@b91d5d2c96a56797b48ac1e0e89220bf64044611
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Go
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c
with:
go-version: "1.24"
cache-dependency-path: backend/go.mod
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f
with:
node-version: lts/*
- uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061
- name: Install dependencies
run: pnpm install
working-directory: frontend
- name: Run Integration Tests
run: task test:ci:postgresql

View File

@@ -1,24 +1,13 @@
name: Pull Request CI
permissions:
contents: read
actions: read
checks: write
pull-requests: write
on:
pull_request:
branches:
- main
- vnext
paths:
- 'backend/**'
- 'frontend/**'
- '.github/workflows/partial-backend.yaml'
- '.github/workflows/partial-frontend.yaml'
- '.github/workflows/e2e-partial.yaml'
- '.github/workflows/pull-requests.yaml'
jobs:
backend-tests:
@@ -26,9 +15,5 @@ jobs:
uses: ./.github/workflows/partial-backend.yaml
frontend-tests:
name: "Frontend Tests"
uses: ./.github/workflows/partial-frontend.yaml
e2e-tests:
name: "End-to-End Playwright Tests"
uses: ./.github/workflows/e2e-partial.yaml
name: "Frontend and End-to-End Tests"
uses: ./.github/workflows/partial-frontend.yaml

View File

@@ -2,12 +2,8 @@ name: Update Currencies
on:
push:
branches: [ main ]
workflow_dispatch:
permissions:
contents: write
pull-requests: write
branches:
- main
jobs:
update-currencies:
@@ -15,46 +11,90 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
fetch-depth: 0
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548
uses: actions/setup-python@v2
with:
python-version: '3.8'
cache: 'pip'
cache-dependency-path: .github/workflows/update-currencies/requirements.txt
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r .github/workflows/update-currencies/requirements.txt
pip install requests
- name: Run currency update script
- name: Run currency fetch script
run: python .github/scripts/update_currencies.py
- name: Check for currencies.json changes
- name: Check for changes
id: check_changes
run: |
if git diff --quiet -- backend/internal/core/currencies/currencies.json; then
echo "changed=false" >> $GITHUB_ENV
if [[ $(git status --porcelain) ]]; then
echo "Changes detected."
echo "changes=true" >> $GITHUB_ENV
else
echo "changed=true" >> $GITHUB_ENV
echo "No changes detected."
echo "changes=false" >> $GITHUB_ENV
fi
- name: Create Pull Request
if: env.changed == 'true'
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: update-currencies
base: main
title: "Update currencies.json"
commit-message: "chore: update currencies.json"
path: .
add-paths: |
backend/internal/core/currencies/currencies.json
- name: Delete existing update-currencies branch
run: |
if git show-ref --verify --quiet refs/heads/update-currencies; then
git branch -D update-currencies
echo "Deleted existing update-currencies branch."
else
echo "No existing update-currencies branch to delete."
fi
- name: No updates needed
if: env.changed == 'false'
run: echo "✅ currencies.json is already up-to-date"
- name: Create new update-currencies branch
if: env.changes == 'true'
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Create a new branch
git checkout -b update-currencies
git add backend/internal/core/currencies/currencies.json
git commit -m "Update currencies.json"
# Fetch the latest changes from the remote
git fetch origin
# Attempt to rebase with the latest changes
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
if ! git rebase origin/update-currencies; then
echo "Rebase conflicts occurred. Please resolve them manually."
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
exit 1
fi
else
echo "No existing remote branch 'update-currencies'. Skipping rebase."
fi
# Push the new branch to the remote
if ! git push --set-upstream origin update-currencies; then
echo "Push failed, trying to fetch and rebase again."
git fetch origin
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
if ! git rebase origin/update-currencies; then
echo "Second rebase failed. Please resolve manually."
exit 1
fi
else
echo "No existing remote branch 'update-currencies'. Skipping rebase."
fi
if ! git push --set-upstream origin update-currencies; then
echo "Second push failed. Please resolve manually."
exit 1
fi
fi
# Create a pull request
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-X POST \
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
https://api.github.com/repos/${{ github.repository }}/pulls
- name: Notify no changes
if: env.changes == 'false'
run: echo "Currencies up-to-date with API, skipping commit."

View File

@@ -1 +0,0 @@
requests

View File

@@ -1,70 +0,0 @@
name: Update Language Names
on:
push:
branches: [ main ]
paths:
- 'frontend/locales/*.json'
- '.github/scripts/update_language_names.py'
- '.github/workflows/update-language-names.yml'
workflow_dispatch:
permissions:
contents: write
pull-requests: write
jobs:
update-language-names:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548
with:
python-version: '3.8'
cache: 'pip'
cache-dependency-path: .github/workflows/update-languages/requirements.txt
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r .github/workflows/update-languages/requirements.txt
- name: Run language names update script
run: python .github/scripts/update_language_names.py
- name: Check for en.json changes
run: |
if git diff --quiet -- frontend/locales/en.json; then
echo "changed=false" >> $GITHUB_ENV
else
echo "changed=true" >> $GITHUB_ENV
fi
- name: Create Pull Request
if: env.changed == 'true'
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725
with:
token: ${{ secrets.GITHUB_TOKEN }}
branch: automation/update-language-file
base: main
title: "Update language names in en.json"
commit-message: "chore: update language names in en.json"
body: |
This PR automatically updates the language names in `frontend/locales/en.json` based on the available locale files.
New languages have been added to ensure all locale files have corresponding language names in the English translation file.
🤖 This PR was automatically created by the update-language-names workflow.
path: .
add-paths: |
frontend/locales/en.json
- name: No updates needed
if: env.changed == 'false'
run: echo "✅ en.json language names are already up-to-date"

View File

@@ -1,2 +0,0 @@
babel
requests

View File

@@ -1,177 +0,0 @@
#name: HomeBox Upgrade Test
# on:
# schedule:
# Run daily at 2 AM UTC
# - cron: '0 2 * * *'
# workflow_dispatch: # Allow manual trigger
# push:
# branches:
# - main
# paths:
# - '.github/workflows/upgrade-test.yaml'
# - '.github/scripts/upgrade-test/**'
jobs:
upgrade-test:
name: Test Upgrade Path
runs-on: ubuntu-latest
timeout-minutes: 60
permissions:
contents: read # Read repository contents
packages: read # Pull Docker images from GHCR
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install pnpm
uses: pnpm/action-setup@v3.0.0
with:
version: 9.12.2
- name: Install Playwright
run: |
cd frontend
pnpm install
pnpm exec playwright install --with-deps chromium
- name: Create test data directory
run: |
mkdir -p /tmp/homebox-data-old
mkdir -p /tmp/homebox-data-new
chmod -R 777 /tmp/homebox-data-old
chmod -R 777 /tmp/homebox-data-new
# Step 1: Pull and deploy latest stable version
- name: Pull latest stable HomeBox image
run: |
docker pull ghcr.io/sysadminsmedia/homebox:latest
- name: Start HomeBox (stable version)
run: |
docker run -d \
--name homebox-old \
--restart unless-stopped \
-p 7745:7745 \
-e HBOX_LOG_LEVEL=debug \
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
-e TZ=UTC \
-v /tmp/homebox-data-old:/data \
ghcr.io/sysadminsmedia/homebox:latest
# Wait for the service to be ready
timeout 60 bash -c 'until curl -f http://localhost:7745/api/v1/status; do sleep 2; done'
echo "HomeBox stable version is ready"
# Step 2: Create test data
- name: Create test data
run: |
chmod +x .github/scripts/upgrade-test/create-test-data.sh
.github/scripts/upgrade-test/create-test-data.sh
env:
HOMEBOX_URL: http://localhost:7745
- name: Verify initial data creation
run: |
echo "Verifying test data was created..."
# Check if database file exists and has content
if [ -f /tmp/homebox-data-old/homebox.db ]; then
ls -lh /tmp/homebox-data-old/homebox.db
echo "Database file exists"
else
echo "Database file not found!"
exit 1
fi
- name: Stop old HomeBox instance
run: |
docker stop homebox-old
docker rm homebox-old
# Step 3: Build latest version from main branch
- name: Build HomeBox from main branch
run: |
docker build \
--build-arg VERSION=main \
--build-arg COMMIT=${{ github.sha }} \
--build-arg BUILD_TIME="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
-t homebox:test \
-f Dockerfile \
.
# Step 4: Copy data and start new version
- name: Copy data to new location
run: |
cp -r /tmp/homebox-data-old/* /tmp/homebox-data-new/
chmod -R 777 /tmp/homebox-data-new
- name: Start HomeBox (new version)
run: |
docker run -d \
--name homebox-new \
--restart unless-stopped \
-p 7745:7745 \
-e HBOX_LOG_LEVEL=debug \
-e HBOX_OPTIONS_ALLOW_REGISTRATION=true \
-e TZ=UTC \
-v /tmp/homebox-data-new:/data \
homebox:test
# Wait for the service to be ready
timeout 60 bash -c 'until curl -f http://localhost:7745/api/v1/status; do sleep 2; done'
echo "HomeBox new version is ready"
# Step 5: Run verification tests with Playwright
- name: Run verification tests
run: |
cd frontend
TEST_DATA_FILE=/tmp/test-users.json \
E2E_BASE_URL=http://localhost:7745 \
pnpm exec playwright test \
-c ./test/playwright.config.ts \
--project=chromium \
test/upgrade/upgrade-verification.spec.ts
env:
HOMEBOX_URL: http://localhost:7745
- name: Upload Playwright report
uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-upgrade-test
path: frontend/playwright-report/
retention-days: 30
- name: Upload test traces
uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-traces
path: frontend/test-results/
retention-days: 7
- name: Collect logs on failure
if: failure()
run: |
echo "=== Docker logs for new version ==="
docker logs homebox-new || true
echo "=== Database content ==="
ls -la /tmp/homebox-data-new/ || true
- name: Cleanup
if: always()
run: |
docker stop homebox-new || true
docker rm homebox-new || true
docker rmi homebox:test || true

16
.gitignore vendored
View File

@@ -2,9 +2,6 @@
backend/.data/*
config.yml
homebox.db
homebox.db-journal
homebox.db-shm
homebox.db-wal
.idea
.DS_Store
test-mailer.json
@@ -36,7 +33,7 @@ go.work
backend/.env
build/*
# Output Directory for Nuxt/Frontend during build steps
# Output Directory for Nuxt/Frontend during build step
backend/app/api/public/*
!backend/app/api/public/.gitkeep
@@ -59,13 +56,4 @@ backend/app/api/static/public/*
!backend/app/api/static/public/.gitkeep
backend/api
docs/.vitepress/cache/
.data/
# Playwright
frontend/test-results/
frontend/playwright-report/
frontend/blob-report/
frontend/playwright/.cache/
__pycache__/
*.pyc
docs/.vitepress/cache/

View File

@@ -1,603 +0,0 @@
include:
- template: Jobs/SAST.gitlab-ci.yml
- component: $CI_SERVER_FQDN/components/code-quality-oss/codequality-os-scanners-integration/codequality-oss@1.1.4
- component: $CI_SERVER_FQDN/components/code-intelligence/golang-code-intel@v0.3.1
- component: $CI_SERVER_FQDN/components/code-intelligence/typescript-code-intel@v0.3.1
inputs:
node_version: 24
- component: $CI_SERVER_FQDN/components/secret-detection/secret-detection@2.1.0
variables:
GITLAB_ADVANCED_SAST_ENABLED: 'true'
ADVANCED_SAST_PARTIAL_SCAN: 'differential'
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: "/certs"
# Registry configuration - adjust as needed
CI_REGISTRY_IMAGE: $CI_REGISTRY/$CI_PROJECT_PATH
stages:
- test
- build-binaries
- build-docker
- release
# ==========================================
# Test Jobs
# ==========================================
# Backend Tests (Go)
test:backend:
stage: test
image: golang:1.24
cache:
key:
files:
- backend/go.sum
paths:
- backend/.go-pkg-cache/
policy: pull-push
before_script:
- export GOMODCACHE=$(pwd)/backend/.go-pkg-cache
# Install Task
- sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
script:
- cd backend
- task go:lint
- task go:build
- task go:coverage
coverage: '/coverage: \d+.\d+% of statements/'
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: backend/coverage.out
paths:
- backend/coverage.out
expire_in: 7 days
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Frontend Lint and Typecheck
test:frontend:lint:
stage: test
image: node:22-alpine
cache:
key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull-push
before_script:
- npm install -g pnpm@9.15.3
- pnpm config set store-dir $(pwd)/.pnpm-store
script:
- cd frontend
- pnpm install --frozen-lockfile
- pnpm run lint:ci
- pnpm run typecheck
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Frontend Integration Tests (SQLite)
test:frontend:integration:
stage: test
image: node:22
cache:
- key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull-push
- key:
files:
- backend/go.sum
paths:
- backend/.go-pkg-cache/
policy: pull-push
before_script:
- npm install -g pnpm@9.15.3
- pnpm config set store-dir $(pwd)/.pnpm-store
# Install Task
- sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
# Install Go
- wget -q https://go.dev/dl/go1.24.0.linux-amd64.tar.gz
- tar -C /usr/local -xzf go1.24.0.linux-amd64.tar.gz
- export PATH=$PATH:/usr/local/go/bin
- export GOMODCACHE=$(pwd)/backend/.go-pkg-cache
script:
- cd frontend
- pnpm install --frozen-lockfile
- cd ..
- task test:ci
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# Frontend Integration Tests (PostgreSQL Matrix)
test:frontend:integration:postgresql:
stage: test
image: node:22
services:
- name: postgres:${POSTGRES_VERSION}
alias: postgres
variables:
POSTGRES_USER: homebox
POSTGRES_PASSWORD: homebox
POSTGRES_DB: homebox
POSTGRES_HOST_AUTH_METHOD: trust
parallel:
matrix:
- POSTGRES_VERSION: ["17", "16", "15"]
cache:
- key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull-push
- key:
files:
- backend/go.sum
paths:
- backend/.go-pkg-cache/
policy: pull-push
before_script:
- npm install -g pnpm@9.15.3
- pnpm config set store-dir $(pwd)/.pnpm-store
# Install Task
- sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
# Install Go
- wget -q https://go.dev/dl/go1.24.0.linux-amd64.tar.gz
- tar -C /usr/local -xzf go1.24.0.linux-amd64.tar.gz
- export PATH=$PATH:/usr/local/go/bin
- export GOMODCACHE=$(pwd)/backend/.go-pkg-cache
script:
- cd frontend
- pnpm install --frozen-lockfile
- cd ..
- task test:ci:postgresql
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# E2E Tests (Playwright) - Sharded
test:e2e:playwright:
stage: test
image: mcr.microsoft.com/playwright:v1.48.2-jammy
timeout: 1h
parallel:
matrix:
- SHARD_INDEX: ["1", "2", "3", "4"]
SHARD_TOTAL: "4"
cache:
- key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull-push
- key:
files:
- backend/go.sum
paths:
- backend/.go-pkg-cache/
policy: pull-push
before_script:
- npm install -g pnpm@9.15.3
- pnpm config set store-dir $(pwd)/.pnpm-store
# Install Task
- sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin
# Install Go
- wget -q https://go.dev/dl/go1.24.0.linux-amd64.tar.gz
- tar -C /usr/local -xzf go1.24.0.linux-amd64.tar.gz
- export PATH=$PATH:/usr/local/go/bin
- export GOMODCACHE=$(pwd)/backend/.go-pkg-cache
script:
- cd frontend
- pnpm install --frozen-lockfile
- cd ..
- cd backend && go mod download
- task test:e2e -- --shard=$SHARD_INDEX/$SHARD_TOTAL
artifacts:
when: always
paths:
- frontend/blob-report/
expire_in: 2 days
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# E2E Reports Merge
test:e2e:merge-reports:
stage: test
image: mcr.microsoft.com/playwright:v1.48.2-jammy
needs:
- test:e2e:playwright
cache:
key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull
before_script:
- npm install -g pnpm@9.15.3
- pnpm config set store-dir $(pwd)/.pnpm-store
script:
- cd frontend
- pnpm install --frozen-lockfile
# Download all blob reports
- mkdir -p all-blob-reports
# GitLab automatically downloads artifacts from dependencies
- cp -r ../frontend/blob-report/* all-blob-reports/ || true
- pnpm exec playwright merge-reports --reporter html,github ./all-blob-reports || true
artifacts:
when: always
paths:
- frontend/playwright-report/
expire_in: 30 days
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
# Update Currencies (Scheduled Job)
update:currencies:
stage: test
image: python:3.11
cache:
key: python-currencies
paths:
- .pip-cache/
before_script:
- pip install --cache-dir .pip-cache -r .github/workflows/update-currencies/requirements.txt
script:
- python .github/scripts/update_currencies.py
- |
if git diff --quiet -- backend/internal/core/currencies/currencies.json; then
echo "✅ currencies.json is already up-to-date"
exit 0
else
echo "Changes detected in currencies.json"
git config user.name "GitLab CI"
git config user.email "ci@gitlab.com"
git checkout -b update-currencies-$CI_COMMIT_SHORT_SHA
git add backend/internal/core/currencies/currencies.json
git commit -m "chore: update currencies.json"
git push -o merge_request.create -o merge_request.target=$CI_DEFAULT_BRANCH -o merge_request.title="Update currencies.json" origin update-currencies-$CI_COMMIT_SHORT_SHA
fi
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $UPDATE_CURRENCIES == "true"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $UPDATE_CURRENCIES == "true"
# ==========================================
# Binary Build with GoReleaser
# ==========================================
build:binaries:
stage: build-binaries
image: golang:1.24
cache:
- key:
files:
- frontend/pnpm-lock.yaml
paths:
- frontend/node_modules/
- .pnpm-store/
policy: pull-push
- key:
files:
- backend/go.sum
paths:
- backend/.go-pkg-cache/
policy: pull-push
before_script:
# Install Node.js and pnpm for frontend build
- curl -fsSL https://deb.nodesource.com/setup_22.x | bash -
- apt-get install -y nodejs
- npm install -g pnpm@9.15.3
# Configure pnpm store
- pnpm config set store-dir $(pwd)/.pnpm-store
# Install GoReleaser
- curl -sfL https://goreleaser.com/static/run | bash -s -- check
- curl -sfL https://goreleaser.com/static/run | bash -s -- --version
# Configure Go cache
- export GOMODCACHE=$(pwd)/backend/.go-pkg-cache
script:
# Build frontend
- cd frontend
- pnpm install --frozen-lockfile
- pnpm run build
- cp -r ./.output/public ../backend/app/api/static/
- cd ..
# Run GoReleaser
- cd backend
- |
if [ -n "$CI_COMMIT_TAG" ]; then
echo "Building release for tag: $CI_COMMIT_TAG"
curl -sfL https://goreleaser.com/static/run | bash -s -- release --clean --skip=publish
else
echo "Building snapshot"
curl -sfL https://goreleaser.com/static/run | bash -s -- release --clean --snapshot --skip=publish
fi
artifacts:
name: "homebox-binaries-$CI_COMMIT_REF_SLUG"
paths:
- backend/dist/
expire_in: 30 days
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
# ==========================================
# Docker Build Jobs - Regular
# ==========================================
.docker_build_template:
stage: build-docker
image: docker:latest
services:
- docker:dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
variables:
DOCKER_BUILDKIT: 1
DOCKERFILE: Dockerfile
IMAGE_SUFFIX: ""
script:
- export VERSION=${CI_COMMIT_TAG:-$CI_COMMIT_REF_NAME}
- export COMMIT=$CI_COMMIT_SHA
- export BUILD_TIME=$(date -u +%Y-%m-%dT%H:%M:%SZ)
- export CACHE_TAG=${IMAGE_SUFFIX:-regular}
# Build and push for the specific platform with layer caching
- |
docker buildx create --use --name builder-${CI_JOB_ID} || true
docker buildx build \
--platform $PLATFORM \
--build-arg VERSION=$VERSION \
--build-arg COMMIT=$COMMIT \
--build-arg BUILD_TIME=$BUILD_TIME \
--cache-from type=registry,ref=$CI_REGISTRY_IMAGE/cache:${PLATFORM_PAIR}-${CACHE_TAG}-$CI_COMMIT_REF_SLUG \
--cache-from type=registry,ref=$CI_REGISTRY_IMAGE/cache:${PLATFORM_PAIR}-${CACHE_TAG}-$CI_DEFAULT_BRANCH \
--cache-to type=registry,ref=$CI_REGISTRY_IMAGE/cache:${PLATFORM_PAIR}-${CACHE_TAG}-$CI_COMMIT_REF_SLUG,mode=max \
--file ./$DOCKERFILE \
--tag $CI_REGISTRY_IMAGE${IMAGE_SUFFIX}:${CI_COMMIT_REF_SLUG}-${PLATFORM_PAIR} \
--push \
.
docker buildx rm builder-${CI_JOB_ID} || true
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
docker:build:amd64:
extends: .docker_build_template
variables:
PLATFORM: linux/amd64
PLATFORM_PAIR: linux-amd64
docker:build:arm64:
extends: .docker_build_template
variables:
PLATFORM: linux/arm64
PLATFORM_PAIR: linux-arm64
docker:build:armv7:
extends: .docker_build_template
variables:
PLATFORM: linux/arm/v7
PLATFORM_PAIR: linux-arm-v7
# ==========================================
# Docker Build Jobs - Rootless
# ==========================================
docker:build:rootless:amd64:
extends: .docker_build_template
variables:
PLATFORM: linux/amd64
PLATFORM_PAIR: linux-amd64
DOCKERFILE: Dockerfile.rootless
IMAGE_SUFFIX: -rootless
docker:build:rootless:arm64:
extends: .docker_build_template
variables:
PLATFORM: linux/arm64
PLATFORM_PAIR: linux-arm64
DOCKERFILE: Dockerfile.rootless
IMAGE_SUFFIX: -rootless
docker:build:rootless:armv7:
extends: .docker_build_template
variables:
PLATFORM: linux/arm/v7
PLATFORM_PAIR: linux-arm-v7
DOCKERFILE: Dockerfile.rootless
IMAGE_SUFFIX: -rootless
# ==========================================
# Docker Build Jobs - Hardened
# ==========================================
docker:build:hardened:amd64:
extends: .docker_build_template
variables:
PLATFORM: linux/amd64
PLATFORM_PAIR: linux-amd64
DOCKERFILE: Dockerfile.hardened
IMAGE_SUFFIX: -hardened
docker:build:hardened:arm64:
extends: .docker_build_template
variables:
PLATFORM: linux/arm64
PLATFORM_PAIR: linux-arm64
DOCKERFILE: Dockerfile.hardened
IMAGE_SUFFIX: -hardened
docker:build:hardened:armv7:
extends: .docker_build_template
variables:
PLATFORM: linux/arm/v7
PLATFORM_PAIR: linux-arm-v7
DOCKERFILE: Dockerfile.hardened
IMAGE_SUFFIX: -hardened
# ==========================================
# Docker Manifest Creation - Regular
# ==========================================
docker:manifest:
stage: release
image: docker:latest
services:
- docker:dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
script:
- export VERSION=${CI_COMMIT_TAG:-$CI_COMMIT_REF_NAME}
# Create manifest for regular image
- |
docker manifest create $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm-v7
docker manifest push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
# Tag as latest on main branch or create version tags
- |
if [ "$CI_COMMIT_BRANCH" = "$CI_DEFAULT_BRANCH" ]; then
docker manifest create $CI_REGISTRY_IMAGE:latest \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm-v7
docker manifest push $CI_REGISTRY_IMAGE:latest
fi
- |
if [ -n "$CI_COMMIT_TAG" ]; then
# Create version tag
docker manifest create $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm-v7
docker manifest push $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
# Create major.minor tag if semantic version
MAJOR_MINOR=$(echo $CI_COMMIT_TAG | sed -E 's/^v?([0-9]+\.[0-9]+)\..*/\1/')
if [ -n "$MAJOR_MINOR" ]; then
docker manifest create $CI_REGISTRY_IMAGE:$MAJOR_MINOR \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm64 \
$CI_REGISTRY_IMAGE:${CI_COMMIT_REF_SLUG}-linux-arm-v7
docker manifest push $CI_REGISTRY_IMAGE:$MAJOR_MINOR
fi
fi
needs:
- docker:build:amd64
- docker:build:arm64
- docker:build:armv7
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# ==========================================
# Docker Manifest Creation - Rootless
# ==========================================
docker:manifest:rootless:
stage: release
image: docker:latest
services:
- docker:dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
script:
- export VERSION=${CI_COMMIT_TAG:-$CI_COMMIT_REF_NAME}
# Create manifest for rootless image
- |
docker manifest create $CI_REGISTRY_IMAGE-rootless:$CI_COMMIT_REF_SLUG \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-rootless:$CI_COMMIT_REF_SLUG
# Tag as latest on main branch or create version tags
- |
if [ "$CI_COMMIT_BRANCH" = "$CI_DEFAULT_BRANCH" ]; then
docker manifest create $CI_REGISTRY_IMAGE-rootless:latest \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-rootless:latest
fi
- |
if [ -n "$CI_COMMIT_TAG" ]; then
docker manifest create $CI_REGISTRY_IMAGE-rootless:$CI_COMMIT_TAG \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-rootless:$CI_COMMIT_TAG
MAJOR_MINOR=$(echo $CI_COMMIT_TAG | sed -E 's/^v?([0-9]+\.[0-9]+)\..*/\1/')
if [ -n "$MAJOR_MINOR" ]; then
docker manifest create $CI_REGISTRY_IMAGE-rootless:$MAJOR_MINOR \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-rootless:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-rootless:$MAJOR_MINOR
fi
fi
needs:
- docker:build:rootless:amd64
- docker:build:rootless:arm64
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# ==========================================
# Docker Manifest Creation - Hardened
# ==========================================
docker:manifest:hardened:
stage: release
image: docker:latest
services:
- docker:dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
script:
- export VERSION=${CI_COMMIT_TAG:-$CI_COMMIT_REF_NAME}
# Create manifest for hardened image
- |
docker manifest create $CI_REGISTRY_IMAGE-hardened:$CI_COMMIT_REF_SLUG \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-hardened:$CI_COMMIT_REF_SLUG
# Tag as latest on main branch or create version tags
- |
if [ "$CI_COMMIT_BRANCH" = "$CI_DEFAULT_BRANCH" ]; then
docker manifest create $CI_REGISTRY_IMAGE-hardened:latest \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-hardened:latest
fi
- |
if [ -n "$CI_COMMIT_TAG" ]; then
docker manifest create $CI_REGISTRY_IMAGE-hardened:$CI_COMMIT_TAG \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-hardened:$CI_COMMIT_TAG
MAJOR_MINOR=$(echo $CI_COMMIT_TAG | sed -E 's/^v?([0-9]+\.[0-9]+)\..*/\1/')
if [ -n "$MAJOR_MINOR" ]; then
docker manifest create $CI_REGISTRY_IMAGE-hardened:$MAJOR_MINOR \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-amd64 \
$CI_REGISTRY_IMAGE-hardened:${CI_COMMIT_REF_SLUG}-linux-arm64
docker manifest push $CI_REGISTRY_IMAGE-hardened:$MAJOR_MINOR
fi
fi
needs:
- docker:build:hardened:amd64
- docker:build:hardened:arm64
rules:
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH

View File

@@ -1,14 +0,0 @@
entgo.io/ent v0.14.5 h1:Rj2WOYJtCkWyFo6a+5wB3EfBRP0rnx1fMk6gGA0UUe4=
entgo.io/ent v0.14.5/go.mod h1:zTzLmWtPvGpmSwtkaayM2cm5m819NdM7z7tYPq3vN0U=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/sysadminsmedia/homebox/backend v0.0.0-20251228172914-2a6773d1d610 h1:kNLtnxaPaOryBUZ7RgUHPQVWxIExXYR/q9pYCbum5Vk=
github.com/sysadminsmedia/homebox/backend v0.0.0-20251228172914-2a6773d1d610/go.mod h1:9zHHw5TNttw5Kn4Wks+SxwXmJPz6PgGNbnB4BtF1Z4c=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

8
.vscode/launch.json vendored
View File

@@ -16,12 +16,14 @@
"type": "go",
"request": "launch",
"mode": "debug",
"program": "${workspaceFolder}/backend/app/api/",
"program": "${workspaceRoot}/backend/app/api/",
"args": [],
"env": {
"HBOX_DEMO": "true",
"HBOX_LOG_LEVEL": "debug",
"HBOX_DEBUG_ENABLED": "true"
"HBOX_DEBUG_ENABLED": "true",
"HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data",
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1"
},
"console": "integratedTerminal",
},
@@ -44,4 +46,4 @@
"console": "integratedTerminal",
}
]
}
}

View File

@@ -4,7 +4,7 @@
},
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"package.json": "package-lock.json, yarn.lock, eslint.config.mjs, tsconfig.json, .prettierrc, .editorconfig, pnpm-lock.yaml, postcss.config.js, tailwind.config.js",
"package.json": "package-lock.json, yarn.lock, .eslintrc.js, tsconfig.json, .prettierrc, .editorconfig, pnpm-lock.yaml, postcss.config.js, tailwind.config.js",
"docker-compose.yml": "Dockerfile, .dockerignore, docker-compose.dev.yml, docker-compose.yml",
"README.md": "LICENSE, SECURITY.md"
},
@@ -22,8 +22,6 @@
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
},
"eslint.format.enable": true,
"eslint.validate": ["javascript", "typescript", "vue"],
"eslint.useFlatConfig": true,
"css.validate": false,
"tailwindCSS.includeLanguages": {
"vue": "html",
@@ -32,8 +30,5 @@
"editor.quickSuggestions": {
"strings": true
},
"tailwindCSS.experimental.configFile": "./frontend/tailwind.config.js",
"[go]": {
"editor.defaultFormatter": "golang.go"
},
"tailwindCSS.experimental.configFile": "./frontend/tailwind.config.js"
}

View File

@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[support@sysadminsmedia.com](mailto:support@sysadminsmedia.com).
[support@sysadminemedia.com](mailto:support@sysadminemedia.com).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@@ -33,6 +33,8 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file.
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server.
### API Development Notes
start command `task go:run`
@@ -42,9 +44,9 @@ start command `task go:run`
### Frontend Development Notes
start command `task ui:dev`
start command `task: ui:dev`
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and Shadcn-vue for styling.
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass.
@@ -52,4 +54,4 @@ start command `task ui:dev`
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo

View File

@@ -1,5 +1,5 @@
# Node dependencies stage
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-dependencies
FROM --platform=$TARGETPLATFORM node:18-alpine AS frontend-dependencies
WORKDIR /app
# Install pnpm globally (caching layer)
@@ -7,10 +7,10 @@ RUN npm install -g pnpm
# Copy package.json and lockfile to leverage caching
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
RUN pnpm install --frozen-lockfile --shamefully-hoist
# Build Nuxt (frontend) stage
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-builder
FROM --platform=$TARGETPLATFORM node:18-alpine AS frontend-builder
WORKDIR /app
# Install pnpm globally again (it can reuse the cache if not changed)
@@ -22,7 +22,7 @@ COPY --from=frontend-dependencies /app/node_modules ./node_modules
RUN pnpm build
# Go dependencies stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies
FROM --platform=$TARGETPLATFORM golang:alpine AS builder-dependencies
WORKDIR /go/src/app
# Copy go.mod and go.sum for better caching
@@ -30,9 +30,7 @@ COPY ./backend/go.mod ./backend/go.sum ./
RUN go mod download
# Build API stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder
ARG TARGETOS
ARG TARGETARCH
FROM --platform=$TARGETPLATFORM golang:alpine AS builder
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
@@ -40,8 +38,7 @@ ARG VERSION
# Install necessary build tools
RUN apk update && \
apk upgrade && \
apk add --no-cache git build-base gcc g++ && \
if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi
apk add --no-cache git build-base gcc g++
WORKDIR /go/src/app
@@ -55,26 +52,19 @@ COPY --from=frontend-builder /app/.output/public ./app/api/static/public
# Use cache for Go build artifacts
RUN --mount=type=cache,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm" ] || [ "$TARGETARCH" = "riscv64" ]; \
then echo "nodynamic" $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-tags nodynamic -o /go/bin/api -v ./app/api/*.go; \
else \
echo $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api -v ./app/api/*.go; \
fi
CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \
-v ./app/api/*.go
# Production stage
FROM public.ecr.aws/docker/library/alpine:latest
FROM --platform=$TARGETPLATFORM alpine:latest
ENV HBOX_MODE=production
ENV HBOX_STORAGE_CONN_STRING=file:///?no_tmp_dir=true
ENV HBOX_STORAGE_PREFIX_PATH=data
ENV HBOX_DATABASE_SQLITE_PATH=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1
# Install necessary runtime dependencies
RUN apk --no-cache add ca-certificates wget && \
if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi
RUN apk --no-cache add ca-certificates wget
# Create application directory and copy over built Go binary
RUN mkdir /app
@@ -85,7 +75,7 @@ RUN chmod +x /app/api
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
# Expose necessary ports for Homebox
# Expose necessary ports
EXPOSE 7745
WORKDIR /app

View File

@@ -1,136 +0,0 @@
# ---------------------------------------
# Node dependencies stage
# ---------------------------------------
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-dependencies
WORKDIR /app
# Install pnpm globally (caching layer)
RUN npm install -g pnpm
# Copy package.json and lockfile to leverage caching
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
# ---------------------------------------
# Build Nuxt (frontend) stage
# ---------------------------------------
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-builder
WORKDIR /app
# Install pnpm globally again (it can reuse the cache if not changed)
RUN npm install -g pnpm
# Copy over source files and node_modules from dependencies stage
COPY frontend .
COPY --from=frontend-dependencies /app/node_modules ./node_modules
RUN pnpm build
# ---------------------------------------
# Go dependencies stage
# ---------------------------------------
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies
WORKDIR /go/src/app
# Copy go.mod and go.sum for better caching
COPY ./backend/go.mod ./backend/go.sum ./
RUN go mod download
# ---------------------------------------
# Build API + healthcheck stage
# ---------------------------------------
FROM public.ecr.aws/docker/library/golang:alpine AS builder
ARG TARGETOS
ARG TARGETARCH
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
# Install necessary build tools
RUN apk update && \
apk upgrade && \
apk add --no-cache git build-base gcc g++
WORKDIR /go/src/app
# Copy Go modules (from dependencies stage) and source code
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
COPY ./backend .
# Clear old public files and copy new ones from frontend build
RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
# Use cache for Go build artifacts to build Homebox API
RUN --mount=type=cache,target=/root/.cache/go-build \
CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-tags nodynamic -o /go/bin/api -v ./app/api/*.go
RUN chmod +x /go/bin/api
RUN mkdir /app
RUN mkdir /data
# ---------- Build static healthcheck helper ----------
# A small Go program that GETs the status URL and exits 0 on 2xx.
RUN cat > /tmp/healthcheck.go <<'EOF'
package main
import (
"fmt"
"net/http"
"os"
"time"
)
func main() {
url := "http://127.0.0.1:7745/api/v1/status"
if len(os.Args) > 1 { url = os.Args[1] }
c := &http.Client{ Timeout: 3 * time.Second }
resp, err := c.Get(url)
if err != nil { fmt.Fprintln(os.Stderr, err); os.Exit(1) }
resp.Body.Close()
if resp.StatusCode/100 != 2 {
fmt.Fprintln(os.Stderr, "unexpected status:", resp.StatusCode)
os.Exit(1)
}
}
EOF
RUN --mount=type=cache,target=/root/.cache/go-build \
CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH \
go build -ldflags "-s -w" -o /go/bin/hc /tmp/healthcheck.go
# ---------------------------------------
# Production stage
# ---------------------------------------
FROM gcr.io/distroless/static:nonroot
ENV HBOX_MODE=production
ENV HBOX_STORAGE_CONN_STRING=file:///?no_tmp_dir=true
ENV HBOX_STORAGE_PREFIX_PATH=data
ENV HBOX_DATABASE_SQLITE_PATH=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
# Create application directory and copy over built Go binary and assets
COPY --from=builder --chown=65532:65532 /app /app
COPY --from=builder --chown=65532:65532 --chmod=755 /go/bin/api /app
COPY --from=builder --chown=65532:65532 /data /data
# Copy the healthcheck helper
COPY --from=builder --chown=65532:65532 --chmod=755 /go/bin/hc /app/healthcheck
# Labels and configuration for the final image
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
# Expose necessary ports for Homebox
EXPOSE 7745
WORKDIR /app
# Persist volume for data
VOLUME [ "/data" ]
# Entrypoint and CMD
USER 65532
ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ]
# JSON exec-form healthcheck (no shell, no wget)
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD ["/app/healthcheck", "http://127.0.0.1:7745/api/v1/status"]

View File

@@ -1,108 +1,73 @@
# Node dependencies stage
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-dependencies
# Node dependencies
FROM node:18-alpine AS frontend-dependencies
WORKDIR /app
# Install pnpm globally (caching layer)
RUN npm install -g pnpm
# Copy package.json and lockfile to leverage caching
COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile
RUN pnpm install --frozen-lockfile --shamefully-hoist
# Build Nuxt (frontend) stage
FROM public.ecr.aws/docker/library/node:22-alpine AS frontend-builder
# Build Nuxt
FROM node:18-alpine AS frontend-builder
WORKDIR /app
# Install pnpm globally again (it can reuse the cache if not changed)
RUN npm install -g pnpm
# Copy over source files and node_modules from dependencies stage
COPY frontend .
COPY frontend ./
COPY --from=frontend-dependencies /app/node_modules ./node_modules
RUN pnpm build
# Go dependencies stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies
# Build Go dependencies
FROM golang:alpine AS builder-dependencies
WORKDIR /go/src/app
# Copy go.mod and go.sum for better caching
COPY ./backend/go.mod ./backend/go.sum ./
RUN go mod download
# Build API stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder
ARG TARGETOS
ARG TARGETARCH
# Build API
FROM golang:alpine AS builder
ARG BUILD_TIME
ARG COMMIT
ARG VERSION
# Install necessary build tools
RUN apk update && \
apk upgrade && \
apk add --no-cache git build-base gcc g++ && \
if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi
RUN apk update && apk upgrade && apk add --no-cache git build-base gcc g++
WORKDIR /go/src/app
# Copy Go modules (from dependencies stage) and source code
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
COPY ./backend .
# Clear old public files and copy new ones from frontend build
RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
# Use cache for Go build artifacts
# Use cache for Go build
RUN --mount=type=cache,target=/root/.cache/go-build \
if [ "$TARGETARCH" = "arm" ] || [ "$TARGETARCH" = "riscv64" ]; \
then echo "nodynamic" $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-tags nodynamic -o /go/bin/api -v ./app/api/*.go; \
else \
echo $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api -v ./app/api/*.go; \
fi
CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api ./app/api/*.go
RUN mkdir /data
# Production stage with distroless
FROM gcr.io/distroless/static:latest
# Production stage
FROM public.ecr.aws/docker/library/alpine:latest
ENV HBOX_MODE=production
ENV HBOX_STORAGE_CONN_STRING=file:///?no_tmp_dir=true
ENV HBOX_STORAGE_PREFIX_PATH=data
ENV HBOX_DATABASE_SQLITE_PATH=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
# Install necessary runtime dependencies
RUN apk --no-cache add ca-certificates wget && \
if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi
# Create a nonroot user with UID/GID 65532
RUN addgroup -g 65532 nonroot && adduser -u 65532 -G nonroot -S nonroot
# Create application directory and copy over built Go binary
RUN mkdir /app
# Copy the binary and data directory, change ownership
COPY --from=builder --chown=nonroot /go/bin/api /app
COPY --from=builder --chown=nonroot /data /data
RUN chmod +x /app/api
# Labels and configuration for the final image
# Add wget to the image
# Note: If using distroless, this may not be applicable
# as distroless images do not include package managers.
# This line may be omitted if you're relying on another way to handle healthchecks.
COPY --from=alpine:latest /bin/wget /usr/bin/wget
LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
# Expose necessary ports for Homebox
EXPOSE 7745
WORKDIR /app
# Healthcheck configuration
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
HEALTHCHECK --interval=30s \
--timeout=5s \
--start-period=5s \
--retries=3 \
CMD ["/usr/bin/wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status"]
# Persist volume for data
VOLUME [ "/data" ]
VOLUME ["/data"]
# Entrypoint and CMD
# Drop root and run as a low-privileged user
USER nonroot
ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ]
ENTRYPOINT ["/app"]
CMD ["/data/config.yml"]

View File

@@ -2,59 +2,32 @@
<img src="/docs/public/lilbox.svg" height="200"/>
</div>
<h1 align="center" style="margin-top: -10px;"> HomeBox </h1>
<p align="center" style="width: 100%;">
<h1 align="center" style="margin-top: -10px"> HomeBox </h1>
<p align="center" style="width: 100;">
<a href="https://homebox.software/en/">Docs</a>
|
<a href="https://demo.homebox.software">Demo</a>
|
<a href="https://discord.gg/aY4DCkpNA9">Discord</a>
</p>
<p align="center" style="width: 100%;">
<img src="https://img.shields.io/github/check-runs/sysadminsmedia/homebox/main" alt="Github Checks"/>
<img src="https://img.shields.io/github/license/sysadminsmedia/homebox"/>
<img src="https://img.shields.io/github/v/release/sysadminsmedia/homebox?sort=semver&display_name=release"/>
<img src="https://img.shields.io/weblate/progress/homebox?server=https%3A%2F%2Ftranslate.sysadminsmedia.com"/>
</p>
<p align="center" style="width: 100%;">
<img src="https://img.shields.io/reddit/subreddit-subscribers/homebox"/>
<img src="https://img.shields.io/mastodon/follow/110749314839831923?domain=infosec.exchange"/>
<img src="https://img.shields.io/lemmy/homebox%40lemmy.world?label=lemmy"/>
</p>
## What is HomeBox
HomeBox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, We've tried to keep the following principles in mind:
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
- 🧘 _Simple but Expandable_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. But expandable to whatever level of infrastructure you want to put into it.
- 🚀 _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general, idle memory usage is less than 50MB for the whole container.
- 📦 _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general, idle memory usage is less than 50MB for the whole container.
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
### Key Features
- 📇 Rich Organization - Organize your items into categories, locations, and tags. You can also create custom fields to store additional information about your items.
- 🔍 Powerful Search - Quickly find items in your inventory using the powerful search feature.
- 📸 Image Upload - Upload images of your items to make it easy to identify them.
- 📄 Document and Warranty Tracking - Keep track of important documents and warranties for your items.
- 💰 Purchase & Maintenance Tracking - Track purchase dates, prices, and maintenance schedules for your items.
- 📱 Responsive Design - Homebox is designed to work on any device, including desktops, tablets, and smartphones.
## Screenshots
![Login Screen](.github/screenshots/1.png)
![Dashboard](.github/screenshots/2.png)
![Item View](.github/screenshots/3.png)
![Create Item](.github/screenshots/9.png)
![Search](.github/screenshots/8.png)
You can also try the demo instances of Homebox:
- [Demo](https://demo.homebox.software)
- [Nightly](https://nightly.homebox.software)
# Screenshots
Check out screenshots of the project [here](https://imgur.com/a/5gLWt2j).
## Quick Start
[Configuration & Docker Compose](https://homebox.software/en/quick-start.html)
```bash
# If using the rootless or hardened image, ensure data
# If using the rootless image, ensure data
# folder has correct permissions
mkdir -p /path/to/data/folder
chown 65532:65532 -R /path/to/data/folder
@@ -66,7 +39,6 @@ docker run -d \
--volume /path/to/data/folder/:/data \
ghcr.io/sysadminsmedia/homebox:latest
# ghcr.io/sysadminsmedia/homebox:latest-rootless
# ghcr.io/sysadminsmedia/homebox:latest-hardened
```
<!-- CONTRIBUTING -->
@@ -75,20 +47,14 @@ docker run -d \
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
To get started with code based contributions, please see our [contributing guide](https://homebox.software/en/contribute/get-started.html).
If you are not a coder and can't help translate, you can still contribute financially. Financial contributions help us maintain the project and keep demos running.
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
## Help us Translate
We want to make sure that Homebox is available in as many languages as possible. If you are interested in helping us translate Homebox, please help us via our [Weblate instance](https://translate.sysadminsmedia.com/projects/homebox/).
[![Translation status](https://translate.sysadminsmedia.com/widget/homebox/multi-auto.svg)](https://translate.sysadminsmedia.com/engage/homebox/)
[![Translation status](http://translate.sysadminsmedia.com/widget/homebox/multi-auto.svg)](http://translate.sysadminsmedia.com/engage/homebox/)
## Credits
- Original project by [@hay-kot](https://github.com/hay-kot)
- Logo by [@lakotelman](https://github.com/lakotelman)
### Contributors
<a href="https://github.com/sysadminsmedia/homebox/graphs/contributors">
<img src="https://contrib.rocks/image?repo=sysadminsmedia/homebox" />
</a>

View File

@@ -2,8 +2,7 @@ version: "3"
env:
HBOX_LOG_LEVEL: debug
HBOX_DATABASE_DRIVER: sqlite3
HBOX_DATABASE_SQLITE_PATH: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
HBOX_OPTIONS_ALLOW_REGISTRATION: true
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
tasks:
@@ -11,9 +10,8 @@ tasks:
desc: Install development dependencies
cmds:
- go install github.com/swaggo/swag/cmd/swag@latest
- go install github.com/pressly/goose/v3/cmd/goose@v3.8.0
- cd backend && go mod tidy
- cd frontend && pnpm install
- cd frontend && pnpm install --shamefully-hoist
swag:
desc: Generate swagger docs
@@ -23,23 +21,19 @@ tasks:
INTERNAL: "../../../internal"
PKGS: "../../../pkgs"
cmds:
- swag fmt --dir={{ .API }}
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
- npx -y -p swagger2openapi swagger2openapi --outfile ./docs/openapi-3.json ./docs/swagger.json
- npx -y -p swagger2openapi swagger2openapi --yaml --outfile ./docs/openapi-3.yaml ./docs/swagger.json
- cp -r ./docs/swagger.json ../../../../docs/en/api/swagger-2.0.json
- cp -r ./docs/swagger.yaml ../../../../docs/en/api/swagger-2.0.yaml
- cp -r ./docs/openapi-3.json ../../../../docs/en/api/openapi-3.0.json
- cp -r ./docs/openapi-3.yaml ../../../../docs/en/api/openapi-3.0.yaml
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
- "./backend/internal/core/services/**/*"
- "./backend/app/tools/typegen/main.go"
typescript-types:
desc: Generates typescript types from swagger definition
cmds:
- |
pnpm dlx swagger-typescript-api generate \
npx swagger-typescript-api \
--no-client \
--modular \
--path ./backend/app/api/static/docs/swagger.json \
@@ -55,12 +49,9 @@ tasks:
cmds:
- task: swag
- task: typescript-types
- cp ./backend/app/api/static/docs/swagger.json docs/en/api/openapi-2.0.json
- cp ./backend/app/api/static/docs/swagger.yaml docs/en/api/openapi-2.0.yaml
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
go:run:
env:
HBOX_DEMO: true
desc: Starts the backend api server (depends on generate task)
dir: backend
deps:
@@ -69,48 +60,11 @@ tasks:
- go run ./app/api/ {{ .CLI_ARGS }}
silent: false
go:run:postgresql:
env:
HBOX_DEMO: true
HBOX_DATABASE_DRIVER: postgres
HBOX_DATABASE_USERNAME: homebox
HBOX_DATABASE_PASSWORD: homebox
HBOX_DATABASE_DATABASE: homebox
HBOX_DATABASE_HOST: localhost
HBOX_DATABASE_PORT: 5432
HBOX_DATABASE_SSL_MODE: disable
desc: Starts the backend api server with postgresql (depends on generate task)
dir: backend
deps:
- generate
cmds:
- go run ./app/api/ {{ .CLI_ARGS }}
silent: false
go:ci:
env:
HBOX_DEMO: true
desc: Runs all go test and lint related tasks
dir: backend
cmds:
- go run ./app/api/ {{ .CLI_ARGS }} &
silent: true
go:ci:with-frontend:
desc: Run backend with frontend in CI mode
dir: frontend
cmds:
- pnpm install
- pnpm run build
- cp -r ./.output/public ../backend/app/api/static/
- task: go:ci
silent: true
go:test:
desc: Runs all go tests using gotestsum - supports passing gotestsum args
dir: backend
cmds:
- go test {{ .CLI_ARGS }} ./...
- gotestsum {{ .CLI_ARGS }} ./...
go:coverage:
desc: Runs all go tests with -race flag and generates a coverage report
@@ -153,6 +107,13 @@ tasks:
sources:
- "./backend/internal/data/ent/schema/**/*"
db:migration:
desc: Runs the database diff engine to generate a SQL migration files
deps:
- db:generate
cmds:
- cd backend && go run app/tools/migrations/main.go {{ .CLI_ARGS }}
ui:watch:
desc: Starts the vitest test runner in watch mode
dir: frontend
@@ -163,14 +124,7 @@ tasks:
desc: Run frontend development server
dir: frontend
cmds:
- pnpm dev --no-fork
ui:ci:
desc: Run frontend build in CI mode
dir: frontend
cmds:
- pnpm dev &
silent: true
- pnpm dev
ui:fix:
desc: Runs prettier and eslint on the frontend
@@ -189,37 +143,10 @@ tasks:
cmds:
- cd backend && go build ./app/api
- backend/api &
- sleep 15
- sleep 5
- cd frontend && pnpm run test:ci
silent: true
test:ci:postgresql:
env:
HBOX_DATABASE_DRIVER: postgres
HBOX_DATABASE_USERNAME: homebox
HBOX_DATABASE_PASSWORD: homebox
HBOX_DATABASE_DATABASE: homebox
HBOX_DATABASE_HOST: 127.0.0.1
HBOX_DATABASE_PORT: 5432
HBOX_DATABASE_SSL_MODE: disable
desc: Runs end-to-end test on a live server with postgresql (only for use in CI)
cmds:
- cd backend && go build ./app/api
- backend/api &
- sleep 15
- cd frontend && pnpm run test:ci
silent: true
test:e2e:
desc: Runs end-to-end test on a live server
dir: frontend
cmds:
- task: go:ci:with-frontend
- pnpm exec playwright install-deps
- pnpm exec playwright install
- sleep 30
- TEST_SHUTDOWN_API_SERVER=true E2E_BASE_URL=http://localhost:7745 pnpm exec playwright test -c ./test/playwright.config.ts {{ .CLI_ARGS }}
pr:
desc: Runs all tasks required for a PR
cmds:

View File

@@ -1,81 +1,74 @@
version: "2"
run:
timeout: 10m
linters-settings:
goconst:
min-len: 5
min-occurrences: 5
exhaustive:
default-signifies-exhaustive: true
revive:
ignore-generated-header: false
severity: warning
confidence: 3
depguard:
rules:
main:
deny:
- pkg: io/util
desc: |
Deprecated: As of Go 1.16, the same functionality is now provided by
package io or package os, and those implementations should be
preferred in new code. See the specific function documentation for
details.
gocritic:
enabled-checks:
- ruleguard
testifylint:
enable-all: true
tagalign:
order:
- json
- schema
- yaml
- yml
- toml
- validate
linters:
default: none
disable-all: true
enable:
- asciicheck
- bodyclose
- copyloopvar
- depguard
- dogsled
- errcheck
- errorlint
- exhaustive
- copyloopvar
- gochecknoinits
- goconst
- gocritic
- gocyclo
- gofmt
- goprintffuncname
- gosimple
- govet
- ineffassign
- misspell
- nakedret
- revive
- sqlclosecheck
- staticcheck
- stylecheck
- tagalign
- testifylint
- typecheck
- typecheck
- unconvert
- unused
- whitespace
- zerologlint
settings:
depguard:
rules:
main:
deny:
- pkg: io/util
desc: |
Deprecated: As of Go 1.16, the same functionality is now provided by
package io or package os, and those implementations should be
preferred in new code. See the specific function documentation for
details.
exhaustive:
default-signifies-exhaustive: true
goconst:
min-len: 5
min-occurrences: 5
gocritic:
enabled-checks:
- ruleguard
revive:
confidence: 3
severity: warning
tagalign:
order:
- json
- schema
- yaml
- yml
- toml
- validate
testifylint:
enable-all: true
exclusions:
generated: lax
paths:
- internal/data/ent.*
- third_party$
- builtin$
- examples$
- sqlclosecheck
issues:
exclude-use-default: false
exclude-dirs:
- internal/data/ent.*
fix: true
formatters:
enable:
- gofmt
exclusions:
generated: lax
paths:
- internal/data/ent.*
- third_party$
- builtin$
- examples$

View File

@@ -1,5 +1,3 @@
version: 2
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
before:
@@ -14,56 +12,35 @@ builds:
- linux
- windows
- darwin
- freebsd
goarch:
- amd64
- "386"
- arm
- arm64
- riscv64
flags:
- -trimpath
ldflags:
- -s -w
- -X main.version={{.Version}}
- -X main.commit={{.Commit}}
- -X main.date={{.Date}}
tags:
- >-
{{- if eq .Arch "riscv64" }}nodynamic
{{- else if eq .Os "freebsd" }}nodynamic
{{ end }}
ignore:
- goos: windows
goarch: arm
- goos: windows
goarch: "386"
signs:
- cmd: cosign
signature: "${artifact}.sigstore.json"
args:
- sign-blob
- "--bundle=${signature}"
- "${artifact}"
- "--yes"
artifacts: checksum
output: true
archives:
- formats: [ 'tar.gz' ]
- format: tar.gz
# this name template makes the OS and Arch compatible with the results of uname.
name_template: >-
{{ .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
# use zip for windows archives
format_overrides:
- goos: windows
formats: [ 'zip' ]
sboms:
- artifacts: archive
release:
extra_files:
- glob: dist/*.sig
format: zip
checksum:
name_template: 'checksums.txt'
snapshot:
version_template: "{{ incpatch .Version }}-next"
name_template: "{{ incpatch .Version }}-next"
changelog:
sort: asc
filters:

View File

@@ -23,9 +23,8 @@ func NewTask(name string, interval time.Duration, fn func(context.Context)) *Bac
}
func (tsk *BackgroundTask) Start(ctx context.Context) error {
tsk.Fn(ctx)
timer := time.NewTimer(tsk.Interval)
for {
select {
case <-ctx.Done():

View File

@@ -10,11 +10,9 @@ import (
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/app/api/providers"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"github.com/olahol/melody"
)
@@ -74,8 +72,6 @@ type V1Controller struct {
allowRegistration bool
bus *eventbus.EventBus
url string
config *config.Config
oidcProvider *providers.OIDCProvider
}
type (
@@ -88,63 +84,38 @@ type (
}
APISummary struct {
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Latest services.Latest `json:"latest"`
Demo bool `json:"demo"`
AllowRegistration bool `json:"allowRegistration"`
LabelPrinting bool `json:"labelPrinting"`
OIDC OIDCStatus `json:"oidc"`
}
OIDCStatus struct {
Enabled bool `json:"enabled"`
ButtonText string `json:"buttonText,omitempty"`
AutoRedirect bool `json:"autoRedirect,omitempty"`
AllowLocal bool `json:"allowLocal"`
Healthy bool `json:"health"`
Versions []string `json:"versions"`
Title string `json:"title"`
Message string `json:"message"`
Build Build `json:"build"`
Demo bool `json:"demo"`
AllowRegistration bool `json:"allowRegistration"`
}
)
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, config *config.Config, options ...func(*V1Controller)) *V1Controller {
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller {
ctrl := &V1Controller{
repo: repos,
svc: svc,
allowRegistration: true,
bus: bus,
config: config,
}
for _, opt := range options {
opt(ctrl)
}
ctrl.initOIDCProvider()
return ctrl
}
func (ctrl *V1Controller) initOIDCProvider() {
if ctrl.config.OIDC.Enabled {
oidcProvider, err := providers.NewOIDCProvider(ctrl.svc.User, &ctrl.config.OIDC, &ctrl.config.Options, ctrl.cookieSecure)
if err != nil {
log.Err(err).Msg("failed to initialize OIDC provider at startup")
} else {
ctrl.oidcProvider = oidcProvider
log.Info().Msg("OIDC provider initialized successfully at startup")
}
}
}
// HandleBase godoc
//
// @Summary Application Info
// @Tags Base
// @Produce json
// @Success 200 {object} APISummary
// @Router /v1/status [GET]
// @Summary Application Info
// @Tags Base
// @Produce json
// @Success 200 {object} APISummary
// @Router /v1/status [GET]
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
return server.JSON(w, http.StatusOK, APISummary{
@@ -152,27 +123,19 @@ func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.Hand
Title: "Homebox",
Message: "Track, Manage, and Organize your Things",
Build: build,
Latest: ctrl.svc.BackgroundService.GetLatestVersion(),
Demo: ctrl.isDemo,
AllowRegistration: ctrl.allowRegistration,
LabelPrinting: ctrl.config.LabelMaker.PrintCommand != nil,
OIDC: OIDCStatus{
Enabled: ctrl.config.OIDC.Enabled,
ButtonText: ctrl.config.OIDC.ButtonText,
AutoRedirect: ctrl.config.OIDC.AutoRedirect,
AllowLocal: ctrl.config.Options.AllowLocalLogin,
},
})
}
}
// HandleCurrency godoc
//
// @Summary Currency
// @Tags Base
// @Produce json
// @Success 200 {object} currencies.Currency
// @Router /v1/currency [GET]
// @Summary Currency
// @Tags Base
// @Produce json
// @Success 200 {object} currencies.Currency
// @Router /v1/currency [GET]
func (ctrl *V1Controller) HandleCurrency() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Set Cache for 10 Minutes

View File

@@ -1,30 +0,0 @@
package v1
import (
"net/url"
"github.com/rs/zerolog/log"
)
func GetHBURL(refererHeader, fallback string) (hbURL string) {
hbURL = refererHeader
if hbURL == "" {
hbURL = fallback
}
return stripPathFromURL(hbURL)
}
// stripPathFromURL removes the path from a URL.
// ex. https://example.com/tools -> https://example.com
func stripPathFromURL(rawURL string) string {
parsedURL, err := url.Parse(rawURL)
if err != nil {
log.Err(err).Msg("failed to parse URL")
return ""
}
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
return strippedURL.String()
}

View File

@@ -2,7 +2,6 @@ package v1
import (
"context"
"errors"
"net/http"
"github.com/google/uuid"
@@ -10,7 +9,6 @@ import (
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
)
@@ -34,126 +32,52 @@ func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int,
// HandleEnsureAssetID godoc
//
// @Summary Ensure Asset IDs
// @Description Ensures all items in the database have an asset ID
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer
// @Summary Ensure Asset IDs
// @Description Ensures all items in the database have an asset ID
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
}
// HandleEnsureImportRefs godoc
//
// @Summary Ensures Import Refs
// @Description Ensures all items in the database have an import ref
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post]
// @Security Bearer
// @Summary Ensures Import Refs
// @Description Ensures all items in the database have an import ref
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
}
// HandleItemDateZeroOut godoc
//
// @Summary Zero Out Time Fields
// @Description Resets all item date fields to the beginning of the day
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer
// @Summary Zero Out Time Fields
// @Description Resets all item date fields to the beginning of the day
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
}
// HandleSetPrimaryPhotos godoc
//
// @Summary Set Primary Photos
// @Description Sets the first photo of each item as the primary photo
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/set-primary-photos [Post]
// @Security Bearer
// @Summary Set Primary Photos
// @Description Sets the first photo of each item as the primary photo
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/set-primary-photos [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
}
// HandleCreateMissingThumbnails godoc
//
// @Summary Create Missing Thumbnails
// @Description Creates thumbnails for items that are missing them
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/create-missing-thumbnails [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleCreateMissingThumbnails() errchain.HandlerFunc {
return actionHandlerFactory("create missing thumbnails", ctrl.repo.Attachments.CreateMissingThumbnails)
}
// WipeInventoryOptions represents the options for wiping inventory
type WipeInventoryOptions struct {
WipeLabels bool `json:"wipeLabels"`
WipeLocations bool `json:"wipeLocations"`
WipeMaintenance bool `json:"wipeMaintenance"`
}
// HandleWipeInventory godoc
//
// @Summary Wipe Inventory
// @Description Deletes all items in the inventory
// @Tags Actions
// @Produce json
// @Param options body WipeInventoryOptions false "Wipe options"
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/wipe-inventory [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleWipeInventory() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {
return validate.NewRequestError(errors.New("wipe inventory is not allowed in demo mode"), http.StatusForbidden)
}
ctx := services.NewContext(r.Context())
// Check if user is owner
if !ctx.User.IsOwner {
return validate.NewRequestError(errors.New("only group owners can wipe inventory"), http.StatusForbidden)
}
// Parse options from request body
var options WipeInventoryOptions
if err := server.Decode(r, &options); err != nil {
// If no body provided, use default (false for all)
options = WipeInventoryOptions{
WipeLabels: false,
WipeLocations: false,
WipeMaintenance: false,
}
}
totalCompleted, err := ctrl.repo.Items.WipeInventory(ctx, ctx.GID, options.WipeLabels, options.WipeLocations, options.WipeMaintenance)
if err != nil {
log.Err(err).Str("action_ref", "wipe inventory").Msg("failed to run action")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
// Publish mutation events for wiped resources
if ctrl.bus != nil {
if options.WipeLabels {
ctrl.bus.Publish(eventbus.EventLabelMutation, eventbus.GroupMutationEvent{GID: ctx.GID})
}
if options.WipeLocations {
ctrl.bus.Publish(eventbus.EventLocationMutation, eventbus.GroupMutationEvent{GID: ctx.GID})
}
}
return server.JSON(w, http.StatusOK, ActionAmountResult{Completed: totalCompleted})
}
}

View File

@@ -17,13 +17,13 @@ import (
// HandleAssetGet godocs
//
// @Summary Get Item by Asset ID
// @Tags Items
// @Produce json
// @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET]
// @Security Bearer
// @Summary Get Item by Asset ID
// @Tags Items
// @Produce json
// @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())

View File

@@ -2,7 +2,6 @@ package v1
import (
"errors"
"fmt"
"net/http"
"strconv"
"strings"
@@ -29,8 +28,8 @@ type (
}
LoginForm struct {
Username string `json:"username" example:"admin@admin.com"`
Password string `json:"password" example:"admin"`
Username string `json:"username"`
Password string `json:"password"`
StayLoggedIn bool `json:"stayLoggedIn"`
}
)
@@ -80,15 +79,17 @@ type AuthProvider interface {
// HandleAuthLogin godoc
//
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param payload body LoginForm true "Login Data"
// @Param provider query string false "auth provider"
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
// @Summary User Login
// @Tags Authentication
// @Accept x-www-form-urlencoded
// @Accept application/json
// @Param username formData string false "string" example(admin@admin.com)
// @Param password formData string false "string" example(admin)
// @Param payload body LoginForm true "Login Data"
// @Param provider query string false "auth provider"
// @Produce json
// @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST]
func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc {
if len(ps) == 0 {
panic("no auth providers provided")
@@ -107,11 +108,6 @@ func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFu
provider = "local"
}
// Block local only when disabled
if provider == "local" && !ctrl.config.Options.AllowLocalLogin {
return validate.NewRequestError(fmt.Errorf("local login is not enabled"), http.StatusForbidden)
}
// Get the provider
p, ok := providers[provider]
if !ok {
@@ -120,11 +116,11 @@ func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFu
newToken, err := p.Authenticate(w, r)
if err != nil {
log.Warn().Err(err).Msg("authentication failed")
return validate.NewUnauthorizedError()
log.Err(err).Msg("failed to authenticate")
return server.JSON(w, http.StatusInternalServerError, err.Error())
}
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true, newToken.AttachmentToken)
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true)
return server.JSON(w, http.StatusOK, TokenResponse{
Token: "Bearer " + newToken.Raw,
ExpiresAt: newToken.ExpiresAt,
@@ -135,11 +131,11 @@ func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFu
// HandleAuthLogout godoc
//
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
// @Summary User Logout
// @Tags Authentication
// @Success 204
// @Router /v1/users/logout [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
@@ -159,13 +155,13 @@ func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
// HandleAuthRefresh godoc
//
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
// @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database.
// @Tags Authentication
// @Success 200
// @Router /v1/users/refresh [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
requestToken := services.UseTokenCtx(r.Context())
@@ -178,7 +174,7 @@ func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
return validate.NewUnauthorizedError()
}
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false, newToken.AttachmentToken)
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
return server.JSON(w, http.StatusOK, newToken)
}
}
@@ -187,7 +183,7 @@ func noPort(host string) string {
return strings.Split(host, ":")[0]
}
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool, attachmentToken string) {
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
http.SetCookie(w, &http.Cookie{
Name: cookieNameRemember,
Value: strconv.FormatBool(remember),
@@ -219,19 +215,6 @@ func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string
HttpOnly: false,
Path: "/",
})
// Set attachment token cookie (accessible to frontend, not HttpOnly)
if attachmentToken != "" {
http.SetCookie(w, &http.Cookie{
Name: "hb.auth.attachment_token",
Value: attachmentToken,
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}
}
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
@@ -265,77 +248,4 @@ func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
HttpOnly: false,
Path: "/",
})
// Unset attachment token cookie
http.SetCookie(w, &http.Cookie{
Name: "hb.auth.attachment_token",
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}
// HandleOIDCLogin godoc
//
// @Summary OIDC Login Initiation
// @Tags Authentication
// @Produce json
// @Success 302
// @Router /v1/users/login/oidc [GET]
func (ctrl *V1Controller) HandleOIDCLogin() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Forbidden if OIDC is not enabled
if !ctrl.config.OIDC.Enabled {
return validate.NewRequestError(fmt.Errorf("OIDC is not enabled"), http.StatusForbidden)
}
// Check if OIDC provider is available
if ctrl.oidcProvider == nil {
log.Error().Msg("OIDC provider not initialized")
return validate.NewRequestError(errors.New("OIDC provider not available"), http.StatusInternalServerError)
}
// Initiate OIDC flow
_, err := ctrl.oidcProvider.InitiateOIDCFlow(w, r)
return err
}
}
// HandleOIDCCallback godoc
//
// @Summary OIDC Callback Handler
// @Tags Authentication
// @Param code query string true "Authorization code"
// @Param state query string true "State parameter"
// @Success 302
// @Router /v1/users/login/oidc/callback [GET]
func (ctrl *V1Controller) HandleOIDCCallback() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Forbidden if OIDC is not enabled
if !ctrl.config.OIDC.Enabled {
return validate.NewRequestError(fmt.Errorf("OIDC is not enabled"), http.StatusForbidden)
}
// Check if OIDC provider is available
if ctrl.oidcProvider == nil {
log.Error().Msg("OIDC provider not initialized")
return validate.NewRequestError(errors.New("OIDC provider not available"), http.StatusInternalServerError)
}
// Handle callback
newToken, err := ctrl.oidcProvider.HandleCallback(w, r)
if err != nil {
log.Err(err).Msg("OIDC callback failed")
http.Redirect(w, r, "/?oidc_error=oidc_auth_failed", http.StatusFound)
return nil
}
// Set cookies and redirect to home
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true, newToken.AttachmentToken)
http.Redirect(w, r, "/home", http.StatusFound)
return nil
}
}

View File

@@ -26,12 +26,12 @@ type (
// HandleGroupGet godoc
//
// @Summary Get Group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
// @Summary Get Group
// @Tags Group
// @Produce json
// @Success 200 {object} repo.Group
// @Router /v1/groups [Get]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.Group, error) {
auth := services.NewContext(r.Context())
@@ -43,13 +43,13 @@ func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
// HandleGroupUpdate godoc
//
// @Summary Update Group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
// @Summary Update Group
// @Tags Group
// @Produce json
// @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group
// @Router /v1/groups [Put]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) {
auth := services.NewContext(r.Context())
@@ -69,13 +69,13 @@ func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
// HandleGroupInvitationsCreate godoc
//
// @Summary Create Group Invitation
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
// @Summary Create Group Invitation
// @Tags Group
// @Produce json
// @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupInvitationsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body GroupInvitationCreate) (GroupInvitation, error) {
if body.ExpiresAt.IsZero() {

View File

@@ -1,164 +0,0 @@
package v1
import (
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
)
// HandleItemTemplatesGetAll godoc
//
// @Summary Get All Item Templates
// @Tags Item Templates
// @Produce json
// @Success 200 {object} []repo.ItemTemplateSummary
// @Router /v1/templates [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesGetAll() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.ItemTemplateSummary, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.ItemTemplates.GetAll(r.Context(), auth.GID)
}
return adapters.Command(fn, http.StatusOK)
}
// HandleItemTemplatesGet godoc
//
// @Summary Get Item Template
// @Tags Item Templates
// @Produce json
// @Param id path string true "Template ID"
// @Success 200 {object} repo.ItemTemplateOut
// @Router /v1/templates/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemTemplateOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.ItemTemplates.GetOne(r.Context(), auth.GID, ID)
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleItemTemplatesCreate godoc
//
// @Summary Create Item Template
// @Tags Item Templates
// @Produce json
// @Param payload body repo.ItemTemplateCreate true "Template Data"
// @Success 201 {object} repo.ItemTemplateOut
// @Router /v1/templates [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.ItemTemplateCreate) (repo.ItemTemplateOut, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.ItemTemplates.Create(r.Context(), auth.GID, body)
}
return adapters.Action(fn, http.StatusCreated)
}
// HandleItemTemplatesUpdate godoc
//
// @Summary Update Item Template
// @Tags Item Templates
// @Produce json
// @Param id path string true "Template ID"
// @Param payload body repo.ItemTemplateUpdate true "Template Data"
// @Success 200 {object} repo.ItemTemplateOut
// @Router /v1/templates/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemTemplateUpdate) (repo.ItemTemplateOut, error) {
auth := services.NewContext(r.Context())
body.ID = ID
return ctrl.repo.ItemTemplates.Update(r.Context(), auth.GID, body)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandleItemTemplatesDelete godoc
//
// @Summary Delete Item Template
// @Tags Item Templates
// @Produce json
// @Param id path string true "Template ID"
// @Success 204
// @Router /v1/templates/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.ItemTemplates.Delete(r.Context(), auth.GID, ID)
return nil, err
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}
type ItemTemplateCreateItemRequest struct {
Name string `json:"name" validate:"required,min=1,max=255"`
Description string `json:"description" validate:"max=1000"`
LocationID uuid.UUID `json:"locationId" validate:"required"`
LabelIDs []uuid.UUID `json:"labelIds"`
Quantity *int `json:"quantity"`
}
// HandleItemTemplatesCreateItem godoc
//
// @Summary Create Item from Template
// @Tags Item Templates
// @Produce json
// @Param id path string true "Template ID"
// @Param payload body ItemTemplateCreateItemRequest true "Item Data"
// @Success 201 {object} repo.ItemOut
// @Router /v1/templates/{id}/create-item [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemTemplatesCreateItem() errchain.HandlerFunc {
fn := func(r *http.Request, templateID uuid.UUID, body ItemTemplateCreateItemRequest) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
template, err := ctrl.repo.ItemTemplates.GetOne(r.Context(), auth.GID, templateID)
if err != nil {
return repo.ItemOut{}, err
}
quantity := template.DefaultQuantity
if body.Quantity != nil {
quantity = *body.Quantity
}
// Build custom fields from template
fields := make([]repo.ItemField, len(template.Fields))
for i, f := range template.Fields {
fields[i] = repo.ItemField{
Type: f.Type,
Name: f.Name,
TextValue: f.TextValue,
}
}
// Create item with all template data in a single transaction
return ctrl.repo.Items.CreateFromTemplate(r.Context(), auth.GID, repo.ItemCreateFromTemplate{
Name: body.Name,
Description: body.Description,
Quantity: quantity,
LocationID: body.LocationID,
LabelIDs: body.LabelIDs,
Insured: template.DefaultInsured,
Manufacturer: template.DefaultManufacturer,
ModelNumber: template.DefaultModelNumber,
LifetimeWarranty: template.DefaultLifetimeWarranty,
WarrantyDetails: template.DefaultWarrantyDetails,
Fields: fields,
})
}
return adapters.ActionID("id", fn, http.StatusCreated)
}

View File

@@ -4,11 +4,10 @@ import (
"database/sql"
"encoding/csv"
"errors"
"fmt"
"math/big"
"net/http"
"net/url"
"strings"
"time"
"github.com/google/uuid"
"github.com/hay-kot/httpkit/errchain"
@@ -22,18 +21,18 @@ import (
// HandleItemsGetAll godoc
//
// @Summary Query All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
// @Summary Query All Items
// @Tags Items
// @Produce json
// @Param q query string false "search string"
// @Param page query int false "page number"
// @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi)
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/items [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
extractQuery := func(r *http.Request) repo.ItemQuery {
params := r.URL.Query()
@@ -55,18 +54,16 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
}
v := repo.ItemQuery{
Page: queryIntOrNegativeOne(params.Get("page")),
PageSize: queryIntOrNegativeOne(params.Get("pageSize")),
Search: params.Get("q"),
LocationIDs: queryUUIDList(params, "locations"),
LabelIDs: queryUUIDList(params, "labels"),
NegateLabels: queryBool(params.Get("negateLabels")),
OnlyWithoutPhoto: queryBool(params.Get("onlyWithoutPhoto")),
OnlyWithPhoto: queryBool(params.Get("onlyWithPhoto")),
ParentItemIDs: queryUUIDList(params, "parentIds"),
IncludeArchived: queryBool(params.Get("includeArchived")),
Fields: filterFieldItems(params["fields"]),
OrderBy: params.Get("orderBy"),
Page: queryIntOrNegativeOne(params.Get("page")),
PageSize: queryIntOrNegativeOne(params.Get("pageSize")),
Search: params.Get("q"),
LocationIDs: queryUUIDList(params, "locations"),
LabelIDs: queryUUIDList(params, "labels"),
NegateLabels: queryBool(params.Get("negateLabels")),
ParentItemIDs: queryUUIDList(params, "parentIds"),
IncludeArchived: queryBool(params.Get("includeArchived")),
Fields: filterFieldItems(params["fields"]),
OrderBy: params.Get("orderBy"),
}
if strings.HasPrefix(v.Search, "#") {
@@ -88,9 +85,6 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r))
totalPrice := new(big.Int)
for _, item := range items.Items {
if !item.SoldTime.IsZero() { // Skip items with a non-null SoldDate
continue
}
totalPrice.Add(totalPrice, big.NewInt(int64(item.PurchasePrice*100)))
}
@@ -112,13 +106,13 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
// HandleItemFullPath godoc
//
// @Summary Get the full path of an item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} []repo.ItemPath
// @Router /v1/items/{id}/path [GET]
// @Security Bearer
// @Summary Get the full path of an item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} []repo.ItemPath
// @Router /v1/items/{id}/path [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
auth := services.NewContext(r.Context())
@@ -154,13 +148,13 @@ func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
// HandleItemsCreate godoc
//
// @Summary Create Item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 201 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
// @Summary Create Item
// @Tags Items
// @Produce json
// @Param payload body repo.ItemCreate true "Item Data"
// @Success 201 {object} repo.ItemSummary
// @Router /v1/items [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.ItemCreate) (repo.ItemOut, error) {
return ctrl.svc.Items.Create(services.NewContext(r.Context()), body)
@@ -171,13 +165,13 @@ func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
// HandleItemGet godocs
//
// @Summary Get Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
// @Summary Get Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
@@ -190,13 +184,13 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
// HandleItemDelete godocs
//
// @Summary Delete Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
// @Summary Delete Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 204
// @Router /v1/items/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
@@ -209,14 +203,14 @@ func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
// HandleItemUpdate godocs
//
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemUpdate) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
@@ -230,14 +224,14 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
// HandleItemPatch godocs
//
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemPatch true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [Patch]
// @Security Bearer
// @Summary Update Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.ItemPatch true "Item Data"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [Patch]
// @Security Bearer
func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemPatch) (repo.ItemOut, error) {
auth := services.NewContext(r.Context())
@@ -254,34 +248,15 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandleItemDuplicate godocs
//
// @Summary Duplicate Item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.DuplicateOptions true "Duplicate Options"
// @Success 201 {object} repo.ItemOut
// @Router /v1/items/{id}/duplicate [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemDuplicate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, options repo.DuplicateOptions) (repo.ItemOut, error) {
ctx := services.NewContext(r.Context())
return ctrl.svc.Items.Duplicate(ctx, ctx.GID, ID, options)
}
return adapters.ActionID("id", fn, http.StatusCreated)
}
// HandleGetAllCustomFieldNames godocs
//
// @Summary Get All Custom Field Names
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields [GET]
// @Success 200 {object} []string
// @Security Bearer
// @Summary Get All Custom Field Names
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
fn := func(r *http.Request) ([]string, error) {
auth := services.NewContext(r.Context())
@@ -293,13 +268,13 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
// HandleGetAllCustomFieldValues godocs
//
// @Summary Get All Custom Field Values
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string
// @Security Bearer
// @Summary Get All Custom Field Values
// @Tags Items
// @Produce json
// @Success 200
// @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string
// @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
type query struct {
Field string `schema:"field" validate:"required"`
@@ -315,14 +290,13 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
// HandleItemsImport godocs
//
// @Summary Import Items
// @Tags Items
// @Accept multipart/form-data
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
// @Summary Import Items
// @Tags Items
// @Produce json
// @Success 204
// @Param csv formData file true "Image to upload"
// @Router /v1/items/import [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsImport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
@@ -351,29 +325,49 @@ func (ctrl *V1Controller) HandleItemsImport() errchain.HandlerFunc {
// HandleItemsExport godocs
//
// @Summary Export Items
// @Tags Items
// @Success 200 {string} string "text/csv"
// @Router /v1/items/export [GET]
// @Security Bearer
// @Summary Export Items
// @Tags Items
// @Success 200 {string} string "text/csv"
// @Router /v1/items/export [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context())
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, GetHBURL(r.Header.Get("Referer"), ctrl.url))
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url))
if err != nil {
log.Err(err).Msg("failed to export items")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
timestamp := time.Now().Format("2006-01-02_15-04-05") // YYYY-MM-DD_HH-MM-SS format
filename := fmt.Sprintf("homebox-items_%s.csv", timestamp) // add timestamp to filename
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment;filename=%s", filename))
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.csv")
writer := csv.NewWriter(w)
writer.Comma = ','
return writer.WriteAll(csvData)
}
}
func getHBURL(refererHeader, fallback string) (hbURL string) {
hbURL = refererHeader
if hbURL == "" {
hbURL = fallback
}
return stripPathFromURL(hbURL)
}
// stripPathFromURL removes the path from a URL.
// ex. https://example.com/tools -> https://example.com
func stripPathFromURL(rawURL string) string {
parsedURL, err := url.Parse(rawURL)
if err != nil {
log.Err(err).Msg("failed to parse URL")
return ""
}
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
return strippedURL.String()
}

View File

@@ -3,9 +3,7 @@ package v1
import (
"errors"
"net/http"
"net/url"
"path/filepath"
"strconv"
"strings"
"github.com/hay-kot/httpkit/errchain"
@@ -15,13 +13,6 @@ import (
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"gocloud.dev/blob"
_ "gocloud.dev/blob/azureblob"
_ "gocloud.dev/blob/fileblob"
_ "gocloud.dev/blob/gcsblob"
_ "gocloud.dev/blob/memblob"
_ "gocloud.dev/blob/s3blob"
)
type (
@@ -32,19 +23,17 @@ type (
// HandleItemAttachmentCreate godocs
//
// @Summary Create Item Attachment
// @Tags Items Attachments
// @Accept multipart/form-data
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string false "Type of file"
// @Param primary formData bool false "Is this the primary attachment"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} validate.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
// @Summary Create Item Attachment
// @Tags Items Attachments
// @Produce json
// @Param id path string true "Item ID"
// @Param file formData file true "File attachment"
// @Param type formData string true "Type of file"
// @Param name formData string true "name of the file including extension"
// @Success 200 {object} repo.ItemOut
// @Failure 422 {object} validate.ErrorResponse
// @Router /v1/items/{id}/attachments [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
err := r.ParseMultipartForm(ctrl.maxUploadSize << 20)
@@ -83,19 +72,13 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
ext := filepath.Ext(attachmentName)
switch strings.ToLower(ext) {
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff", ".avif", ".ico", ".heic", ".jxl":
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
attachmentType = attachment.TypePhoto.String()
default:
attachmentType = attachment.TypeAttachment.String()
}
}
primary, err := strconv.ParseBool(r.FormValue("primary"))
if err != nil {
log.Debug().Msg("failed to parse primary from form")
primary = false
}
id, err := ctrl.routeID(r)
if err != nil {
return err
@@ -108,7 +91,6 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
id,
attachmentName,
attachment.Type(attachmentType),
primary,
file,
)
if err != nil {
@@ -122,41 +104,41 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
// HandleItemAttachmentGet godocs
//
// @Summary Get Item Attachment
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
// @Summary Get Item Attachment
// @Tags Items Attachments
// @Produce application/octet-stream
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentGet() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentDelete godocs
//
// @Summary Delete Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
// @Summary Delete Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
// HandleItemAttachmentUpdate godocs
//
// @Summary Update Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
// @Summary Update Item Attachment
// @Tags Items Attachments
// @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentUpdate() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler
}
@@ -175,39 +157,13 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
ctx := services.NewContext(r.Context())
switch r.Method {
case http.MethodGet:
doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), ctx.GID, attachmentID)
doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), attachmentID)
if err != nil {
log.Err(err).Msg("failed to get attachment path")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
bucket, err := blob.OpenBucket(ctx, ctrl.repo.Attachments.GetConnString())
if err != nil {
log.Err(err).Msg("failed to open bucket")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
file, err := bucket.NewReader(ctx, ctrl.repo.Attachments.GetFullPath(doc.Path), nil)
if err != nil {
log.Err(err).Msg("failed to open file")
return validate.NewRequestError(err, http.StatusInternalServerError)
}
defer func(file *blob.Reader) {
err := file.Close()
if err != nil {
log.Err(err).Msg("failed to close file")
}
}(file)
defer func(bucket *blob.Bucket) {
err := bucket.Close()
if err != nil {
log.Err(err).Msg("failed to close bucket")
}
}(bucket)
// Set the Content-Disposition header for RFC6266 compliance
disposition := "inline; filename*=UTF-8''" + url.QueryEscape(doc.Title)
w.Header().Set("Content-Disposition", disposition)
http.ServeContent(w, r, doc.Title, doc.CreatedAt, file)
http.ServeFile(w, r, doc.Path)
return nil
// Delete Attachment Handler
@@ -230,9 +186,9 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
}
attachment.ID = attachmentID
val, err := ctrl.svc.Items.AttachmentUpdate(ctx, ctx.GID, ID, &attachment)
val, err := ctrl.svc.Items.AttachmentUpdate(ctx, ID, &attachment)
if err != nil {
log.Err(err).Msg("failed to update attachment")
log.Err(err).Msg("failed to delete attachment")
return validate.NewRequestError(err, http.StatusInternalServerError)
}

View File

@@ -1,136 +0,0 @@
package v1
import (
"fmt"
"net/http"
"strconv"
"strings"
"github.com/go-chi/chi/v5"
"github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
"github.com/sysadminsmedia/homebox/backend/pkgs/labelmaker"
)
func generateOrPrint(ctrl *V1Controller, w http.ResponseWriter, r *http.Request, title string, description string, url string) error {
params := labelmaker.NewGenerateParams(int(ctrl.config.LabelMaker.Width), int(ctrl.config.LabelMaker.Height), int(ctrl.config.LabelMaker.Margin), int(ctrl.config.LabelMaker.Padding), ctrl.config.LabelMaker.FontSize, title, description, url, ctrl.config.LabelMaker.DynamicLength, ctrl.config.LabelMaker.AdditionalInformation)
print := queryBool(r.URL.Query().Get("print"))
if print {
err := labelmaker.PrintLabel(ctrl.config, &params)
if err != nil {
return err
}
_, err = w.Write([]byte("Printed!"))
return err
} else {
return labelmaker.GenerateLabel(w, &params, ctrl.config)
}
}
// HandleGetLocationLabel godoc
//
// @Summary Get Location label
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param print query bool false "Print this label, defaults to false"
// @Success 200 {string} string "image/png"
// @Router /v1/labelmaker/location/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetLocationLabel() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ID, err := adapters.RouteUUID(r, "id")
if err != nil {
return err
}
auth := services.NewContext(r.Context())
location, err := ctrl.repo.Locations.GetOneByGroup(auth, auth.GID, ID)
if err != nil {
return err
}
hbURL := GetHBURL(r.Header.Get("Referer"), ctrl.url)
return generateOrPrint(ctrl, w, r, location.Name, "Homebox Location", fmt.Sprintf("%s/location/%s", hbURL, location.ID))
}
}
// HandleGetItemLabel godoc
//
// @Summary Get Item label
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Param print query bool false "Print this label, defaults to false"
// @Success 200 {string} string "image/png"
// @Router /v1/labelmaker/item/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetItemLabel() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
ID, err := adapters.RouteUUID(r, "id")
if err != nil {
return err
}
auth := services.NewContext(r.Context())
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
if err != nil {
return err
}
description := ""
if item.Location != nil {
description += fmt.Sprintf("\nLocation: %s", item.Location.Name)
}
hbURL := GetHBURL(r.Header.Get("Referer"), ctrl.url)
return generateOrPrint(ctrl, w, r, item.Name, description, fmt.Sprintf("%s/item/%s", hbURL, item.ID))
}
}
// HandleGetAssetLabel godoc
//
// @Summary Get Asset label
// @Tags Items
// @Produce json
// @Param id path string true "Asset ID"
// @Param print query bool false "Print this label, defaults to false"
// @Success 200 {string} string "image/png"
// @Router /v1/labelmaker/assets/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetAssetLabel() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
assetIDParam := chi.URLParam(r, "id")
assetIDParam = strings.ReplaceAll(assetIDParam, "-", "")
assetID, err := strconv.ParseInt(assetIDParam, 10, 64)
if err != nil {
return err
}
auth := services.NewContext(r.Context())
item, err := ctrl.repo.Items.QueryByAssetID(auth, auth.GID, repo.AssetID(assetID), 0, 1)
if err != nil {
return err
}
if len(item.Items) == 0 {
return validate.NewRequestError(fmt.Errorf("failed to find asset id"), http.StatusNotFound)
}
description := item.Items[0].Name
if item.Items[0].Location != nil {
description += fmt.Sprintf("\nLocation: %s", item.Items[0].Location.Name)
}
hbURL := GetHBURL(r.Header.Get("Referer"), ctrl.url)
return generateOrPrint(ctrl, w, r, item.Items[0].AssetID.String(), description, fmt.Sprintf("%s/a/%s", hbURL, item.Items[0].AssetID.String()))
}
}

View File

@@ -12,12 +12,12 @@ import (
// HandleLabelsGetAll godoc
//
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} []repo.LabelOut
// @Router /v1/labels [GET]
// @Security Bearer
// @Summary Get All Labels
// @Tags Labels
// @Produce json
// @Success 200 {object} []repo.LabelOut
// @Router /v1/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.LabelSummary, error) {
auth := services.NewContext(r.Context())
@@ -29,13 +29,13 @@ func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
// HandleLabelsCreate godoc
//
// @Summary Create Label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
// @Summary Create Label
// @Tags Labels
// @Produce json
// @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())
@@ -47,13 +47,13 @@ func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
// HandleLabelDelete godocs
//
// @Summary Delete Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
// @Summary Delete Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 204
// @Router /v1/labels/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
@@ -66,13 +66,13 @@ func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
// HandleLabelGet godocs
//
// @Summary Get Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
// @Summary Get Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())
@@ -84,13 +84,13 @@ func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
// HandleLabelUpdate godocs
//
// @Summary Update Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
// @Summary Update Label
// @Tags Labels
// @Produce json
// @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context())

View File

@@ -0,0 +1,51 @@
package v1
import (
"encoding/json"
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
)
type Locales struct {
Locales []string `json:"locales"`
}
// HandleLocalesGetAll godoc
//
// @Summary Get All Locales
// @Tags Locales
// @Produce json
// @Success 200 {object} []Locales
// @Router /v1/locales [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocalesGetAll() errchain.HandlerFunc {
fn := func(r *http.Request) ([]Locales, error) {
// TODO: get a list of locales from files
return []Locales{}, nil
}
return adapters.Command(fn, http.StatusOK)
}
// HandleLocalesGet godoc
//
// @Summary Get Locale
// @Tags Locales
// @Produce json
// @Param id path string true "Locale ID"
// @Success 200 {object} interface{}
// @Router /v1/locales/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocalesGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (interface{}, error) {
// TODO: get the current locale
return interface{}, nil
}
return adapters.CommandID("id", fn, http.StatusOK)
}

View File

@@ -14,13 +14,13 @@ import (
// HandleLocationTreeQuery godoc
//
// @Summary Get Locations Tree
// @Tags Locations
// @Produce json
// @Param withItems query bool false "include items in response tree"
// @Success 200 {object} []repo.TreeItem
// @Router /v1/locations/tree [GET]
// @Security Bearer
// @Summary Get Locations Tree
// @Tags Locations
// @Produce json
// @Param withItems query bool false "include items in response tree"
// @Success 200 {object} []repo.TreeItem
// @Router /v1/locations/tree [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) {
auth := services.NewContext(r.Context())
@@ -32,13 +32,13 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
// HandleLocationGetAll godoc
//
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} []repo.LocationOutCount
// @Router /v1/locations [GET]
// @Security Bearer
// @Summary Get All Locations
// @Tags Locations
// @Produce json
// @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} []repo.LocationOutCount
// @Router /v1/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) {
auth := services.NewContext(r.Context())
@@ -50,13 +50,13 @@ func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
// HandleLocationCreate godoc
//
// @Summary Create Location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
// @Summary Create Location
// @Tags Locations
// @Produce json
// @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())
@@ -68,13 +68,13 @@ func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
// HandleLocationDelete godoc
//
// @Summary Delete Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
// @Summary Delete Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 204
// @Router /v1/locations/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
@@ -99,11 +99,6 @@ func (ctrl *V1Controller) GetLocationWithPrice(auth context.Context, gid uuid.UU
}
for _, item := range items.Items {
// Skip items with a non-zero SoldTime
if !item.SoldTime.IsZero() {
continue
}
// Convert item.Quantity to float64 for multiplication
quantity := float64(item.Quantity)
itemTotal := big.NewInt(int64(item.PurchasePrice * quantity * 100))
@@ -129,13 +124,13 @@ func (ctrl *V1Controller) GetLocationWithPrice(auth context.Context, gid uuid.UU
// HandleLocationGet godoc
//
// @Summary Get Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
// @Summary Get Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())
@@ -149,14 +144,14 @@ func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
// HandleLocationUpdate godoc
//
// @Summary Update Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
// @Summary Update Location
// @Tags Locations
// @Produce json
// @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context())

View File

@@ -12,14 +12,13 @@ import (
// HandleMaintenanceLogGet godoc
//
// @Summary Get Maintenance Log
// @Tags Item Maintenance
// @Produce json
// @Param id path string true "Item ID"
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer
// @Summary Get Maintenance Log
// @Tags Item Maintenance
// @Produce json
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
auth := services.NewContext(r.Context())
@@ -31,14 +30,13 @@ func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
// HandleMaintenanceEntryCreate godoc
//
// @Summary Create Maintenance Entry
// @Tags Item Maintenance
// @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 201 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST]
// @Security Bearer
// @Summary Create Maintenance Entry
// @Tags Item Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 201 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc {
fn := func(r *http.Request, itemID uuid.UUID, body repo.MaintenanceEntryCreate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())

View File

@@ -12,13 +12,13 @@ import (
// HandleMaintenanceGetAll godoc
//
// @Summary Query All Maintenance
// @Tags Maintenance
// @Produce json
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/maintenance [GET]
// @Security Bearer
// @Summary Query All Maintenance
// @Tags Maintenance
// @Produce json
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/maintenance [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc {
fn := func(r *http.Request, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
auth := services.NewContext(r.Context())
@@ -30,14 +30,13 @@ func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc {
// HandleMaintenanceEntryUpdate godoc
//
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param id path string true "Maintenance ID"
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/maintenance/{id} [PUT]
// @Security Bearer
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/maintenance/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())
@@ -49,13 +48,12 @@ func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
// HandleMaintenanceEntryDelete godoc
//
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param id path string true "Maintenance ID"
// @Success 204
// @Router /v1/maintenance/{id} [DELETE]
// @Security Bearer
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Success 204
// @Router /v1/maintenance/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())

View File

@@ -13,12 +13,12 @@ import (
// HandleGetUserNotifiers godoc
//
// @Summary Get Notifiers
// @Tags Notifiers
// @Produce json
// @Success 200 {object} []repo.NotifierOut
// @Router /v1/notifiers [GET]
// @Security Bearer
// @Summary Get Notifiers
// @Tags Notifiers
// @Produce json
// @Success 200 {object} []repo.NotifierOut
// @Router /v1/notifiers [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
fn := func(r *http.Request, _ struct{}) ([]repo.NotifierOut, error) {
user := services.UseUserCtx(r.Context())
@@ -30,13 +30,13 @@ func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
// HandleCreateNotifier godoc
//
// @Summary Create Notifier
// @Tags Notifiers
// @Produce json
// @Param payload body repo.NotifierCreate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers [POST]
// @Security Bearer
// @Summary Create Notifier
// @Tags Notifiers
// @Produce json
// @Param payload body repo.NotifierCreate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers [POST]
// @Security Bearer
func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context())
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
// HandleDeleteNotifier godocs
//
// @Summary Delete a Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Success 204
// @Router /v1/notifiers/{id} [DELETE]
// @Security Bearer
// @Summary Delete a Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Success 204
// @Router /v1/notifiers/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
@@ -65,13 +65,13 @@ func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
// HandleUpdateNotifier godocs
//
// @Summary Update Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Param payload body repo.NotifierUpdate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers/{id} [PUT]
// @Security Bearer
// @Summary Update Notifier
// @Tags Notifiers
// @Param id path string true "Notifier ID"
// @Param payload body repo.NotifierUpdate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context())
@@ -83,13 +83,14 @@ func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
// HandlerNotifierTest godoc
//
// @Summary Test Notifier
// @Tags Notifiers
// @Produce json
// @Param url query string true "URL"
// @Success 204
// @Router /v1/notifiers/test [POST]
// @Security Bearer
// @Summary Test Notifier
// @Tags Notifiers
// @Produce json
// @Param id path string true "Notifier ID"
// @Param url query string true "URL"
// @Success 204
// @Router /v1/notifiers/test [POST]
// @Security Bearer
func (ctrl *V1Controller) HandlerNotifierTest() errchain.HandlerFunc {
type body struct {
URL string `json:"url" validate:"required"`

View File

@@ -1,332 +0,0 @@
package v1
import (
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
)
type UPCITEMDBResponse struct {
Code string `json:"code"`
Total int `json:"total"`
Offset int `json:"offset"`
Items []struct {
Ean string `json:"ean"`
Title string `json:"title"`
Description string `json:"description"`
Upc string `json:"upc"`
Brand string `json:"brand"`
Model string `json:"model"`
Color string `json:"color"`
Size string `json:"size"`
Dimension string `json:"dimension"`
Weight string `json:"weight"`
Category string `json:"category"`
LowestRecordedPrice float64 `json:"lowest_recorded_price"`
HighestRecordedPrice float64 `json:"highest_recorded_price"`
Images []string `json:"images"`
Offers []struct {
Merchant string `json:"merchant"`
Domain string `json:"domain"`
Title string `json:"title"`
Currency string `json:"currency"`
ListPrice string `json:"list_price"`
Price float64 `json:"price"`
Shipping string `json:"shipping"`
Condition string `json:"condition"`
Availability string `json:"availability"`
Link string `json:"link"`
UpdatedT int `json:"updated_t"`
} `json:"offers"`
Asin string `json:"asin"`
Elid string `json:"elid"`
} `json:"items"`
}
type BARCODESPIDER_COMResponse struct {
ItemResponse struct {
Code int `json:"code"`
Status string `json:"status"`
Message string `json:"message"`
} `json:"item_response"`
ItemAttributes struct {
Title string `json:"title"`
Upc string `json:"upc"`
Ean string `json:"ean"`
ParentCategory string `json:"parent_category"`
Category string `json:"category"`
Brand string `json:"brand"`
Model string `json:"model"`
Mpn string `json:"mpn"`
Manufacturer string `json:"manufacturer"`
Publisher string `json:"publisher"`
Asin string `json:"asin"`
Color string `json:"color"`
Size string `json:"size"`
Weight string `json:"weight"`
Image string `json:"image"`
IsAdult string `json:"is_adult"`
Description string `json:"description"`
} `json:"item_attributes"`
Stores []struct {
StoreName string `json:"store_name"`
Title string `json:"title"`
Image string `json:"image"`
Price string `json:"price"`
Currency string `json:"currency"`
Link string `json:"link"`
Updated string `json:"updated"`
} `json:"Stores"`
}
// HandleGenerateQRCode godoc
//
// @Summary Search EAN from Barcode
// @Tags Items
// @Produce json
// @Param data query string false "barcode to be searched"
// @Success 200 {object} []repo.BarcodeProduct
// @Router /v1/products/search-from-barcode [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleProductSearchFromBarcode(conf config.BarcodeAPIConf) errchain.HandlerFunc {
type query struct {
// 80 characters is the longest non-2D barcode length (GS1-128)
EAN string `schema:"productEAN" validate:"required,max=80"`
}
return func(w http.ResponseWriter, r *http.Request) error {
q, err := adapters.DecodeQuery[query](r)
if err != nil {
return err
}
const TIMEOUT_SEC = 10
log.Info().Msg("Processing barcode lookup request on: " + q.EAN)
// Search on UPCITEMDB
var products []repo.BarcodeProduct
// www.ean-search.org/: not free
// Example code: dewalt 5035048748428
upcitemdb := func(iEan string) ([]repo.BarcodeProduct, error) {
client := &http.Client{Timeout: TIMEOUT_SEC * time.Second}
resp, err := client.Get("https://api.upcitemdb.com/prod/trial/lookup?upc=" + iEan)
if err != nil {
return nil, err
}
defer func() {
err = errors.Join(err, resp.Body.Close())
}()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("API returned status code: %d", resp.StatusCode)
}
// We Read the response body on the line below.
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
// Uncomment the following string for debug
// sb := string(body)
// log.Debug().Msg("Response: " + sb)
var result UPCITEMDBResponse
if err := json.Unmarshal(body, &result); err != nil { // Parse []byte to go struct pointer
log.Error().Msg("Can not unmarshal JSON")
}
var res []repo.BarcodeProduct
for _, it := range result.Items {
var p repo.BarcodeProduct
p.SearchEngineName = "upcitemdb.com"
p.Barcode = iEan
p.Item.Description = it.Description
p.Item.Name = it.Title
p.Manufacturer = it.Brand
p.ModelNumber = it.Model
if len(it.Images) != 0 {
p.ImageURL = it.Images[0]
}
res = append(res, p)
}
return res, nil
}
ps, err := upcitemdb(q.EAN)
if err != nil {
log.Error().Msg("Can not retrieve product from upcitemdb.com" + err.Error())
}
// Barcode spider implementation
barcodespider := func(tokenAPI string, iEan string) ([]repo.BarcodeProduct, error) {
if len(tokenAPI) == 0 {
return nil, errors.New("no api token configured for barcodespider. " +
"Please define the api token in environment variable HBOX_BARCODE_TOKEN_BARCODESPIDER")
}
req, err := http.NewRequest(
"GET", "https://api.barcodespider.com/v1/lookup?upc="+iEan, nil)
if err != nil {
return nil, err
}
req.Header.Add("token", tokenAPI)
client := &http.Client{Timeout: TIMEOUT_SEC * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
// defer the call to Body.Close(). We also check the error code, and merge
// it with the other error in this code to avoid error overiding.
defer func() {
err = errors.Join(err, resp.Body.Close())
}()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("barcodespider API returned status code: %d", resp.StatusCode)
}
// We Read the response body on the line below.
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
// Uncomment the following string for debug
// sb := string(body)
// log.Debug().Msg("Response: " + sb)
var result BARCODESPIDER_COMResponse
if err := json.Unmarshal(body, &result); err != nil { // Parse []byte to go struct pointer
log.Error().Msg("Can not unmarshal JSON")
}
// TODO: check 200 code on HTTP response.
var p repo.BarcodeProduct
p.Barcode = iEan
p.SearchEngineName = "barcodespider.com"
p.Item.Name = result.ItemAttributes.Title
p.Item.Description = result.ItemAttributes.Description
p.Manufacturer = result.ItemAttributes.Brand
p.ModelNumber = result.ItemAttributes.Model
p.ImageURL = result.ItemAttributes.Image
var res []repo.BarcodeProduct
res = append(res, p)
return res, nil
}
ps2, err := barcodespider(conf.TokenBarcodespider, q.EAN)
if err != nil {
log.Error().Msg("Can not retrieve product from barcodespider.com: " + err.Error())
}
// Merge everything.
products = append(products, ps...)
products = append(products, ps2...)
// Retrieve images if possible
for i := range products {
p := &products[i]
if len(p.ImageURL) == 0 {
continue
}
// Validate URL is HTTPS
u, err := url.Parse(p.ImageURL)
if err != nil || u.Scheme != "https" {
log.Warn().Msg("Skipping non-HTTPS image URL: " + p.ImageURL)
continue
}
client := &http.Client{Timeout: TIMEOUT_SEC * time.Second}
res, err := client.Get(p.ImageURL)
if err != nil {
log.Warn().Msg("Cannot fetch image for URL: " + p.ImageURL + ": " + err.Error())
}
defer func() {
err = errors.Join(err, res.Body.Close())
}()
// Validate response
if res.StatusCode != http.StatusOK {
continue
}
// Check content type
contentType := res.Header.Get("Content-Type")
if !strings.HasPrefix(contentType, "image/") {
continue
}
// Limit image size to 8MB
limitedReader := io.LimitReader(res.Body, 8*1024*1024)
// Read data of image
bytes, err := io.ReadAll(limitedReader)
if err != nil {
log.Warn().Msg(err.Error())
continue
}
// Convert to Base64
var base64Encoding string
// Determine the content type of the image file
mimeType := http.DetectContentType(bytes)
// Prepend the appropriate URI scheme header depending
// on the MIME type
switch mimeType {
case "image/jpeg":
base64Encoding += "data:image/jpeg;base64,"
case "image/png":
base64Encoding += "data:image/png;base64,"
default:
continue
}
// Append the base64 encoded output
base64Encoding += base64.StdEncoding.EncodeToString(bytes)
p.ImageBase64 = base64Encoding
}
if len(products) != 0 {
return server.JSON(w, http.StatusOK, products)
}
return server.JSON(w, http.StatusNoContent, nil)
}
}

View File

@@ -20,13 +20,13 @@ var qrcodeLogo []byte
// HandleGenerateQRCode godoc
//
// @Summary Create QR Code
// @Tags Items
// @Produce json
// @Param data query string false "data to be encoded into qrcode"
// @Success 200 {string} string "image/jpeg"
// @Router /v1/qrcode [GET]
// @Security Bearer
// @Summary Create QR Code
// @Tags Items
// @Produce json
// @Param data query string false "data to be encoded into qrcode"
// @Success 200 {string} string "image/jpeg"
// @Router /v1/qrcode [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
type query struct {
// 4,296 characters is the maximum length of a QR code

View File

@@ -8,12 +8,12 @@ import (
// HandleBillOfMaterialsExport godoc
//
// @Summary Export Bill of Materials
// @Tags Reporting
// @Produce json
// @Success 200 {string} string "text/csv"
// @Router /v1/reporting/bill-of-materials [GET]
// @Security Bearer
// @Summary Export Bill of Materials
// @Tags Reporting
// @Produce json
// @Success 200 {string} string "text/csv"
// @Router /v1/reporting/bill-of-materials [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
actor := services.UseUserCtx(r.Context())

View File

@@ -14,12 +14,12 @@ import (
// HandleGroupStatisticsLocations godoc
//
// @Summary Get Location Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET]
// @Security Bearer
// @Summary Get Location Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context())
@@ -31,12 +31,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc
// HandleGroupStatisticsLabels godoc
//
// @Summary Get Label Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET]
// @Security Bearer
// @Summary Get Label Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context())
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
// HandleGroupStatistics godoc
//
// @Summary Get Group Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET]
// @Security Bearer
// @Summary Get Group Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.GroupStatistics, error) {
auth := services.NewContext(r.Context())
@@ -65,14 +65,14 @@ func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
// HandleGroupStatisticsPriceOverTime godoc
//
// @Summary Get Purchase Price Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date"
// @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer
// @Summary Get Purchase Price Statistics
// @Tags Statistics
// @Produce json
// @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date"
// @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerFunc {
parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) {
if datestr == "" {

View File

@@ -15,20 +15,14 @@ import (
// HandleUserRegistration godoc
//
// @Summary Register New User
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Failure 403 {string} string "Local login is not enabled"
// @Router /v1/users/register [Post]
// @Summary Register New User
// @Tags User
// @Produce json
// @Param payload body services.UserRegistration true "User Data"
// @Success 204
// @Router /v1/users/register [Post]
func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Forbidden if local login is not enabled
if !ctrl.config.Options.AllowLocalLogin {
return validate.NewRequestError(fmt.Errorf("local login is not enabled"), http.StatusForbidden)
}
regData := services.UserRegistration{}
if err := server.Decode(r, &regData); err != nil {
@@ -52,12 +46,12 @@ func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
// HandleUserSelf godoc
//
// @Summary Get User Self
// @Tags User
// @Produce json
// @Success 200 {object} Wrapped{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
// @Summary Get User Self
// @Tags User
// @Produce json
// @Success 200 {object} Wrapped{item=repo.UserOut}
// @Router /v1/users/self [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context())
@@ -73,13 +67,13 @@ func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
// HandleUserSelfUpdate godoc
//
// @Summary Update Account
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} Wrapped{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
// @Summary Update Account
// @Tags User
// @Produce json
// @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} Wrapped{item=repo.UserUpdate}
// @Router /v1/users/self [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
updateData := repo.UserUpdate{}
@@ -100,12 +94,12 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
// HandleUserSelfDelete godoc
//
// @Summary Delete Account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
// @Summary Delete Account
// @Tags User
// @Produce json
// @Success 204
// @Router /v1/users/self [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {
@@ -130,12 +124,12 @@ type (
// HandleUserSelfChangePassword godoc
//
// @Summary Change Password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
// @Summary Change Password
// @Tags User
// @Success 204
// @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo {

View File

@@ -18,10 +18,7 @@ func (a *app) setupLogger() {
}
level, err := zerolog.ParseLevel(a.conf.Log.Level)
if err != nil {
log.Error().Err(err).Str("level", a.conf.Log.Level).Msg("invalid log level, falling back to info")
zerolog.SetGlobalLevel(zerolog.InfoLevel)
} else {
if err == nil {
zerolog.SetGlobalLevel(level)
}
}

View File

@@ -1,17 +1,18 @@
package main
import (
"bytes"
"context"
"errors"
"fmt"
"net/http"
"strings"
"os"
"path/filepath"
"time"
atlas "ariga.io/atlas/sql/migrate"
"entgo.io/ent/dialect/sql/schema"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/pressly/goose/v3"
"github.com/sysadminsmedia/homebox/backend/internal/sys/analytics"
"github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/graceful"
@@ -26,20 +27,8 @@ import (
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"github.com/sysadminsmedia/homebox/backend/internal/web/mid"
"go.balki.me/anyhttp"
_ "github.com/lib/pq"
_ "github.com/sysadminsmedia/homebox/backend/internal/data/migrations/postgres"
_ "github.com/sysadminsmedia/homebox/backend/internal/data/migrations/sqlite3"
_ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite"
_ "gocloud.dev/pubsub/awssnssqs"
_ "gocloud.dev/pubsub/azuresb"
_ "gocloud.dev/pubsub/gcppubsub"
_ "gocloud.dev/pubsub/kafkapubsub"
_ "gocloud.dev/pubsub/mempubsub"
_ "gocloud.dev/pubsub/natspubsub"
_ "gocloud.dev/pubsub/rabbitpubsub"
)
var (
@@ -57,33 +46,15 @@ func build() string {
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime)
}
func validatePostgresSSLMode(sslMode string) bool {
validModes := map[string]bool{
"": true,
"disable": true,
"allow": true,
"prefer": true,
"require": true,
"verify-ca": true,
"verify-full": true,
}
return validModes[strings.ToLower(strings.TrimSpace(sslMode))]
}
// @title Homebox API
// @version 1.0
// @description Track, Manage, and Organize your Things.
// @contact.name Homebox Team
// @contact.url https://discord.homebox.software
// @host demo.homebox.software
// @schemes https http
// @BasePath /api
// @securityDefinitions.apikey Bearer
// @in header
// @name Authorization
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
// @externalDocs.url https://homebox.software/en/api
// @title Homebox API
// @version 1.0
// @description Track, Manage, and Organize your Things.
// @contact.name Don't
// @BasePath /api
// @securityDefinitions.apikey Bearer
// @in header
// @name Authorization
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
func main() {
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
@@ -103,64 +74,82 @@ func run(cfg *config.Config) error {
// =========================================================================
// Initialize Database & Repos
err := setupStorageDir(cfg)
err := os.MkdirAll(cfg.Storage.Data, 0o755)
if err != nil {
return err
log.Fatal().Err(err).Msg("failed to create data directory")
}
if strings.ToLower(cfg.Database.Driver) == config.DriverPostgres {
if !validatePostgresSSLMode(cfg.Database.SslMode) {
log.Error().Str("sslmode", cfg.Database.SslMode).Msg("invalid sslmode")
return fmt.Errorf("invalid sslmode: %s", cfg.Database.SslMode)
c, err := ent.Open("sqlite3", cfg.Storage.SqliteURL)
if err != nil {
log.Fatal().
Err(err).
Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteURL).
Msg("failed opening connection to sqlite")
}
defer func(c *ent.Client) {
err := c.Close()
if err != nil {
log.Fatal().Err(err).Msg("failed to close database connection")
}
}
}(c)
databaseURL, err := setupDatabaseURL(cfg)
temp := filepath.Join(os.TempDir(), "migrations")
err = migrations.Write(temp)
if err != nil {
return err
}
c, err := ent.Open(strings.ToLower(cfg.Database.Driver), databaseURL)
dir, err := atlas.NewLocalDir(temp)
if err != nil {
return err
}
options := []schema.MigrateOption{
schema.WithDir(dir),
schema.WithDropColumn(true),
schema.WithDropIndex(true),
}
err = c.Schema.Create(context.Background(), options...)
if err != nil {
log.Error().
Err(err).
Str("driver", strings.ToLower(cfg.Database.Driver)).
Str("host", cfg.Database.Host).
Str("port", cfg.Database.Port).
Str("database", cfg.Database.Database).
Msg("failed opening connection to {driver} database at {host}:{port}/{database}")
return fmt.Errorf("failed opening connection to %s database at %s:%s/%s: %w",
strings.ToLower(cfg.Database.Driver),
cfg.Database.Host,
cfg.Database.Port,
cfg.Database.Database,
err,
)
}
migrationsFs, err := migrations.Migrations(strings.ToLower(cfg.Database.Driver))
if err != nil {
return fmt.Errorf("failed to get migrations for %s: %w", strings.ToLower(cfg.Database.Driver), err)
}
goose.SetBaseFS(migrationsFs)
err = goose.SetDialect(strings.ToLower(cfg.Database.Driver))
if err != nil {
log.Error().Str("driver", cfg.Database.Driver).Msg("unsupported database driver")
return fmt.Errorf("unsupported database driver: %s", cfg.Database.Driver)
}
err = goose.Up(c.Sql(), strings.ToLower(cfg.Database.Driver))
if err != nil {
log.Error().Err(err).Msg("failed to migrate database")
Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteURL).
Msg("failed creating schema resources")
return err
}
collectFuncs, err := loadCurrencies(cfg)
err = os.RemoveAll(temp)
if err != nil {
log.Error().Err(err).Msg("failed to remove temporary directory for database migrations")
return err
}
collectFuncs := []currencies.CollectorFunc{
currencies.CollectDefaults(),
}
if cfg.Options.CurrencyConfig != "" {
log.Info().
Str("path", cfg.Options.CurrencyConfig).
Msg("loading currency config file")
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
if err != nil {
log.Error().
Err(err).
Str("path", cfg.Options.CurrencyConfig).
Msg("failed to read currency config file")
return err
}
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
}
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
if err != nil {
log.Error().
@@ -171,7 +160,7 @@ func run(cfg *config.Config) error {
app.bus = eventbus.New()
app.db = c
app.repos = repo.New(c, app.bus, cfg.Storage, cfg.Database.PubSubConnString, cfg.Thumbnail)
app.repos = repo.New(c, app.bus, cfg.Storage.Data)
app.services = services.New(
app.repos,
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
@@ -212,52 +201,91 @@ func run(cfg *config.Config) error {
_ = httpserver.Shutdown(context.Background())
}()
listener, addrType, addrCfg, err := anyhttp.GetListener(cfg.Web.Host)
if err == nil {
switch addrType {
case anyhttp.SystemdFD:
sysdCfg := addrCfg.(*anyhttp.SysdConfig)
if sysdCfg.IdleTimeout != nil {
log.Error().Msg("idle timeout not yet supported. Please remove and try again")
return errors.New("idle timeout not yet supported. Please remove and try again")
}
fallthrough
case anyhttp.UnixSocket:
log.Info().Msgf("Server is running on %s", cfg.Web.Host)
return httpserver.Serve(listener)
}
} else {
log.Debug().Msgf("anyhttp error: %v", err)
}
log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port)
return httpserver.ListenAndServe()
})
// =========================================================================
// Start Reoccurring Tasks
registerRecurringTasks(app, cfg, runner)
// Send analytics if enabled at around midnight UTC
if cfg.Options.AllowAnalytics {
analyticsTime := time.Second
runner.AddPlugin(NewTask("send-analytics", analyticsTime, func(ctx context.Context) {
for {
now := time.Now().UTC()
nextMidnight := time.Date(now.Year(), now.Month(), now.Day()+1, 0, 0, 0, 0, time.UTC)
dur := time.Until(nextMidnight)
analyticsTime = dur
select {
case <-ctx.Done():
return
case <-time.After(dur):
log.Debug().Msg("running send analytics")
err := analytics.Send(version, build())
if err != nil {
log.Error().Err(err).Msg("failed to send analytics")
}
}
runner.AddFunc("eventbus", app.bus.Run)
runner.AddFunc("seed_database", func(ctx context.Context) error {
// TODO: Remove through external API that does setup
if cfg.Demo {
log.Info().Msg("Running in demo mode, creating demo data")
err := app.SetupDemo()
if err != nil {
log.Fatal().Msg(err.Error())
}
}
return nil
})
runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) {
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
if err != nil {
log.Error().
Err(err).
Msg("failed to purge expired tokens")
}
}))
runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) {
_, err := app.repos.Groups.InvitationPurge(ctx)
if err != nil {
log.Error().
Err(err).
Msg("failed to purge expired invitations")
}
}))
runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) {
now := time.Now()
if now.Hour() == 8 {
fmt.Println("run notifiers")
err := app.services.BackgroundService.SendNotifiersToday(context.Background())
if err != nil {
log.Error().
Err(err).
Msg("failed to send notifiers")
}
}
}))
// TODO: read from env var0 GOOS=linux go build
if true {
runner.AddPlugin(NewTask("locale-update", time.Duration(24)*time.Hour, func(ctx context.Context) {
log.Debug().Msg("running locale update")
err := app.services.BackgroundService.UpdateLocales(ctx)
if err != nil {
log.Error().
Err(err).
Msg("failed to update locales")
}
}))
}
if cfg.Debug.Enabled {
runner.AddFunc("debug", func(ctx context.Context) error {
debugserver := http.Server{
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port),
Handler: app.debugRouter(),
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
}
go func() {
<-ctx.Done()
_ = debugserver.Shutdown(context.Background())
}()
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
return debugserver.ListenAndServe()
})
}
return runner.Start(context.Background())
}

View File

@@ -1,626 +0,0 @@
package providers
import (
"context"
"crypto/rand"
"crypto/sha256"
"encoding/base64"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/coreos/go-oidc/v3/oidc"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"golang.org/x/oauth2"
)
type OIDCProvider struct {
service *services.UserService
config *config.OIDCConf
options *config.Options
cookieSecure bool
provider *oidc.Provider
verifier *oidc.IDTokenVerifier
endpoint oauth2.Endpoint
}
type OIDCClaims struct {
Email string
Groups []string
Name string
Subject string
Issuer string
EmailVerified *bool
}
func NewOIDCProvider(service *services.UserService, config *config.OIDCConf, options *config.Options, cookieSecure bool) (*OIDCProvider, error) {
if !config.Enabled {
return nil, fmt.Errorf("OIDC is not enabled")
}
// Validate required configuration
if config.ClientID == "" {
return nil, fmt.Errorf("OIDC client ID is required when OIDC is enabled (set HBOX_OIDC_CLIENT_ID)")
}
if config.ClientSecret == "" {
return nil, fmt.Errorf("OIDC client secret is required when OIDC is enabled (set HBOX_OIDC_CLIENT_SECRET)")
}
if config.IssuerURL == "" {
return nil, fmt.Errorf("OIDC issuer URL is required when OIDC is enabled (set HBOX_OIDC_ISSUER_URL)")
}
ctx, cancel := context.WithTimeout(context.Background(), config.RequestTimeout)
defer cancel()
provider, err := oidc.NewProvider(ctx, config.IssuerURL)
if err != nil {
return nil, fmt.Errorf("failed to create OIDC provider from issuer URL: %w", err)
}
// Create ID token verifier
verifier := provider.Verifier(&oidc.Config{
ClientID: config.ClientID,
})
log.Info().
Str("issuer", config.IssuerURL).
Str("client_id", config.ClientID).
Str("scope", config.Scope).
Msg("OIDC provider initialized successfully with discovery")
return &OIDCProvider{
service: service,
config: config,
options: options,
cookieSecure: cookieSecure,
provider: provider,
verifier: verifier,
endpoint: provider.Endpoint(),
}, nil
}
func (p *OIDCProvider) Name() string {
return "oidc"
}
// Authenticate implements the AuthProvider interface but is not used for OIDC
// OIDC uses dedicated endpoints: GET /api/v1/users/login/oidc and GET /api/v1/users/login/oidc/callback
func (p *OIDCProvider) Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
_ = w
_ = r
return services.UserAuthTokenDetail{}, fmt.Errorf("OIDC authentication uses dedicated endpoints: /api/v1/users/login/oidc")
}
// AuthenticateWithBaseURL is the main authentication method that requires baseURL
// Called from handleCallback after state, nonce, and PKCE verification
func (p *OIDCProvider) AuthenticateWithBaseURL(baseURL, expectedNonce, pkceVerifier string, _ http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
code := r.URL.Query().Get("code")
if code == "" {
return services.UserAuthTokenDetail{}, fmt.Errorf("missing authorization code")
}
// Get OAuth2 config for this request
oauth2Config := p.getOAuth2Config(baseURL)
// Exchange code for token with timeout and PKCE verifier
ctx, cancel := context.WithTimeout(r.Context(), p.config.RequestTimeout)
defer cancel()
token, err := oauth2Config.Exchange(ctx, code, oauth2.SetAuthURLParam("code_verifier", pkceVerifier))
if err != nil {
log.Err(err).Msg("failed to exchange OIDC code for token")
return services.UserAuthTokenDetail{}, fmt.Errorf("failed to exchange code for token")
}
// Extract ID token
idToken, ok := token.Extra("id_token").(string)
if !ok {
return services.UserAuthTokenDetail{}, fmt.Errorf("no id_token in response")
}
// Parse and validate the ID token using the library's verifier with timeout
verifyCtx, verifyCancel := context.WithTimeout(r.Context(), p.config.RequestTimeout)
defer verifyCancel()
idTokenStruct, err := p.verifier.Verify(verifyCtx, idToken)
if err != nil {
log.Err(err).Msg("failed to verify ID token")
return services.UserAuthTokenDetail{}, fmt.Errorf("failed to verify ID token")
}
// Extract claims from the verified token using dynamic parsing
var rawClaims map[string]interface{}
if err := idTokenStruct.Claims(&rawClaims); err != nil {
log.Err(err).Msg("failed to extract claims from ID token")
return services.UserAuthTokenDetail{}, fmt.Errorf("failed to extract claims from ID token")
}
// Attempt to retrieve UserInfo claims; use them as primary, fallback to ID token claims.
finalClaims := rawClaims
userInfoCtx, uiCancel := context.WithTimeout(r.Context(), p.config.RequestTimeout)
defer uiCancel()
userInfo, uiErr := p.provider.UserInfo(userInfoCtx, oauth2.StaticTokenSource(token))
if uiErr != nil {
log.Debug().Err(uiErr).Msg("OIDC UserInfo fetch failed; falling back to ID token claims")
} else {
var uiClaims map[string]interface{}
if err := userInfo.Claims(&uiClaims); err != nil {
log.Debug().Err(err).Msg("failed to decode UserInfo claims; falling back to ID token claims")
} else {
finalClaims = mergeOIDCClaims(uiClaims, rawClaims) // UserInfo first, then fill gaps from ID token
log.Debug().Int("userinfo_claims", len(uiClaims)).Int("id_token_claims", len(rawClaims)).Int("merged_claims", len(finalClaims)).Msg("merged UserInfo and ID token claims")
}
}
// Parse claims using configurable claim names (after merge)
claims, err := p.parseOIDCClaims(finalClaims)
if err != nil {
log.Err(err).Msg("failed to parse OIDC claims")
return services.UserAuthTokenDetail{}, fmt.Errorf("failed to parse OIDC claims: %w", err)
}
// Verify nonce claim matches expected value (nonce only from ID token; ensure preserved in merged map)
tokenNonce, exists := finalClaims["nonce"]
if !exists {
log.Warn().Msg("nonce claim missing from ID token - possible replay attack")
return services.UserAuthTokenDetail{}, fmt.Errorf("nonce claim missing from token")
}
tokenNonceStr, ok := tokenNonce.(string)
if !ok {
log.Warn().Msg("nonce claim is not a string in ID token")
return services.UserAuthTokenDetail{}, fmt.Errorf("invalid nonce claim format")
}
if tokenNonceStr != expectedNonce {
log.Warn().Str("received", tokenNonceStr).Str("expected", expectedNonce).Msg("OIDC nonce mismatch - possible replay attack")
return services.UserAuthTokenDetail{}, fmt.Errorf("nonce parameter mismatch")
}
// Check if email is verified
if p.config.VerifyEmail {
if claims.EmailVerified == nil {
return services.UserAuthTokenDetail{}, fmt.Errorf("email verification status not found in token claims")
}
if !*claims.EmailVerified {
return services.UserAuthTokenDetail{}, fmt.Errorf("email not verified")
}
}
// Check group authorization if configured
if p.config.AllowedGroups != "" {
allowedGroups := strings.Split(p.config.AllowedGroups, ",")
if !p.hasAllowedGroup(claims.Groups, allowedGroups) {
log.Warn().
Strs("user_groups", claims.Groups).
Strs("allowed_groups", allowedGroups).
Str("user", claims.Email).
Msg("user not in allowed groups")
return services.UserAuthTokenDetail{}, fmt.Errorf("user not in allowed groups")
}
}
// Determine username from claims
email := claims.Email
if email == "" {
return services.UserAuthTokenDetail{}, fmt.Errorf("no email found in token claims")
}
if claims.Subject == "" {
return services.UserAuthTokenDetail{}, fmt.Errorf("no subject (sub) claim present")
}
if claims.Issuer == "" {
claims.Issuer = p.config.IssuerURL // fallback to configured issuer, though spec requires 'iss'
}
// Use the dedicated OIDC login method (issuer + subject identity)
sessionToken, err := p.service.LoginOIDC(r.Context(), claims.Issuer, claims.Subject, email, claims.Name)
if err != nil {
log.Err(err).Str("email", email).Str("issuer", claims.Issuer).Str("subject", claims.Subject).Msg("OIDC login failed")
return services.UserAuthTokenDetail{}, fmt.Errorf("OIDC login failed: %w", err)
}
return sessionToken, nil
}
func (p *OIDCProvider) parseOIDCClaims(rawClaims map[string]interface{}) (OIDCClaims, error) {
var claims OIDCClaims
// Parse email claim
key := p.config.EmailClaim
if key == "" {
key = "email"
}
if emailValue, exists := rawClaims[key]; exists {
if email, ok := emailValue.(string); ok {
claims.Email = email
}
}
// Parse email_verified claim
if p.config.VerifyEmail {
key = p.config.EmailVerifiedClaim
if key == "" {
key = "email_verified"
}
if emailVerifiedValue, exists := rawClaims[key]; exists {
switch v := emailVerifiedValue.(type) {
case bool:
claims.EmailVerified = &v
case string:
if b, err := strconv.ParseBool(v); err == nil {
claims.EmailVerified = &b
}
}
}
}
// Parse name claim
key = p.config.NameClaim
if key == "" {
key = "name"
}
if nameValue, exists := rawClaims[key]; exists {
if name, ok := nameValue.(string); ok {
claims.Name = name
}
}
// Parse groups claim
key = p.config.GroupClaim
if key == "" {
key = "groups"
}
if groupsValue, exists := rawClaims[key]; exists {
switch groups := groupsValue.(type) {
case []interface{}:
for _, group := range groups {
if groupStr, ok := group.(string); ok {
claims.Groups = append(claims.Groups, groupStr)
}
}
case []string:
claims.Groups = groups
case string:
// Single group as string
claims.Groups = []string{groups}
}
}
// Parse subject claim (always "sub")
if subValue, exists := rawClaims["sub"]; exists {
if subject, ok := subValue.(string); ok {
claims.Subject = subject
}
}
// Parse issuer claim ("iss")
if issValue, exists := rawClaims["iss"]; exists {
if iss, ok := issValue.(string); ok {
claims.Issuer = iss
}
}
return claims, nil
}
func (p *OIDCProvider) hasAllowedGroup(userGroups, allowedGroups []string) bool {
if len(allowedGroups) == 0 {
return true
}
allowedGroupsMap := make(map[string]bool)
for _, group := range allowedGroups {
allowedGroupsMap[strings.TrimSpace(group)] = true
}
for _, userGroup := range userGroups {
if allowedGroupsMap[userGroup] {
return true
}
}
return false
}
func (p *OIDCProvider) GetAuthURL(baseURL, state, nonce, pkceVerifier string) string {
oauth2Config := p.getOAuth2Config(baseURL)
pkceChallenge := generatePKCEChallenge(pkceVerifier)
return oauth2Config.AuthCodeURL(state,
oidc.Nonce(nonce),
oauth2.SetAuthURLParam("code_challenge", pkceChallenge),
oauth2.SetAuthURLParam("code_challenge_method", "S256"))
}
func (p *OIDCProvider) getOAuth2Config(baseURL string) oauth2.Config {
// Construct full redirect URL with dedicated callback endpoint
redirectURL, err := url.JoinPath(baseURL, "/api/v1/users/login/oidc/callback")
if err != nil {
log.Err(err).Msg("failed to construct redirect URL")
return oauth2.Config{}
}
return oauth2.Config{
ClientID: p.config.ClientID,
ClientSecret: p.config.ClientSecret,
RedirectURL: redirectURL,
Endpoint: p.endpoint,
Scopes: strings.Fields(p.config.Scope),
}
}
// initiateOIDCFlow handles the initial OIDC authentication request by redirecting to the provider
func (p *OIDCProvider) initiateOIDCFlow(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
// Generate state parameter for CSRF protection
state, err := generateSecureToken()
if err != nil {
log.Err(err).Msg("failed to generate OIDC state parameter")
return services.UserAuthTokenDetail{}, fmt.Errorf("internal server error")
}
// Generate nonce parameter for replay attack protection
nonce, err := generateSecureToken()
if err != nil {
log.Err(err).Msg("failed to generate OIDC nonce parameter")
return services.UserAuthTokenDetail{}, fmt.Errorf("internal server error")
}
// Generate PKCE verifier for code interception protection
pkceVerifier, err := generatePKCEVerifier()
if err != nil {
log.Err(err).Msg("failed to generate OIDC PKCE verifier")
return services.UserAuthTokenDetail{}, fmt.Errorf("internal server error")
}
// Get base URL from request
baseURL := p.getBaseURL(r)
u, _ := url.Parse(baseURL)
domain := u.Hostname()
if domain == "" {
domain = noPort(r.Host)
}
// Store state in session cookie for validation
http.SetCookie(w, &http.Cookie{
Name: "oidc_state",
Value: state,
Expires: time.Now().Add(p.config.StateExpiry),
Domain: domain,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
// Store nonce in session cookie for validation
http.SetCookie(w, &http.Cookie{
Name: "oidc_nonce",
Value: nonce,
Expires: time.Now().Add(p.config.StateExpiry),
Domain: domain,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
// Store PKCE verifier in session cookie for token exchange
http.SetCookie(w, &http.Cookie{
Name: "oidc_pkce_verifier",
Value: pkceVerifier,
Expires: time.Now().Add(p.config.StateExpiry),
Domain: domain,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
// Generate auth URL and redirect
authURL := p.GetAuthURL(baseURL, state, nonce, pkceVerifier)
http.Redirect(w, r, authURL, http.StatusFound)
// Return empty token since this is a redirect response
return services.UserAuthTokenDetail{}, nil
}
// handleCallback processes the OAuth2 callback from the OIDC provider
func (p *OIDCProvider) handleCallback(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
// Helper to clear state cookie using computed domain
baseURL := p.getBaseURL(r)
u, _ := url.Parse(baseURL)
domain := u.Hostname()
if domain == "" {
domain = noPort(r.Host)
}
clearCookies := func() {
http.SetCookie(w, &http.Cookie{
Name: "oidc_state",
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
MaxAge: -1,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
http.SetCookie(w, &http.Cookie{
Name: "oidc_nonce",
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
MaxAge: -1,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
http.SetCookie(w, &http.Cookie{
Name: "oidc_pkce_verifier",
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
MaxAge: -1,
Secure: p.isSecure(r),
HttpOnly: true,
Path: "/",
SameSite: http.SameSiteLaxMode,
})
}
// Check for OAuth error responses first
if errCode := r.URL.Query().Get("error"); errCode != "" {
errDesc := r.URL.Query().Get("error_description")
log.Warn().Str("error", errCode).Str("description", errDesc).Msg("OIDC provider returned error")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("OIDC provider error: %s - %s", errCode, errDesc)
}
// Verify state parameter
stateCookie, err := r.Cookie("oidc_state")
if err != nil {
log.Warn().Err(err).Msg("OIDC state cookie not found - possible CSRF attack or expired session")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("state cookie not found")
}
stateParam := r.URL.Query().Get("state")
if stateParam == "" {
log.Warn().Msg("OIDC state parameter missing from callback")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("state parameter missing")
}
if stateParam != stateCookie.Value {
log.Warn().Str("received", stateParam).Str("expected", stateCookie.Value).Msg("OIDC state mismatch - possible CSRF attack")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("state parameter mismatch")
}
// Verify nonce parameter
nonceCookie, err := r.Cookie("oidc_nonce")
if err != nil {
log.Warn().Err(err).Msg("OIDC nonce cookie not found - possible replay attack or expired session")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("nonce cookie not found")
}
// Verify PKCE verifier parameter
pkceCookie, err := r.Cookie("oidc_pkce_verifier")
if err != nil {
log.Warn().Err(err).Msg("OIDC PKCE verifier cookie not found - possible code interception attack or expired session")
clearCookies()
return services.UserAuthTokenDetail{}, fmt.Errorf("PKCE verifier cookie not found")
}
// Clear cookies before proceeding to token verification
clearCookies()
// Use the existing callback logic but return the token instead of redirecting
return p.AuthenticateWithBaseURL(baseURL, nonceCookie.Value, pkceCookie.Value, w, r)
}
// Helper functions
func generateSecureToken() (string, error) {
// Generate 32 bytes of cryptographically secure random data
bytes := make([]byte, 32)
_, err := rand.Read(bytes)
if err != nil {
return "", fmt.Errorf("failed to generate secure random token: %w", err)
}
// Use URL-safe base64 encoding without padding for clean URLs
return base64.RawURLEncoding.EncodeToString(bytes), nil
}
// generatePKCEVerifier generates a cryptographically secure code verifier for PKCE
func generatePKCEVerifier() (string, error) {
// PKCE verifier must be 43-128 characters, we'll use 43 for efficiency
// 32 bytes = 43 characters when base64url encoded without padding
bytes := make([]byte, 32)
_, err := rand.Read(bytes)
if err != nil {
return "", fmt.Errorf("failed to generate PKCE verifier: %w", err)
}
return base64.RawURLEncoding.EncodeToString(bytes), nil
}
// generatePKCEChallenge generates a code challenge from a verifier using S256 method
func generatePKCEChallenge(verifier string) string {
hash := sha256.Sum256([]byte(verifier))
return base64.RawURLEncoding.EncodeToString(hash[:])
}
func noPort(host string) string {
return strings.Split(host, ":")[0]
}
func (p *OIDCProvider) getBaseURL(r *http.Request) string {
scheme := "http"
if r.TLS != nil {
scheme = "https"
} else if p.options.TrustProxy && r.Header.Get("X-Forwarded-Proto") == "https" {
scheme = "https"
}
host := r.Host
if p.options.Hostname != "" {
host = p.options.Hostname
} else if p.options.TrustProxy {
if xfHost := r.Header.Get("X-Forwarded-Host"); xfHost != "" {
host = xfHost
}
}
return scheme + "://" + host
}
func (p *OIDCProvider) isSecure(r *http.Request) bool {
_ = r
return p.cookieSecure
}
// InitiateOIDCFlow starts the OIDC authentication flow by redirecting to the provider
func (p *OIDCProvider) InitiateOIDCFlow(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
return p.initiateOIDCFlow(w, r)
}
// HandleCallback processes the OIDC callback and returns the authenticated user token
func (p *OIDCProvider) HandleCallback(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
return p.handleCallback(w, r)
}
func mergeOIDCClaims(primary, secondary map[string]interface{}) map[string]interface{} {
// primary has precedence; fill missing/empty values from secondary.
merged := make(map[string]interface{}, len(primary)+len(secondary))
for k, v := range primary {
merged[k] = v
}
for k, v := range secondary {
if existing, ok := merged[k]; !ok || isEmptyClaim(existing) {
merged[k] = v
}
}
return merged
}
func isEmptyClaim(v interface{}) bool {
if v == nil {
return true
}
switch val := v.(type) {
case string:
return val == ""
case []interface{}:
return len(val) == 0
case []string:
return len(val) == 0
default:
return false
}
}

View File

@@ -1,151 +0,0 @@
package main
import (
"context"
"fmt"
"net/http"
"time"
"github.com/google/uuid"
"github.com/hay-kot/httpkit/graceful"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"github.com/sysadminsmedia/homebox/backend/pkgs/utils"
"gocloud.dev/pubsub"
)
func registerRecurringTasks(app *app, cfg *config.Config, runner *graceful.Runner) {
runner.AddFunc("eventbus", app.bus.Run)
runner.AddFunc("seed_database", func(ctx context.Context) error {
if cfg.Demo {
log.Info().Msg("Running in demo mode, creating demo data")
err := app.SetupDemo()
if err != nil {
log.Error().Err(err).Msg("failed to setup demo data")
return fmt.Errorf("failed to setup demo data: %w", err)
}
}
return nil
})
runner.AddPlugin(NewTask("purge-tokens", 24*time.Hour, func(ctx context.Context) {
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
if err != nil {
log.Error().Err(err).Msg("failed to purge expired tokens")
}
}))
runner.AddPlugin(NewTask("purge-invitations", 24*time.Hour, func(ctx context.Context) {
_, err := app.repos.Groups.InvitationPurge(ctx)
if err != nil {
log.Error().Err(err).Msg("failed to purge expired invitations")
}
}))
runner.AddPlugin(NewTask("send-notifications", time.Hour, func(ctx context.Context) {
now := time.Now()
if now.Hour() == 8 {
fmt.Println("run notifiers")
err := app.services.BackgroundService.SendNotifiersToday(context.Background())
if err != nil {
log.Error().Err(err).Msg("failed to send notifiers")
}
}
}))
if cfg.Thumbnail.Enabled {
runner.AddFunc("create-thumbnails-subscription", func(ctx context.Context) error {
pubsubString, err := utils.GenerateSubPubConn(cfg.Database.PubSubConnString, "thumbnails")
if err != nil {
log.Error().Err(err).Msg("failed to generate pubsub connection string")
return err
}
topic, err := pubsub.OpenTopic(ctx, pubsubString)
if err != nil {
return err
}
defer func(topic *pubsub.Topic, ctx context.Context) {
err := topic.Shutdown(ctx)
if err != nil {
log.Err(err).Msg("fail to shutdown pubsub topic")
}
}(topic, ctx)
subscription, err := pubsub.OpenSubscription(ctx, pubsubString)
if err != nil {
log.Err(err).Msg("failed to open pubsub topic")
return err
}
defer func(topic *pubsub.Subscription, ctx context.Context) {
err := topic.Shutdown(ctx)
if err != nil {
log.Err(err).Msg("fail to shutdown pubsub topic")
}
}(subscription, ctx)
for {
select {
case <-ctx.Done():
return ctx.Err()
default:
msg, err := subscription.Receive(ctx)
log.Debug().Msg("received thumbnail generation request from pubsub topic")
if err != nil {
log.Err(err).Msg("failed to receive message from pubsub topic")
continue
}
if msg == nil {
log.Warn().Msg("received nil message from pubsub topic")
continue
}
groupId, err := uuid.Parse(msg.Metadata["group_id"])
if err != nil {
log.Error().Err(err).Str("group_id", msg.Metadata["group_id"]).Msg("failed to parse group ID from message metadata")
}
attachmentId, err := uuid.Parse(msg.Metadata["attachment_id"])
if err != nil {
log.Error().Err(err).Str("attachment_id", msg.Metadata["attachment_id"]).Msg("failed to parse attachment ID from message metadata")
}
err = app.repos.Attachments.CreateThumbnail(ctx, groupId, attachmentId, msg.Metadata["title"], msg.Metadata["path"])
if err != nil {
log.Err(err).Msg("failed to create thumbnail")
}
msg.Ack()
}
}
})
}
if cfg.Options.GithubReleaseCheck {
runner.AddPlugin(NewTask("get-latest-github-release", time.Hour, func(ctx context.Context) {
log.Debug().Msg("running get latest github release")
err := app.services.BackgroundService.GetLatestGithubRelease(context.Background())
if err != nil {
log.Error().Err(err).Msg("failed to get latest github release")
}
}))
}
if cfg.Debug.Enabled {
runner.AddFunc("debug", func(ctx context.Context) error {
debugserver := http.Server{
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port),
Handler: app.debugRouter(),
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
}
go func() {
<-ctx.Done()
_ = debugserver.Shutdown(context.Background())
}()
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
return debugserver.ListenAndServe()
})
// Print the configuration to the console
cfg.Print()
}
}

View File

@@ -52,7 +52,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
a.services,
a.repos,
a.bus,
a.conf,
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
v1.WithRegistration(a.conf.Options.AllowRegistration),
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
@@ -75,11 +74,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
r.Post("/users/register", chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
r.Post("/users/login", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
if a.conf.OIDC.Enabled {
r.Get("/users/login/oidc", chain.ToHandlerFunc(v1Ctrl.HandleOIDCLogin()))
r.Get("/users/login/oidc/callback", chain.ToHandlerFunc(v1Ctrl.HandleOIDCCallback()))
}
userMW := []errchain.Middleware{
a.mwAuthToken,
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
@@ -107,8 +101,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
r.Post("/actions/zero-item-time-fields", chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
r.Post("/actions/create-missing-thumbnails", chain.ToHandlerFunc(v1Ctrl.HandleCreateMissingThumbnails(), userMW...))
r.Post("/actions/wipe-inventory", chain.ToHandlerFunc(v1Ctrl.HandleWipeInventory(), userMW...))
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
@@ -135,7 +127,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
r.Put("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
r.Patch("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
r.Delete("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
r.Post("/items/{id}/duplicate", chain.ToHandlerFunc(v1Ctrl.HandleItemDuplicate(), userMW...))
r.Post("/items/{id}/attachments", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
r.Put("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
@@ -146,14 +137,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
r.Get("/assets/{id}", chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
// Item Templates
r.Get("/templates", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesGetAll(), userMW...))
r.Post("/templates", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesCreate(), userMW...))
r.Get("/templates/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesGet(), userMW...))
r.Put("/templates/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesUpdate(), userMW...))
r.Delete("/templates/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesDelete(), userMW...))
r.Post("/templates/{id}/create-item", chain.ToHandlerFunc(v1Ctrl.HandleItemTemplatesCreateItem(), userMW...))
// Maintenance
r.Get("/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceGetAll(), userMW...))
r.Put("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
@@ -172,19 +155,12 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
}
r.Get("/products/search-from-barcode", chain.ToHandlerFunc(v1Ctrl.HandleProductSearchFromBarcode(a.conf.Barcode), userMW...))
r.Get("/qrcode", chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...))
r.Get(
"/items/{id}/attachments/{attachment_id}",
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
)
// Labelmaker
r.Get("/labelmaker/location/{id}", chain.ToHandlerFunc(v1Ctrl.HandleGetLocationLabel(), userMW...))
r.Get("/labelmaker/item/{id}", chain.ToHandlerFunc(v1Ctrl.HandleGetItemLabel(), userMW...))
r.Get("/labelmaker/asset/{id}", chain.ToHandlerFunc(v1Ctrl.HandleGetAssetLabel(), userMW...))
// Reporting Services
r.Get("/reporting/bill-of-materials", chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))

View File

@@ -1,103 +0,0 @@
package main
import (
"bytes"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
)
// setupStorageDir handles the creation and validation of the storage directory.
func setupStorageDir(cfg *config.Config) error {
if strings.HasPrefix(cfg.Storage.ConnString, "file:///./") {
raw := strings.TrimPrefix(cfg.Storage.ConnString, "file:///./")
clean := filepath.Clean(raw)
absBase, err := filepath.Abs(clean)
if err != nil {
log.Error().Err(err).Msg("failed to get absolute path for storage connection string")
return fmt.Errorf("failed to get absolute path for storage connection string: %w", err)
}
absBase = strings.ReplaceAll(absBase, "\\", "/")
storageDir := filepath.Join(absBase, cfg.Storage.PrefixPath)
storageDir = strings.ReplaceAll(storageDir, "\\", "/")
if !strings.HasPrefix(storageDir, absBase+"/") && storageDir != absBase {
log.Error().Str("path", storageDir).Msg("invalid storage path: you tried to use a prefix that is not a subdirectory of the base path")
return fmt.Errorf("invalid storage path: you tried to use a prefix that is not a subdirectory of the base path")
}
if err := os.MkdirAll(storageDir, 0o750); err != nil {
log.Error().Err(err).Msg("failed to create data directory")
return fmt.Errorf("failed to create data directory: %w", err)
}
}
return nil
}
// setupDatabaseURL returns the database URL and ensures any required directories exist.
func setupDatabaseURL(cfg *config.Config) (string, error) {
databaseURL := ""
switch strings.ToLower(cfg.Database.Driver) {
case config.DriverSqlite3:
databaseURL = cfg.Database.SqlitePath
dbFilePath := strings.Split(cfg.Database.SqlitePath, "?")[0]
dbDir := filepath.Dir(dbFilePath)
if err := os.MkdirAll(dbDir, 0o755); err != nil {
log.Error().Err(err).Str("path", dbDir).Msg("failed to create SQLite database directory")
return "", fmt.Errorf("failed to create SQLite database directory: %w", err)
}
case config.DriverPostgres:
databaseURL = fmt.Sprintf("host=%s port=%s dbname=%s sslmode=%s", cfg.Database.Host, cfg.Database.Port, cfg.Database.Database, cfg.Database.SslMode)
if cfg.Database.Username != "" {
databaseURL += fmt.Sprintf(" user=%s", cfg.Database.Username)
}
if cfg.Database.Password != "" {
databaseURL += fmt.Sprintf(" password=%s", cfg.Database.Password)
}
if cfg.Database.SslRootCert != "" {
if _, err := os.Stat(cfg.Database.SslRootCert); err != nil {
log.Error().Err(err).Str("path", cfg.Database.SslRootCert).Msg("SSL root certificate file is not accessible")
return "", fmt.Errorf("SSL root certificate file is not accessible: %w", err)
}
databaseURL += fmt.Sprintf(" sslrootcert=%s", cfg.Database.SslRootCert)
}
if cfg.Database.SslCert != "" {
if _, err := os.Stat(cfg.Database.SslCert); err != nil {
log.Error().Err(err).Str("path", cfg.Database.SslCert).Msg("SSL certificate file is not accessible")
return "", fmt.Errorf("SSL certificate file is not accessible: %w", err)
}
databaseURL += fmt.Sprintf(" sslcert=%s", cfg.Database.SslCert)
}
if cfg.Database.SslKey != "" {
if _, err := os.Stat(cfg.Database.SslKey); err != nil {
log.Error().Err(err).Str("path", cfg.Database.SslKey).Msg("SSL key file is not accessible")
return "", fmt.Errorf("SSL key file is not accessible: %w", err)
}
databaseURL += fmt.Sprintf(" sslkey=%s", cfg.Database.SslKey)
}
default:
log.Error().Str("driver", cfg.Database.Driver).Msg("unsupported database driver")
return "", fmt.Errorf("unsupported database driver: %s", cfg.Database.Driver)
}
return databaseURL, nil
}
// loadCurrencies loads currency data from config if provided.
func loadCurrencies(cfg *config.Config) ([]currencies.CollectorFunc, error) {
collectFuncs := []currencies.CollectorFunc{
currencies.CollectDefaults(),
}
if cfg.Options.CurrencyConfig != "" {
log.Info().Str("path", cfg.Options.CurrencyConfig).Msg("loading currency config file")
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
if err != nil {
log.Error().Err(err).Str("path", cfg.Options.CurrencyConfig).Msg("failed to read currency config file")
return nil, err
}
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
}
return collectFuncs, nil
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,45 @@
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate"
atlas "ariga.io/atlas/sql/migrate"
_ "ariga.io/atlas/sql/sqlite"
"entgo.io/ent/dialect"
"entgo.io/ent/dialect/sql/schema"
_ "github.com/mattn/go-sqlite3"
)
func main() {
ctx := context.Background()
// Create a local migration directory able to understand Atlas migration file format for replay.
dir, err := atlas.NewLocalDir("internal/data/migrations/migrations")
if err != nil {
log.Fatalf("failed creating atlas migration directory: %v", err)
}
// Migrate diff options.
opts := []schema.MigrateOption{
schema.WithDir(dir), // provide migration directory
schema.WithMigrationMode(schema.ModeReplay), // provide migration mode
schema.WithDialect(dialect.SQLite), // Ent dialect to use
schema.WithFormatter(atlas.DefaultFormatter),
schema.WithDropIndex(true),
schema.WithDropColumn(true),
}
if len(os.Args) != 2 {
log.Fatalln("migration name is required. Use: 'go run -mod=mod ent/migrate/main.go <name>'")
}
// Generate migrations using Atlas support for MySQL (note the Ent dialect option passed above).
err = migrate.NamedDiff(ctx, "sqlite://.data/homebox.migration.db?_fk=1", os.Args[1], opts...)
if err != nil {
log.Fatalf("failed generating migration file: %v", err)
}
fmt.Println("Migration file generated successfully.")
}

View File

@@ -1,11 +0,0 @@
-----BEGIN ENCRYPTED SIGSTORE PRIVATE KEY-----
eyJrZGYiOnsibmFtZSI6InNjcnlwdCIsInBhcmFtcyI6eyJOIjo2NTUzNiwiciI6
OCwicCI6MX0sInNhbHQiOiJ3bmU3TTd2dndlL2FBS1piUEE2QktsdFNzMkhkSk9v
eXlvOTNLMnByRXdJPSJ9LCJjaXBoZXIiOnsibmFtZSI6Im5hY2wvc2VjcmV0Ym94
Iiwibm9uY2UiOiJoOWdIMHRsYk9zMnZIbVBTYk5zaGxBQU5TYUlkcVZoQiJ9LCJj
aXBoZXJ0ZXh0IjoiTERiQk5ac3ZlVnRMbTlQdkRTa2t6bzRrWGExVGRTTEY5VzVO
cGd6M05GNVJLRWlGRmJQRDJDYzhnTWNkRmkrTU8xd2FTUzFGWWdXU3BIdnI3QXZ3
K0tUTXVWLzhSZ1pnOE9ieHNJY2xKSlZldHRLTzdzWXY2aWgxM09iZlVBV0lQcGpS
ZUQ5UmE3WjJwbWd0SkpBdjl2dlk1RGNNeGRKcFFrOEY1UStLZytSbnhLRUd6Z1ZN
MWUxdjF3UGhsOWhVRGRMSFVSTzE5Z0w3aFE9PSJ9
-----END ENCRYPTED SIGSTORE PRIVATE KEY-----

View File

@@ -1,4 +0,0 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2DXKcerPznDayM+rMJ/25w+ubI8g
e3ZTbm07VqLFz6uI2vXqN8X7/72dygtJlUw07FpR0oLXaSia0adaywz1JA==
-----END PUBLIC KEY-----

View File

@@ -1,209 +1,76 @@
module github.com/sysadminsmedia/homebox/backend
go 1.24.0
toolchain go1.24.3
go 1.23.0
require (
entgo.io/ent v0.14.5
github.com/ardanlabs/conf/v3 v3.10.0
ariga.io/atlas v0.19.1
entgo.io/ent v0.14.1
github.com/ardanlabs/conf/v3 v3.1.8
github.com/containrrr/shoutrrr v0.8.0
github.com/coreos/go-oidc/v3 v3.17.0
github.com/evanoberholster/imagemeta v0.3.1
github.com/gen2brain/avif v0.4.4
github.com/gen2brain/heic v0.4.7
github.com/gen2brain/jpegxl v0.4.5
github.com/gen2brain/webp v0.5.5
github.com/go-chi/chi/v5 v5.2.3
github.com/go-playground/validator/v10 v10.30.1
github.com/go-chi/chi/v5 v5.1.0
github.com/go-playground/validator/v10 v10.22.1
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0
github.com/google/uuid v1.6.0
github.com/gorilla/schema v1.4.1
github.com/hay-kot/httpkit v0.0.11
github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.32
github.com/olahol/melody v1.4.0
github.com/mattn/go-sqlite3 v1.14.24
github.com/olahol/melody v1.2.1
github.com/pkg/errors v0.9.1
github.com/pressly/goose/v3 v3.26.0
github.com/rs/zerolog v1.34.0
github.com/shirou/gopsutil/v4 v4.25.11
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e
github.com/stretchr/testify v1.11.1
github.com/rs/zerolog v1.33.0
github.com/stretchr/testify v1.9.0
github.com/swaggo/http-swagger/v2 v2.0.2
github.com/swaggo/swag v1.16.6
github.com/yeqown/go-qrcode/v2 v2.2.5
github.com/yeqown/go-qrcode/writer/standard v1.3.0
github.com/zeebo/blake3 v0.2.4
go.balki.me/anyhttp v0.5.2
gocloud.dev v0.44.0
gocloud.dev/pubsub/kafkapubsub v0.44.0
gocloud.dev/pubsub/natspubsub v0.44.0
gocloud.dev/pubsub/rabbitpubsub v0.44.0
golang.org/x/crypto v0.46.0
golang.org/x/image v0.34.0
golang.org/x/oauth2 v0.34.0
golang.org/x/text v0.32.0
modernc.org/sqlite v1.41.0
github.com/swaggo/swag v1.16.3
github.com/yeqown/go-qrcode/v2 v2.2.4
github.com/yeqown/go-qrcode/writer/standard v1.2.4
golang.org/x/crypto v0.28.0
modernc.org/sqlite v1.33.1
)
require (
ariga.io/atlas v0.32.1-0.20250325101103-175b25e1c1b9 // indirect
cel.dev/expr v0.24.0 // indirect
cloud.google.com/go v0.121.6 // indirect
cloud.google.com/go/auth v0.17.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.9.0 // indirect
cloud.google.com/go/iam v1.5.3 // indirect
cloud.google.com/go/monitoring v1.24.3 // indirect
cloud.google.com/go/pubsub v1.50.1 // indirect
cloud.google.com/go/pubsub/v2 v2.2.1 // indirect
cloud.google.com/go/storage v1.56.0 // indirect
github.com/Azure/azure-amqp-common-go/v3 v3.2.3 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.9.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1 // indirect
github.com/Azure/go-amqp v1.4.0 // indirect
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
github.com/Azure/go-autorest/autorest/to v0.4.1 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 // indirect
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 // indirect
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect
github.com/IBM/sarama v1.46.3 // indirect
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/aws/aws-sdk-go-v2 v1.39.6 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sns v1.34.7 // indirect
github.com/aws/aws-sdk-go-v2/service/sqs v1.38.8 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/bmatcuk/doublestar v1.3.4 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/eapache/go-resiliency v1.7.0 // indirect
github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 // indirect
github.com/eapache/queue v1.1.0 // indirect
github.com/ebitengine/purego v0.9.1 // indirect
github.com/envoyproxy/go-control-plane/envoy v1.35.0 // indirect
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fatih/color v1.16.0 // indirect
github.com/fogleman/gg v1.3.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.12 // indirect
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/go-openapi/inflect v0.19.0 // indirect
github.com/go-openapi/jsonpointer v0.22.4 // indirect
github.com/go-openapi/jsonreference v0.21.4 // indirect
github.com/go-openapi/spec v0.22.3 // indirect
github.com/go-openapi/swag/conv v0.25.4 // indirect
github.com/go-openapi/swag/jsonname v0.25.4 // indirect
github.com/go-openapi/swag/jsonutils v0.25.4 // indirect
github.com/go-openapi/swag/loading v0.25.4 // indirect
github.com/go-openapi/swag/stringutils v0.25.4 // indirect
github.com/go-openapi/swag/typeutils v0.25.4 // indirect
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
github.com/go-openapi/jsonpointer v0.20.0 // indirect
github.com/go-openapi/jsonreference v0.20.2 // indirect
github.com/go-openapi/spec v0.20.9 // indirect
github.com/go-openapi/swag v0.22.4 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/golang-jwt/jwt/v5 v5.2.3 // indirect
github.com/golang/snappy v1.0.0 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/google/wire v0.7.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
github.com/googleapis/gax-go/v2 v2.16.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/hcl/v2 v2.18.1 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
github.com/jcmturner/gofork v1.7.6 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4 // indirect
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/klauspost/compress v1.18.2 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/gorilla/websocket v1.5.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl/v2 v2.19.1 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/nats-io/nats.go v1.48.0 // indirect
github.com/nats-io/nkeys v0.4.12 // indirect
github.com/nats-io/nuid v1.0.1 // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pierrec/lz4/v4 v4.1.23 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/rabbitmq/amqp091-go v1.10.0 // indirect
github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect
github.com/swaggo/files/v2 v2.0.2 // indirect
github.com/tetratelabs/wazero v1.11.0 // indirect
github.com/tinylib/msgp v1.6.1 // indirect
github.com/tklauser/go-sysconf v0.3.16 // indirect
github.com/tklauser/numcpus v0.11.0 // indirect
github.com/rogpeppe/go-internal v1.11.0 // indirect
github.com/swaggo/files/v2 v2.0.0 // indirect
github.com/yeqown/reedsolomon v1.0.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zclconf/go-cty v1.14.4 // indirect
github.com/zclconf/go-cty-yaml v1.1.0 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 // indirect
go.opentelemetry.io/otel v1.39.0 // indirect
go.opentelemetry.io/otel/metric v1.39.0 // indirect
go.opentelemetry.io/otel/sdk v1.39.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.39.0 // indirect
go.opentelemetry.io/otel/trace v1.39.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 // indirect
golang.org/x/mod v0.31.0 // indirect
golang.org/x/net v0.48.0 // indirect
golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/time v0.14.0 // indirect
golang.org/x/tools v0.40.0 // indirect
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
google.golang.org/api v0.258.0 // indirect
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b // indirect
google.golang.org/grpc v1.78.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
github.com/zclconf/go-cty v1.14.1 // indirect
golang.org/x/image v0.18.0 // indirect
golang.org/x/mod v0.20.0 // indirect
golang.org/x/net v0.28.0 // indirect
golang.org/x/sys v0.26.0 // indirect
golang.org/x/text v0.19.0 // indirect
golang.org/x/tools v0.24.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.67.2 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
modernc.org/libc v1.55.3 // indirect
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.8.0 // indirect
modernc.org/strutil v1.2.0 // indirect
modernc.org/token v1.1.0 // indirect
)

View File

@@ -1,235 +1,67 @@
ariga.io/atlas v0.32.1-0.20250325101103-175b25e1c1b9 h1:E0wvcUXTkgyN4wy4LGtNzMNGMytJN8afmIWXJVMi4cc=
ariga.io/atlas v0.32.1-0.20250325101103-175b25e1c1b9/go.mod h1:Oe1xWPuu5q9LzyrWfbZmEZxFYeu4BHTyzfjeW2aZp/w=
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c=
cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI=
cloud.google.com/go/auth v0.17.0 h1:74yCm7hCj2rUyyAocqnFzsAYXgJhrG26XCFimrc/Kz4=
cloud.google.com/go/auth v0.17.0/go.mod h1:6wv/t5/6rOPAX4fJiRjKkJCvswLwdet7G8+UGXt7nCQ=
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
cloud.google.com/go/iam v1.5.3 h1:+vMINPiDF2ognBJ97ABAYYwRgsaqxPbQDlMnbHMjolc=
cloud.google.com/go/iam v1.5.3/go.mod h1:MR3v9oLkZCTlaqljW6Eb2d3HGDGK5/bDv93jhfISFvU=
cloud.google.com/go/logging v1.13.1 h1:O7LvmO0kGLaHY/gq8cV7T0dyp6zJhYAOtZPX4TF3QtY=
cloud.google.com/go/logging v1.13.1/go.mod h1:XAQkfkMBxQRjQek96WLPNze7vsOmay9H5PqfsNYDqvw=
cloud.google.com/go/longrunning v0.7.0 h1:FV0+SYF1RIj59gyoWDRi45GiYUMM3K1qO51qoboQT1E=
cloud.google.com/go/longrunning v0.7.0/go.mod h1:ySn2yXmjbK9Ba0zsQqunhDkYi0+9rlXIwnoAf+h+TPY=
cloud.google.com/go/monitoring v1.24.3 h1:dde+gMNc0UhPZD1Azu6at2e79bfdztVDS5lvhOdsgaE=
cloud.google.com/go/monitoring v1.24.3/go.mod h1:nYP6W0tm3N9H/bOw8am7t62YTzZY+zUeQ+Bi6+2eonI=
cloud.google.com/go/pubsub v1.50.1 h1:fzbXpPyJnSGvWXF1jabhQeXyxdbCIkXTpjXHy7xviBM=
cloud.google.com/go/pubsub v1.50.1/go.mod h1:6YVJv3MzWJUVdvQXG081sFvS0dWQOdnV+oTo++q/xFk=
cloud.google.com/go/pubsub/v2 v2.2.1 h1:3brZcshL3fIiD1qOxAE2QW9wxsfjioy014x4yC9XuYI=
cloud.google.com/go/pubsub/v2 v2.2.1/go.mod h1:O5f0KHG9zDheZAd3z5rlCRhxt2JQtB+t/IYLKK3Bpvw=
cloud.google.com/go/storage v1.56.0 h1:iixmq2Fse2tqxMbWhLWC9HfBj1qdxqAmiK8/eqtsLxI=
cloud.google.com/go/storage v1.56.0/go.mod h1:Tpuj6t4NweCLzlNbw9Z9iwxEkrSem20AetIeH/shgVU=
cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U=
cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s=
entgo.io/ent v0.14.5 h1:Rj2WOYJtCkWyFo6a+5wB3EfBRP0rnx1fMk6gGA0UUe4=
entgo.io/ent v0.14.5/go.mod h1:zTzLmWtPvGpmSwtkaayM2cm5m819NdM7z7tYPq3vN0U=
github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk=
github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1 h1:Wc1ml6QlJs2BHQ/9Bqu1jiyggbsSjramq2oUmp5WeIo=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.1/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1/go.mod h1:JdM5psgjfBf5fo2uWOZhflPWyDBZ/O/CNAH9CtsuZE4=
github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY=
github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA=
github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.9.1 h1:CRZwf68N55u7ZZo3Xx2ynuqEA6k5GZfwsEUkU8qsAPk=
github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.9.1/go.mod h1:NydgUaroiShkgOcb+X6OUdS3RalWBrvDNtOyFHJtsZY=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.0 h1:LR0kAX9ykz8G4YgLCaRDVJ3+n43R8MneB5dTy2konZo=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.8.0/go.mod h1:DWAciXemNf++PQJLeXUB4HHH5OpsAh12HZnu2wXE1jA=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1 h1:lhZdRq7TIx0GJQvSyX2Si406vrYsov2FXGp/RnSEtcs=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.1/go.mod h1:8cl44BDmi+effbARHMQjgOKA2AYvcohNm7KEt42mSV8=
github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg=
github.com/Azure/go-amqp v1.4.0 h1:Xj3caqi4comOF/L1Uc5iuBxR/pB6KumejC01YQOqOR4=
github.com/Azure/go-amqp v1.4.0/go.mod h1:vZAogwdrkbyK3Mla8m/CxSc/aKdnTZ4IbPxl51Y5WZE=
github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=
github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=
github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=
github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
github.com/Azure/go-autorest/autorest/to v0.4.1 h1:CxNHBqdzTr7rLtdrtb5CMjJcDut+WNGCVv7OmS5+lTc=
github.com/Azure/go-autorest/autorest/to v0.4.1/go.mod h1:EtaofgU4zmtvn1zT2ARsjRFdq9vXx0YWtmElwL+GZ9M=
github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=
github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM=
github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE=
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs=
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
ariga.io/atlas v0.19.1 h1:QzBHkakwzEhmPWOzNhw8Yr/Bbicj6Iq5hwEoNI/Jr9A=
ariga.io/atlas v0.19.1/go.mod h1:VPlcXdd4w2KqKnH54yEZcry79UAhpaWaxEsmn5JRNoE=
ariga.io/atlas v0.28.0 h1:qmn9tUyJypJkIw+X3ECUwDtkMTiFupgstHbjRN4xGH0=
ariga.io/atlas v0.28.0/go.mod h1:LOOp18LCL9r+VifvVlJqgYJwYl271rrXD9/wIyzJ8sw=
entgo.io/ent v0.12.5 h1:KREM5E4CSoej4zeGa88Ou/gfturAnpUv0mzAjch1sj4=
entgo.io/ent v0.12.5/go.mod h1:Y3JVAjtlIk8xVZYSn3t3mf8xlZIn5SAOXZQxD6kKI+Q=
entgo.io/ent v0.14.1 h1:fUERL506Pqr92EPHJqr8EYxbPioflJo6PudkrEA8a/s=
entgo.io/ent v0.14.1/go.mod h1:MH6XLG0KXpkcDQhKiHfANZSzR55TJyPL5IGNpI8wpco=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0 h1:4LP6hvB4I5ouTbGgWtixJhgED6xdf67twf9PoY96Tbg=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0/go.mod h1:jUZ5LYlw40WMd07qxcQJD5M40aUxrfwqQX1g7zxYnrQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo=
github.com/IBM/sarama v1.46.3 h1:njRsX6jNlnR+ClJ8XmkO+CM4unbrNr/2vB5KK6UA+IE=
github.com/IBM/sarama v1.46.3/go.mod h1:GTUYiF9DMOZVe3FwyGT+dtSPceGFIgA+sPc5u6CBwko=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/ardanlabs/conf/v3 v3.10.0 h1:qIrJ/WBmH/hFQ/IX4xH9LX9LzwK44T9aEOy78M+4S+0=
github.com/ardanlabs/conf/v3 v3.10.0/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk=
github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+Xsqk=
github.com/aws/aws-sdk-go-v2 v1.39.6/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y=
github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y=
github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 h1:4GNV1lhyELGjMz5ILMRxDvxvOaeo3Ux9Z69S1EgVMMQ=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3/go.mod h1:br7KA6edAAqDGUYJ+zVVPAyMrPhnN+zdt17yTUT6FPw=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 h1:a+8/MLcWlIxo1lF9xaGt3J/u3yOZx+CdSveSNwjhD40=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13/go.mod h1:oGnKwIYZ4XttyU2JWxFrwvhF6YKiK/9/wmE3v3Iu9K8=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 h1:HBSI2kDkMdWz4ZM7FjwE7e/pWDEZ+nR95x8Ztet1ooY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13/go.mod h1:YE94ZoDArI7awZqJzBAZ3PDD2zSfuP7w6P2knOzIn8M=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 h1:eg/WYAa12vqTphzIdWMzqYRVKKnCboVPRlvaybNCqPA=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13/go.mod h1:/FDdxWhz1486obGrKKC1HONd7krpk38LBt+dutLcN9k=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 h1:NvMjwvv8hpGUILarKw7Z4Q0w1H9anXKsesMxtw++MA4=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4/go.mod h1:455WPHSwaGj2waRSpQp7TsnpOnBfw8iDfPfbwl7KPJE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 h1:zhBJXdhWIFZ1acfDYIhu4+LCzdUS2Vbcum7D01dXlHQ=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13/go.mod h1:JaaOeCE368qn2Hzi3sEzY6FgAZVCIYcC2nwbro2QCh8=
github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 h1:xgBWsgaeUESl8A8k80p6yBdexMWDVeiDmJ/pkjohJ7c=
github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2/go.mod h1:+wArOOrcHUevqdto9k1tKOF5++YTe9JEcPSc9Tx2ZSw=
github.com/aws/aws-sdk-go-v2/service/sns v1.34.7 h1:OBuZE9Wt8h2imuRktu+WfjiTGrnYdCIJg8IX92aalHE=
github.com/aws/aws-sdk-go-v2/service/sns v1.34.7/go.mod h1:4WYoZAhHt+dWYpoOQUgkUKfuQbE6Gg/hW4oXE0pKS9U=
github.com/aws/aws-sdk-go-v2/service/sqs v1.38.8 h1:80dpSqWMwx2dAm30Ib7J6ucz1ZHfiv5OCRwN/EnCOXQ=
github.com/aws/aws-sdk-go-v2/service/sqs v1.38.8/go.mod h1:IzNt/udsXlETCdvBOL0nmyMe2t9cGmXmZgsdoZGYYhI=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0=
github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f h1:Y8xYupdHxryycyPlc9Y+bSQAYZnetRJ70VMVKm5CKI0=
github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f/go.mod h1:HlzOvOjVBOfTGSRXRyY0OiCS/3J1akRGQQpRO/7zyF4=
github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE=
github.com/coder/websocket v1.8.13/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
github.com/ardanlabs/conf/v3 v3.1.8 h1:r0KUV9/Hni5XdeWR2+A1BiedIDnry5CjezoqgJ0rnFQ=
github.com/ardanlabs/conf/v3 v3.1.8/go.mod h1:OIi6NK95fj8jKFPdZ/UmcPlY37JBg99hdP9o5XmNK9c=
github.com/containrrr/shoutrrr v0.8.0 h1:mfG2ATzIS7NR2Ec6XL+xyoHzN97H8WPjir8aYzJUSec=
github.com/containrrr/shoutrrr v0.8.0/go.mod h1:ioyQAyu1LJY6sILuNyKaQaw+9Ttik5QePU8atnAdO2o=
github.com/coreos/go-oidc/v3 v3.17.0 h1:hWBGaQfbi0iVviX4ibC7bk8OKT5qNr4klBaCHVNvehc=
github.com/coreos/go-oidc/v3 v3.17.0/go.mod h1:wqPbKFrVnE90vty060SB40FCJ8fTHTxSwyXJqZH+sI8=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/eapache/go-resiliency v1.7.0 h1:n3NRTnBn5N0Cbi/IeOHuQn9s2UwVUH7Ga0ZWcP+9JTA=
github.com/eapache/go-resiliency v1.7.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho=
github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 h1:Oy0F4ALJ04o5Qqpdz8XLIpNA3WM/iSIXqxtqo7UGVws=
github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3/go.mod h1:YvSRo5mw33fLEx1+DlK6L2VV43tJt5Eyel9n9XBcR+0=
github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A=
github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329 h1:K+fnvUM0VZ7ZFJf0n4L/BRlnsb9pL/GuDG6FqaH+PwM=
github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329/go.mod h1:Alz8LEClvR7xKsrq3qzoc4N0guvVNSS8KmSChGYr9hs=
github.com/envoyproxy/go-control-plane/envoy v1.35.0 h1:ixjkELDE+ru6idPxcHLj8LBVc2bFP7iBytj353BoHUo=
github.com/envoyproxy/go-control-plane/envoy v1.35.0/go.mod h1:09qwbGVuSWWAyN5t/b3iyVfz5+z8QWGrzkoqm/8SbEs=
github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI=
github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4=
github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8=
github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU=
github.com/evanoberholster/imagemeta v0.3.1 h1:E4GUjXcvlVMjP9joN25+bBNf3Al3MTTfMqCrDOCW+LE=
github.com/evanoberholster/imagemeta v0.3.1/go.mod h1:V0vtDJmjTqvwAYO8r+u33NRVIMXQb0qSqEfImoKEiXM=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw=
github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/gen2brain/avif v0.4.4 h1:Ga/ss7qcWWQm2bxFpnjYjhJsNfZrWs5RsyklgFjKRSE=
github.com/gen2brain/avif v0.4.4/go.mod h1:/XCaJcjZraQwKVhpu9aEd9aLOssYOawLvhMBtmHVGqk=
github.com/gen2brain/heic v0.4.7 h1:xw/e9R3HdIvb+uEhRDMRJdviYnB3ODe/VwL8SYLaMGc=
github.com/gen2brain/heic v0.4.7/go.mod h1:ECnpqbqLu0qSje4KSNWUUDK47UPXPzl80T27GWGEL5I=
github.com/gen2brain/jpegxl v0.4.5 h1:TWpVEn5xkIfsswzkjHBArd0Cc9AE0tbjBSoa0jDsrbo=
github.com/gen2brain/jpegxl v0.4.5/go.mod h1:4kWYJ18xCEuO2vzocYdGpeqNJ990/Gjy3uLMg5TBN6I=
github.com/gen2brain/webp v0.5.5 h1:MvQR75yIPU/9nSqYT5h13k4URaJK3gf9tgz/ksRbyEg=
github.com/gen2brain/webp v0.5.5/go.mod h1:xOSMzp4aROt2KFW++9qcK/RBTOVC2S9tJG66ip/9Oc0=
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw=
github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
github.com/go-openapi/jsonpointer v0.22.4 h1:dZtK82WlNpVLDW2jlA1YCiVJFVqkED1MegOUy9kR5T4=
github.com/go-openapi/jsonpointer v0.22.4/go.mod h1:elX9+UgznpFhgBuaMQ7iu4lvvX1nvNsesQ3oxmYTw80=
github.com/go-openapi/jsonreference v0.21.4 h1:24qaE2y9bx/q3uRK/qN+TDwbok1NhbSmGjjySRCHtC8=
github.com/go-openapi/jsonreference v0.21.4/go.mod h1:rIENPTjDbLpzQmQWCj5kKj3ZlmEh+EFVbz3RTUh30/4=
github.com/go-openapi/spec v0.22.3 h1:qRSmj6Smz2rEBxMnLRBMeBWxbbOvuOoElvSvObIgwQc=
github.com/go-openapi/spec v0.22.3/go.mod h1:iIImLODL2loCh3Vnox8TY2YWYJZjMAKYyLH2Mu8lOZs=
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
github.com/go-openapi/swag/conv v0.25.4 h1:/Dd7p0LZXczgUcC/Ikm1+YqVzkEeCc9LnOWjfkpkfe4=
github.com/go-openapi/swag/conv v0.25.4/go.mod h1:3LXfie/lwoAv0NHoEuY1hjoFAYkvlqI/Bn5EQDD3PPU=
github.com/go-openapi/swag/jsonname v0.25.4 h1:bZH0+MsS03MbnwBXYhuTttMOqk+5KcQ9869Vye1bNHI=
github.com/go-openapi/swag/jsonname v0.25.4/go.mod h1:GPVEk9CWVhNvWhZgrnvRA6utbAltopbKwDu8mXNUMag=
github.com/go-openapi/swag/jsonutils v0.25.4 h1:VSchfbGhD4UTf4vCdR2F4TLBdLwHyUDTd1/q4i+jGZA=
github.com/go-openapi/swag/jsonutils v0.25.4/go.mod h1:7OYGXpvVFPn4PpaSdPHJBtF0iGnbEaTk8AvBkoWnaAY=
github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.4 h1:IACsSvBhiNJwlDix7wq39SS2Fh7lUOCJRmx/4SN4sVo=
github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.4/go.mod h1:Mt0Ost9l3cUzVv4OEZG+WSeoHwjWLnarzMePNDAOBiM=
github.com/go-openapi/swag/loading v0.25.4 h1:jN4MvLj0X6yhCDduRsxDDw1aHe+ZWoLjW+9ZQWIKn2s=
github.com/go-openapi/swag/loading v0.25.4/go.mod h1:rpUM1ZiyEP9+mNLIQUdMiD7dCETXvkkC30z53i+ftTE=
github.com/go-openapi/swag/stringutils v0.25.4 h1:O6dU1Rd8bej4HPA3/CLPciNBBDwZj9HiEpdVsb8B5A8=
github.com/go-openapi/swag/stringutils v0.25.4/go.mod h1:GTsRvhJW5xM5gkgiFe0fV3PUlFm0dr8vki6/VSRaZK0=
github.com/go-openapi/swag/typeutils v0.25.4 h1:1/fbZOUN472NTc39zpa+YGHn3jzHWhv42wAJSN91wRw=
github.com/go-openapi/swag/typeutils v0.25.4/go.mod h1:Ou7g//Wx8tTLS9vG0UmzfCsjZjKhpjxayRKTHXf2pTE=
github.com/go-openapi/swag/yamlutils v0.25.4 h1:6jdaeSItEUb7ioS9lFoCZ65Cne1/RZtPBZ9A56h92Sw=
github.com/go-openapi/swag/yamlutils v0.25.4/go.mod h1:MNzq1ulQu+yd8Kl7wPOut/YHAAU/H6hL91fF+E2RFwc=
github.com/go-openapi/testify/enable/yaml/v2 v2.0.2 h1:0+Y41Pz1NkbTHz8NngxTuAXxEodtNSI1WG1c/m5Akw4=
github.com/go-openapi/testify/enable/yaml/v2 v2.0.2/go.mod h1:kme83333GCtJQHXQ8UKX3IBZu6z8T5Dvy5+CW3NLUUg=
github.com/go-openapi/testify/v2 v2.0.2 h1:X999g3jeLcoY8qctY/c/Z8iBHTbwLz7R2WXd6Ub6wls=
github.com/go-openapi/testify/v2 v2.0.2/go.mod h1:HCPmvFFnheKK2BuwSA0TbbdxJ3I16pjwMkYkP4Ywn54=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
github.com/go-openapi/jsonpointer v0.20.0 h1:ESKJdU9ASRfaPNOPRx12IUyA1vn3R9GiE3KYD14BXdQ=
github.com/go-openapi/jsonpointer v0.20.0/go.mod h1:6PGzBjjIIumbLYysB73Klnms1mwnU4G3YHOECG3CedA=
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE=
github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
github.com/go-openapi/spec v0.20.9 h1:xnlYNQAwKd2VQRRfwTEI0DcK+2cbuvI/0c7jx3gA8/8=
github.com/go-openapi/spec v0.20.9/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU=
github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao=
github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
@@ -237,349 +69,175 @@ github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3a
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 h1:FWNFq4fM1wPfcK40yHE5UO3RUdSNPaBC+j3PokzA6OQ=
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0=
github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo=
github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI=
github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk=
github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg=
github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc=
github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4=
github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18=
github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ=
github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
github.com/googleapis/gax-go/v2 v2.16.0 h1:iHbQmKLLZrexmb0OSsNGTeSTS0HO4YvFOG8g5E4Zd0Y=
github.com/googleapis/gax-go/v2 v2.16.0/go.mod h1:o1vfQjjNZn4+dPnRdl/4ZD7S9414Y4xA+a/6Icj6l14=
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hashicorp/hcl/v2 v2.18.1 h1:6nxnOJFku1EuSawSD81fuviYUV8DxFr3fp2dUi3ZYSo=
github.com/hashicorp/hcl/v2 v2.18.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
github.com/hay-kot/httpkit v0.0.11 h1:ZdB2uqsFBSDpfUoClGK5c5orjBjQkEVSXh7fZX5FKEk=
github.com/hay-kot/httpkit v0.0.11/go.mod h1:0kZdk5/swzdfqfg2c6pBWimcgeJ9PTyO97EbHnYl2Sw=
github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc=
github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=
github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8=
github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/keybase/go-keychain v0.0.1 h1:way+bWYa6lDppZoZcgMbYsvC7GxljxrskdNInRtuthU=
github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXwkPPMeUgOK1k=
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0=
github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/nats-io/jwt/v2 v2.5.0 h1:WQQ40AAlqqfx+f6ku+i0pOVm+ASirD4fUh+oQsiE9Ak=
github.com/nats-io/jwt/v2 v2.5.0/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI=
github.com/nats-io/nats-server/v2 v2.9.23 h1:6Wj6H6QpP9FMlpCyWUaNu2yeZ/qGj+mdRkZ1wbikExU=
github.com/nats-io/nats-server/v2 v2.9.23/go.mod h1:wEjrEy9vnqIGE4Pqz4/c75v9Pmaq7My2IgFmnykc4C0=
github.com/nats-io/nats.go v1.48.0 h1:pSFyXApG+yWU/TgbKCjmm5K4wrHu86231/w84qRVR+U=
github.com/nats-io/nats.go v1.48.0/go.mod h1:iRWIPokVIFbVijxuMQq4y9ttaBTMe0SFdlZfMDd+33g=
github.com/nats-io/nkeys v0.4.12 h1:nssm7JKOG9/x4J8II47VWCL1Ds29avyiQDRn0ckMvDc=
github.com/nats-io/nkeys v0.4.12/go.mod h1:MT59A1HYcjIcyQDJStTfaOY6vhy9XTUjOFo+SVsvpBg=
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/olahol/melody v1.4.0 h1:Pa5SdeZL/zXPi1tJuMAPDbl4n3gQOThSL6G1p4qZ4SI=
github.com/olahol/melody v1.4.0/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/olahol/melody v1.2.1 h1:xdwRkzHxf+B0w4TKbGpUSSkV516ZucQZJIWLztOWICQ=
github.com/olahol/melody v1.2.1/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU=
github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts=
github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE=
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
github.com/pierrec/lz4/v4 v4.1.23 h1:oJE7T90aYBGtFNrI8+KbETnPymobAhzRrR8Mu8n1yfU=
github.com/pierrec/lz4/v4 v4.1.23/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU=
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 h1:bsUq1dX0N8AOIL7EB/X911+m4EHsnWEHeJ0c+3TTBrg=
github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/redis/go-redis/v9 v9.8.0 h1:q3nRvjrlge/6UD7eTu/DSg2uYiU2mCL0G/uzBWqhicI=
github.com/redis/go-redis/v9 v9.8.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
github.com/shirou/gopsutil/v4 v4.25.11 h1:X53gB7muL9Gnwwo2evPSE+SfOrltMoR6V3xJAXZILTY=
github.com/shirou/gopsutil/v4 v4.25.11/go.mod h1:EivAfP5x2EhLp2ovdpKSozecVXn1TmuG7SMzs/Wh4PU=
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo=
github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/swaggo/files/v2 v2.0.2 h1:Bq4tgS/yxLB/3nwOMcul5oLEUKa877Ykgz3CJMVbQKU=
github.com/swaggo/files/v2 v2.0.2/go.mod h1:TVqetIzZsO9OhHX1Am9sRf9LdrFZqoK49N37KON/jr0=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw=
github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM=
github.com/swaggo/http-swagger/v2 v2.0.2 h1:FKCdLsl+sFCx60KFsyM0rDarwiUSZ8DqbfSyIKC9OBg=
github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ=
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=
github.com/tklauser/go-sysconf v0.3.16 h1:frioLaCQSsF5Cy1jgRBrzr6t502KIIwQ0MArYICU0nA=
github.com/tklauser/go-sysconf v0.3.16/go.mod h1:/qNL9xxDhc7tx3HSRsLWNnuzbVfh3e7gh/BmM179nYI=
github.com/tklauser/numcpus v0.11.0 h1:nSTwhKH5e1dMNsCdVBukSZrURJRoHbSEQjdEbY+9RXw=
github.com/tklauser/numcpus v0.11.0/go.mod h1:z+LwcLq54uWZTX0u/bGobaV34u6V7KNlTZejzM6/3MQ=
github.com/yeqown/go-qrcode/v2 v2.2.5 h1:HCOe2bSjkhZyYoyyNaXNzh4DJZll6inVJQQw+8228Zk=
github.com/yeqown/go-qrcode/v2 v2.2.5/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw=
github.com/yeqown/go-qrcode/writer/standard v1.3.0 h1:chdyhEfRtUPgQtuPeaWVGQ/TQx4rE1PqeoW3U+53t34=
github.com/yeqown/go-qrcode/writer/standard v1.3.0/go.mod h1:O4MbzsotGCvy8upYPCR91j81dr5XLT7heuljcNXW+oQ=
github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg=
github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk=
github.com/yeqown/go-qrcode/v2 v2.2.4 h1:cXdYlrhzHzVAnJHiwr/T6lAUmS9MtEStjEZBjArrvnc=
github.com/yeqown/go-qrcode/v2 v2.2.4/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw=
github.com/yeqown/go-qrcode/writer/standard v1.2.4 h1:41e/aLr1AMVWlug6oUMkDg2r0+dv5ofB7UaTkekKZBc=
github.com/yeqown/go-qrcode/writer/standard v1.2.4/go.mod h1:H8nLSGYUWBpNyBPjDcJzAanMzYBBYMFtrU2lwoSRn+k=
github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0=
github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
github.com/zclconf/go-cty v1.14.1 h1:t9fyA35fwjjUMcmL5hLER+e/rEPqrbCK1/OSE4SI9KA=
github.com/zclconf/go-cty v1.14.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8=
github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
github.com/zclconf/go-cty-yaml v1.1.0 h1:nP+jp0qPHv2IhUVqmQSzjvqAWcObN0KBkUl2rWBdig0=
github.com/zclconf/go-cty-yaml v1.1.0/go.mod h1:9YLUH4g7lOhVWqUbctnVlZ5KLpg7JAprQNgxSZ1Gyxs=
github.com/zeebo/assert v1.1.0 h1:hU1L1vLTHsnO8x8c9KAR5GmM5QscxHg5RNU5z5qbUWY=
github.com/zeebo/assert v1.1.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI=
github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE=
github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo=
github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4=
go.balki.me/anyhttp v0.5.2 h1:et4tCDXLeXpWfMNvRKG7ojfrnlr3du7cEaG966MLSpA=
go.balki.me/anyhttp v0.5.2/go.mod h1:JhfekOIjgVODoVqUCficjpIgmB3wwlB7jhN0eN2EZ/s=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/contrib/detectors/gcp v1.38.0 h1:ZoYbqX7OaA/TAikspPl3ozPI6iY6LiIY9I8cUfm+pJs=
go.opentelemetry.io/contrib/detectors/gcp v1.38.0/go.mod h1:SU+iU7nu5ud4oCb3LQOhIZ3nRLj6FNVrKgtflbaf2ts=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 h1:rbRJ8BBoVMsQShESYZ0FkvcITu8X8QNwJogcLUmDNNw=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY=
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0 h1:6VjV6Et+1Hd2iLZEPtdV7vie80Yyqf7oikJLjQ/myi0=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.37.0/go.mod h1:u8hcp8ji5gaM/RfcOo8z9NMnf1pVLfVY7lBY2VOGuUU=
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
gocloud.dev v0.44.0 h1:iVyMAqFl2r6xUy7M4mfqwlN+21UpJoEtgHEcfiLMUXs=
gocloud.dev v0.44.0/go.mod h1:ZmjROXGdC/eKZLF1N+RujDlFRx3D+4Av2thREKDMVxY=
gocloud.dev/pubsub/kafkapubsub v0.44.0 h1:nQvzfnEN6lCh4j2p+1t0OLS4nmC2U/Ji5aWHVwgkifg=
gocloud.dev/pubsub/kafkapubsub v0.44.0/go.mod h1:/gcNz6OG4HgcY+w2LXwwY4qaRMgtq+SXoPSQU2jOlcw=
gocloud.dev/pubsub/natspubsub v0.44.0 h1:1Us76ckkdgtiE1p1rJZ+38b9TQP051bmjAiQlFQzYrM=
gocloud.dev/pubsub/natspubsub v0.44.0/go.mod h1:PvVAGIhL14PWGwWIXX/zAK42ixr2/PKP4Q4yMiAUraQ=
gocloud.dev/pubsub/rabbitpubsub v0.44.0 h1:MpRIO6XJ/JTqrlUWt3CxwDe1LvaiXUVu4sS5cv4f/AM=
gocloud.dev/pubsub/rabbitpubsub v0.44.0/go.mod h1:BB9+qT3r6g4M5+4asiXaEeqw4QAOzsWusO5krYaqkdA=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93 h1:fQsdNF2N+/YewlRZiricy4P1iimyPKZ/xwniHj8Q2a0=
golang.org/x/exp v0.0.0-20251219203646-944ab1f22d93/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ=
golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
google.golang.org/api v0.258.0 h1:IKo1j5FBlN74fe5isA2PVozN3Y5pwNKriEgAXPOkDAc=
google.golang.org/api v0.258.0/go.mod h1:qhOMTQEZ6lUps63ZNq9jhODswwjkjYYguA7fA3TBFww=
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217 h1:GvESR9BIyHUahIb0NcTum6itIWtdoglGX+rnGxm2934=
google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:yJ2HH4EHEDTd3JiLmhds6NkJ17ITVYOdV3m3VKOnws0=
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls=
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b h1:Mv8VFug0MP9e5vUxfBcE3vUkV6CImK3cMNMIDFjmzxU=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc=
modernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM=
modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=
modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.67.2 h1:ZbNmly1rcbjhot5jlOZG0q4p5VwFfjwWqZ5rY2xxOXo=
modernc.org/libc v1.67.2/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.41.0 h1:bJXddp4ZpsqMsNN1vS0jWo4IJTZzb8nWpcgvyCFG9Ck=
modernc.org/sqlite v1.41.0/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 h1:5D53IMaUuA5InSeMu9eJtlQXS2NxAhyWQvkKEgXZhHI=
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4=
modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
modernc.org/sqlite v1.33.0 h1:WWkA/T2G17okiLGgKAj4/RMIvgyMT19yQ038160IeYk=
modernc.org/sqlite v1.33.0/go.mod h1:9uQ9hF/pCZoYZK73D/ud5Z7cIRIILSZI8NdIemVMTX8=
modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM=
modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k=
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=

View File

@@ -7,7 +7,6 @@ import (
_ "embed"
"encoding/json"
"io"
"log"
"slices"
"strings"
"sync"
@@ -16,24 +15,6 @@ import (
//go:embed currencies.json
var defaults []byte
const (
MinDecimals = 0
MaxDecimals = 18
)
// clampDecimals ensures the decimals value is within a safe range [0, 18]
func clampDecimals(decimals int, code string) int {
original := decimals
if decimals < MinDecimals {
decimals = MinDecimals
log.Printf("WARNING: Currency %s had negative decimals (%d), normalized to %d", code, original, decimals)
} else if decimals > MaxDecimals {
decimals = MaxDecimals
log.Printf("WARNING: Currency %s had excessive decimals (%d), normalized to %d", code, original, decimals)
}
return decimals
}
type CollectorFunc func() ([]Currency, error)
func CollectJSON(reader io.Reader) CollectorFunc {
@@ -44,11 +25,6 @@ func CollectJSON(reader io.Reader) CollectorFunc {
return nil, err
}
// Clamp decimals during collection to ensure early normalization
for i := range currencies {
currencies[i].Decimals = clampDecimals(currencies[i].Decimals, currencies[i].Code)
}
return currencies, nil
}
}
@@ -72,11 +48,10 @@ func CollectionCurrencies(collectors ...CollectorFunc) ([]Currency, error) {
}
type Currency struct {
Name string `json:"name"`
Code string `json:"code"`
Local string `json:"local"`
Symbol string `json:"symbol"`
Decimals int `json:"decimals"`
Name string `json:"name"`
Code string `json:"code"`
Local string `json:"local"`
Symbol string `json:"symbol"`
}
type CurrencyRegistry struct {
@@ -87,10 +62,7 @@ type CurrencyRegistry struct {
func NewCurrencyService(currencies []Currency) *CurrencyRegistry {
registry := make(map[string]Currency, len(currencies))
for i := range currencies {
// Clamp decimals to safe range before adding to registry
currency := currencies[i]
currency.Decimals = clampDecimals(currency.Decimals, currency.Code)
registry[currency.Code] = currency
registry[currencies[i].Code] = currencies[i]
}
return &CurrencyRegistry{

Some files were not shown because too many files have changed in this diff Show More