diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 5fd264a8..448dc1de 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -29,6 +29,6 @@ // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. "remoteUser": "node", "features": { - "ghcr.io/devcontainers/features/go:1": "1.21" + "ghcr.io/devcontainers/features/go:1": "1.24" } } diff --git a/.github/AGENTS.md b/.github/AGENTS.md new file mode 100644 index 00000000..3db03e75 --- /dev/null +++ b/.github/AGENTS.md @@ -0,0 +1,40 @@ +This is a Go based repository with a VueJS client for the frontend built with Vite and Nuxt, with ShadCN. + +To make life easier, the use of a Taskfile is included for the majority of development commands. + +Please follow these guidelines when contributing: + +## Required Before Each Commit +- Generate Swagger Files: `task swag --force` +- Generate JS API Client: `task typescript-types --force` +- Lint Golang: `task go:lint` +- Lint frontend: `task ui:fix` + +## Repository Structure +### Backend +- `backend/`: Contains the backend folders +- `backend/app`: Contains main app code including API endpoints +- `backend/internal/core`: Contains basic services such as currencies +- `backend/data`: Contains all information related to data, including `ent` schemas, repos, migrations, etc. +- `backend/data/migrations`: Contains migration data, the `sqlite3` sub-folder contains sqlite migrations, `postgres` sub-folder the postgres migrations, BOTH are REQUIRED. +- `backend/data/ent/schema`: Contains the actual `ent` data models. +- `backend/data/repo`: Contains the data repositories +- `backend/pkgs`: Contains general helper functions and services + +### Frontend +- `frontend/`: Contains initial frontend files +- `frontend/components`: Contains the ShadCN components +- `frontend/locales`: Contains the i18n JSON for languages +- `frontend/pages`: Contains VueJS pages +- `frontend/test`: Contains Playwright setup +- `frontend/test/e2e`: Contains actual Playwright test files + +### Docs +- `docs/`: Contains VitePress based documentation + +## Key Guidelines +1. Follow best practices for the various programming languages +2. Maintain existing code structure and organization when possible +3. Use dependency injection when reasonable +4. Write tests for new functionality and after fixing bugs to validate they're fixed +5. Document changes to the `docs/` folder when appropriate \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/DISCUSSION_TEMPLATE/ideas.yml similarity index 66% rename from .github/ISSUE_TEMPLATE/feature_request.yml rename to .github/DISCUSSION_TEMPLATE/ideas.yml index 7be76a99..69ed6a1b 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/DISCUSSION_TEMPLATE/ideas.yml @@ -1,9 +1,4 @@ ---- -name: "Feature Request" -description: "Submit a feature request for the current release" labels: ["⬆️ enhancement"] -projects: ["sysadminsmedia/2"] -type: "Enhancement" body: - type: textarea id: problem-statement @@ -31,9 +26,5 @@ body: label: Contributions description: Please confirm the following options: - - label: I have searched through existing issues and feature requests to see if my idea has already been proposed. + - label: I have searched through existing ideas in the discussions to check if this is a duplicate required: true - - label: If this feature is accepted, I would be willing to help implement and maintain this feature. - required: false - - label: If this feature is accepted, I'm willing to sponsor the development of this feature. - required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..5c6ca0ca --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: false +contact_links: + - name: GitHub Community Support + url: https://github.com/sysadminsmedia/homebox/discussions/categories/support + about: Get support for issues here + - name: Feature Requests + url: https://github.com/sysadminsmedia/homebox/discussions/categories/ideas + about: Have an idea for Homebox? Share it in our discussions forum. If we decide to take it on we will create an issue for it. + - name: Translate + url: https://translate.sysadminsmedia.com + about: Help us translate Homebox! All contributions and all languages welcome! diff --git a/.github/workflows/binaries-publish.yaml b/.github/workflows/binaries-publish.yaml index 05e7b963..e435b491 100644 --- a/.github/workflows/binaries-publish.yaml +++ b/.github/workflows/binaries-publish.yaml @@ -17,10 +17,8 @@ jobs: - name: Set up Go uses: actions/setup-go@v5 with: - go-version: "1.23" - cache-dependency-path: | - backend/go.mod - backend/go.sum + go-version: "1.24" + cache-dependency-path: backend/go.mod - uses: pnpm/action-setup@v2 with: diff --git a/.github/workflows/clear-stale-docker-images.yml b/.github/workflows/clear-stale-docker-images.yml index 5a2dc40b..6811a5ff 100644 --- a/.github/workflows/clear-stale-docker-images.yml +++ b/.github/workflows/clear-stale-docker-images.yml @@ -7,46 +7,35 @@ on: jobs: delete-old-images-main: - name: Delete Untagged Images + name: Delete Old Images for Main Repo runs-on: ubuntu-latest permissions: packages: write steps: - - name: Fetch multi-platform package version SHAs - id: multi-arch-digests - run: | - package1=$(docker manifest inspect ghcr.io/sysadminsmedia/homebox | jq -r '.manifests.[] | .digest' | paste -s -d ' ' -) - echo "multi-arch-digests=$package1" >> $GITHUB_OUTPUT - - uses: snok/container-retention-policy@v3.0.0 + - uses: dataaxiom/ghcr-cleanup-action@v1 with: - skip-shas: ${{ steps.multi-arch-digests.outputs.multi-arch-digests }} - # The type of account. Can be either 'org' or 'personal'. - account: sysadminsmedia - # Image name to delete. Supports passing several names as a comma-separated list. - image-names: homebox - # The cut-off for which to delete images older than. For example '2 days ago UTC'. Timezone is required. - cut-off: 90d - # Personal access token with read and delete scopes. - token: ${{ secrets.CLEANUP_PAT }} - # Restrict deletions to images without specific tags. Supports Unix-shell style wildcards - skip-tags: "!latest,!latest-rootless,!0.*,!0.*-rootless,!main,!main-rootless,!vnext,!vnext-rootless,!0,!0-rootless" # optional - # Do not actually delete images. Print output showing what would have been deleted. - dry-run: true # optional, default is false + dry-run: true + delete-ghost-images: true + delete-partial-images: true + delete-orphaned-images: true + delete-untagged: true + validate: true + token: '${{ github.token }}' + package: homebox + use-regex: true + exclude-tags: latest,latest-rootless,main,main-rootless,nightly,nightly-rootless,*.*.*,0.*,0,*.*.*-rootless,0.*-rootless,0-rootless + older-than: 180 days delete-old-images-devcache: - name: Delete Cache Old Images + name: Delete Old Devcache Images runs-on: ubuntu-latest permissions: packages: write steps: - - uses: snok/container-retention-policy@v3.0.0 + - uses: dataaxiom/ghcr-cleanup-action@v1 with: - # The type of account. Can be either 'org' or 'personal'. - account: sysadminsmedia - image-names: devcache - # The cut-off for which to delete images older than. For example '2 days ago UTC'. Timezone is required. - cut-off: 90d - # Personal access token with read and delete scopes. - token: ${{ secrets.CLEANUP_PAT }} - # Do not actually delete images. Print output showing what would have been deleted. - dry-run: true # optional, default is false + dry-run: false + delete-untagged: true + token: '${{ github.token }}' + package: devcache + older-than: 90 days diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml new file mode 100644 index 00000000..7a9cd774 --- /dev/null +++ b/.github/workflows/copilot-setup-steps.yml @@ -0,0 +1,52 @@ +name: "Copilot Setup Steps" + +# Automatically run the setup steps when they are changed to allow for easy validation, and +# allow manual testing through the repository's "Actions" tab +on: + workflow_dispatch: + push: + paths: + - .github/workflows/copilot-setup-steps.yml + pull_request: + paths: + - .github/workflows/copilot-setup-steps.yml + +jobs: + # The job MUST be called `copilot-setup-steps` or it will not be picked up by Copilot. + copilot-setup-steps: + runs-on: ubuntu-latest + + # Set the permissions to the lowest permissions possible needed for your steps. + # Copilot will be given its own token for its operations. + permissions: + # If you want to clone the repository as part of your setup steps, for example to install dependencies, you'll need the `contents: read` permission. If you don't clone the repository in your setup steps, Copilot will do this for you automatically after the steps complete. + contents: read + + # You can define any steps you want, and they will run before the agent starts. + # If you do not check out your code, Copilot will do this for you. + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "22" + + - uses: pnpm/action-setup@v3.0.0 + with: + version: 9.12.2 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: "1.24" + cache-dependency-path: backend/go.mod + + - name: Install Task + uses: arduino/setup-task@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Perform setup + run: task setup diff --git a/.github/workflows/docker-publish-rootless.yaml b/.github/workflows/docker-publish-rootless.yaml index 21839c54..41073bb2 100644 --- a/.github/workflows/docker-publish-rootless.yaml +++ b/.github/workflows/docker-publish-rootless.yaml @@ -33,7 +33,7 @@ permissions: env: DOCKERHUB_REPO: sysadminsmedia/homebox - GHCR_REPO: ghcr.io/sysadminsmedia/homebox + GHCR_REPO: ghcr.io/${{ github.repository }} jobs: build: @@ -83,7 +83,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 - if: github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -159,7 +159,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 - if: github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -204,7 +204,7 @@ jobs: - name: Create manifest list and push Dockerhub id: push-dockerhub working-directory: /tmp/digests - if: github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) run: | docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ $(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *) diff --git a/.github/workflows/docker-publish.yaml b/.github/workflows/docker-publish.yaml index 0613a063..f52287cb 100644 --- a/.github/workflows/docker-publish.yaml +++ b/.github/workflows/docker-publish.yaml @@ -27,7 +27,7 @@ on: env: DOCKERHUB_REPO: sysadminsmedia/homebox - GHCR_REPO: ghcr.io/sysadminsmedia/homebox + GHCR_REPO: ghcr.io/${{ github.repository }} permissions: contents: read # Access to repository contents @@ -78,7 +78,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 - if: github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -152,6 +152,7 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -194,8 +195,7 @@ jobs: - name: Create manifest list and push Dockerhub id: push-dockerhub working-directory: /tmp/digests - if: github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/') + if: (github.event_name == 'schedule' || startsWith(github.ref, 'refs/tags/')) run: | docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ $(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *) - diff --git a/.github/workflows/e2e-partial.yaml b/.github/workflows/e2e-partial.yaml index cc3b2917..f6ee9213 100644 --- a/.github/workflows/e2e-partial.yaml +++ b/.github/workflows/e2e-partial.yaml @@ -28,9 +28,7 @@ jobs: uses: actions/setup-go@v5 with: go-version: "1.23" - cache-dependency-path: | - backend/go.mod - backend/go.sum + cache-dependency-path: backend/go.mod - uses: actions/setup-node@v4 with: diff --git a/.github/workflows/partial-backend.yaml b/.github/workflows/partial-backend.yaml index 6ce3fbc4..7c620b08 100644 --- a/.github/workflows/partial-backend.yaml +++ b/.github/workflows/partial-backend.yaml @@ -12,10 +12,8 @@ jobs: - name: Set up Go uses: actions/setup-go@v5 with: - go-version: "1.23" - cache-dependency-path: | - backend/go.mod - backend/go.sum + go-version: "1.24" + cache-dependency-path: backend/go.mod - name: Install Task uses: arduino/setup-task@v1 @@ -37,8 +35,3 @@ jobs: - name: Test run: task go:coverage - - - name: Validate OpenAPI definition - uses: swaggerexpert/swagger-editor-validate@v1 - with: - definition-file: backend/app/api/static/docs/swagger.json diff --git a/.github/workflows/partial-frontend.yaml b/.github/workflows/partial-frontend.yaml index 948cefa8..b219fed3 100644 --- a/.github/workflows/partial-frontend.yaml +++ b/.github/workflows/partial-frontend.yaml @@ -61,9 +61,7 @@ jobs: uses: actions/setup-go@v5 with: go-version: "1.23" - cache-dependency-path: | - backend/go.mod - backend/go.sum + cache-dependency-path: backend/go.mod - uses: actions/setup-node@v4 with: @@ -114,9 +112,7 @@ jobs: uses: actions/setup-go@v5 with: go-version: "1.23" - cache-dependency-path: | - backend/go.mod - backend/go.sum + cache-dependency-path: backend/go.mod - uses: actions/setup-node@v4 with: diff --git a/.vscode/launch.json b/.vscode/launch.json index c621b0a3..32daf54d 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -16,14 +16,12 @@ "type": "go", "request": "launch", "mode": "debug", - "program": "${workspaceRoot}/backend/app/api/", + "program": "${workspaceFolder}/backend/app/api/", "args": [], "env": { "HBOX_DEMO": "true", "HBOX_LOG_LEVEL": "debug", - "HBOX_DEBUG_ENABLED": "true", - "HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data", - "HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1&_time_format=sqlite" + "HBOX_DEBUG_ENABLED": "true" }, "console": "integratedTerminal", }, @@ -46,4 +44,4 @@ "console": "integratedTerminal", } ] -} \ No newline at end of file +} diff --git a/Dockerfile b/Dockerfile index cec76307..89f4249a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,6 +31,8 @@ RUN go mod download # Build API stage FROM public.ecr.aws/docker/library/golang:alpine AS builder +ARG TARGETOS +ARG TARGETARCH ARG BUILD_TIME ARG COMMIT ARG VERSION @@ -38,7 +40,8 @@ ARG VERSION # Install necessary build tools RUN apk update && \ apk upgrade && \ - apk add --no-cache git build-base gcc g++ + apk add --no-cache git build-base gcc g++ && \ + if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi WORKDIR /go/src/app @@ -52,19 +55,26 @@ COPY --from=frontend-builder /app/.output/public ./app/api/static/public # Use cache for Go build artifacts RUN --mount=type=cache,target=/root/.cache/go-build \ - CGO_ENABLED=0 GOOS=linux go build \ - -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ - -o /go/bin/api \ - -v ./app/api/*.go + if [ "$TARGETARCH" = "arm" ] || [ "$TARGETARCH" = "riscv64" ]; \ + then echo "nodynamic" $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \ + -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ + -tags nodynamic -o /go/bin/api -v ./app/api/*.go; \ + else \ + echo $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \ + -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ + -o /go/bin/api -v ./app/api/*.go; \ + fi # Production stage FROM public.ecr.aws/docker/library/alpine:latest ENV HBOX_MODE=production -ENV HBOX_STORAGE_DATA=/data/ +ENV HBOX_STORAGE_CONN_STRING=file:///?no_tmp_dir=true +ENV HBOX_STORAGE_PREFIX_PATH=data ENV HBOX_DATABASE_SQLITE_PATH=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite # Install necessary runtime dependencies -RUN apk --no-cache add ca-certificates wget +RUN apk --no-cache add ca-certificates wget && \ + if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi # Create application directory and copy over built Go binary RUN mkdir /app diff --git a/Dockerfile.rootless b/Dockerfile.rootless index 684e2cfd..06a5737d 100644 --- a/Dockerfile.rootless +++ b/Dockerfile.rootless @@ -31,6 +31,8 @@ RUN go mod download # Build API stage FROM public.ecr.aws/docker/library/golang:alpine AS builder +ARG TARGETOS +ARG TARGETARCH ARG BUILD_TIME ARG COMMIT ARG VERSION @@ -38,7 +40,8 @@ ARG VERSION # Install necessary build tools RUN apk update && \ apk upgrade && \ - apk add --no-cache git build-base gcc g++ + apk add --no-cache git build-base gcc g++ && \ + if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi WORKDIR /go/src/app @@ -52,21 +55,29 @@ COPY --from=frontend-builder /app/.output/public ./app/api/static/public # Use cache for Go build artifacts RUN --mount=type=cache,target=/root/.cache/go-build \ - CGO_ENABLED=0 GOOS=linux go build \ - -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ - -o /go/bin/api \ - -v ./app/api/*.go + if [ "$TARGETARCH" = "arm" ] || [ "$TARGETARCH" = "riscv64" ]; \ + then echo "nodynamic" $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \ + -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ + -tags nodynamic -o /go/bin/api -v ./app/api/*.go; \ + else \ + echo $TARGETOS $TARGETARCH; CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build \ + -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \ + -o /go/bin/api -v ./app/api/*.go; \ + fi RUN mkdir /data # Production stage FROM public.ecr.aws/docker/library/alpine:latest ENV HBOX_MODE=production -ENV HBOX_STORAGE_DATA=/data/ +ENV HBOX_STORAGE_CONN_STRING=file:///?no_tmp_dir=true +ENV HBOX_STORAGE_PREFIX_PATH=data ENV HBOX_DATABASE_SQLITE_PATH=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite # Install necessary runtime dependencies -RUN apk --no-cache add ca-certificates wget +RUN apk --no-cache add ca-certificates wget && \ + if [ "$TARGETARCH" != "arm" ] || [ "$TARGETARCH" != "riscv64" ]; then apk --no-cache add libwebp libavif libheif libjxl; fi + # Create a nonroot user with UID/GID 65532 RUN addgroup -g 65532 nonroot && adduser -u 65532 -G nonroot -S nonroot diff --git a/Taskfile.yml b/Taskfile.yml index 13ec614c..f6c8fc5d 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -25,6 +25,8 @@ tasks: cmds: - swag fmt --dir={{ .API }} - swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency + - cp -r ./docs/swagger.json ../../../../docs/en/api/openapi-2.0.json + - cp -r ./docs/swagger.yaml ../../../../docs/en/api/openapi-2.0.yaml sources: - "./backend/app/api/**/*" - "./backend/internal/data/**" @@ -95,7 +97,7 @@ tasks: desc: Runs all go tests using gotestsum - supports passing gotestsum args dir: backend cmds: - - gotestsum {{ .CLI_ARGS }} ./... + - go test {{ .CLI_ARGS }} ./... go:coverage: desc: Runs all go tests with -race flag and generates a coverage report @@ -148,7 +150,7 @@ tasks: desc: Run frontend development server dir: frontend cmds: - - pnpm dev + - pnpm dev --no-fork ui:ci: desc: Run frontend build in CI mode @@ -174,7 +176,7 @@ tasks: cmds: - cd backend && go build ./app/api - backend/api & - - sleep 10 + - sleep 15 - cd frontend && pnpm run test:ci silent: true @@ -191,7 +193,7 @@ tasks: cmds: - cd backend && go build ./app/api - backend/api & - - sleep 10 + - sleep 15 - cd frontend && pnpm run test:ci silent: true diff --git a/backend/.goreleaser.yaml b/backend/.goreleaser.yaml index 625e722a..02c80bcb 100644 --- a/backend/.goreleaser.yaml +++ b/backend/.goreleaser.yaml @@ -14,16 +14,29 @@ builds: - linux - windows - darwin + - freebsd goarch: - amd64 - "386" - arm - arm64 + - riscv64 ignore: - goos: windows goarch: arm - goos: windows goarch: "386" + - goos: freebsd + goarch: arm + - goos: freebsd + goarch: "386" + tags: + - >- + {{- if eq .Arch "riscv64" }}nodynamic + {{- else if eq .Arch "arm" }}nodynamic + {{- else if eq .Arch "386" }}nodynamic + {{- else if eq .Os "freebsd" }}nodynamic + {{ end }} signs: - cmd: cosign @@ -54,7 +67,6 @@ archives: release: extra_files: - glob: dist/*.sig - checksum: name_template: 'checksums.txt' snapshot: diff --git a/backend/app/api/handlers/v1/v1_ctrl_actions.go b/backend/app/api/handlers/v1/v1_ctrl_actions.go index a134e490..9f53f496 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_actions.go +++ b/backend/app/api/handlers/v1/v1_ctrl_actions.go @@ -81,3 +81,16 @@ func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc { func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc { return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos) } + +// HandleCreateMissingThumbnails godoc +// +// @Summary Create Missing Thumbnails +// @Description Creates thumbnails for items that are missing them +// @Tags Actions +// @Produce json +// @Success 200 {object} ActionAmountResult +// @Router /v1/actions/create-missing-thumbnails [Post] +// @Security Bearer +func (ctrl *V1Controller) HandleCreateMissingThumbnails() errchain.HandlerFunc { + return actionHandlerFactory("create missing thumbnails", ctrl.repo.Attachments.CreateMissingThumbnails) +} diff --git a/backend/app/api/handlers/v1/v1_ctrl_auth.go b/backend/app/api/handlers/v1/v1_ctrl_auth.go index dce66340..64d556fe 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_auth.go +++ b/backend/app/api/handlers/v1/v1_ctrl_auth.go @@ -79,15 +79,15 @@ type AuthProvider interface { // HandleAuthLogin godoc // -// @Summary User Login -// @Tags Authentication -// @Accept x-www-form-urlencoded -// @Accept application/json -// @Param payload body LoginForm true "Login Data" -// @Param provider query string false "auth provider" -// @Produce json -// @Success 200 {object} TokenResponse -// @Router /v1/users/login [POST] +// @Summary User Login +// @Tags Authentication +// @Accept x-www-form-urlencoded +// @Accept application/json +// @Param payload body LoginForm true "Login Data" +// @Param provider query string false "auth provider" +// @Produce json +// @Success 200 {object} TokenResponse +// @Router /v1/users/login [POST] func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc { if len(ps) == 0 { panic("no auth providers provided") diff --git a/backend/app/api/handlers/v1/v1_ctrl_items.go b/backend/app/api/handlers/v1/v1_ctrl_items.go index 316c0afd..b1874134 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_items.go +++ b/backend/app/api/handlers/v1/v1_ctrl_items.go @@ -156,7 +156,7 @@ func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc { // HandleItemsCreate godoc // -// @Summary (Deprecated) Create Item +// @Summary Create Item // @Tags Items // @Produce json // @Param payload body repo.ItemCreate true "Item Data" @@ -201,7 +201,6 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc { // @Success 204 // @Router /v1/items/{id} [DELETE] // @Security Bearer -// @Deprecated func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc { fn := func(r *http.Request, ID uuid.UUID) (any, error) { auth := services.NewContext(r.Context()) diff --git a/backend/app/api/handlers/v1/v1_ctrl_items_attachments.go b/backend/app/api/handlers/v1/v1_ctrl_items_attachments.go index 23df120f..e055ec63 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_items_attachments.go +++ b/backend/app/api/handlers/v1/v1_ctrl_items_attachments.go @@ -3,6 +3,7 @@ package v1 import ( "errors" "net/http" + "net/url" "path/filepath" "strconv" "strings" @@ -14,6 +15,13 @@ import ( "github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/sysadminsmedia/homebox/backend/internal/sys/validate" + + "gocloud.dev/blob" + _ "gocloud.dev/blob/azureblob" + _ "gocloud.dev/blob/fileblob" + _ "gocloud.dev/blob/gcsblob" + _ "gocloud.dev/blob/memblob" + _ "gocloud.dev/blob/s3blob" ) type ( @@ -24,19 +32,19 @@ type ( // HandleItemAttachmentCreate godocs // -// @Summary Create Item Attachment -// @Tags Items Attachments -// @Accept multipart/form-data -// @Produce json -// @Param id path string true "Item ID" -// @Param file formData file true "File attachment" -// @Param type formData string true "Type of file" -// @Param primary formData bool false "Is this the primary attachment" -// @Param name formData string true "name of the file including extension" -// @Success 200 {object} repo.ItemOut -// @Failure 422 {object} validate.ErrorResponse -// @Router /v1/items/{id}/attachments [POST] -// @Security Bearer +// @Summary Create Item Attachment +// @Tags Items Attachments +// @Accept multipart/form-data +// @Produce json +// @Param id path string true "Item ID" +// @Param file formData file true "File attachment" +// @Param type formData string false "Type of file" +// @Param primary formData bool false "Is this the primary attachment" +// @Param name formData string true "name of the file including extension" +// @Success 200 {object} repo.ItemOut +// @Failure 422 {object} validate.ErrorResponse +// @Router /v1/items/{id}/attachments [POST] +// @Security Bearer func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) error { err := r.ParseMultipartForm(ctrl.maxUploadSize << 20) @@ -75,7 +83,7 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc { ext := filepath.Ext(attachmentName) switch strings.ToLower(ext) { - case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff": + case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff", ".avif", ".ico", ".heic", ".jxl": attachmentType = attachment.TypePhoto.String() default: attachmentType = attachment.TypeAttachment.String() @@ -167,13 +175,39 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r ctx := services.NewContext(r.Context()) switch r.Method { case http.MethodGet: - doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), attachmentID) + doc, err := ctrl.svc.Items.AttachmentPath(r.Context(), ctx.GID, attachmentID) if err != nil { log.Err(err).Msg("failed to get attachment path") return validate.NewRequestError(err, http.StatusInternalServerError) } - // w.Header().Set("Content-Disposition", "attachment; filename="+doc.Title) - http.ServeFile(w, r, doc.Path) + + bucket, err := blob.OpenBucket(ctx, ctrl.repo.Attachments.GetConnString()) + if err != nil { + log.Err(err).Msg("failed to open bucket") + return validate.NewRequestError(err, http.StatusInternalServerError) + } + file, err := bucket.NewReader(ctx, doc.Path, nil) + if err != nil { + log.Err(err).Msg("failed to open file") + return validate.NewRequestError(err, http.StatusInternalServerError) + } + defer func(file *blob.Reader) { + err := file.Close() + if err != nil { + log.Err(err).Msg("failed to close file") + } + }(file) + defer func(bucket *blob.Bucket) { + err := bucket.Close() + if err != nil { + log.Err(err).Msg("failed to close bucket") + } + }(bucket) + + // Set the Content-Disposition header for RFC6266 compliance + disposition := "attachment; filename*=UTF-8''" + url.QueryEscape(doc.Title) + w.Header().Set("Content-Disposition", disposition) + http.ServeContent(w, r, doc.Title, doc.CreatedAt, file) return nil // Delete Attachment Handler @@ -196,9 +230,9 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r } attachment.ID = attachmentID - val, err := ctrl.svc.Items.AttachmentUpdate(ctx, ID, &attachment) + val, err := ctrl.svc.Items.AttachmentUpdate(ctx, ctx.GID, ID, &attachment) if err != nil { - log.Err(err).Msg("failed to delete attachment") + log.Err(err).Msg("failed to update attachment") return validate.NewRequestError(err, http.StatusInternalServerError) } diff --git a/backend/app/api/handlers/v1/v1_ctrl_labelmaker.go b/backend/app/api/handlers/v1/v1_ctrl_labelmaker.go index ed4620a2..f3b1613b 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_labelmaker.go +++ b/backend/app/api/handlers/v1/v1_ctrl_labelmaker.go @@ -35,14 +35,14 @@ func generateOrPrint(ctrl *V1Controller, w http.ResponseWriter, r *http.Request, // HandleGetLocationLabel godoc // -// @Summary Get Location label -// @Tags Locations -// @Produce json -// @Param id path string true "Location ID" -// @Param print query bool false "Print this label, defaults to false" -// @Success 200 {string} string "image/png" -// @Router /v1/labelmaker/location/{id} [GET] -// @Security Bearer +// @Summary Get Location label +// @Tags Locations +// @Produce json +// @Param id path string true "Location ID" +// @Param print query bool false "Print this label, defaults to false" +// @Success 200 {string} string "image/png" +// @Router /v1/labelmaker/location/{id} [GET] +// @Security Bearer func (ctrl *V1Controller) HandleGetLocationLabel() errchain.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) error { ID, err := adapters.RouteUUID(r, "id") @@ -63,14 +63,14 @@ func (ctrl *V1Controller) HandleGetLocationLabel() errchain.HandlerFunc { // HandleGetItemLabel godoc // -// @Summary Get Item label -// @Tags Items -// @Produce json -// @Param id path string true "Item ID" -// @Param print query bool false "Print this label, defaults to false" -// @Success 200 {string} string "image/png" -// @Router /v1/labelmaker/item/{id} [GET] -// @Security Bearer +// @Summary Get Item label +// @Tags Items +// @Produce json +// @Param id path string true "Item ID" +// @Param print query bool false "Print this label, defaults to false" +// @Success 200 {string} string "image/png" +// @Router /v1/labelmaker/item/{id} [GET] +// @Security Bearer func (ctrl *V1Controller) HandleGetItemLabel() errchain.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) error { ID, err := adapters.RouteUUID(r, "id") @@ -97,14 +97,14 @@ func (ctrl *V1Controller) HandleGetItemLabel() errchain.HandlerFunc { // HandleGetAssetLabel godoc // -// @Summary Get Asset label -// @Tags Items -// @Produce json -// @Param id path string true "Asset ID" -// @Param print query bool false "Print this label, defaults to false" -// @Success 200 {string} string "image/png" -// @Router /v1/labelmaker/assets/{id} [GET] -// @Security Bearer +// @Summary Get Asset label +// @Tags Items +// @Produce json +// @Param id path string true "Asset ID" +// @Param print query bool false "Print this label, defaults to false" +// @Success 200 {string} string "image/png" +// @Router /v1/labelmaker/assets/{id} [GET] +// @Security Bearer func (ctrl *V1Controller) HandleGetAssetLabel() errchain.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) error { assetIDParam := chi.URLParam(r, "id") diff --git a/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go b/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go index b981405b..b465b1c3 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go +++ b/backend/app/api/handlers/v1/v1_ctrl_maint_entry.go @@ -12,14 +12,14 @@ import ( // HandleMaintenanceLogGet godoc // -// @Summary Get Maintenance Log -// @Tags Item Maintenance -// @Produce json -// @Param id path string true "Item ID" -// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve" -// @Success 200 {array} repo.MaintenanceEntryWithDetails[] -// @Router /v1/items/{id}/maintenance [GET] -// @Security Bearer +// @Summary Get Maintenance Log +// @Tags Item Maintenance +// @Produce json +// @Param id path string true "Item ID" +// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve" +// @Success 200 {array} repo.MaintenanceEntryWithDetails[] +// @Router /v1/items/{id}/maintenance [GET] +// @Security Bearer func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc { fn := func(r *http.Request, ID uuid.UUID, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) { auth := services.NewContext(r.Context()) @@ -31,14 +31,14 @@ func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc { // HandleMaintenanceEntryCreate godoc // -// @Summary Create Maintenance Entry -// @Tags Item Maintenance -// @Produce json -// @Param id path string true "Item ID" -// @Param payload body repo.MaintenanceEntryCreate true "Entry Data" -// @Success 201 {object} repo.MaintenanceEntry -// @Router /v1/items/{id}/maintenance [POST] -// @Security Bearer +// @Summary Create Maintenance Entry +// @Tags Item Maintenance +// @Produce json +// @Param id path string true "Item ID" +// @Param payload body repo.MaintenanceEntryCreate true "Entry Data" +// @Success 201 {object} repo.MaintenanceEntry +// @Router /v1/items/{id}/maintenance [POST] +// @Security Bearer func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc { fn := func(r *http.Request, itemID uuid.UUID, body repo.MaintenanceEntryCreate) (repo.MaintenanceEntry, error) { auth := services.NewContext(r.Context()) diff --git a/backend/app/api/handlers/v1/v1_ctrl_maintenance.go b/backend/app/api/handlers/v1/v1_ctrl_maintenance.go index 64ba29cc..becab81b 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_maintenance.go +++ b/backend/app/api/handlers/v1/v1_ctrl_maintenance.go @@ -30,14 +30,14 @@ func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc { // HandleMaintenanceEntryUpdate godoc // -// @Summary Update Maintenance Entry -// @Tags Maintenance -// @Produce json -// @Param id path string true "Maintenance ID" -// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data" -// @Success 200 {object} repo.MaintenanceEntry -// @Router /v1/maintenance/{id} [PUT] -// @Security Bearer +// @Summary Update Maintenance Entry +// @Tags Maintenance +// @Produce json +// @Param id path string true "Maintenance ID" +// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data" +// @Success 200 {object} repo.MaintenanceEntry +// @Router /v1/maintenance/{id} [PUT] +// @Security Bearer func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc { fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) { auth := services.NewContext(r.Context()) @@ -49,13 +49,13 @@ func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc { // HandleMaintenanceEntryDelete godoc // -// @Summary Delete Maintenance Entry -// @Tags Maintenance -// @Produce json -// @Param id path string true "Maintenance ID" -// @Success 204 -// @Router /v1/maintenance/{id} [DELETE] -// @Security Bearer +// @Summary Delete Maintenance Entry +// @Tags Maintenance +// @Produce json +// @Param id path string true "Maintenance ID" +// @Success 204 +// @Router /v1/maintenance/{id} [DELETE] +// @Security Bearer func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc { fn := func(r *http.Request, entryID uuid.UUID) (any, error) { auth := services.NewContext(r.Context()) diff --git a/backend/app/api/handlers/v1/v1_ctrl_notifiers.go b/backend/app/api/handlers/v1/v1_ctrl_notifiers.go index 16072be4..a119e498 100644 --- a/backend/app/api/handlers/v1/v1_ctrl_notifiers.go +++ b/backend/app/api/handlers/v1/v1_ctrl_notifiers.go @@ -83,13 +83,13 @@ func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc { // HandlerNotifierTest godoc // -// @Summary Test Notifier -// @Tags Notifiers -// @Produce json -// @Param url query string true "URL" -// @Success 204 -// @Router /v1/notifiers/test [POST] -// @Security Bearer +// @Summary Test Notifier +// @Tags Notifiers +// @Produce json +// @Param url query string true "URL" +// @Success 204 +// @Router /v1/notifiers/test [POST] +// @Security Bearer func (ctrl *V1Controller) HandlerNotifierTest() errchain.HandlerFunc { type body struct { URL string `json:"url" validate:"required"` diff --git a/backend/app/api/handlers/v1/v1_ctrl_product_search.go b/backend/app/api/handlers/v1/v1_ctrl_product_search.go new file mode 100644 index 00000000..248cf87e --- /dev/null +++ b/backend/app/api/handlers/v1/v1_ctrl_product_search.go @@ -0,0 +1,332 @@ +package v1 + +import ( + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/hay-kot/httpkit/errchain" + "github.com/hay-kot/httpkit/server" + "github.com/rs/zerolog/log" + "github.com/sysadminsmedia/homebox/backend/internal/data/repo" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" + "github.com/sysadminsmedia/homebox/backend/internal/web/adapters" +) + +type UPCITEMDBResponse struct { + Code string `json:"code"` + Total int `json:"total"` + Offset int `json:"offset"` + Items []struct { + Ean string `json:"ean"` + Title string `json:"title"` + Description string `json:"description"` + Upc string `json:"upc"` + Brand string `json:"brand"` + Model string `json:"model"` + Color string `json:"color"` + Size string `json:"size"` + Dimension string `json:"dimension"` + Weight string `json:"weight"` + Category string `json:"category"` + LowestRecordedPrice float64 `json:"lowest_recorded_price"` + HighestRecordedPrice float64 `json:"highest_recorded_price"` + Images []string `json:"images"` + Offers []struct { + Merchant string `json:"merchant"` + Domain string `json:"domain"` + Title string `json:"title"` + Currency string `json:"currency"` + ListPrice string `json:"list_price"` + Price float64 `json:"price"` + Shipping string `json:"shipping"` + Condition string `json:"condition"` + Availability string `json:"availability"` + Link string `json:"link"` + UpdatedT int `json:"updated_t"` + } `json:"offers"` + Asin string `json:"asin"` + Elid string `json:"elid"` + } `json:"items"` +} + +type BARCODESPIDER_COMResponse struct { + ItemResponse struct { + Code int `json:"code"` + Status string `json:"status"` + Message string `json:"message"` + } `json:"item_response"` + ItemAttributes struct { + Title string `json:"title"` + Upc string `json:"upc"` + Ean string `json:"ean"` + ParentCategory string `json:"parent_category"` + Category string `json:"category"` + Brand string `json:"brand"` + Model string `json:"model"` + Mpn string `json:"mpn"` + Manufacturer string `json:"manufacturer"` + Publisher string `json:"publisher"` + Asin string `json:"asin"` + Color string `json:"color"` + Size string `json:"size"` + Weight string `json:"weight"` + Image string `json:"image"` + IsAdult string `json:"is_adult"` + Description string `json:"description"` + } `json:"item_attributes"` + Stores []struct { + StoreName string `json:"store_name"` + Title string `json:"title"` + Image string `json:"image"` + Price string `json:"price"` + Currency string `json:"currency"` + Link string `json:"link"` + Updated string `json:"updated"` + } `json:"Stores"` +} + +// HandleGenerateQRCode godoc +// +// @Summary Search EAN from Barcode +// @Tags Items +// @Produce json +// @Param data query string false "barcode to be searched" +// @Success 200 {object} []repo.BarcodeProduct +// @Router /v1/products/search-from-barcode [GET] +// @Security Bearer +func (ctrl *V1Controller) HandleProductSearchFromBarcode(conf config.BarcodeAPIConf) errchain.HandlerFunc { + type query struct { + // 80 characters is the longest non-2D barcode length (GS1-128) + EAN string `schema:"productEAN" validate:"required,max=80"` + } + + return func(w http.ResponseWriter, r *http.Request) error { + q, err := adapters.DecodeQuery[query](r) + if err != nil { + return err + } + + const TIMEOUT_SEC = 10 + + log.Info().Msg("Processing barcode lookup request on: " + q.EAN) + + // Search on UPCITEMDB + var products []repo.BarcodeProduct + + // www.ean-search.org/: not free + + // Example code: dewalt 5035048748428 + + upcitemdb := func(iEan string) ([]repo.BarcodeProduct, error) { + client := &http.Client{Timeout: TIMEOUT_SEC * time.Second} + resp, err := client.Get("https://api.upcitemdb.com/prod/trial/lookup?upc=" + iEan) + if err != nil { + return nil, err + } + + defer func() { + err = errors.Join(err, resp.Body.Close()) + }() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("API returned status code: %d", resp.StatusCode) + } + + // We Read the response body on the line below. + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + // Uncomment the following string for debug + // sb := string(body) + // log.Debug().Msg("Response: " + sb) + + var result UPCITEMDBResponse + if err := json.Unmarshal(body, &result); err != nil { // Parse []byte to go struct pointer + log.Error().Msg("Can not unmarshal JSON") + } + + var res []repo.BarcodeProduct + + for _, it := range result.Items { + var p repo.BarcodeProduct + p.SearchEngineName = "upcitemdb.com" + p.Barcode = iEan + + p.Item.Description = it.Description + p.Item.Name = it.Title + p.Manufacturer = it.Brand + p.ModelNumber = it.Model + if len(it.Images) != 0 { + p.ImageURL = it.Images[0] + } + + res = append(res, p) + } + + return res, nil + } + + ps, err := upcitemdb(q.EAN) + if err != nil { + log.Error().Msg("Can not retrieve product from upcitemdb.com" + err.Error()) + } + + // Barcode spider implementation + barcodespider := func(tokenAPI string, iEan string) ([]repo.BarcodeProduct, error) { + if len(tokenAPI) == 0 { + return nil, errors.New("no api token configured for barcodespider. " + + "Please define the api token in environment variable HBOX_BARCODE_TOKEN_BARCODESPIDER") + } + + req, err := http.NewRequest( + "GET", "https://api.barcodespider.com/v1/lookup?upc="+iEan, nil) + + if err != nil { + return nil, err + } + + req.Header.Add("token", tokenAPI) + + client := &http.Client{Timeout: TIMEOUT_SEC * time.Second} + + resp, err := client.Do(req) + if err != nil { + return nil, err + } + + // defer the call to Body.Close(). We also check the error code, and merge + // it with the other error in this code to avoid error overiding. + defer func() { + err = errors.Join(err, resp.Body.Close()) + }() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("barcodespider API returned status code: %d", resp.StatusCode) + } + + // We Read the response body on the line below. + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + // Uncomment the following string for debug + // sb := string(body) + // log.Debug().Msg("Response: " + sb) + + var result BARCODESPIDER_COMResponse + if err := json.Unmarshal(body, &result); err != nil { // Parse []byte to go struct pointer + log.Error().Msg("Can not unmarshal JSON") + } + + // TODO: check 200 code on HTTP response. + var p repo.BarcodeProduct + p.Barcode = iEan + p.SearchEngineName = "barcodespider.com" + p.Item.Name = result.ItemAttributes.Title + p.Item.Description = result.ItemAttributes.Description + p.Manufacturer = result.ItemAttributes.Brand + p.ModelNumber = result.ItemAttributes.Model + p.ImageURL = result.ItemAttributes.Image + + var res []repo.BarcodeProduct + res = append(res, p) + + return res, nil + } + + ps2, err := barcodespider(conf.TokenBarcodespider, q.EAN) + if err != nil { + log.Error().Msg("Can not retrieve product from barcodespider.com: " + err.Error()) + } + + // Merge everything. + products = append(products, ps...) + + products = append(products, ps2...) + + // Retrieve images if possible + for i := range products { + p := &products[i] + + if len(p.ImageURL) == 0 { + continue + } + + // Validate URL is HTTPS + u, err := url.Parse(p.ImageURL) + if err != nil || u.Scheme != "https" { + log.Warn().Msg("Skipping non-HTTPS image URL: " + p.ImageURL) + continue + } + + client := &http.Client{Timeout: TIMEOUT_SEC * time.Second} + res, err := client.Get(p.ImageURL) + if err != nil { + log.Warn().Msg("Cannot fetch image for URL: " + p.ImageURL + ": " + err.Error()) + } + + defer func() { + err = errors.Join(err, res.Body.Close()) + }() + + // Validate response + if res.StatusCode != http.StatusOK { + continue + } + + // Check content type + contentType := res.Header.Get("Content-Type") + if !strings.HasPrefix(contentType, "image/") { + continue + } + + // Limit image size to 8MB + limitedReader := io.LimitReader(res.Body, 8*1024*1024) + + // Read data of image + bytes, err := io.ReadAll(limitedReader) + if err != nil { + log.Warn().Msg(err.Error()) + continue + } + + // Convert to Base64 + var base64Encoding string + + // Determine the content type of the image file + mimeType := http.DetectContentType(bytes) + + // Prepend the appropriate URI scheme header depending + // on the MIME type + switch mimeType { + case "image/jpeg": + base64Encoding += "data:image/jpeg;base64," + case "image/png": + base64Encoding += "data:image/png;base64," + default: + continue + } + + // Append the base64 encoded output + base64Encoding += base64.StdEncoding.EncodeToString(bytes) + + p.ImageBase64 = base64Encoding + } + + if len(products) != 0 { + return server.JSON(w, http.StatusOK, products) + } + + return server.JSON(w, http.StatusNoContent, nil) + } +} diff --git a/backend/app/api/logger.go b/backend/app/api/logger.go index 714cba44..4ed5d341 100644 --- a/backend/app/api/logger.go +++ b/backend/app/api/logger.go @@ -18,7 +18,10 @@ func (a *app) setupLogger() { } level, err := zerolog.ParseLevel(a.conf.Log.Level) - if err == nil { + if err != nil { + log.Error().Err(err).Str("level", a.conf.Log.Level).Msg("invalid log level, falling back to info") + zerolog.SetGlobalLevel(zerolog.InfoLevel) + } else { zerolog.SetGlobalLevel(level) } } diff --git a/backend/app/api/main.go b/backend/app/api/main.go index d50cc9d4..a3fa817a 100644 --- a/backend/app/api/main.go +++ b/backend/app/api/main.go @@ -1,18 +1,16 @@ package main import ( - "bytes" "context" + "errors" "fmt" - "net/http" - "os" - "strings" - "time" - - "github.com/pressly/goose/v3" - "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" + "github.com/pressly/goose/v3" + "github.com/sysadminsmedia/homebox/backend/internal/sys/analytics" + "net/http" + "strings" + "time" "github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/graceful" @@ -25,14 +23,22 @@ import ( "github.com/sysadminsmedia/homebox/backend/internal/data/ent" "github.com/sysadminsmedia/homebox/backend/internal/data/migrations" "github.com/sysadminsmedia/homebox/backend/internal/data/repo" - "github.com/sysadminsmedia/homebox/backend/internal/sys/analytics" "github.com/sysadminsmedia/homebox/backend/internal/sys/config" "github.com/sysadminsmedia/homebox/backend/internal/web/mid" + "go.balki.me/anyhttp" _ "github.com/lib/pq" _ "github.com/sysadminsmedia/homebox/backend/internal/data/migrations/postgres" _ "github.com/sysadminsmedia/homebox/backend/internal/data/migrations/sqlite3" _ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite" + + _ "gocloud.dev/pubsub/awssnssqs" + _ "gocloud.dev/pubsub/azuresb" + _ "gocloud.dev/pubsub/gcppubsub" + _ "gocloud.dev/pubsub/kafkapubsub" + _ "gocloud.dev/pubsub/mempubsub" + _ "gocloud.dev/pubsub/natspubsub" + _ "gocloud.dev/pubsub/rabbitpubsub" ) var ( @@ -63,19 +69,19 @@ func validatePostgresSSLMode(sslMode string) bool { return validModes[strings.ToLower(strings.TrimSpace(sslMode))] } -// @title Homebox API -// @version 1.0 -// @description Track, Manage, and Organize your Things. -// @contact.name Homebox Team -// @contact.url https://discord.homebox.software -// @host demo.homebox.software -// @schemes https http -// @BasePath /api -// @securityDefinitions.apikey Bearer -// @in header -// @name Authorization -// @description "Type 'Bearer TOKEN' to correctly set the API Key" -// @externalDocs.url https://homebox.software/en/api +// @title Homebox API +// @version 1.0 +// @description Track, Manage, and Organize your Things. +// @contact.name Homebox Team +// @contact.url https://discord.homebox.software +// @host demo.homebox.software +// @schemes https http +// @BasePath /api +// @securityDefinitions.apikey Bearer +// @in header +// @name Authorization +// @description "Type 'Bearer TOKEN' to correctly set the API Key" +// @externalDocs.url https://homebox.software/en/api func main() { zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack @@ -94,17 +100,9 @@ func run(cfg *config.Config) error { app := new(cfg) app.setupLogger() - if cfg.Options.AllowAnalytics { - analytics.Send(version, build()) - } - // ========================================================================= // Initialize Database & Repos - - err := os.MkdirAll(cfg.Storage.Data, 0o755) - if err != nil { - log.Fatal().Err(err).Msg("failed to create data directory") - } + setupStorageDir(cfg) if strings.ToLower(cfg.Database.Driver) == "postgres" { if !validatePostgresSSLMode(cfg.Database.SslMode) { @@ -112,16 +110,7 @@ func run(cfg *config.Config) error { } } - // Set up the database URL based on the driver because for some reason a common URL format is not used - databaseURL := "" - switch strings.ToLower(cfg.Database.Driver) { - case "sqlite3": - databaseURL = cfg.Database.SqlitePath - case "postgres": - databaseURL = fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s", cfg.Database.Host, cfg.Database.Port, cfg.Database.Username, cfg.Database.Password, cfg.Database.Database, cfg.Database.SslMode) - default: - log.Fatal().Str("driver", cfg.Database.Driver).Msg("unsupported database driver") - } + databaseURL := setupDatabaseURL(cfg) c, err := ent.Open(strings.ToLower(cfg.Database.Driver), databaseURL) if err != nil { @@ -147,25 +136,9 @@ func run(cfg *config.Config) error { return err } - collectFuncs := []currencies.CollectorFunc{ - currencies.CollectDefaults(), - } - - if cfg.Options.CurrencyConfig != "" { - log.Info(). - Str("path", cfg.Options.CurrencyConfig). - Msg("loading currency config file") - - content, err := os.ReadFile(cfg.Options.CurrencyConfig) - if err != nil { - log.Error(). - Err(err). - Str("path", cfg.Options.CurrencyConfig). - Msg("failed to read currency config file") - return err - } - - collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content))) + collectFuncs, err := loadCurrencies(cfg) + if err != nil { + return err } currencies, err := currencies.CollectionCurrencies(collectFuncs...) @@ -178,7 +151,7 @@ func run(cfg *config.Config) error { app.bus = eventbus.New() app.db = c - app.repos = repo.New(c, app.bus, cfg.Storage.Data) + app.repos = repo.New(c, app.bus, cfg.Storage, cfg.Database.PubSubConnString, cfg.Thumbnail) app.services = services.New( app.repos, services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID), @@ -219,90 +192,52 @@ func run(cfg *config.Config) error { _ = httpserver.Shutdown(context.Background()) }() + listener, addrType, addrCfg, err := anyhttp.GetListener(cfg.Web.Host) + if err == nil { + switch addrType { + case anyhttp.SystemdFD: + sysdCfg := addrCfg.(*anyhttp.SysdConfig) + if sysdCfg.IdleTimeout != nil { + log.Error().Msg("idle timeout not yet supported. Please remove and try again") + return errors.New("idle timeout not yet supported. Please remove and try again") + } + fallthrough + case anyhttp.UnixSocket: + log.Info().Msgf("Server is running on %s", cfg.Web.Host) + return httpserver.Serve(listener) + } + } else { + log.Debug().Msgf("anyhttp error: %v", err) + } log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port) return httpserver.ListenAndServe() }) - // ========================================================================= // Start Reoccurring Tasks + registerRecurringTasks(app, cfg, runner) - runner.AddFunc("eventbus", app.bus.Run) - - runner.AddFunc("seed_database", func(ctx context.Context) error { - // TODO: Remove through external API that does setup - if cfg.Demo { - log.Info().Msg("Running in demo mode, creating demo data") - err := app.SetupDemo() - if err != nil { - log.Fatal().Msg(err.Error()) - } - } - return nil - }) - - runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) { - _, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx) - if err != nil { - log.Error(). - Err(err). - Msg("failed to purge expired tokens") - } - })) - - runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) { - _, err := app.repos.Groups.InvitationPurge(ctx) - if err != nil { - log.Error(). - Err(err). - Msg("failed to purge expired invitations") - } - })) - - runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) { - now := time.Now() - - if now.Hour() == 8 { - fmt.Println("run notifiers") - err := app.services.BackgroundService.SendNotifiersToday(context.Background()) - if err != nil { - log.Error(). - Err(err). - Msg("failed to send notifiers") - } - } - })) - - if cfg.Options.GithubReleaseCheck { - runner.AddPlugin(NewTask("get-latest-github-release", time.Hour, func(ctx context.Context) { - log.Debug().Msg("running get latest github release") - err := app.services.BackgroundService.GetLatestGithubRelease(context.Background()) - if err != nil { - log.Error(). - Err(err). - Msg("failed to get latest github release") + // Send analytics if enabled at around midnight UTC + if cfg.Options.AllowAnalytics { + analyticsTime := time.Second + runner.AddPlugin(NewTask("send-analytics", analyticsTime, func(ctx context.Context) { + for { + now := time.Now().UTC() + nextMidnight := time.Date(now.Year(), now.Month(), now.Day()+1, 0, 0, 0, 0, time.UTC) + dur := time.Until(nextMidnight) + analyticsTime = dur + select { + case <-ctx.Done(): + return + case <-time.After(dur): + log.Debug().Msg("running send analytics") + err := analytics.Send(version, build()) + if err != nil { + log.Error().Err(err).Msg("failed to send analytics") + } + } } })) } - if cfg.Debug.Enabled { - runner.AddFunc("debug", func(ctx context.Context) error { - debugserver := http.Server{ - Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port), - Handler: app.debugRouter(), - ReadTimeout: cfg.Web.ReadTimeout, - WriteTimeout: cfg.Web.WriteTimeout, - IdleTimeout: cfg.Web.IdleTimeout, - } - - go func() { - <-ctx.Done() - _ = debugserver.Shutdown(context.Background()) - }() - - log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port) - return debugserver.ListenAndServe() - }) - } - return runner.Start(context.Background()) } diff --git a/backend/app/api/recurring.go b/backend/app/api/recurring.go new file mode 100644 index 00000000..d6df8511 --- /dev/null +++ b/backend/app/api/recurring.go @@ -0,0 +1,150 @@ +package main + +import ( + "context" + "fmt" + "github.com/hay-kot/httpkit/graceful" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" + "net/http" + "time" + + "github.com/google/uuid" + "github.com/rs/zerolog/log" + "github.com/sysadminsmedia/homebox/backend/pkgs/utils" + "gocloud.dev/pubsub" +) + +func registerRecurringTasks(app *app, cfg *config.Config, runner *graceful.Runner) { + runner.AddFunc("eventbus", app.bus.Run) + + runner.AddFunc("seed_database", func(ctx context.Context) error { + if cfg.Demo { + log.Info().Msg("Running in demo mode, creating demo data") + err := app.SetupDemo() + if err != nil { + log.Fatal().Msg(err.Error()) + } + } + return nil + }) + + runner.AddPlugin(NewTask("purge-tokens", 24*time.Hour, func(ctx context.Context) { + _, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx) + if err != nil { + log.Error().Err(err).Msg("failed to purge expired tokens") + } + })) + + runner.AddPlugin(NewTask("purge-invitations", 24*time.Hour, func(ctx context.Context) { + _, err := app.repos.Groups.InvitationPurge(ctx) + if err != nil { + log.Error().Err(err).Msg("failed to purge expired invitations") + } + })) + + runner.AddPlugin(NewTask("send-notifications", time.Hour, func(ctx context.Context) { + now := time.Now() + if now.Hour() == 8 { + fmt.Println("run notifiers") + err := app.services.BackgroundService.SendNotifiersToday(context.Background()) + if err != nil { + log.Error().Err(err).Msg("failed to send notifiers") + } + } + })) + + if cfg.Thumbnail.Enabled { + runner.AddFunc("create-thumbnails-subscription", func(ctx context.Context) error { + pubsubString, err := utils.GenerateSubPubConn(cfg.Database.PubSubConnString, "thumbnails") + if err != nil { + log.Error().Err(err).Msg("failed to generate pubsub connection string") + return err + } + topic, err := pubsub.OpenTopic(ctx, pubsubString) + if err != nil { + return err + } + defer func(topic *pubsub.Topic, ctx context.Context) { + err := topic.Shutdown(ctx) + if err != nil { + log.Err(err).Msg("fail to shutdown pubsub topic") + } + }(topic, ctx) + + subscription, err := pubsub.OpenSubscription(ctx, pubsubString) + if err != nil { + log.Err(err).Msg("failed to open pubsub topic") + return err + } + defer func(topic *pubsub.Subscription, ctx context.Context) { + err := topic.Shutdown(ctx) + if err != nil { + log.Err(err).Msg("fail to shutdown pubsub topic") + } + }(subscription, ctx) + + for { + select { + case <-ctx.Done(): + return ctx.Err() + default: + msg, err := subscription.Receive(ctx) + log.Debug().Msg("received thumbnail generation request from pubsub topic") + if err != nil { + log.Err(err).Msg("failed to receive message from pubsub topic") + continue + } + if msg == nil { + log.Warn().Msg("received nil message from pubsub topic") + continue + } + groupId, err := uuid.Parse(msg.Metadata["group_id"]) + if err != nil { + log.Error().Err(err).Str("group_id", msg.Metadata["group_id"]).Msg("failed to parse group ID from message metadata") + } + attachmentId, err := uuid.Parse(msg.Metadata["attachment_id"]) + if err != nil { + log.Error().Err(err).Str("attachment_id", msg.Metadata["attachment_id"]).Msg("failed to parse attachment ID from message metadata") + } + err = app.repos.Attachments.CreateThumbnail(ctx, groupId, attachmentId, msg.Metadata["title"], msg.Metadata["path"]) + if err != nil { + log.Err(err).Msg("failed to create thumbnail") + } + msg.Ack() + } + } + }) + } + + if cfg.Options.GithubReleaseCheck { + runner.AddPlugin(NewTask("get-latest-github-release", time.Hour, func(ctx context.Context) { + log.Debug().Msg("running get latest github release") + err := app.services.BackgroundService.GetLatestGithubRelease(context.Background()) + if err != nil { + log.Error().Err(err).Msg("failed to get latest github release") + } + })) + } + + if cfg.Debug.Enabled { + runner.AddFunc("debug", func(ctx context.Context) error { + debugserver := http.Server{ + Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port), + Handler: app.debugRouter(), + ReadTimeout: cfg.Web.ReadTimeout, + WriteTimeout: cfg.Web.WriteTimeout, + IdleTimeout: cfg.Web.IdleTimeout, + } + + go func() { + <-ctx.Done() + _ = debugserver.Shutdown(context.Background()) + }() + + log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port) + return debugserver.ListenAndServe() + }) + // Print the configuration to the console + cfg.Print() + } +} diff --git a/backend/app/api/routes.go b/backend/app/api/routes.go index af92f54d..74f8595f 100644 --- a/backend/app/api/routes.go +++ b/backend/app/api/routes.go @@ -102,6 +102,7 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR r.Post("/actions/zero-item-time-fields", chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...)) r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...)) r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...)) + r.Post("/actions/create-missing-thumbnails", chain.ToHandlerFunc(v1Ctrl.HandleCreateMissingThumbnails(), userMW...)) r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...)) r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...)) @@ -156,6 +157,8 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()), } + r.Get("/products/search-from-barcode", chain.ToHandlerFunc(v1Ctrl.HandleProductSearchFromBarcode(a.conf.Barcode), userMW...)) + r.Get("/qrcode", chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...)) r.Get( "/items/{id}/attachments/{attachment_id}", diff --git a/backend/app/api/setup.go b/backend/app/api/setup.go new file mode 100644 index 00000000..ecc7bad0 --- /dev/null +++ b/backend/app/api/setup.go @@ -0,0 +1,94 @@ +package main + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/rs/zerolog/log" + "github.com/sysadminsmedia/homebox/backend/internal/core/currencies" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" +) + +// setupStorageDir handles the creation and validation of the storage directory. +func setupStorageDir(cfg *config.Config) { + if strings.HasPrefix(cfg.Storage.ConnString, "file:///./") { + raw := strings.TrimPrefix(cfg.Storage.ConnString, "file:///./") + clean := filepath.Clean(raw) + absBase, err := filepath.Abs(clean) + if err != nil { + log.Fatal().Err(err).Msg("failed to get absolute path for storage connection string") + } + absBase = strings.ReplaceAll(absBase, "\\", "/") + storageDir := filepath.Join(absBase, cfg.Storage.PrefixPath) + storageDir = strings.ReplaceAll(storageDir, "\\", "/") + if !strings.HasPrefix(storageDir, absBase+"/") && storageDir != absBase { + log.Fatal().Str("path", storageDir).Msg("invalid storage path: you tried to use a prefix that is not a subdirectory of the base path") + } + if err := os.MkdirAll(storageDir, 0o750); err != nil { + log.Fatal().Err(err).Msg("failed to create data directory") + } + } +} + +// setupDatabaseURL returns the database URL and ensures any required directories exist. +func setupDatabaseURL(cfg *config.Config) string { + databaseURL := "" + switch strings.ToLower(cfg.Database.Driver) { + case "sqlite3": + databaseURL = cfg.Database.SqlitePath + dbFilePath := strings.Split(cfg.Database.SqlitePath, "?")[0] + dbDir := filepath.Dir(dbFilePath) + if err := os.MkdirAll(dbDir, 0o755); err != nil { + log.Fatal().Err(err).Str("path", dbDir).Msg("failed to create SQLite database directory") + } + case "postgres": + databaseURL = fmt.Sprintf("host=%s port=%s dbname=%s sslmode=%s", cfg.Database.Host, cfg.Database.Port, cfg.Database.Database, cfg.Database.SslMode) + if cfg.Database.Username != "" { + databaseURL += fmt.Sprintf(" user=%s", cfg.Database.Username) + } + if cfg.Database.Password != "" { + databaseURL += fmt.Sprintf(" password=%s", cfg.Database.Password) + } + if cfg.Database.SslRootCert != "" { + if _, err := os.Stat(cfg.Database.SslRootCert); err != nil || !os.IsNotExist(err) { + log.Fatal().Err(err).Str("path", cfg.Database.SslRootCert).Msg("SSL root certificate file does not accessible") + } + databaseURL += fmt.Sprintf(" sslrootcert=%s", cfg.Database.SslRootCert) + } + if cfg.Database.SslCert != "" { + if _, err := os.Stat(cfg.Database.SslCert); err != nil || !os.IsNotExist(err) { + log.Fatal().Err(err).Str("path", cfg.Database.SslCert).Msg("SSL certificate file does not accessible") + } + databaseURL += fmt.Sprintf(" sslcert=%s", cfg.Database.SslCert) + } + if cfg.Database.SslKey != "" { + if _, err := os.Stat(cfg.Database.SslKey); err != nil || !os.IsNotExist(err) { + log.Fatal().Err(err).Str("path", cfg.Database.SslKey).Msg("SSL key file does not accessible") + } + databaseURL += fmt.Sprintf(" sslkey=%s", cfg.Database.SslKey) + } + default: + log.Fatal().Str("driver", cfg.Database.Driver).Msg("unsupported database driver") + } + return databaseURL +} + +// loadCurrencies loads currency data from config if provided. +func loadCurrencies(cfg *config.Config) ([]currencies.CollectorFunc, error) { + collectFuncs := []currencies.CollectorFunc{ + currencies.CollectDefaults(), + } + if cfg.Options.CurrencyConfig != "" { + log.Info().Str("path", cfg.Options.CurrencyConfig).Msg("loading currency config file") + content, err := os.ReadFile(cfg.Options.CurrencyConfig) + if err != nil { + log.Error().Err(err).Str("path", cfg.Options.CurrencyConfig).Msg("failed to read currency config file") + return nil, err + } + collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content))) + } + return collectFuncs, nil +} diff --git a/backend/app/api/static/docs/docs.go b/backend/app/api/static/docs/docs.go index 7dd6f41c..569e8532 100644 --- a/backend/app/api/static/docs/docs.go +++ b/backend/app/api/static/docs/docs.go @@ -18,6 +18,31 @@ const docTemplate = `{ "host": "{{.Host}}", "basePath": "{{.BasePath}}", "paths": { + "/v1/actions/create-missing-thumbnails": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Creates thumbnails for items that are missing them", + "produces": [ + "application/json" + ], + "tags": [ + "Actions" + ], + "summary": "Create Missing Thumbnails", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/v1.ActionAmountResult" + } + } + } + } + }, "/v1/actions/ensure-asset-ids": { "post": { "security": [ @@ -780,8 +805,7 @@ const docTemplate = `{ "type": "string", "description": "Type of file", "name": "type", - "in": "formData", - "required": true + "in": "formData" }, { "type": "boolean", @@ -1804,6 +1828,41 @@ const docTemplate = `{ } } }, + "/v1/products/search-from-barcode": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Items" + ], + "summary": "Search EAN from Barcode", + "parameters": [ + { + "type": "string", + "description": "barcode to be searched", + "name": "data", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/repo.BarcodeProduct" + } + } + } + } + } + }, "/v1/qrcode": { "get": { "security": [ @@ -2114,6 +2173,42 @@ const docTemplate = `{ } }, "definitions": { + "attachment.Type": { + "type": "string", + "enum": [ + "attachment", + "photo", + "manual", + "warranty", + "attachment", + "receipt", + "thumbnail" + ], + "x-enum-varnames": [ + "DefaultType", + "TypePhoto", + "TypeManual", + "TypeWarranty", + "TypeAttachment", + "TypeReceipt", + "TypeThumbnail" + ] + }, + "authroles.Role": { + "type": "string", + "enum": [ + "user", + "admin", + "user", + "attachments" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleAdmin", + "RoleUser", + "RoleAttachments" + ] + }, "currencies.Currency": { "type": "object", "properties": { @@ -2131,6 +2226,926 @@ const docTemplate = `{ } } }, + "ent.Attachment": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AttachmentQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AttachmentEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "mime_type": { + "description": "MimeType holds the value of the \"mime_type\" field.", + "type": "string" + }, + "path": { + "description": "Path holds the value of the \"path\" field.", + "type": "string" + }, + "primary": { + "description": "Primary holds the value of the \"primary\" field.", + "type": "boolean" + }, + "title": { + "description": "Title holds the value of the \"title\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/attachment.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AttachmentEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + }, + "thumbnail": { + "description": "Thumbnail holds the value of the thumbnail edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Attachment" + } + ] + } + } + }, + "ent.AuthRoles": { + "type": "object", + "properties": { + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthRolesQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRolesEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "integer" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/authroles.Role" + } + ] + } + } + }, + "ent.AuthRolesEdges": { + "type": "object", + "properties": { + "token": { + "description": "Token holds the value of the token edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokens" + } + ] + } + } + }, + "ent.AuthTokens": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokensEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AuthTokensEdges": { + "type": "object", + "properties": { + "roles": { + "description": "Roles holds the value of the roles edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRoles" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.Group": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "currency": { + "description": "Currency holds the value of the \"currency\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.GroupEdges": { + "type": "object", + "properties": { + "invitation_tokens": { + "description": "InvitationTokens holds the value of the invitation_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.GroupInvitationToken" + } + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "labels": { + "description": "Labels holds the value of the labels edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "locations": { + "description": "Locations holds the value of the locations edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + }, + "users": { + "description": "Users holds the value of the users edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.User" + } + } + } + }, + "ent.GroupInvitationToken": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupInvitationTokenQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupInvitationTokenEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "uses": { + "description": "Uses holds the value of the \"uses\" field.", + "type": "integer" + } + } + }, + "ent.GroupInvitationTokenEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + } + } + }, + "ent.Item": { + "type": "object", + "properties": { + "archived": { + "description": "Archived holds the value of the \"archived\" field.", + "type": "boolean" + }, + "asset_id": { + "description": "AssetID holds the value of the \"asset_id\" field.", + "type": "integer" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "import_ref": { + "description": "ImportRef holds the value of the \"import_ref\" field.", + "type": "string" + }, + "insured": { + "description": "Insured holds the value of the \"insured\" field.", + "type": "boolean" + }, + "lifetime_warranty": { + "description": "LifetimeWarranty holds the value of the \"lifetime_warranty\" field.", + "type": "boolean" + }, + "manufacturer": { + "description": "Manufacturer holds the value of the \"manufacturer\" field.", + "type": "string" + }, + "model_number": { + "description": "ModelNumber holds the value of the \"model_number\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "notes": { + "description": "Notes holds the value of the \"notes\" field.", + "type": "string" + }, + "purchase_from": { + "description": "PurchaseFrom holds the value of the \"purchase_from\" field.", + "type": "string" + }, + "purchase_price": { + "description": "PurchasePrice holds the value of the \"purchase_price\" field.", + "type": "number" + }, + "purchase_time": { + "description": "PurchaseTime holds the value of the \"purchase_time\" field.", + "type": "string" + }, + "quantity": { + "description": "Quantity holds the value of the \"quantity\" field.", + "type": "integer" + }, + "serial_number": { + "description": "SerialNumber holds the value of the \"serial_number\" field.", + "type": "string" + }, + "sold_notes": { + "description": "SoldNotes holds the value of the \"sold_notes\" field.", + "type": "string" + }, + "sold_price": { + "description": "SoldPrice holds the value of the \"sold_price\" field.", + "type": "number" + }, + "sold_time": { + "description": "SoldTime holds the value of the \"sold_time\" field.", + "type": "string" + }, + "sold_to": { + "description": "SoldTo holds the value of the \"sold_to\" field.", + "type": "string" + }, + "sync_child_items_locations": { + "description": "SyncChildItemsLocations holds the value of the \"sync_child_items_locations\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "warranty_details": { + "description": "WarrantyDetails holds the value of the \"warranty_details\" field.", + "type": "string" + }, + "warranty_expires": { + "description": "WarrantyExpires holds the value of the \"warranty_expires\" field.", + "type": "string" + } + } + }, + "ent.ItemEdges": { + "type": "object", + "properties": { + "attachments": { + "description": "Attachments holds the value of the attachments edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Attachment" + } + }, + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "fields": { + "description": "Fields holds the value of the fields edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.ItemField" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "label": { + "description": "Label holds the value of the label edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "location": { + "description": "Location holds the value of the location edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + }, + "maintenance_entries": { + "description": "MaintenanceEntries holds the value of the maintenance_entries edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.MaintenanceEntry" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.ItemField": { + "type": "object", + "properties": { + "boolean_value": { + "description": "BooleanValue holds the value of the \"boolean_value\" field.", + "type": "boolean" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemFieldEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "number_value": { + "description": "NumberValue holds the value of the \"number_value\" field.", + "type": "integer" + }, + "text_value": { + "description": "TextValue holds the value of the \"text_value\" field.", + "type": "string" + }, + "time_value": { + "description": "TimeValue holds the value of the \"time_value\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/itemfield.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemFieldEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Label": { + "type": "object", + "properties": { + "color": { + "description": "Color holds the value of the \"color\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LabelEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LabelEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.Location": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LocationEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LocationEdges": { + "type": "object", + "properties": { + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + } + } + }, + "ent.MaintenanceEntry": { + "type": "object", + "properties": { + "cost": { + "description": "Cost holds the value of the \"cost\" field.", + "type": "number" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "date": { + "description": "Date holds the value of the \"date\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the MaintenanceEntryQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.MaintenanceEntryEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "item_id": { + "description": "ItemID holds the value of the \"item_id\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "scheduled_date": { + "description": "ScheduledDate holds the value of the \"scheduled_date\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.MaintenanceEntryEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Notifier": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the NotifierQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.NotifierEdges" + } + ] + }, + "group_id": { + "description": "GroupID holds the value of the \"group_id\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_active": { + "description": "IsActive holds the value of the \"is_active\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "user_id": { + "description": "UserID holds the value of the \"user_id\" field.", + "type": "string" + } + } + }, + "ent.NotifierEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.User": { + "type": "object", + "properties": { + "activated_on": { + "description": "ActivatedOn holds the value of the \"activated_on\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.UserEdges" + } + ] + }, + "email": { + "description": "Email holds the value of the \"email\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_superuser": { + "description": "IsSuperuser holds the value of the \"is_superuser\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/user.Role" + } + ] + }, + "superuser": { + "description": "Superuser holds the value of the \"superuser\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.UserEdges": { + "type": "object", + "properties": { + "auth_tokens": { + "description": "AuthTokens holds the value of the auth_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.AuthTokens" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + } + } + }, + "itemfield.Type": { + "type": "string", + "enum": [ + "text", + "number", + "boolean", + "time" + ], + "x-enum-varnames": [ + "TypeText", + "TypeNumber", + "TypeBoolean", + "TypeTime" + ] + }, + "repo.BarcodeProduct": { + "type": "object", + "properties": { + "barcode": { + "type": "string" + }, + "imageBase64": { + "type": "string" + }, + "imageURL": { + "type": "string" + }, + "item": { + "$ref": "#/definitions/repo.ItemCreate" + }, + "manufacturer": { + "type": "string" + }, + "modelNumber": { + "description": "Identifications", + "type": "string" + }, + "notes": { + "description": "Extras", + "type": "string" + }, + "search_engine_name": { + "type": "string" + } + } + }, "repo.Group": { "type": "object", "properties": { @@ -2194,12 +3209,18 @@ const docTemplate = `{ "id": { "type": "string" }, + "mimeType": { + "type": "string" + }, "path": { "type": "string" }, "primary": { "type": "boolean" }, + "thumbnail": { + "$ref": "#/definitions/ent.Attachment" + }, "title": { "type": "string" }, @@ -2314,7 +3335,9 @@ const docTemplate = `{ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2393,6 +3416,11 @@ const docTemplate = `{ "syncChildItemsLocations": { "type": "boolean" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" }, @@ -2451,7 +3479,9 @@ const docTemplate = `{ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2485,6 +3515,11 @@ const docTemplate = `{ "description": "Sale details", "type": "string" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" } @@ -2633,6 +3668,9 @@ const docTemplate = `{ "repo.LabelOut": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -2653,6 +3691,9 @@ const docTemplate = `{ "repo.LabelSummary": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -3115,6 +4156,19 @@ const docTemplate = `{ } } }, + "user.Role": { + "type": "string", + "enum": [ + "user", + "user", + "owner" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleUser", + "RoleOwner" + ] + }, "v1.APISummary": { "type": "object", "properties": { diff --git a/backend/app/api/static/docs/swagger.json b/backend/app/api/static/docs/swagger.json index 92371e8f..1537b810 100644 --- a/backend/app/api/static/docs/swagger.json +++ b/backend/app/api/static/docs/swagger.json @@ -16,6 +16,31 @@ "host": "demo.homebox.software", "basePath": "/api", "paths": { + "/v1/actions/create-missing-thumbnails": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Creates thumbnails for items that are missing them", + "produces": [ + "application/json" + ], + "tags": [ + "Actions" + ], + "summary": "Create Missing Thumbnails", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/v1.ActionAmountResult" + } + } + } + } + }, "/v1/actions/ensure-asset-ids": { "post": { "security": [ @@ -778,8 +803,7 @@ "type": "string", "description": "Type of file", "name": "type", - "in": "formData", - "required": true + "in": "formData" }, { "type": "boolean", @@ -1802,6 +1826,41 @@ } } }, + "/v1/products/search-from-barcode": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Items" + ], + "summary": "Search EAN from Barcode", + "parameters": [ + { + "type": "string", + "description": "barcode to be searched", + "name": "data", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/repo.BarcodeProduct" + } + } + } + } + } + }, "/v1/qrcode": { "get": { "security": [ @@ -2112,6 +2171,42 @@ } }, "definitions": { + "attachment.Type": { + "type": "string", + "enum": [ + "attachment", + "photo", + "manual", + "warranty", + "attachment", + "receipt", + "thumbnail" + ], + "x-enum-varnames": [ + "DefaultType", + "TypePhoto", + "TypeManual", + "TypeWarranty", + "TypeAttachment", + "TypeReceipt", + "TypeThumbnail" + ] + }, + "authroles.Role": { + "type": "string", + "enum": [ + "user", + "admin", + "user", + "attachments" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleAdmin", + "RoleUser", + "RoleAttachments" + ] + }, "currencies.Currency": { "type": "object", "properties": { @@ -2129,6 +2224,926 @@ } } }, + "ent.Attachment": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AttachmentQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AttachmentEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "mime_type": { + "description": "MimeType holds the value of the \"mime_type\" field.", + "type": "string" + }, + "path": { + "description": "Path holds the value of the \"path\" field.", + "type": "string" + }, + "primary": { + "description": "Primary holds the value of the \"primary\" field.", + "type": "boolean" + }, + "title": { + "description": "Title holds the value of the \"title\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/attachment.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AttachmentEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + }, + "thumbnail": { + "description": "Thumbnail holds the value of the thumbnail edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Attachment" + } + ] + } + } + }, + "ent.AuthRoles": { + "type": "object", + "properties": { + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthRolesQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRolesEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "integer" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/authroles.Role" + } + ] + } + } + }, + "ent.AuthRolesEdges": { + "type": "object", + "properties": { + "token": { + "description": "Token holds the value of the token edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokens" + } + ] + } + } + }, + "ent.AuthTokens": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokensEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AuthTokensEdges": { + "type": "object", + "properties": { + "roles": { + "description": "Roles holds the value of the roles edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRoles" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.Group": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "currency": { + "description": "Currency holds the value of the \"currency\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.GroupEdges": { + "type": "object", + "properties": { + "invitation_tokens": { + "description": "InvitationTokens holds the value of the invitation_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.GroupInvitationToken" + } + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "labels": { + "description": "Labels holds the value of the labels edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "locations": { + "description": "Locations holds the value of the locations edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + }, + "users": { + "description": "Users holds the value of the users edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.User" + } + } + } + }, + "ent.GroupInvitationToken": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupInvitationTokenQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupInvitationTokenEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "uses": { + "description": "Uses holds the value of the \"uses\" field.", + "type": "integer" + } + } + }, + "ent.GroupInvitationTokenEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + } + } + }, + "ent.Item": { + "type": "object", + "properties": { + "archived": { + "description": "Archived holds the value of the \"archived\" field.", + "type": "boolean" + }, + "asset_id": { + "description": "AssetID holds the value of the \"asset_id\" field.", + "type": "integer" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "import_ref": { + "description": "ImportRef holds the value of the \"import_ref\" field.", + "type": "string" + }, + "insured": { + "description": "Insured holds the value of the \"insured\" field.", + "type": "boolean" + }, + "lifetime_warranty": { + "description": "LifetimeWarranty holds the value of the \"lifetime_warranty\" field.", + "type": "boolean" + }, + "manufacturer": { + "description": "Manufacturer holds the value of the \"manufacturer\" field.", + "type": "string" + }, + "model_number": { + "description": "ModelNumber holds the value of the \"model_number\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "notes": { + "description": "Notes holds the value of the \"notes\" field.", + "type": "string" + }, + "purchase_from": { + "description": "PurchaseFrom holds the value of the \"purchase_from\" field.", + "type": "string" + }, + "purchase_price": { + "description": "PurchasePrice holds the value of the \"purchase_price\" field.", + "type": "number" + }, + "purchase_time": { + "description": "PurchaseTime holds the value of the \"purchase_time\" field.", + "type": "string" + }, + "quantity": { + "description": "Quantity holds the value of the \"quantity\" field.", + "type": "integer" + }, + "serial_number": { + "description": "SerialNumber holds the value of the \"serial_number\" field.", + "type": "string" + }, + "sold_notes": { + "description": "SoldNotes holds the value of the \"sold_notes\" field.", + "type": "string" + }, + "sold_price": { + "description": "SoldPrice holds the value of the \"sold_price\" field.", + "type": "number" + }, + "sold_time": { + "description": "SoldTime holds the value of the \"sold_time\" field.", + "type": "string" + }, + "sold_to": { + "description": "SoldTo holds the value of the \"sold_to\" field.", + "type": "string" + }, + "sync_child_items_locations": { + "description": "SyncChildItemsLocations holds the value of the \"sync_child_items_locations\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "warranty_details": { + "description": "WarrantyDetails holds the value of the \"warranty_details\" field.", + "type": "string" + }, + "warranty_expires": { + "description": "WarrantyExpires holds the value of the \"warranty_expires\" field.", + "type": "string" + } + } + }, + "ent.ItemEdges": { + "type": "object", + "properties": { + "attachments": { + "description": "Attachments holds the value of the attachments edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Attachment" + } + }, + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "fields": { + "description": "Fields holds the value of the fields edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.ItemField" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "label": { + "description": "Label holds the value of the label edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "location": { + "description": "Location holds the value of the location edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + }, + "maintenance_entries": { + "description": "MaintenanceEntries holds the value of the maintenance_entries edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.MaintenanceEntry" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.ItemField": { + "type": "object", + "properties": { + "boolean_value": { + "description": "BooleanValue holds the value of the \"boolean_value\" field.", + "type": "boolean" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemFieldEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "number_value": { + "description": "NumberValue holds the value of the \"number_value\" field.", + "type": "integer" + }, + "text_value": { + "description": "TextValue holds the value of the \"text_value\" field.", + "type": "string" + }, + "time_value": { + "description": "TimeValue holds the value of the \"time_value\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/itemfield.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemFieldEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Label": { + "type": "object", + "properties": { + "color": { + "description": "Color holds the value of the \"color\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LabelEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LabelEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.Location": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LocationEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LocationEdges": { + "type": "object", + "properties": { + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + } + } + }, + "ent.MaintenanceEntry": { + "type": "object", + "properties": { + "cost": { + "description": "Cost holds the value of the \"cost\" field.", + "type": "number" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "date": { + "description": "Date holds the value of the \"date\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the MaintenanceEntryQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.MaintenanceEntryEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "item_id": { + "description": "ItemID holds the value of the \"item_id\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "scheduled_date": { + "description": "ScheduledDate holds the value of the \"scheduled_date\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.MaintenanceEntryEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Notifier": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the NotifierQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.NotifierEdges" + } + ] + }, + "group_id": { + "description": "GroupID holds the value of the \"group_id\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_active": { + "description": "IsActive holds the value of the \"is_active\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "user_id": { + "description": "UserID holds the value of the \"user_id\" field.", + "type": "string" + } + } + }, + "ent.NotifierEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.User": { + "type": "object", + "properties": { + "activated_on": { + "description": "ActivatedOn holds the value of the \"activated_on\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.UserEdges" + } + ] + }, + "email": { + "description": "Email holds the value of the \"email\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_superuser": { + "description": "IsSuperuser holds the value of the \"is_superuser\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/user.Role" + } + ] + }, + "superuser": { + "description": "Superuser holds the value of the \"superuser\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.UserEdges": { + "type": "object", + "properties": { + "auth_tokens": { + "description": "AuthTokens holds the value of the auth_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.AuthTokens" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + } + } + }, + "itemfield.Type": { + "type": "string", + "enum": [ + "text", + "number", + "boolean", + "time" + ], + "x-enum-varnames": [ + "TypeText", + "TypeNumber", + "TypeBoolean", + "TypeTime" + ] + }, + "repo.BarcodeProduct": { + "type": "object", + "properties": { + "barcode": { + "type": "string" + }, + "imageBase64": { + "type": "string" + }, + "imageURL": { + "type": "string" + }, + "item": { + "$ref": "#/definitions/repo.ItemCreate" + }, + "manufacturer": { + "type": "string" + }, + "modelNumber": { + "description": "Identifications", + "type": "string" + }, + "notes": { + "description": "Extras", + "type": "string" + }, + "search_engine_name": { + "type": "string" + } + } + }, "repo.Group": { "type": "object", "properties": { @@ -2192,12 +3207,18 @@ "id": { "type": "string" }, + "mimeType": { + "type": "string" + }, "path": { "type": "string" }, "primary": { "type": "boolean" }, + "thumbnail": { + "$ref": "#/definitions/ent.Attachment" + }, "title": { "type": "string" }, @@ -2312,7 +3333,9 @@ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2391,6 +3414,11 @@ "syncChildItemsLocations": { "type": "boolean" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" }, @@ -2449,7 +3477,9 @@ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2483,6 +3513,11 @@ "description": "Sale details", "type": "string" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" } @@ -2631,6 +3666,9 @@ "repo.LabelOut": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -2651,6 +3689,9 @@ "repo.LabelSummary": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -3113,6 +4154,19 @@ } } }, + "user.Role": { + "type": "string", + "enum": [ + "user", + "user", + "owner" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleUser", + "RoleOwner" + ] + }, "v1.APISummary": { "type": "object", "properties": { diff --git a/backend/app/api/static/docs/swagger.yaml b/backend/app/api/static/docs/swagger.yaml index d95e008f..f8a2713e 100644 --- a/backend/app/api/static/docs/swagger.yaml +++ b/backend/app/api/static/docs/swagger.yaml @@ -1,5 +1,35 @@ basePath: /api definitions: + attachment.Type: + enum: + - attachment + - photo + - manual + - warranty + - attachment + - receipt + - thumbnail + type: string + x-enum-varnames: + - DefaultType + - TypePhoto + - TypeManual + - TypeWarranty + - TypeAttachment + - TypeReceipt + - TypeThumbnail + authroles.Role: + enum: + - user + - admin + - user + - attachments + type: string + x-enum-varnames: + - DefaultRole + - RoleAdmin + - RoleUser + - RoleAttachments currencies.Currency: properties: code: @@ -11,6 +41,632 @@ definitions: symbol: type: string type: object + ent.Attachment: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.AttachmentEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AttachmentQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + mime_type: + description: MimeType holds the value of the "mime_type" field. + type: string + path: + description: Path holds the value of the "path" field. + type: string + primary: + description: Primary holds the value of the "primary" field. + type: boolean + title: + description: Title holds the value of the "title" field. + type: string + type: + allOf: + - $ref: '#/definitions/attachment.Type' + description: Type holds the value of the "type" field. + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.AttachmentEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + thumbnail: + allOf: + - $ref: '#/definitions/ent.Attachment' + description: Thumbnail holds the value of the thumbnail edge. + type: object + ent.AuthRoles: + properties: + edges: + allOf: + - $ref: '#/definitions/ent.AuthRolesEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AuthRolesQuery when eager-loading is set. + id: + description: ID of the ent. + type: integer + role: + allOf: + - $ref: '#/definitions/authroles.Role' + description: Role holds the value of the "role" field. + type: object + ent.AuthRolesEdges: + properties: + token: + allOf: + - $ref: '#/definitions/ent.AuthTokens' + description: Token holds the value of the token edge. + type: object + ent.AuthTokens: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.AuthTokensEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AuthTokensQuery when eager-loading is set. + expires_at: + description: ExpiresAt holds the value of the "expires_at" field. + type: string + id: + description: ID of the ent. + type: string + token: + description: Token holds the value of the "token" field. + items: + type: integer + type: array + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.AuthTokensEdges: + properties: + roles: + allOf: + - $ref: '#/definitions/ent.AuthRoles' + description: Roles holds the value of the roles edge. + user: + allOf: + - $ref: '#/definitions/ent.User' + description: User holds the value of the user edge. + type: object + ent.Group: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + currency: + description: Currency holds the value of the "currency" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.GroupEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the GroupQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.GroupEdges: + properties: + invitation_tokens: + description: InvitationTokens holds the value of the invitation_tokens edge. + items: + $ref: '#/definitions/ent.GroupInvitationToken' + type: array + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + labels: + description: Labels holds the value of the labels edge. + items: + $ref: '#/definitions/ent.Label' + type: array + locations: + description: Locations holds the value of the locations edge. + items: + $ref: '#/definitions/ent.Location' + type: array + notifiers: + description: Notifiers holds the value of the notifiers edge. + items: + $ref: '#/definitions/ent.Notifier' + type: array + users: + description: Users holds the value of the users edge. + items: + $ref: '#/definitions/ent.User' + type: array + type: object + ent.GroupInvitationToken: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.GroupInvitationTokenEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the GroupInvitationTokenQuery when eager-loading is set. + expires_at: + description: ExpiresAt holds the value of the "expires_at" field. + type: string + id: + description: ID of the ent. + type: string + token: + description: Token holds the value of the "token" field. + items: + type: integer + type: array + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + uses: + description: Uses holds the value of the "uses" field. + type: integer + type: object + ent.GroupInvitationTokenEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + type: object + ent.Item: + properties: + archived: + description: Archived holds the value of the "archived" field. + type: boolean + asset_id: + description: AssetID holds the value of the "asset_id" field. + type: integer + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.ItemEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + import_ref: + description: ImportRef holds the value of the "import_ref" field. + type: string + insured: + description: Insured holds the value of the "insured" field. + type: boolean + lifetime_warranty: + description: LifetimeWarranty holds the value of the "lifetime_warranty" field. + type: boolean + manufacturer: + description: Manufacturer holds the value of the "manufacturer" field. + type: string + model_number: + description: ModelNumber holds the value of the "model_number" field. + type: string + name: + description: Name holds the value of the "name" field. + type: string + notes: + description: Notes holds the value of the "notes" field. + type: string + purchase_from: + description: PurchaseFrom holds the value of the "purchase_from" field. + type: string + purchase_price: + description: PurchasePrice holds the value of the "purchase_price" field. + type: number + purchase_time: + description: PurchaseTime holds the value of the "purchase_time" field. + type: string + quantity: + description: Quantity holds the value of the "quantity" field. + type: integer + serial_number: + description: SerialNumber holds the value of the "serial_number" field. + type: string + sold_notes: + description: SoldNotes holds the value of the "sold_notes" field. + type: string + sold_price: + description: SoldPrice holds the value of the "sold_price" field. + type: number + sold_time: + description: SoldTime holds the value of the "sold_time" field. + type: string + sold_to: + description: SoldTo holds the value of the "sold_to" field. + type: string + sync_child_items_locations: + description: SyncChildItemsLocations holds the value of the "sync_child_items_locations" + field. + type: boolean + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + warranty_details: + description: WarrantyDetails holds the value of the "warranty_details" field. + type: string + warranty_expires: + description: WarrantyExpires holds the value of the "warranty_expires" field. + type: string + type: object + ent.ItemEdges: + properties: + attachments: + description: Attachments holds the value of the attachments edge. + items: + $ref: '#/definitions/ent.Attachment' + type: array + children: + description: Children holds the value of the children edge. + items: + $ref: '#/definitions/ent.Item' + type: array + fields: + description: Fields holds the value of the fields edge. + items: + $ref: '#/definitions/ent.ItemField' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + label: + description: Label holds the value of the label edge. + items: + $ref: '#/definitions/ent.Label' + type: array + location: + allOf: + - $ref: '#/definitions/ent.Location' + description: Location holds the value of the location edge. + maintenance_entries: + description: MaintenanceEntries holds the value of the maintenance_entries + edge. + items: + $ref: '#/definitions/ent.MaintenanceEntry' + type: array + parent: + allOf: + - $ref: '#/definitions/ent.Item' + description: Parent holds the value of the parent edge. + type: object + ent.ItemField: + properties: + boolean_value: + description: BooleanValue holds the value of the "boolean_value" field. + type: boolean + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.ItemFieldEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemFieldQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + number_value: + description: NumberValue holds the value of the "number_value" field. + type: integer + text_value: + description: TextValue holds the value of the "text_value" field. + type: string + time_value: + description: TimeValue holds the value of the "time_value" field. + type: string + type: + allOf: + - $ref: '#/definitions/itemfield.Type' + description: Type holds the value of the "type" field. + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.ItemFieldEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + type: object + ent.Label: + properties: + color: + description: Color holds the value of the "color" field. + type: string + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.LabelEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LabelQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LabelEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + type: object + ent.Location: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.LocationEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LocationQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LocationEdges: + properties: + children: + description: Children holds the value of the children edge. + items: + $ref: '#/definitions/ent.Location' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + parent: + allOf: + - $ref: '#/definitions/ent.Location' + description: Parent holds the value of the parent edge. + type: object + ent.MaintenanceEntry: + properties: + cost: + description: Cost holds the value of the "cost" field. + type: number + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + date: + description: Date holds the value of the "date" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.MaintenanceEntryEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the MaintenanceEntryQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + item_id: + description: ItemID holds the value of the "item_id" field. + type: string + name: + description: Name holds the value of the "name" field. + type: string + scheduled_date: + description: ScheduledDate holds the value of the "scheduled_date" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.MaintenanceEntryEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + type: object + ent.Notifier: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.NotifierEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the NotifierQuery when eager-loading is set. + group_id: + description: GroupID holds the value of the "group_id" field. + type: string + id: + description: ID of the ent. + type: string + is_active: + description: IsActive holds the value of the "is_active" field. + type: boolean + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + user_id: + description: UserID holds the value of the "user_id" field. + type: string + type: object + ent.NotifierEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + user: + allOf: + - $ref: '#/definitions/ent.User' + description: User holds the value of the user edge. + type: object + ent.User: + properties: + activated_on: + description: ActivatedOn holds the value of the "activated_on" field. + type: string + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.UserEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the UserQuery when eager-loading is set. + email: + description: Email holds the value of the "email" field. + type: string + id: + description: ID of the ent. + type: string + is_superuser: + description: IsSuperuser holds the value of the "is_superuser" field. + type: boolean + name: + description: Name holds the value of the "name" field. + type: string + role: + allOf: + - $ref: '#/definitions/user.Role' + description: Role holds the value of the "role" field. + superuser: + description: Superuser holds the value of the "superuser" field. + type: boolean + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.UserEdges: + properties: + auth_tokens: + description: AuthTokens holds the value of the auth_tokens edge. + items: + $ref: '#/definitions/ent.AuthTokens' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + notifiers: + description: Notifiers holds the value of the notifiers edge. + items: + $ref: '#/definitions/ent.Notifier' + type: array + type: object + itemfield.Type: + enum: + - text + - number + - boolean + - time + type: string + x-enum-varnames: + - TypeText + - TypeNumber + - TypeBoolean + - TypeTime + repo.BarcodeProduct: + properties: + barcode: + type: string + imageBase64: + type: string + imageURL: + type: string + item: + $ref: '#/definitions/repo.ItemCreate' + manufacturer: + type: string + modelNumber: + description: Identifications + type: string + notes: + description: Extras + type: string + search_engine_name: + type: string + type: object repo.Group: properties: createdAt: @@ -52,10 +708,14 @@ definitions: type: string id: type: string + mimeType: + type: string path: type: string primary: type: boolean + thumbnail: + $ref: '#/definitions/ent.Attachment' title: type: string type: @@ -134,6 +794,8 @@ definitions: type: string imageId: type: string + x-nullable: true + x-omitempty: true insured: type: boolean labels: @@ -185,6 +847,10 @@ definitions: type: string syncChildItemsLocations: type: boolean + thumbnailId: + type: string + x-nullable: true + x-omitempty: true updatedAt: type: string warrantyDetails: @@ -225,6 +891,8 @@ definitions: type: string imageId: type: string + x-nullable: true + x-omitempty: true insured: type: boolean labels: @@ -246,6 +914,10 @@ definitions: soldTime: description: Sale details type: string + thumbnailId: + type: string + x-nullable: true + x-omitempty: true updatedAt: type: string type: object @@ -351,6 +1023,8 @@ definitions: type: object repo.LabelOut: properties: + color: + type: string createdAt: type: string description: @@ -364,6 +1038,8 @@ definitions: type: object repo.LabelSummary: properties: + color: + type: string createdAt: type: string description: @@ -670,6 +1346,16 @@ definitions: token: type: string type: object + user.Role: + enum: + - user + - user + - owner + type: string + x-enum-varnames: + - DefaultRole + - RoleUser + - RoleOwner v1.APISummary: properties: allowRegistration: @@ -779,6 +1465,21 @@ info: title: Homebox API version: "1.0" paths: + /v1/actions/create-missing-thumbnails: + post: + description: Creates thumbnails for items that are missing them + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/v1.ActionAmountResult' + security: + - Bearer: [] + summary: Create Missing Thumbnails + tags: + - Actions /v1/actions/ensure-asset-ids: post: description: Ensures all items in the database have an asset ID @@ -1174,7 +1875,6 @@ paths: - description: Type of file in: formData name: type - required: true type: string - description: Is this the primary attachment in: formData @@ -1881,6 +2581,27 @@ paths: summary: Test Notifier tags: - Notifiers + /v1/products/search-from-barcode: + get: + parameters: + - description: barcode to be searched + in: query + name: data + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/repo.BarcodeProduct' + type: array + security: + - Bearer: [] + summary: Search EAN from Barcode + tags: + - Items /v1/qrcode: get: parameters: diff --git a/backend/go.mod b/backend/go.mod index 3ed9eed9..43d3afef 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -1,14 +1,22 @@ module github.com/sysadminsmedia/homebox/backend -go 1.23.0 +go 1.24 + +toolchain go1.24.3 require ( entgo.io/ent v0.14.4 - github.com/ardanlabs/conf/v3 v3.7.2 + github.com/ardanlabs/conf/v3 v3.8.0 github.com/containrrr/shoutrrr v0.8.0 - github.com/go-chi/chi/v5 v5.2.1 + github.com/evanoberholster/imagemeta v0.3.1 + github.com/gen2brain/avif v0.4.4 + github.com/gen2brain/heic v0.4.5 + github.com/gen2brain/jpegxl v0.4.5 + github.com/gen2brain/webp v0.5.5 + github.com/go-chi/chi/v5 v5.2.2 github.com/go-playground/validator/v10 v10.26.0 github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 + github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 github.com/google/uuid v1.6.0 github.com/gorilla/schema v1.4.1 github.com/hay-kot/httpkit v0.0.11 @@ -18,7 +26,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/pressly/goose/v3 v3.24.3 github.com/rs/zerolog v1.34.0 - github.com/shirou/gopsutil/v4 v4.25.4 + github.com/shirou/gopsutil/v4 v4.25.5 github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e github.com/stretchr/testify v1.10.0 github.com/swaggo/http-swagger/v2 v2.0.2 @@ -26,69 +34,175 @@ require ( github.com/yeqown/go-qrcode/v2 v2.2.5 github.com/yeqown/go-qrcode/writer/standard v1.3.0 github.com/zeebo/blake3 v0.2.4 - golang.org/x/crypto v0.38.0 + go.balki.me/anyhttp v0.5.2 + gocloud.dev v0.41.0 + gocloud.dev/pubsub/kafkapubsub v0.41.0 + gocloud.dev/pubsub/natspubsub v0.41.0 + gocloud.dev/pubsub/rabbitpubsub v0.41.0 + golang.org/x/crypto v0.39.0 + golang.org/x/image v0.28.0 + golang.org/x/text v0.26.0 modernc.org/sqlite v1.37.1 ) require ( - ariga.io/atlas v0.32.0 // indirect - github.com/ebitengine/purego v0.8.2 // indirect - github.com/go-ole/go-ole v1.3.0 // indirect - github.com/klauspost/cpuid/v2 v2.2.10 // indirect - github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect - github.com/mfridman/interpolate v0.0.2 // indirect - github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect - github.com/sethvargo/go-retry v0.3.0 // indirect - github.com/tklauser/go-sysconf v0.3.15 // indirect - github.com/tklauser/numcpus v0.10.0 // indirect - github.com/yusufpapurcu/wmi v1.2.4 // indirect - github.com/zclconf/go-cty-yaml v1.1.0 // indirect - go.uber.org/multierr v1.11.0 // indirect -) - -require ( + ariga.io/atlas v0.31.1-0.20250212144724-069be8033e83 // indirect + cel.dev/expr v0.22.1 // indirect + cloud.google.com/go v0.120.0 // indirect + cloud.google.com/go/auth v0.15.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.6.0 // indirect + cloud.google.com/go/iam v1.4.2 // indirect + cloud.google.com/go/monitoring v1.24.1 // indirect + cloud.google.com/go/pubsub v1.48.0 // indirect + cloud.google.com/go/storage v1.51.0 // indirect + github.com/Azure/azure-amqp-common-go/v3 v3.2.3 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.8.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 // indirect + github.com/Azure/go-amqp v1.4.0 // indirect + github.com/Azure/go-autorest v14.2.0+incompatible // indirect + github.com/Azure/go-autorest/autorest/to v0.4.1 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 // indirect + github.com/IBM/sarama v1.45.1 // indirect github.com/KyleBanks/depth v1.2.1 // indirect - github.com/agext/levenshtein v1.2.3 // indirect + github.com/agext/levenshtein v1.2.1 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect + github.com/aws/aws-sdk-go v1.55.6 // indirect + github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect + github.com/aws/aws-sdk-go-v2/config v1.29.12 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.17.65 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.69 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.78.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sns v1.34.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sqs v1.38.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.25.2 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.0 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 // indirect + github.com/aws/smithy-go v1.22.3 // indirect github.com/bmatcuk/doublestar v1.3.4 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect - github.com/fatih/color v1.18.0 // indirect + github.com/eapache/go-resiliency v1.7.0 // indirect + github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 // indirect + github.com/eapache/queue v1.1.0 // indirect + github.com/ebitengine/purego v0.8.4 // indirect + github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect + github.com/fatih/color v1.15.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fogleman/gg v1.3.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.9 // indirect - github.com/go-openapi/inflect v0.21.2 // indirect - github.com/go-openapi/jsonpointer v0.21.1 // indirect - github.com/go-openapi/jsonreference v0.21.0 // indirect - github.com/go-openapi/spec v0.21.0 // indirect - github.com/go-openapi/swag v0.23.1 // indirect + github.com/gabriel-vasile/mimetype v1.4.8 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/go-openapi/inflect v0.19.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.20.0 // indirect + github.com/go-openapi/spec v0.20.6 // indirect + github.com/go-openapi/swag v0.19.15 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 + github.com/golang-jwt/jwt/v5 v5.2.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/golang/snappy v1.0.0 // indirect github.com/google/go-cmp v0.7.0 // indirect - github.com/gorilla/websocket v1.5.3 // indirect - github.com/hashicorp/hcl/v2 v2.23.0 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/google/wire v0.6.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/gax-go/v2 v2.14.1 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/hashicorp/hcl/v2 v2.13.0 // indirect + github.com/jcmturner/aescts/v2 v2.0.0 // indirect + github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect + github.com/jcmturner/gofork v1.7.6 // indirect + github.com/jcmturner/gokrb5/v8 v8.4.4 // indirect + github.com/jcmturner/rpc/v2 v2.0.3 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/klauspost/cpuid/v2 v2.2.4 // indirect + github.com/kylelemons/godebug v1.1.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect - github.com/mailru/easyjson v0.9.0 // indirect - github.com/mattn/go-colorable v0.1.14 // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/mailru/easyjson v0.7.6 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mitchellh/go-wordwrap v1.0.1 // indirect + github.com/mfridman/interpolate v0.0.2 // indirect + github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/nats-io/nats.go v1.40.1 // indirect + github.com/nats-io/nkeys v0.4.10 // indirect + github.com/nats-io/nuid v1.0.1 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect + github.com/philhofer/fwd v1.1.2 // indirect + github.com/pierrec/lz4/v4 v4.1.22 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/rabbitmq/amqp091-go v1.10.0 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect - github.com/swaggo/files/v2 v2.0.2 // indirect + github.com/sethvargo/go-retry v0.3.0 // indirect + github.com/swaggo/files/v2 v2.0.0 // indirect + github.com/tetratelabs/wazero v1.9.0 // indirect + github.com/tinylib/msgp v1.1.8 // indirect + github.com/tklauser/go-sysconf v0.3.12 // indirect + github.com/tklauser/numcpus v0.6.1 // indirect github.com/yeqown/reedsolomon v1.0.0 // indirect - github.com/zclconf/go-cty v1.16.2 // indirect + github.com/yusufpapurcu/wmi v1.2.4 // indirect + github.com/zclconf/go-cty v1.14.4 // indirect + github.com/zclconf/go-cty-yaml v1.1.0 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/sdk v1.35.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + go.uber.org/multierr v1.11.0 // indirect golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 // indirect - golang.org/x/image v0.27.0 - golang.org/x/mod v0.24.0 // indirect + golang.org/x/mod v0.25.0 // indirect golang.org/x/net v0.40.0 // indirect - golang.org/x/sync v0.14.0 // indirect + golang.org/x/oauth2 v0.28.0 // indirect + golang.org/x/sync v0.15.0 // indirect golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.25.0 // indirect + golang.org/x/time v0.11.0 // indirect golang.org/x/tools v0.33.0 // indirect + golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect + google.golang.org/api v0.228.0 // indirect + google.golang.org/genproto v0.0.0-20250324211829-b45e905df463 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 // indirect + google.golang.org/grpc v1.71.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - modernc.org/libc v1.65.8 // indirect + modernc.org/libc v1.65.7 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 68fc2f03..6c9209c2 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -1,51 +1,217 @@ -ariga.io/atlas v0.32.0 h1:y+77nueMrExLiKlz1CcPKh/nU7VSlWfBbwCShsJyvCw= -ariga.io/atlas v0.32.0/go.mod h1:Oe1xWPuu5q9LzyrWfbZmEZxFYeu4BHTyzfjeW2aZp/w= +ariga.io/atlas v0.31.1-0.20250212144724-069be8033e83 h1:nX4HXncwIdvQ8/8sIUIf1nyCkK8qdBaHQ7EtzPpuiGE= +ariga.io/atlas v0.31.1-0.20250212144724-069be8033e83/go.mod h1:Oe1xWPuu5q9LzyrWfbZmEZxFYeu4BHTyzfjeW2aZp/w= +cel.dev/expr v0.22.1 h1:xoFEsNh972Yzey8N9TCPx2nDvMN7TMhQEzxLuj/iRrI= +cel.dev/expr v0.22.1/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.120.0 h1:wc6bgG9DHyKqF5/vQvX1CiZrtHnxJjBlKUyF9nP6meA= +cloud.google.com/go v0.120.0/go.mod h1:/beW32s8/pGRuj4IILWQNd4uuebeT4dkOhKmkfit64Q= +cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= +cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= +cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/iam v1.4.2 h1:4AckGYAYsowXeHzsn/LCKWIwSWLkdb0eGjH8wWkd27Q= +cloud.google.com/go/iam v1.4.2/go.mod h1:REGlrt8vSlh4dfCJfSEcNjLGq75wW75c5aU3FLOYq34= +cloud.google.com/go/logging v1.13.0 h1:7j0HgAp0B94o1YRDqiqm26w4q1rDMH7XNRU34lJXHYc= +cloud.google.com/go/logging v1.13.0/go.mod h1:36CoKh6KA/M0PbhPKMq6/qety2DCAErbhXT62TuXALA= +cloud.google.com/go/longrunning v0.6.6 h1:XJNDo5MUfMM05xK3ewpbSdmt7R2Zw+aQEMbdQR65Rbw= +cloud.google.com/go/longrunning v0.6.6/go.mod h1:hyeGJUrPHcx0u2Uu1UFSoYZLn4lkMrccJig0t4FI7yw= +cloud.google.com/go/monitoring v1.24.1 h1:vKiypZVFD/5a3BbQMvI4gZdl8445ITzXFh257XBgrS0= +cloud.google.com/go/monitoring v1.24.1/go.mod h1:Z05d1/vn9NaujqY2voG6pVQXoJGbp+r3laV+LySt9K0= +cloud.google.com/go/pubsub v1.48.0 h1:ntFpQVrr10Wj/GXSOpxGmexGynldv/bFp25H0jy8aOs= +cloud.google.com/go/pubsub v1.48.0/go.mod h1:AAtyjyIT/+zaY1ERKFJbefOvkUxRDNp3nD6TdfdqUZk= +cloud.google.com/go/storage v1.51.0 h1:ZVZ11zCiD7b3k+cH5lQs/qcNaoSz3U9I0jgwVzqDlCw= +cloud.google.com/go/storage v1.51.0/go.mod h1:YEJfu/Ki3i5oHC/7jyTgsGZwdQ8P9hqMqvpi5kRKGgc= +cloud.google.com/go/trace v1.11.5 h1:CALS1loyxJMnRiCwZSpdf8ac7iCsjreMxFD2WGxzzHU= +cloud.google.com/go/trace v1.11.5/go.mod h1:TwblCcqNInriu5/qzaeYEIH7wzUcchSdeY2l5wL3Eec= entgo.io/ent v0.14.4 h1:/DhDraSLXIkBhyiVoJeSshr4ZYi7femzhj6/TckzZuI= entgo.io/ent v0.14.4/go.mod h1:aDPE/OziPEu8+OWbzy4UlvWmD2/kbRuWfK2A40hcxJM= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1 h1:DSDNVxqkoXJiko6x8a90zidoYqnYYa6c1MTzDKzKkTo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1/go.mod h1:zGqV2R4Cr/k8Uye5w+dgQ06WJtEcbQG/8J7BB6hnCr4= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2/go.mod h1:SqINnQ9lVVdRlyC8cd1lCI0SdX4n2paeABd2K8ggfnE= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= +github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.8.0 h1:JNgM3Tz592fUHU2vgwgvOgKxo5s9Ki0y2wicBeckn70= +github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus v1.8.0/go.mod h1:6vUKmzY17h6dpn9ZLAhM4R/rcrltBeq52qZIkUR7Oro= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0 h1:PiSrjRPpkQNjrM8H0WwKMnZUdu1RGMtd/LdGKUrOo+c= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.6.0/go.mod h1:oDrbWx4ewMylP7xHivfgixbfGBT6APAwsSoHRKotnIc= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0 h1:UXT0o77lXQrikd1kgwIPQOUect7EoR/+sbP4wQKdzxM= +github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.0/go.mod h1:cTvi54pg19DoT07ekoeMgE/taAwNtCShVeZqA+Iv2xI= +github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= +github.com/Azure/go-amqp v1.4.0 h1:Xj3caqi4comOF/L1Uc5iuBxR/pB6KumejC01YQOqOR4= +github.com/Azure/go-amqp v1.4.0/go.mod h1:vZAogwdrkbyK3Mla8m/CxSc/aKdnTZ4IbPxl51Y5WZE= +github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= +github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= +github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= +github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= +github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= +github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/to v0.4.1 h1:CxNHBqdzTr7rLtdrtb5CMjJcDut+WNGCVv7OmS5+lTc= +github.com/Azure/go-autorest/autorest/to v0.4.1/go.mod h1:EtaofgU4zmtvn1zT2ARsjRFdq9vXx0YWtmElwL+GZ9M= +github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= +github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= +github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0 h1:ErKg/3iS1AKcTkf3yixlZ54f9U1rljCkQyEXWUnIUxc= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.27.0/go.mod h1:yAZHSGnqScoU556rBOVkwLze6WP5N+U11RHuWaGVxwY= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0 h1:fYE9p3esPxA/C0rQ0AHhP0drtPXDRhaWiwg1DPqO7IU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.51.0/go.mod h1:BnBReJLvVYx2CS/UHOgVz2BXKXD9wsQPxZug20nZhd0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0 h1:OqVGm6Ei3x5+yZmSJG1Mh2NwHvpVmZ08CB5qJhT9Nuk= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.51.0/go.mod h1:SZiPHWGOOk3bl8tkevxkoiwPgsIl6CwrWcbwjfHZpdM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0 h1:6/0iUd0xrnX7qt+mLNRwg5c0PGv8wpE8K90ryANQwMI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0= +github.com/IBM/sarama v1.45.1 h1:nY30XqYpqyXOXSNoe2XCgjj9jklGM1Ye94ierUb1jQ0= +github.com/IBM/sarama v1.45.1/go.mod h1:qifDhA3VWSrQ1TjSMyxDl3nYL3oX2C83u+G6L79sq4w= github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= -github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= -github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/ardanlabs/conf/v3 v3.7.2 h1:s2VBuDJM6OQfR0erDuopiZ+dHUQVqGxZeLrTsls03dw= -github.com/ardanlabs/conf/v3 v3.7.2/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk= +github.com/ardanlabs/conf/v3 v3.8.0 h1:Mvv2wZJz8tIl705m5BU3ZRCP1V6TKY6qebA8i4sykrY= +github.com/ardanlabs/conf/v3 v3.8.0/go.mod h1:XlL9P0quWP4m1weOVFmlezabinbZLI05niDof/+Ochk= +github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk= +github.com/aws/aws-sdk-go v1.55.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= +github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM= +github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 h1:zAybnyUQXIZ5mok5Jqwlf58/TFE7uvd3IAsa1aF9cXs= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10/go.mod h1:qqvMj6gHLR/EXWZw4ZbqlPbQUyenf4h82UQUlKc+l14= +github.com/aws/aws-sdk-go-v2/config v1.29.12 h1:Y/2a+jLPrPbHpFkpAAYkVEtJmxORlXoo5k2g1fa2sUo= +github.com/aws/aws-sdk-go-v2/config v1.29.12/go.mod h1:xse1YTjmORlb/6fhkWi8qJh3cvZi4JoVNhc+NbJt4kI= +github.com/aws/aws-sdk-go-v2/credentials v1.17.65 h1:q+nV2yYegofO/SUXruT+pn4KxkxmaQ++1B/QedcKBFM= +github.com/aws/aws-sdk-go-v2/credentials v1.17.65/go.mod h1:4zyjAuGOdikpNYiSGpsGz8hLGmUzlY8pc8r9QQ/RXYQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.69 h1:6VFPH/Zi9xYFMJKPQOX5URYkQoXRWeJ7V/7Y6ZDYoms= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.69/go.mod h1:GJj8mmO6YT6EqgduWocwhMoxTLFitkhIrK+owzrYL2I= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34 h1:ZNTqv4nIdE/DiBfUUfXcLZ/Spcuz+RjeziUtNJackkM= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34/go.mod h1:zf7Vcd1ViW7cPqYWEHLHJkS50X0JS2IKz9Cgaj6ugrs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0 h1:lguz0bmOoGzozP9XfRJR1QIayEYo+2vP/No3OfLF0pU= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0/go.mod h1:iu6FSzgt+M2/x3Dk8zhycdIcHjEFb36IS8HVUVFoMg0= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15 h1:moLQUoVq91LiqT1nbvzDukyqAlCv89ZmwaHw/ZFlFZg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15/go.mod h1:ZH34PJUc8ApjBIfgQCFvkWcUDBtl/WTD+uiYHjd8igA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.78.2 h1:jIiopHEV22b4yQP2q36Y0OmwLbsxNWdWwfZRR5QRRO4= +github.com/aws/aws-sdk-go-v2/service/s3 v1.78.2/go.mod h1:U5SNqwhXB3Xe6F47kXvWihPl/ilGaEDe8HD/50Z9wxc= +github.com/aws/aws-sdk-go-v2/service/sns v1.34.2 h1:PajtbJ/5bEo6iUAIGMYnK8ljqg2F1h4mMCGh1acjN30= +github.com/aws/aws-sdk-go-v2/service/sns v1.34.2/go.mod h1:PJtxxMdj747j8DeZENRTTYAz/lx/pADn/U0k7YNNiUY= +github.com/aws/aws-sdk-go-v2/service/sqs v1.38.3 h1:j5BchjfDoS7K26vPdyJlyxBIIBGDflq3qjjJKBDlbcI= +github.com/aws/aws-sdk-go-v2/service/sqs v1.38.3/go.mod h1:Bar4MrRxeqdn6XIh8JGfiXuFRmyrrsZNTJotxEJmWW0= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.2 h1:pdgODsAhGo4dvzC3JAG5Ce0PX8kWXrTZGx+jxADD+5E= +github.com/aws/aws-sdk-go-v2/service/sso v1.25.2/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.0 h1:90uX0veLKcdHVfvxhkWUQSCi5VabtwMLFutYiRke4oo= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.0/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 h1:PZV5W8yk4OtH1JAuhV2PXwwO9v5G5Aoj+eMCn4T+1Kc= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.17/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= +github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= +github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0= github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= +github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= +github.com/coder/websocket v1.8.13/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= github.com/containrrr/shoutrrr v0.8.0 h1:mfG2ATzIS7NR2Ec6XL+xyoHzN97H8WPjir8aYzJUSec= github.com/containrrr/shoutrrr v0.8.0/go.mod h1:ioyQAyu1LJY6sILuNyKaQaw+9Ttik5QePU8atnAdO2o= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= -github.com/ebitengine/purego v0.8.2 h1:jPPGWs2sZ1UgOSgD2bClL0MJIqu58nOmIcBuXr62z1I= -github.com/ebitengine/purego v0.8.2/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= -github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= -github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= +github.com/eapache/go-resiliency v1.7.0 h1:n3NRTnBn5N0Cbi/IeOHuQn9s2UwVUH7Ga0ZWcP+9JTA= +github.com/eapache/go-resiliency v1.7.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= +github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 h1:Oy0F4ALJ04o5Qqpdz8XLIpNA3WM/iSIXqxtqo7UGVws= +github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3/go.mod h1:YvSRo5mw33fLEx1+DlK6L2VV43tJt5Eyel9n9XBcR+0= +github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= +github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= +github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw= +github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= +github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= +github.com/evanoberholster/imagemeta v0.3.1 h1:E4GUjXcvlVMjP9joN25+bBNf3Al3MTTfMqCrDOCW+LE= +github.com/evanoberholster/imagemeta v0.3.1/go.mod h1:V0vtDJmjTqvwAYO8r+u33NRVIMXQb0qSqEfImoKEiXM= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= -github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= -github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= -github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8= -github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM= +github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8= +github.com/gen2brain/avif v0.4.4 h1:Ga/ss7qcWWQm2bxFpnjYjhJsNfZrWs5RsyklgFjKRSE= +github.com/gen2brain/avif v0.4.4/go.mod h1:/XCaJcjZraQwKVhpu9aEd9aLOssYOawLvhMBtmHVGqk= +github.com/gen2brain/heic v0.4.5 h1:Cq3hPu6wwlTJNv2t48ro3oWje54h82Q5pALeCBNgaSk= +github.com/gen2brain/heic v0.4.5/go.mod h1:ECnpqbqLu0qSje4KSNWUUDK47UPXPzl80T27GWGEL5I= +github.com/gen2brain/jpegxl v0.4.5 h1:TWpVEn5xkIfsswzkjHBArd0Cc9AE0tbjBSoa0jDsrbo= +github.com/gen2brain/jpegxl v0.4.5/go.mod h1:4kWYJ18xCEuO2vzocYdGpeqNJ990/Gjy3uLMg5TBN6I= +github.com/gen2brain/webp v0.5.5 h1:MvQR75yIPU/9nSqYT5h13k4URaJK3gf9tgz/ksRbyEg= +github.com/gen2brain/webp v0.5.5/go.mod h1:xOSMzp4aROt2KFW++9qcK/RBTOVC2S9tJG66ip/9Oc0= +github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618= +github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= -github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= -github.com/go-openapi/inflect v0.21.2 h1:0gClGlGcxifcJR56zwvhaOulnNgnhc4qTAkob5ObnSM= -github.com/go-openapi/inflect v0.21.2/go.mod h1:INezMuUu7SJQc2AyR3WO0DqqYUJSj8Kb4hBd7WtjlAw= -github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic= -github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk= -github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= -github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= -github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= -github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= -github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU= -github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0= +github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= +github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= +github.com/go-openapi/spec v0.20.6 h1:ich1RQ3WDbfoeTqTAb+5EIxNmpKVJZWBNah9RAT0jIQ= +github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -61,47 +227,127 @@ github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3a github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 h1:FWNFq4fM1wPfcK40yHE5UO3RUdSNPaBC+j3PokzA6OQ= github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= +github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo= +github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI= +github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI= +github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= +github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E= github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM= -github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= -github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos= -github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= +github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hay-kot/httpkit v0.0.11 h1:ZdB2uqsFBSDpfUoClGK5c5orjBjQkEVSXh7fZX5FKEk= github.com/hay-kot/httpkit v0.0.11/go.mod h1:0kZdk5/swzdfqfg2c6pBWimcgeJ9PTyO97EbHnYl2Sw= github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc= github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/klauspost/cpuid/v2 v2.0.12 h1:p9dKCg8i4gmOxtv35DvrYoWqYzQrvEVdjQ762Y0OqZE= -github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= -github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= -github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6 h1:IsMZxCuZqKuao2vNdfD82fjjgPLfyHLpR41Z88viRWs= +github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6/go.mod h1:3VeWNIJaW+O5xpRQbPp0Ybqu1vJd/pm7s2F473HRrkw= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= +github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 h1:PpXWgLPs+Fqr325bN2FD2ISlRRztXibcX6e8f5FR5Dc= -github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg= -github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= -github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= -github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= @@ -112,10 +358,23 @@ github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEu github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY= github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg= -github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= -github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/nats-io/jwt/v2 v2.5.0 h1:WQQ40AAlqqfx+f6ku+i0pOVm+ASirD4fUh+oQsiE9Ak= +github.com/nats-io/jwt/v2 v2.5.0/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI= +github.com/nats-io/nats-server/v2 v2.9.23 h1:6Wj6H6QpP9FMlpCyWUaNu2yeZ/qGj+mdRkZ1wbikExU= +github.com/nats-io/nats-server/v2 v2.9.23/go.mod h1:wEjrEy9vnqIGE4Pqz4/c75v9Pmaq7My2IgFmnykc4C0= +github.com/nats-io/nats.go v1.40.1 h1:MLjDkdsbGUeCMKFyCFoLnNn/HDTqcgVa3EQm+pMNDPk= +github.com/nats-io/nats.go v1.40.1/go.mod h1:wV73x0FSI/orHPSYoyMeJB+KajMDoWyXmFaRrrYaaTo= +github.com/nats-io/nkeys v0.4.10 h1:glmRrpCmYLHByYcePvnTBEAwawwapjCPMjy2huw20wc= +github.com/nats-io/nkeys v0.4.10/go.mod h1:OjRrnIKnWBFl+s4YK5ChQfvHP2fxqZexrKJoVVyWB3U= +github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= +github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/olahol/melody v1.2.1 h1:xdwRkzHxf+B0w4TKbGpUSSkV516ZucQZJIWLztOWICQ= github.com/olahol/melody v1.2.1/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= @@ -124,61 +383,84 @@ github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU= github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts= github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= +github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw= +github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2tUTP0= +github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= +github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU= -github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/pressly/goose/v3 v3.24.2 h1:c/ie0Gm8rnIVKvnDQ/scHErv46jrDv9b4I0WRcFJzYU= -github.com/pressly/goose/v3 v3.24.2/go.mod h1:kjefwFB0eR4w30Td2Gj2Mznyw94vSP+2jJYkOVNbD1k= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM= github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw= +github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE= github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas= -github.com/shirou/gopsutil/v4 v4.25.3 h1:SeA68lsu8gLggyMbmCn8cmp97V1TI9ld9sVzAUcKcKE= -github.com/shirou/gopsutil/v4 v4.25.3/go.mod h1:xbuxyoZj+UsgnZrENu3lQivsngRR5BdjbJwf2fv4szA= -github.com/shirou/gopsutil/v4 v4.25.4 h1:cdtFO363VEOOFrUCjZRh4XVJkb548lyF0q0uTeMqYPw= -github.com/shirou/gopsutil/v4 v4.25.4/go.mod h1:xbuxyoZj+UsgnZrENu3lQivsngRR5BdjbJwf2fv4szA= +github.com/shirou/gopsutil/v4 v4.25.5 h1:rtd9piuSMGeU8g1RMXjZs9y9luK5BwtnG7dZaQUJAsc= +github.com/shirou/gopsutil/v4 v4.25.5/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c= github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0= github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/swaggo/files/v2 v2.0.2 h1:Bq4tgS/yxLB/3nwOMcul5oLEUKa877Ykgz3CJMVbQKU= -github.com/swaggo/files/v2 v2.0.2/go.mod h1:TVqetIzZsO9OhHX1Am9sRf9LdrFZqoK49N37KON/jr0= +github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw= +github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM= github.com/swaggo/http-swagger/v2 v2.0.2 h1:FKCdLsl+sFCx60KFsyM0rDarwiUSZ8DqbfSyIKC9OBg= github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ= github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A= github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg= -github.com/tklauser/go-sysconf v0.3.15 h1:VE89k0criAymJ/Os65CSn1IXaol+1wrsFHEB8Ol49K4= -github.com/tklauser/go-sysconf v0.3.15/go.mod h1:Dmjwr6tYFIseJw7a3dRLJfsHAMXZ3nEnL/aZY+0IuI4= -github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfjso= -github.com/tklauser/numcpus v0.10.0/go.mod h1:BiTKazU708GQTYF4mB+cmlpT2Is1gLk7XVuEeem8LsQ= +github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I= +github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM= +github.com/tinylib/msgp v1.1.8 h1:FCXC1xanKO4I8plpHGH2P7koL/RzZs12l/+r7vakfm0= +github.com/tinylib/msgp v1.1.8/go.mod h1:qkpG+2ldGg4xRFmx+jfTvZPxfGFhi64BcnL9vkCm/Tw= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= github.com/yeqown/go-qrcode/v2 v2.2.5 h1:HCOe2bSjkhZyYoyyNaXNzh4DJZll6inVJQQw+8228Zk= github.com/yeqown/go-qrcode/v2 v2.2.5/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw= -github.com/yeqown/go-qrcode/writer/standard v1.2.5 h1:m+5BUIcbsaG2md76FIqI/oZULrAju8tsk47eOohovQ0= -github.com/yeqown/go-qrcode/writer/standard v1.2.5/go.mod h1:O4MbzsotGCvy8upYPCR91j81dr5XLT7heuljcNXW+oQ= github.com/yeqown/go-qrcode/writer/standard v1.3.0 h1:chdyhEfRtUPgQtuPeaWVGQ/TQx4rE1PqeoW3U+53t34= github.com/yeqown/go-qrcode/writer/standard v1.3.0/go.mod h1:O4MbzsotGCvy8upYPCR91j81dr5XLT7heuljcNXW+oQ= github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0= github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70= -github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= -github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= -github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= github.com/zclconf/go-cty-yaml v1.1.0 h1:nP+jp0qPHv2IhUVqmQSzjvqAWcObN0KBkUl2rWBdig0= github.com/zclconf/go-cty-yaml v1.1.0/go.mod h1:9YLUH4g7lOhVWqUbctnVlZ5KLpg7JAprQNgxSZ1Gyxs= github.com/zeebo/assert v1.1.0 h1:hU1L1vLTHsnO8x8c9KAR5GmM5QscxHg5RNU5z5qbUWY= @@ -187,81 +469,218 @@ github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI= github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE= github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo= github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4= +go.balki.me/anyhttp v0.5.2 h1:et4tCDXLeXpWfMNvRKG7ojfrnlr3du7cEaG966MLSpA= +go.balki.me/anyhttp v0.5.2/go.mod h1:JhfekOIjgVODoVqUCficjpIgmB3wwlB7jhN0eN2EZ/s= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA= +go.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 h1:x7wzEgXfnzJcHDwStJT+mxOz4etr2EcexjqhBvmoakw= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc= +go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= +go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg= +go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o= +go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= -golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8= -golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= -golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= -golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= +gocloud.dev v0.41.0 h1:qBKd9jZkBKEghYbP/uThpomhedK5s2Gy6Lz7h/zYYrM= +gocloud.dev v0.41.0/go.mod h1:IetpBcWLUwroOOxKr90lhsZ8vWxeSkuszBnW62sbcf0= +gocloud.dev/pubsub/kafkapubsub v0.41.0 h1:Ft6YB77ejqk++VjW51UP39RH/WDAMtv6ed3+PHMxBzg= +gocloud.dev/pubsub/kafkapubsub v0.41.0/go.mod h1:kJf4c6b+4yJk6nXmv33yXKblbrgWmrYCzI5QEsr27G0= +gocloud.dev/pubsub/natspubsub v0.41.0 h1:UxNb0DiAzdnyHut6jcCG7u6lsB/hzxTyZ/RHWeCUJ4Q= +gocloud.dev/pubsub/natspubsub v0.41.0/go.mod h1:uCBKjwvIcuNuf3+ft4wUI9hPHHKQvroxq9ZPB/410ac= +gocloud.dev/pubsub/rabbitpubsub v0.41.0 h1:RutvHbacZxlFr0t3wlr+kz63j53UOfHY3PJR8NKN1EI= +gocloud.dev/pubsub/rabbitpubsub v0.41.0/go.mod h1:s7oQXOlQ2FOj8XmYMv5Ocgs1t+8hIXfsKaWGgECM9SQ= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= +golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI= golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ= -golang.org/x/image v0.26.0 h1:4XjIFEZWQmCZi6Wv8BoxsDhRU3RVnLX04dToTDAEPlY= -golang.org/x/image v0.26.0/go.mod h1:lcxbMFAovzpnJxzXS3nyL83K27tmqtKzIJpctK8YO5c= -golang.org/x/image v0.27.0 h1:C8gA4oWU/tKkdCfYT6T2u4faJu3MeNS5O8UPWlPF61w= -golang.org/x/image v0.27.0/go.mod h1:xbdrClrAUway1MUTEZDq9mz/UpRwYAkFFNUslZtcB+g= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/image v0.28.0 h1:gdem5JW1OLS4FbkWgLO+7ZeFzYtL3xClb97GaUzYMFE= +golang.org/x/image v0.28.0/go.mod h1:GUJYXtnGKEUgggyzh+Vxt+AviiCcyiwpsl8iQ8MvwGY= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= +golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= -golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.28.0 h1:CrgCKl8PPAVtLnU3c+EDw6x11699EWlsDeWNWKdIOkc= +golang.org/x/oauth2 v0.28.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= +golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= -golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= -golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= -golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= -golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= +golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +google.golang.org/api v0.228.0 h1:X2DJ/uoWGnY5obVjewbp8icSL5U4FzuCfy9OjbLSnLs= +google.golang.org/api v0.228.0/go.mod h1:wNvRS1Pbe8r4+IfBIniV8fwCpGwTrYa+kMUDiC5z5a4= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20250324211829-b45e905df463 h1:qEFnJI6AnfZk0NNe8YTyXQh5i//Zxi4gBHwRgp76qpw= +google.golang.org/genproto v0.0.0-20250324211829-b45e905df463/go.mod h1:SqIx1NV9hcvqdLHo7uNZDS5lrUJybQ3evo3+z/WBfA0= +google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463 h1:hE3bRWtU6uceqlh4fhrSnUyjKHMKB9KrTLLG+bc0ddM= +google.golang.org/genproto/googleapis/api v0.0.0-20250324211829-b45e905df463/go.mod h1:U90ffi8eUL9MwPcrJylN5+Mk2v3vuPDptd5yyNUiRR8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg= +google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -modernc.org/cc/v4 v4.26.0 h1:QMYvbVduUGH0rrO+5mqF/PSPPRZNpRtg2CLELy7vUpA= -modernc.org/cc/v4 v4.26.0/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= modernc.org/cc/v4 v4.26.1 h1:+X5NtzVBn0KgsBCBe+xkDC7twLb/jNVj9FPgiwSQO3s= -modernc.org/ccgo/v4 v4.26.0 h1:gVzXaDzGeBYJ2uXTOpR8FR7OlksDOe9jxnjhIKCsiTc= -modernc.org/ccgo/v4 v4.26.0/go.mod h1:Sem8f7TFUtVXkG2fiaChQtyyfkqhJBg/zjEJBkmuAVY= +modernc.org/cc/v4 v4.26.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0= modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU= -modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE= -modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ= +modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE= +modernc.org/fileutil v1.3.1 h1:8vq5fe7jdtEvoCf3Zf9Nm0Q05sH6kGx0Op2CPx1wTC8= +modernc.org/fileutil v1.3.1/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc= modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI= modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito= -modernc.org/libc v1.63.0 h1:wKzb61wOGCzgahQBORb1b0dZonh8Ufzl/7r4Yf1D5YA= -modernc.org/libc v1.63.0/go.mod h1:wDzH1mgz1wUIEwottFt++POjGRO9sgyQKrpXaz3x89E= -modernc.org/libc v1.65.8 h1:7PXRJai0TXZ8uNA3srsmYzmTyrLoHImV5QxHeni108Q= -modernc.org/libc v1.65.8/go.mod h1:011EQibzzio/VX3ygj1qGFt5kMjP0lHb0qCW5/D/pQU= +modernc.org/libc v1.65.7 h1:Ia9Z4yzZtWNtUIuiPuQ7Qf7kxYrxP1/jeHZzG8bFu00= +modernc.org/libc v1.65.7/go.mod h1:011EQibzzio/VX3ygj1qGFt5kMjP0lHb0qCW5/D/pQU= modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= -modernc.org/memory v1.10.0 h1:fzumd51yQ1DxcOxSO+S6X7+QTuVU+n8/Aj7swYjFfC4= -modernc.org/memory v1.10.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8= modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns= modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w= modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE= -modernc.org/sqlite v1.37.0 h1:s1TMe7T3Q3ovQiK2Ouz4Jwh7dw4ZDqbebSDTlSJdfjI= -modernc.org/sqlite v1.37.0/go.mod h1:5YiWv+YviqGMuGw4V+PNplcyaJ5v+vQd7TQOgkACoJM= modernc.org/sqlite v1.37.1 h1:EgHJK/FPoqC+q2YBXg7fUmES37pCHFc97sI7zSayBEs= modernc.org/sqlite v1.37.1/go.mod h1:XwdRtsE1MpiBcL54+MbKcaDvcuej+IYSMfLN6gSKV8g= modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0= diff --git a/backend/internal/core/services/main_test.go b/backend/internal/core/services/main_test.go index 96b0b935..b792ab37 100644 --- a/backend/internal/core/services/main_test.go +++ b/backend/internal/core/services/main_test.go @@ -2,6 +2,7 @@ package services import ( "context" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" "log" "os" "testing" @@ -61,7 +62,19 @@ func MainNoExit(m *testing.M) int { } tClient = client - tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox") + tRepos = repo.New(tClient, tbus, config.Storage{ + PrefixPath: "/", + ConnString: "file://" + os.TempDir(), + }, "mem://{{ .Topic }}", config.Thumbnail{ + Enabled: false, + Width: 0, + Height: 0, + }) + + err = os.MkdirAll(os.TempDir()+"/homebox", 0o755) + if err != nil { + return 0 + } defaults, _ := currencies.CollectionCurrencies( currencies.CollectDefaults(), diff --git a/backend/internal/core/services/service_items_attachments.go b/backend/internal/core/services/service_items_attachments.go index 95444002..4085d90d 100644 --- a/backend/internal/core/services/service_items_attachments.go +++ b/backend/internal/core/services/service_items_attachments.go @@ -10,8 +10,8 @@ import ( "io" ) -func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Attachment, error) { - attachment, err := svc.repo.Attachments.Get(ctx, attachmentID) +func (svc *ItemService) AttachmentPath(ctx context.Context, gid uuid.UUID, attachmentID uuid.UUID) (*ent.Attachment, error) { + attachment, err := svc.repo.Attachments.Get(ctx, gid, attachmentID) if err != nil { return nil, err } @@ -19,16 +19,16 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UU return attachment, nil } -func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) { +func (svc *ItemService) AttachmentUpdate(ctx Context, gid uuid.UUID, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) { // Update Attachment - attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data) + attachment, err := svc.repo.Attachments.Update(ctx, gid, data.ID, data) if err != nil { return repo.ItemOut{}, err } // Update Document attDoc := attachment - _, err = svc.repo.Attachments.Rename(ctx, attDoc.ID, data.Title) + _, err = svc.repo.Attachments.Rename(ctx, gid, attDoc.ID, data.Title) if err != nil { return repo.ItemOut{}, err } @@ -55,9 +55,9 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename st return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID) } -func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error { +func (svc *ItemService) AttachmentDelete(ctx context.Context, gid uuid.UUID, id uuid.UUID, attachmentID uuid.UUID) error { // Delete the attachment - err := svc.repo.Attachments.Delete(ctx, attachmentID) + err := svc.repo.Attachments.Delete(ctx, gid, id, attachmentID) if err != nil { return err } diff --git a/backend/internal/core/services/service_items_attachments_test.go b/backend/internal/core/services/service_items_attachments_test.go index 67f7a551..7e15fac9 100644 --- a/backend/internal/core/services/service_items_attachments_test.go +++ b/backend/internal/core/services/service_items_attachments_test.go @@ -53,10 +53,10 @@ func TestItemService_AddAttachment(t *testing.T) { storedPath := afterAttachment.Attachments[0].Path // {root}/{group}/{item}/{attachment} - assert.Equal(t, path.Join(temp, "homebox", tGroup.ID.String(), "documents"), path.Dir(storedPath)) + assert.Equal(t, path.Join("/", tGroup.ID.String(), "documents"), path.Dir(storedPath)) // Check that the file contents are correct - bts, err := os.ReadFile(storedPath) + bts, err := os.ReadFile(path.Join(os.TempDir(), storedPath)) require.NoError(t, err) assert.Equal(t, contents, string(bts)) } diff --git a/backend/internal/core/services/service_user.go b/backend/internal/core/services/service_user.go index 3cd0e086..a54711a7 100644 --- a/backend/internal/core/services/service_user.go +++ b/backend/internal/core/services/service_user.go @@ -196,10 +196,23 @@ func (svc *UserService) Login(ctx context.Context, username, password string, ex return UserAuthTokenDetail{}, ErrorInvalidLogin } - if !hasher.CheckPasswordHash(password, usr.PasswordHash) { + check, rehash := hasher.CheckPasswordHash(password, usr.PasswordHash) + + if !check { return UserAuthTokenDetail{}, ErrorInvalidLogin } + if rehash { + hash, err := hasher.HashPassword(password) + if err != nil { + log.Err(err).Msg("Failed to hash password") + return UserAuthTokenDetail{}, err + } + err = svc.repos.Users.ChangePassword(ctx, usr.ID, hash) + if err != nil { + return UserAuthTokenDetail{}, err + } + } return svc.createSessionToken(ctx, usr.ID, extendedSession) } @@ -233,7 +246,8 @@ func (svc *UserService) ChangePassword(ctx Context, current string, new string) return false } - if !hasher.CheckPasswordHash(current, usr.PasswordHash) { + match, _ := hasher.CheckPasswordHash(current, usr.PasswordHash) + if !match { log.Err(errors.New("current password is incorrect")).Msg("Failed to change password") return false } diff --git a/backend/internal/data/ent/attachment.go b/backend/internal/data/ent/attachment.go index c2fd4588..fd78e435 100644 --- a/backend/internal/data/ent/attachment.go +++ b/backend/internal/data/ent/attachment.go @@ -11,7 +11,7 @@ import ( "entgo.io/ent/dialect/sql" "github.com/google/uuid" "github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" - "github.com/sysadminsmedia/homebox/backend/internal/data/ent/entity" + "github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" ) // Attachment is the model entity for the Attachment schema. @@ -31,8 +31,14 @@ type Attachment struct { Title string `json:"title,omitempty"` // Path holds the value of the "path" field. Path string `json:"path,omitempty"` + // MimeType holds the value of the "mime_type" field. + MimeType string `json:"mime_type,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the AttachmentQuery when eager-loading is set. + Edges AttachmentEdges `json:"edges"` + attachment_thumbnail *uuid.UUID + item_attachments *uuid.UUID + selectValues sql.SelectValues Edges AttachmentEdges `json:"edges"` entity_attachments *uuid.UUID selectValues sql.SelectValues @@ -42,6 +48,10 @@ type Attachment struct { type AttachmentEdges struct { // Entity holds the value of the entity edge. Entity *Entity `json:"entity,omitempty"` + // Item holds the value of the item edge. + Item *Item `json:"item,omitempty"` + // Thumbnail holds the value of the thumbnail edge. + Thumbnail *Attachment `json:"thumbnail,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool @@ -58,6 +68,17 @@ func (e AttachmentEdges) EntityOrErr() (*Entity, error) { return nil, &NotLoadedError{edge: "entity"} } +// ThumbnailOrErr returns the Thumbnail value or an error if the edge +// was not loaded in eager-loading, or loaded but was not found. +func (e AttachmentEdges) ThumbnailOrErr() (*Attachment, error) { + if e.Thumbnail != nil { + return e.Thumbnail, nil + } else if e.loadedTypes[1] { + return nil, &NotFoundError{label: attachment.Label} + } + return nil, &NotLoadedError{edge: "thumbnail"} +} + // scanValues returns the types for scanning values from sql.Rows. func (*Attachment) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) @@ -71,6 +92,9 @@ func (*Attachment) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullTime) case attachment.FieldID: values[i] = new(uuid.UUID) + case attachment.ForeignKeys[0]: // attachment_thumbnail + values[i] = &sql.NullScanner{S: new(uuid.UUID)} + case attachment.ForeignKeys[1]: // item_attachments case attachment.ForeignKeys[0]: // entity_attachments values[i] = &sql.NullScanner{S: new(uuid.UUID)} default: @@ -130,8 +154,22 @@ func (a *Attachment) assignValues(columns []string, values []any) error { } else if value.Valid { a.Path = value.String } + case attachment.FieldMimeType: + if value, ok := values[i].(*sql.NullString); !ok { + return fmt.Errorf("unexpected type %T for field mime_type", values[i]) + } else if value.Valid { + a.MimeType = value.String + } case attachment.ForeignKeys[0]: if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field attachment_thumbnail", values[i]) + } else if value.Valid { + a.attachment_thumbnail = new(uuid.UUID) + *a.attachment_thumbnail = *value.S.(*uuid.UUID) + } + case attachment.ForeignKeys[1]: + if value, ok := values[i].(*sql.NullScanner); !ok { + return fmt.Errorf("unexpected type %T for field item_attachments", values[i]) return fmt.Errorf("unexpected type %T for field entity_attachments", values[i]) } else if value.Valid { a.entity_attachments = new(uuid.UUID) @@ -155,6 +193,11 @@ func (a *Attachment) QueryEntity() *EntityQuery { return NewAttachmentClient(a.config).QueryEntity(a) } +// QueryThumbnail queries the "thumbnail" edge of the Attachment entity. +func (a *Attachment) QueryThumbnail() *AttachmentQuery { + return NewAttachmentClient(a.config).QueryThumbnail(a) +} + // Update returns a builder for updating this Attachment. // Note that you need to call Attachment.Unwrap() before calling this method if this Attachment // was returned from a transaction, and the transaction was committed or rolled back. @@ -195,6 +238,9 @@ func (a *Attachment) String() string { builder.WriteString(", ") builder.WriteString("path=") builder.WriteString(a.Path) + builder.WriteString(", ") + builder.WriteString("mime_type=") + builder.WriteString(a.MimeType) builder.WriteByte(')') return builder.String() } diff --git a/backend/internal/data/ent/client.go b/backend/internal/data/ent/client.go index d7c3b449..b90755b1 100644 --- a/backend/internal/data/ent/client.go +++ b/backend/internal/data/ent/client.go @@ -422,6 +422,22 @@ func (c *AttachmentClient) QueryEntity(a *Attachment) *EntityQuery { return query } +// QueryThumbnail queries the thumbnail edge of a Attachment. +func (c *AttachmentClient) QueryThumbnail(a *Attachment) *AttachmentQuery { + query := (&AttachmentClient{config: c.config}).Query() + query.path = func(context.Context) (fromV *sql.Selector, _ error) { + id := a.ID + step := sqlgraph.NewStep( + sqlgraph.From(attachment.Table, attachment.FieldID, id), + sqlgraph.To(attachment.Table, attachment.FieldID), + sqlgraph.Edge(sqlgraph.O2O, false, attachment.ThumbnailTable, attachment.ThumbnailColumn), + ) + fromV = sqlgraph.Neighbors(a.driver.Dialect(), step) + return fromV, nil + } + return query +} + // Hooks returns the client hooks. func (c *AttachmentClient) Hooks() []Hook { return c.hooks.Attachment diff --git a/backend/internal/data/ent/item_predicates.go b/backend/internal/data/ent/item_predicates.go new file mode 100644 index 00000000..3732ae9e --- /dev/null +++ b/backend/internal/data/ent/item_predicates.go @@ -0,0 +1,127 @@ +package ent + +import ( + "entgo.io/ent/dialect/sql" + "github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" + "github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" + "github.com/sysadminsmedia/homebox/backend/pkgs/textutils" +) + +// AccentInsensitiveContains creates a predicate that performs accent-insensitive text search. +// It normalizes both the database field value and the search value for comparison. +func AccentInsensitiveContains(field string, searchValue string) predicate.Item { + if searchValue == "" { + return predicate.Item(func(s *sql.Selector) { + // Return a predicate that never matches if search is empty + s.Where(sql.False()) + }) + } + + // Normalize the search value + normalizedSearch := textutils.NormalizeSearchQuery(searchValue) + + return predicate.Item(func(s *sql.Selector) { + dialect := s.Dialect() + + switch dialect { + case "sqlite3": + // For SQLite, we'll create a custom normalization function using REPLACE + // to handle common accented characters + normalizeFunc := buildSQLiteNormalizeExpression(s.C(field)) + s.Where(sql.ExprP( + "LOWER("+normalizeFunc+") LIKE ?", + "%"+normalizedSearch+"%", + )) + case "postgres": + // For PostgreSQL, try to use unaccent extension if available + // Fall back to REPLACE-based normalization if not available + normalizeFunc := buildPostgreSQLNormalizeExpression(s.C(field)) + s.Where(sql.ExprP( + "LOWER("+normalizeFunc+") LIKE ?", + "%"+normalizedSearch+"%", + )) + default: + // Default fallback using REPLACE for common accented characters + normalizeFunc := buildGenericNormalizeExpression(s.C(field)) + s.Where(sql.ExprP( + "LOWER("+normalizeFunc+") LIKE ?", + "%"+normalizedSearch+"%", + )) + } + }) +} + +// buildSQLiteNormalizeExpression creates a SQLite expression to normalize accented characters +func buildSQLiteNormalizeExpression(fieldExpr string) string { + return buildGenericNormalizeExpression(fieldExpr) +} + +// buildPostgreSQLNormalizeExpression creates a PostgreSQL expression to normalize accented characters +func buildPostgreSQLNormalizeExpression(fieldExpr string) string { + // Use a CASE statement to check if unaccent function exists before using it + // This prevents errors when the unaccent extension is not installed + return "CASE WHEN EXISTS (SELECT 1 FROM pg_proc WHERE proname = 'unaccent') " + + "THEN unaccent(" + fieldExpr + ") " + + "ELSE " + buildGenericNormalizeExpression(fieldExpr) + " END" +} + +// buildGenericNormalizeExpression creates a database-agnostic expression to normalize common accented characters +func buildGenericNormalizeExpression(fieldExpr string) string { + // Chain REPLACE functions to handle the most common accented characters + // Focused on the most frequently used accents in Spanish, French, and Portuguese + // Ordered by frequency of use for better performance + normalized := fieldExpr + + // Most common accented characters ordered by frequency + commonAccents := []struct { + from, to string + }{ + // Spanish - most common + {"á", "a"}, {"é", "e"}, {"í", "i"}, {"ó", "o"}, {"ú", "u"}, {"ñ", "n"}, + {"Á", "A"}, {"É", "E"}, {"Í", "I"}, {"Ó", "O"}, {"Ú", "U"}, {"Ñ", "N"}, + + // French - most common + {"è", "e"}, {"ê", "e"}, {"à", "a"}, {"ç", "c"}, + {"È", "E"}, {"Ê", "E"}, {"À", "A"}, {"Ç", "C"}, + + // German umlauts and Portuguese - common + {"ä", "a"}, {"ö", "o"}, {"ü", "u"}, {"ã", "a"}, {"õ", "o"}, + {"Ä", "A"}, {"Ö", "O"}, {"Ü", "U"}, {"Ã", "A"}, {"Õ", "O"}, + } + + for _, accent := range commonAccents { + normalized = "REPLACE(" + normalized + ", '" + accent.from + "', '" + accent.to + "')" + } + + return normalized +} + +// ItemNameAccentInsensitiveContains creates an accent-insensitive search predicate for the item name field. +func ItemNameAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldName, value) +} + +// ItemDescriptionAccentInsensitiveContains creates an accent-insensitive search predicate for the item description field. +func ItemDescriptionAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldDescription, value) +} + +// ItemSerialNumberAccentInsensitiveContains creates an accent-insensitive search predicate for the item serial number field. +func ItemSerialNumberAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldSerialNumber, value) +} + +// ItemModelNumberAccentInsensitiveContains creates an accent-insensitive search predicate for the item model number field. +func ItemModelNumberAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldModelNumber, value) +} + +// ItemManufacturerAccentInsensitiveContains creates an accent-insensitive search predicate for the item manufacturer field. +func ItemManufacturerAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldManufacturer, value) +} + +// ItemNotesAccentInsensitiveContains creates an accent-insensitive search predicate for the item notes field. +func ItemNotesAccentInsensitiveContains(value string) predicate.Item { + return AccentInsensitiveContains(item.FieldNotes, value) +} diff --git a/backend/internal/data/ent/item_predicates_test.go b/backend/internal/data/ent/item_predicates_test.go new file mode 100644 index 00000000..9d49e24d --- /dev/null +++ b/backend/internal/data/ent/item_predicates_test.go @@ -0,0 +1,156 @@ +package ent + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBuildGenericNormalizeExpression(t *testing.T) { + tests := []struct { + name string + field string + expected string + }{ + { + name: "Simple field name", + field: "name", + expected: "name", // Should be wrapped in many REPLACE functions + }, + { + name: "Complex field name", + field: "description", + expected: "description", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := buildGenericNormalizeExpression(tt.field) + + // Should contain the original field + assert.Contains(t, result, tt.field) + + // Should contain REPLACE functions for accent normalization + assert.Contains(t, result, "REPLACE(") + + // Should handle common accented characters + assert.Contains(t, result, "'á'", "Should handle Spanish á") + assert.Contains(t, result, "'é'", "Should handle Spanish é") + assert.Contains(t, result, "'ñ'", "Should handle Spanish ñ") + assert.Contains(t, result, "'ü'", "Should handle German ü") + + // Should handle uppercase accents too + assert.Contains(t, result, "'Á'", "Should handle uppercase Spanish Á") + assert.Contains(t, result, "'É'", "Should handle uppercase Spanish É") + }) + } +} + +func TestSQLiteNormalizeExpression(t *testing.T) { + result := buildSQLiteNormalizeExpression("test_field") + + // Should contain the field name and REPLACE functions + assert.Contains(t, result, "test_field") + assert.Contains(t, result, "REPLACE(") + // Check for some specific accent replacements (order doesn't matter) + assert.Contains(t, result, "'á'", "Should handle Spanish á") + assert.Contains(t, result, "'ó'", "Should handle Spanish ó") +} + +func TestPostgreSQLNormalizeExpression(t *testing.T) { + result := buildPostgreSQLNormalizeExpression("test_field") + + // Should contain unaccent function and CASE WHEN logic + assert.Contains(t, result, "unaccent(") + assert.Contains(t, result, "CASE WHEN EXISTS") + assert.Contains(t, result, "test_field") +} + +func TestAccentInsensitivePredicateCreation(t *testing.T) { + tests := []struct { + name string + field string + searchValue string + description string + }{ + { + name: "Normal search value", + field: "name", + searchValue: "electronica", + description: "Should create predicate for normal search", + }, + { + name: "Accented search value", + field: "description", + searchValue: "electrónica", + description: "Should create predicate for accented search", + }, + { + name: "Empty search value", + field: "name", + searchValue: "", + description: "Should handle empty search gracefully", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + predicate := AccentInsensitiveContains(tt.field, tt.searchValue) + assert.NotNil(t, predicate, tt.description) + }) + } +} + +func TestSpecificItemPredicates(t *testing.T) { + tests := []struct { + name string + predicateFunc func(string) interface{} + searchValue string + description string + }{ + { + name: "ItemNameAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemNameAccentInsensitiveContains(val) }, + searchValue: "electronica", + description: "Should create accent-insensitive name search predicate", + }, + { + name: "ItemDescriptionAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemDescriptionAccentInsensitiveContains(val) }, + searchValue: "descripcion", + description: "Should create accent-insensitive description search predicate", + }, + { + name: "ItemManufacturerAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemManufacturerAccentInsensitiveContains(val) }, + searchValue: "compañia", + description: "Should create accent-insensitive manufacturer search predicate", + }, + { + name: "ItemSerialNumberAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemSerialNumberAccentInsensitiveContains(val) }, + searchValue: "sn123", + description: "Should create accent-insensitive serial number search predicate", + }, + { + name: "ItemModelNumberAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemModelNumberAccentInsensitiveContains(val) }, + searchValue: "model456", + description: "Should create accent-insensitive model number search predicate", + }, + { + name: "ItemNotesAccentInsensitiveContains", + predicateFunc: func(val string) interface{} { return ItemNotesAccentInsensitiveContains(val) }, + searchValue: "notas importantes", + description: "Should create accent-insensitive notes search predicate", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + predicate := tt.predicateFunc(tt.searchValue) + assert.NotNil(t, predicate, tt.description) + }) + } +} diff --git a/backend/internal/data/ent/runtime.go b/backend/internal/data/ent/runtime.go index 0d7a7211..bf2f8792 100644 --- a/backend/internal/data/ent/runtime.go +++ b/backend/internal/data/ent/runtime.go @@ -51,6 +51,10 @@ func init() { attachmentDescPath := attachmentFields[3].Descriptor() // attachment.DefaultPath holds the default value on creation for the path field. attachment.DefaultPath = attachmentDescPath.Default.(string) + // attachmentDescMimeType is the schema descriptor for mime_type field. + attachmentDescMimeType := attachmentFields[4].Descriptor() + // attachment.DefaultMimeType holds the default value on creation for the mime_type field. + attachment.DefaultMimeType = attachmentDescMimeType.Default.(string) // attachmentDescID is the schema descriptor for id field. attachmentDescID := attachmentMixinFields0[0].Descriptor() // attachment.DefaultID holds the default value on creation for the id field. diff --git a/backend/internal/data/ent/schema/attachment.go b/backend/internal/data/ent/schema/attachment.go index 707f6985..49a3483d 100644 --- a/backend/internal/data/ent/schema/attachment.go +++ b/backend/internal/data/ent/schema/attachment.go @@ -21,10 +21,11 @@ func (Attachment) Mixin() []ent.Mixin { // Fields of the Attachment. func (Attachment) Fields() []ent.Field { return []ent.Field{ - field.Enum("type").Values("photo", "manual", "warranty", "attachment", "receipt").Default("attachment"), + field.Enum("type").Values("photo", "manual", "warranty", "attachment", "receipt", "thumbnail").Default("attachment"), field.Bool("primary").Default(false), field.String("title").Default(""), field.String("path").Default(""), + field.String("mime_type").Default("application/octet-stream"), } } @@ -33,7 +34,8 @@ func (Attachment) Edges() []ent.Edge { return []ent.Edge{ edge.From("entity", Entity.Type). Ref("attachments"). - Required(). + Unique(), + edge.To("thumbnail", Attachment.Type). Unique(), } } diff --git a/backend/internal/data/migrations/postgres/20250619215101_add_thumbnails.sql b/backend/internal/data/migrations/postgres/20250619215101_add_thumbnails.sql new file mode 100644 index 00000000..d19deacc --- /dev/null +++ b/backend/internal/data/migrations/postgres/20250619215101_add_thumbnails.sql @@ -0,0 +1,14 @@ +-- +goose Up +alter table public.attachments + alter column item_attachments drop not null; + +alter table public.attachments + add attachment_thumbnail uuid; + +alter table public.attachments + add constraint attachments_attachments_thumbnail + foreign key (attachment_thumbnail) references public.attachments (id); + +alter table public.attachments + add constraint attachments_no_self_reference + check (id != attachment_thumbnail); \ No newline at end of file diff --git a/backend/internal/data/migrations/postgres/20250625120010_add_mime_type.sql b/backend/internal/data/migrations/postgres/20250625120010_add_mime_type.sql new file mode 100644 index 00000000..c533d871 --- /dev/null +++ b/backend/internal/data/migrations/postgres/20250625120010_add_mime_type.sql @@ -0,0 +1,3 @@ +-- +goose Up +ALTER TABLE public.attachments ADD COLUMN mime_type VARCHAR DEFAULT 'application/octet-stream'; + diff --git a/backend/internal/data/migrations/postgres/20250629112901_fix_thumbnail_foriegn.sql b/backend/internal/data/migrations/postgres/20250629112901_fix_thumbnail_foriegn.sql new file mode 100644 index 00000000..9f8aa5cb --- /dev/null +++ b/backend/internal/data/migrations/postgres/20250629112901_fix_thumbnail_foriegn.sql @@ -0,0 +1,8 @@ +-- +goose Up +alter table public.attachments + drop constraint attachments_attachments_thumbnail; + +alter table public.attachments + add constraint attachments_attachments_thumbnail + foreign key (attachment_thumbnail) references public.attachments + on delete set null; \ No newline at end of file diff --git a/backend/internal/data/migrations/sqlite3/20250619215150_add_thumbnails.sql b/backend/internal/data/migrations/sqlite3/20250619215150_add_thumbnails.sql new file mode 100644 index 00000000..911459ff --- /dev/null +++ b/backend/internal/data/migrations/sqlite3/20250619215150_add_thumbnails.sql @@ -0,0 +1,35 @@ +-- +goose Up +create table attachments_dg_tmp +( + id uuid not null + primary key, + created_at datetime not null, + updated_at datetime not null, + type text default 'attachment' not null, + "primary" bool default false not null, + path text not null, + title text not null, + item_attachments uuid + constraint attachments_items_attachments + references items + on delete cascade, + attachment_thumbnail uuid + constraint attachments_attachments_thumbnail + references attachments +); + +insert into attachments_dg_tmp(id, created_at, updated_at, type, "primary", path, title, item_attachments) +select id, + created_at, + updated_at, + type, + "primary", + path, + title, + item_attachments +from attachments; + +drop table attachments; + +alter table attachments_dg_tmp + rename to attachments; \ No newline at end of file diff --git a/backend/internal/data/migrations/sqlite3/20250625120000_add_mime_type.sql b/backend/internal/data/migrations/sqlite3/20250625120000_add_mime_type.sql new file mode 100644 index 00000000..7d766f2a --- /dev/null +++ b/backend/internal/data/migrations/sqlite3/20250625120000_add_mime_type.sql @@ -0,0 +1,3 @@ +-- +goose Up +ALTER TABLE attachments ADD COLUMN mime_type TEXT DEFAULT 'application/octet-stream'; + diff --git a/backend/internal/data/migrations/sqlite3/20250629112902_fix_thumbnail_foriegn.sql b/backend/internal/data/migrations/sqlite3/20250629112902_fix_thumbnail_foriegn.sql new file mode 100644 index 00000000..3e4004f2 --- /dev/null +++ b/backend/internal/data/migrations/sqlite3/20250629112902_fix_thumbnail_foriegn.sql @@ -0,0 +1,45 @@ +-- +goose Up +create table attachments_dg_tmp +( + id uuid not null + primary key, + created_at datetime not null, + updated_at datetime not null, + type text default 'attachment' not null, + "primary" bool default false not null, + path text not null, + title text not null, + mime_type text default 'application/octet-stream' not null, + item_attachments uuid + constraint attachments_items_attachments + references items + on delete cascade, + attachment_thumbnail uuid + constraint attachments_attachments_thumbnail + references attachments + on delete set null +); + +insert into attachments_dg_tmp(id, created_at, updated_at, type, "primary", path, title, mime_type, item_attachments, + attachment_thumbnail) +select id, + created_at, + updated_at, + type, + "primary", + path, + title, + mime_type, + item_attachments, + attachment_thumbnail +from attachments; + +drop table attachments; + +alter table attachments_dg_tmp + rename to attachments; + +CREATE INDEX IF NOT EXISTS idx_attachments_item_id ON attachments(item_attachments); +CREATE INDEX IF NOT EXISTS idx_attachments_path ON attachments(path); +CREATE INDEX IF NOT EXISTS idx_attachments_type ON attachments(type); +CREATE INDEX IF NOT EXISTS idx_attachments_thumbnail ON attachments(attachment_thumbnail); diff --git a/backend/internal/data/migrations/sqlite3/20250706190000_fix_old_timestamps.sql b/backend/internal/data/migrations/sqlite3/20250706190000_fix_old_timestamps.sql new file mode 100644 index 00000000..0f3c0ba1 --- /dev/null +++ b/backend/internal/data/migrations/sqlite3/20250706190000_fix_old_timestamps.sql @@ -0,0 +1,126 @@ +-- +goose Up +-- GENERATED with 20250706190000_generate_migration.py +-- Migrating auth_tokens/created_at +update auth_tokens set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update auth_tokens set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating auth_tokens/updated_at +update auth_tokens set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update auth_tokens set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating auth_tokens/expires_at +update auth_tokens set expires_at = substr(expires_at,1, instr(expires_at, ' +')-1) || substr(expires_at, instr(expires_at, ' +')+1,3) || ':' || substr(expires_at, instr(expires_at, ' +')+4,2) where expires_at like '% +%'; +update auth_tokens set expires_at = substr(expires_at,1, instr(expires_at, ' -')-1) || substr(expires_at, instr(expires_at, ' -')+1,3) || ':' || substr(expires_at, instr(expires_at, ' -')+4,2) where expires_at like '% -%'; + +-- Migrating groups/created_at +update groups set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update groups set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating groups/updated_at +update groups set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update groups set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating group_invitation_tokens/created_at +update group_invitation_tokens set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update group_invitation_tokens set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating group_invitation_tokens/updated_at +update group_invitation_tokens set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update group_invitation_tokens set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating group_invitation_tokens/expires_at +update group_invitation_tokens set expires_at = substr(expires_at,1, instr(expires_at, ' +')-1) || substr(expires_at, instr(expires_at, ' +')+1,3) || ':' || substr(expires_at, instr(expires_at, ' +')+4,2) where expires_at like '% +%'; +update group_invitation_tokens set expires_at = substr(expires_at,1, instr(expires_at, ' -')-1) || substr(expires_at, instr(expires_at, ' -')+1,3) || ':' || substr(expires_at, instr(expires_at, ' -')+4,2) where expires_at like '% -%'; + +-- Migrating item_fields/created_at +update item_fields set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update item_fields set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating item_fields/updated_at +update item_fields set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update item_fields set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating item_fields/time_value +update item_fields set time_value = substr(time_value,1, instr(time_value, ' +')-1) || substr(time_value, instr(time_value, ' +')+1,3) || ':' || substr(time_value, instr(time_value, ' +')+4,2) where time_value like '% +%'; +update item_fields set time_value = substr(time_value,1, instr(time_value, ' -')-1) || substr(time_value, instr(time_value, ' -')+1,3) || ':' || substr(time_value, instr(time_value, ' -')+4,2) where time_value like '% -%'; + +-- Migrating labels/created_at +update labels set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update labels set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating labels/updated_at +update labels set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update labels set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating locations/created_at +update locations set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update locations set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating locations/updated_at +update locations set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update locations set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating maintenance_entries/created_at +update maintenance_entries set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update maintenance_entries set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating maintenance_entries/updated_at +update maintenance_entries set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update maintenance_entries set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating maintenance_entries/date +update maintenance_entries set date = substr(date,1, instr(date, ' +')-1) || substr(date, instr(date, ' +')+1,3) || ':' || substr(date, instr(date, ' +')+4,2) where date like '% +%'; +update maintenance_entries set date = substr(date,1, instr(date, ' -')-1) || substr(date, instr(date, ' -')+1,3) || ':' || substr(date, instr(date, ' -')+4,2) where date like '% -%'; + +-- Migrating maintenance_entries/scheduled_date +update maintenance_entries set scheduled_date = substr(scheduled_date,1, instr(scheduled_date, ' +')-1) || substr(scheduled_date, instr(scheduled_date, ' +')+1,3) || ':' || substr(scheduled_date, instr(scheduled_date, ' +')+4,2) where scheduled_date like '% +%'; +update maintenance_entries set scheduled_date = substr(scheduled_date,1, instr(scheduled_date, ' -')-1) || substr(scheduled_date, instr(scheduled_date, ' -')+1,3) || ':' || substr(scheduled_date, instr(scheduled_date, ' -')+4,2) where scheduled_date like '% -%'; + +-- Migrating notifiers/created_at +update notifiers set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update notifiers set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating notifiers/updated_at +update notifiers set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update notifiers set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating users/created_at +update users set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update users set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating users/updated_at +update users set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update users set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating users/activated_on +update users set activated_on = substr(activated_on,1, instr(activated_on, ' +')-1) || substr(activated_on, instr(activated_on, ' +')+1,3) || ':' || substr(activated_on, instr(activated_on, ' +')+4,2) where activated_on like '% +%'; +update users set activated_on = substr(activated_on,1, instr(activated_on, ' -')-1) || substr(activated_on, instr(activated_on, ' -')+1,3) || ':' || substr(activated_on, instr(activated_on, ' -')+4,2) where activated_on like '% -%'; + +-- Migrating items/created_at +update items set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update items set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating items/updated_at +update items set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update items set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + +-- Migrating items/warranty_expires +update items set warranty_expires = substr(warranty_expires,1, instr(warranty_expires, ' +')-1) || substr(warranty_expires, instr(warranty_expires, ' +')+1,3) || ':' || substr(warranty_expires, instr(warranty_expires, ' +')+4,2) where warranty_expires like '% +%'; +update items set warranty_expires = substr(warranty_expires,1, instr(warranty_expires, ' -')-1) || substr(warranty_expires, instr(warranty_expires, ' -')+1,3) || ':' || substr(warranty_expires, instr(warranty_expires, ' -')+4,2) where warranty_expires like '% -%'; + +-- Migrating items/purchase_time +update items set purchase_time = substr(purchase_time,1, instr(purchase_time, ' +')-1) || substr(purchase_time, instr(purchase_time, ' +')+1,3) || ':' || substr(purchase_time, instr(purchase_time, ' +')+4,2) where purchase_time like '% +%'; +update items set purchase_time = substr(purchase_time,1, instr(purchase_time, ' -')-1) || substr(purchase_time, instr(purchase_time, ' -')+1,3) || ':' || substr(purchase_time, instr(purchase_time, ' -')+4,2) where purchase_time like '% -%'; + +-- Migrating items/sold_time +update items set sold_time = substr(sold_time,1, instr(sold_time, ' +')-1) || substr(sold_time, instr(sold_time, ' +')+1,3) || ':' || substr(sold_time, instr(sold_time, ' +')+4,2) where sold_time like '% +%'; +update items set sold_time = substr(sold_time,1, instr(sold_time, ' -')-1) || substr(sold_time, instr(sold_time, ' -')+1,3) || ':' || substr(sold_time, instr(sold_time, ' -')+4,2) where sold_time like '% -%'; + +-- Migrating attachments/created_at +update attachments set created_at = substr(created_at,1, instr(created_at, ' +')-1) || substr(created_at, instr(created_at, ' +')+1,3) || ':' || substr(created_at, instr(created_at, ' +')+4,2) where created_at like '% +%'; +update attachments set created_at = substr(created_at,1, instr(created_at, ' -')-1) || substr(created_at, instr(created_at, ' -')+1,3) || ':' || substr(created_at, instr(created_at, ' -')+4,2) where created_at like '% -%'; + +-- Migrating attachments/updated_at +update attachments set updated_at = substr(updated_at,1, instr(updated_at, ' +')-1) || substr(updated_at, instr(updated_at, ' +')+1,3) || ':' || substr(updated_at, instr(updated_at, ' +')+4,2) where updated_at like '% +%'; +update attachments set updated_at = substr(updated_at,1, instr(updated_at, ' -')-1) || substr(updated_at, instr(updated_at, ' -')+1,3) || ':' || substr(updated_at, instr(updated_at, ' -')+4,2) where updated_at like '% -%'; + diff --git a/backend/internal/data/migrations/sqlite3/20250706190000_generate_migration.py b/backend/internal/data/migrations/sqlite3/20250706190000_generate_migration.py new file mode 100755 index 00000000..03b00d76 --- /dev/null +++ b/backend/internal/data/migrations/sqlite3/20250706190000_generate_migration.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +import os + +# Extract fields with +""" WITH tables AS ( + SELECT name AS table_name + FROM sqlite_master + WHERE type = 'table' + AND name NOT LIKE 'sqlite_%' +) + +SELECT + '["' || t.table_name || '", "' || c.name || '"],' AS table_column +FROM tables t +JOIN pragma_table_info(t.table_name) c +WHERE c.name like'%date%'; """ + +fields = [["auth_tokens", "created_at"], + ["auth_tokens", "updated_at"], + ["auth_tokens", "expires_at"], + ["groups", "created_at"], + ["groups", "updated_at"], + ["group_invitation_tokens", "created_at"], + ["group_invitation_tokens", "updated_at"], + ["group_invitation_tokens", "expires_at"], + ["item_fields", "created_at"], + ["item_fields", "updated_at"], + ["item_fields", "time_value"], + ["labels", "created_at"], + ["labels", "updated_at"], + ["locations", "created_at"], + ["locations", "updated_at"], + ["maintenance_entries", "created_at"], + ["maintenance_entries", "updated_at"], + ["maintenance_entries", "date"], + ["maintenance_entries", "scheduled_date"], + ["notifiers", "created_at"], + ["notifiers", "updated_at"], + ["users", "created_at"], + ["users", "updated_at"], + ["users", "activated_on"], + ["items", "created_at"], + ["items", "updated_at"], + ["items", "warranty_expires"], + ["items", "purchase_time"], + ["items", "sold_time"], + ["attachments", "created_at"], + ["attachments", "updated_at"]] + + +def generate_migration(table_name, field_name): + return f"""update {table_name} set {field_name} = substr({field_name},1, instr({field_name}, ' +')-1) || substr({field_name}, instr({field_name}, ' +')+1,3) || ':' || substr({field_name}, instr({field_name}, ' +')+4,2) where {field_name} like '% +%';\n""" + \ + f"""update {table_name} set {field_name} = substr({field_name},1, instr({field_name}, ' -')-1) || substr({field_name}, instr({field_name}, ' -')+1,3) || ':' || substr({field_name}, instr({field_name}, ' -')+4,2) where {field_name} like '% -%';""" + + +print("-- +goose Up") +print(f"-- GENERATED with {os.path.basename(__file__)}") +for table, column in fields: + print(f"-- Migrating {table}/{column}") + print(generate_migration(table, column)) + print() diff --git a/backend/internal/data/repo/main_test.go b/backend/internal/data/repo/main_test.go index 04221120..bda60e36 100644 --- a/backend/internal/data/repo/main_test.go +++ b/backend/internal/data/repo/main_test.go @@ -2,6 +2,7 @@ package repo import ( "context" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" "log" "os" "testing" @@ -55,7 +56,19 @@ func MainNoExit(m *testing.M) int { } tClient = client - tRepos = New(tClient, tbus, os.TempDir()) + tRepos = New(tClient, tbus, config.Storage{ + PrefixPath: "/", + ConnString: "file://" + os.TempDir(), + }, "mem://{{ .Topic }}", config.Thumbnail{ + Enabled: false, + Width: 0, + Height: 0, + }) + err = os.MkdirAll(os.TempDir()+"/homebox", 0o755) + if err != nil { + return 0 + } + defer func() { _ = client.Close() }() bootstrap() diff --git a/backend/internal/data/repo/pagination.go b/backend/internal/data/repo/pagination.go index d8878d02..2a357331 100644 --- a/backend/internal/data/repo/pagination.go +++ b/backend/internal/data/repo/pagination.go @@ -8,5 +8,10 @@ type PaginationResult[T any] struct { } func calculateOffset(page, pageSize int) int { - return (page - 1) * pageSize + offset := (page - 1) * pageSize + if offset < 0 { + return 0 + } else { + return offset + } } diff --git a/backend/internal/data/repo/repo_item_attachments.go b/backend/internal/data/repo/repo_item_attachments.go index 4d66bf0c..35a310e9 100644 --- a/backend/internal/data/repo/repo_item_attachments.go +++ b/backend/internal/data/repo/repo_item_attachments.go @@ -3,37 +3,69 @@ package repo import ( "bytes" "context" + "crypto/md5" "fmt" + "github.com/evanoberholster/imagemeta" + "github.com/gen2brain/avif" + "github.com/gen2brain/heic" + "github.com/gen2brain/jpegxl" + "github.com/gen2brain/webp" + "github.com/rs/zerolog/log" + "github.com/sysadminsmedia/homebox/backend/internal/data/ent/group" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" + "github.com/sysadminsmedia/homebox/backend/pkgs/utils" + "github.com/zeebo/blake3" + "golang.org/x/image/draw" + "image" "io" - "os" + "io/fs" + "net/http" "path/filepath" + "strings" "time" "github.com/google/uuid" - "github.com/rs/zerolog/log" "github.com/sysadminsmedia/homebox/backend/internal/data/ent" "github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" - "github.com/sysadminsmedia/homebox/backend/internal/data/ent/entity" - "github.com/sysadminsmedia/homebox/backend/internal/data/ent/group" - "github.com/zeebo/blake3" + "github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" + + "gocloud.dev/blob" + _ "gocloud.dev/blob/azureblob" + _ "gocloud.dev/blob/fileblob" + _ "gocloud.dev/blob/gcsblob" + _ "gocloud.dev/blob/memblob" + _ "gocloud.dev/blob/s3blob" + + "gocloud.dev/pubsub" + _ "gocloud.dev/pubsub/awssnssqs" + _ "gocloud.dev/pubsub/azuresb" + _ "gocloud.dev/pubsub/gcppubsub" + _ "gocloud.dev/pubsub/kafkapubsub" + _ "gocloud.dev/pubsub/mempubsub" + _ "gocloud.dev/pubsub/natspubsub" + _ "gocloud.dev/pubsub/rabbitpubsub" ) // AttachmentRepo is a repository for Attachments table that links Items to their // associated files while also specifying the type of the attachment. type AttachmentRepo struct { - db *ent.Client - dir string + db *ent.Client + storage config.Storage + pubSubConn string + thumbnail config.Thumbnail } type ( ItemAttachment struct { - ID uuid.UUID `json:"id"` - CreatedAt time.Time `json:"createdAt"` - UpdatedAt time.Time `json:"updatedAt"` - Type string `json:"type"` - Primary bool `json:"primary"` - Path string `json:"path"` - Title string `json:"title"` + ID uuid.UUID `json:"id"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Type string `json:"type"` + Primary bool `json:"primary"` + Path string `json:"path"` + Title string `json:"title"` + MimeType string `json:"mimeType,omitempty"` + Thumbnail *ent.Attachment `json:"thumbnail,omitempty"` } ItemAttachmentUpdate struct { @@ -58,11 +90,25 @@ func ToItemAttachment(attachment *ent.Attachment) ItemAttachment { Primary: attachment.Primary, Path: attachment.Path, Title: attachment.Title, + MimeType: attachment.MimeType, + Thumbnail: attachment.QueryThumbnail().FirstX(context.Background()), } } func (r *AttachmentRepo) path(gid uuid.UUID, hash string) string { - return filepath.Join(r.dir, gid.String(), "documents", hash) + return filepath.Join(r.storage.PrefixPath, gid.String(), "documents", hash) +} + +func (r *AttachmentRepo) GetConnString() string { + if strings.HasPrefix(r.storage.ConnString, "file:///./") { + dir, err := filepath.Abs(strings.TrimPrefix(r.storage.ConnString, "file:///./")) + if err != nil { + log.Err(err).Msg("failed to get absolute path for attachment directory") + return r.storage.ConnString + } + return fmt.Sprintf("file://%s?no_tmp_dir=true", dir) + } + return r.storage.ConnString } func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc ItemCreateAttachment, typ attachment.Type, primary bool) (*ent.Attachment, error) { @@ -142,31 +188,8 @@ func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc ItemC return nil, err } - // Prepare for the hashing of the file contents - hashOut := make([]byte, 32) - - // Read all content into a buffer - buf := new(bytes.Buffer) - _, err = io.Copy(buf, doc.Content) - if err != nil { - log.Err(err).Msg("failed to read file content") - if rbErr := tx.Rollback(); rbErr != nil { - return nil, rbErr - } - return nil, err - } - // Now the buffer contains all the data, use it for hashing - contentBytes := buf.Bytes() - - // We use blake3 to generate a hash of the file contents, the group ID is used as context to ensure unique hashes - // for the same file across different groups to reduce the chance of collisions - // additionally, the hash can be used to validate the file contents if needed - blake3.DeriveKey(itemGroup.ID.String(), contentBytes, hashOut) - - // Create the file itself - path := r.path(itemGroup.ID, fmt.Sprintf("%x", hashOut)) - parent := filepath.Dir(path) - err = os.MkdirAll(parent, 0755) + // Upload the file to the storage bucket + path, err := r.UploadFile(ctx, itemGroup, doc) if err != nil { log.Err(err).Msg("failed to create parent directory") err := tx.Rollback() @@ -176,39 +199,17 @@ func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc ItemC return nil, err } - if _, err := os.Stat(path); os.IsNotExist(err) { - file, err := os.Create(path) + limitedReader := io.LimitReader(doc.Content, 1024*128) + file, err := io.ReadAll(limitedReader) + if err != nil { + log.Err(err).Msg("failed to read file content") + err = tx.Rollback() if err != nil { - log.Err(err).Msg("failed to create file") - err := tx.Rollback() - if err != nil { - return nil, err - } - return nil, err - } - - defer func(file *os.File) { - err := file.Close() - if err != nil { - log.Err(err).Msg("failed to close file") - err := tx.Rollback() - if err != nil { - return - } - return - } - }(file) - _, err = file.Write(contentBytes) - if err != nil { - log.Err(err).Msg("failed to copy file contents") - err := tx.Rollback() - if err != nil { - return nil, err - } return nil, err } + return nil, err } - + bldr = bldr.SetMimeType(http.DetectContentType(file[:min(512, len(file))])) bldr = bldr.SetPath(path) attachmentDb, err := bldr.Save(ctx) @@ -225,18 +226,77 @@ func (r *AttachmentRepo) Create(ctx context.Context, itemID uuid.UUID, doc ItemC log.Err(err).Msg("failed to commit transaction") return nil, err } + + if r.thumbnail.Enabled { + pubsubString, err := utils.GenerateSubPubConn(r.pubSubConn, "thumbnails") + if err != nil { + log.Err(err).Msg("failed to generate pubsub connection string") + return nil, err + } + topic, err := pubsub.OpenTopic(ctx, pubsubString) + if err != nil { + log.Err(err).Msg("failed to open pubsub topic") + return nil, err + } + + err = topic.Send(ctx, &pubsub.Message{ + Body: []byte(fmt.Sprintf("attachment_created:%s", attachmentDb.ID.String())), + Metadata: map[string]string{ + "group_id": itemGroup.ID.String(), + "attachment_id": attachmentDb.ID.String(), + "title": doc.Title, + "path": attachmentDb.Path, + }, + }) + if err != nil { + log.Err(err).Msg("failed to send message to topic") + return nil, err + } + } + return attachmentDb, nil } -func (r *AttachmentRepo) Get(ctx context.Context, id uuid.UUID) (*ent.Attachment, error) { - return r.db.Attachment. - Query(). - Where(attachment.ID(id)). - WithEntity(). - Only(ctx) +func (r *AttachmentRepo) Get(ctx context.Context, gid uuid.UUID, id uuid.UUID) (*ent.Attachment, error) { + first, err := r.db.Attachment.Query().Where(attachment.ID(id)).Only(ctx) + if err != nil { + return nil, err + } + if first.Type == attachment.TypeThumbnail { + // If the attachment is a thumbnail, get the parent attachment and check if it belongs to the specified group + return r.db.Attachment. + Query(). + Where(attachment.ID(id), + attachment.HasThumbnailWith(attachment.HasItemWith(item.HasGroupWith(group.ID(gid)))), + ). + WithItem(). + WithThumbnail(). + Only(ctx) + } else { + // For regular attachments, check if the attachment's item belongs to the specified group + return r.db.Attachment. + Query(). + Where(attachment.ID(id), + attachment.HasItemWith(item.HasGroupWith(group.ID(gid))), + ). + WithItem(). + WithThumbnail(). + Only(ctx) + } } -func (r *AttachmentRepo) Update(ctx context.Context, id uuid.UUID, data *ItemAttachmentUpdate) (*ent.Attachment, error) { +func (r *AttachmentRepo) Update(ctx context.Context, gid uuid.UUID, id uuid.UUID, data *ItemAttachmentUpdate) (*ent.Attachment, error) { + // Validate that the attachment belongs to the specified group + _, err := r.db.Attachment.Query(). + Where( + attachment.ID(id), + attachment.HasItemWith(item.HasGroupWith(group.ID(gid))), + ). + Only(ctx) + if err != nil { + return nil, err + } + // TODO: execute within Tx typ := attachment.Type(data.Type) @@ -260,25 +320,34 @@ func (r *AttachmentRepo) Update(ctx context.Context, id uuid.UUID, data *ItemAtt return nil, err } - // Ensure all other attachments are not primary - err = r.db.Attachment.Update(). - Where( - attachment.HasEntityWith(entity.ID(attachmentItem.ID)), - attachment.IDNEQ(updatedAttachment.ID), - ). - SetPrimary(false). - Exec(ctx) - if err != nil { - return nil, err + // Only remove primary status from other photo attachments when setting a new photo as primary + if typ == attachment.TypePhoto && data.Primary { + err = r.db.Attachment.Update(). + Where( + attachment.HasItemWith(item.ID(attachmentItem.ID)), + attachment.IDNEQ(updatedAttachment.ID), + attachment.TypeEQ(attachment.TypePhoto), + ). + SetPrimary(false). + Exec(ctx) + if err != nil { + return nil, err + } } - return r.Get(ctx, updatedAttachment.ID) + return r.Get(ctx, gid, updatedAttachment.ID) } -func (r *AttachmentRepo) Delete(ctx context.Context, id uuid.UUID) error { - doc, error := r.db.Attachment.Get(ctx, id) - if error != nil { - return error +func (r *AttachmentRepo) Delete(ctx context.Context, gid uuid.UUID, itemId uuid.UUID, id uuid.UUID) error { + // Validate that the attachment belongs to the specified group + doc, err := r.db.Attachment.Query(). + Where( + attachment.ID(id), + attachment.HasItemWith(item.HasGroupWith(group.ID(gid))), + ). + Only(ctx) + if err != nil { + return err } all, err := r.db.Attachment.Query().Where(attachment.Path(doc.Path)).All(ctx) @@ -288,7 +357,40 @@ func (r *AttachmentRepo) Delete(ctx context.Context, id uuid.UUID) error { // If this is the last attachment for this path, delete the file if len(all) == 1 { - err := os.Remove(doc.Path) + thumb, err := doc.QueryThumbnail().First(ctx) + if err != nil && !ent.IsNotFound(err) { + log.Err(err).Msg("failed to query thumbnail for attachment") + return err + } + if thumb != nil { + thumbBucket, err := blob.OpenBucket(ctx, r.GetConnString()) + if err != nil { + log.Err(err).Msg("failed to open bucket for thumbnail deletion") + return err + } + err = thumbBucket.Delete(ctx, thumb.Path) + if err != nil { + return err + } + _ = doc.Update().SetNillableThumbnailID(nil).SaveX(ctx) + _ = thumb.Update().SetNillableThumbnailID(nil).SaveX(ctx) + err = r.db.Attachment.DeleteOneID(thumb.ID).Exec(ctx) + if err != nil { + return err + } + } + bucket, err := blob.OpenBucket(ctx, r.GetConnString()) + if err != nil { + log.Err(err).Msg("failed to open bucket") + return err + } + defer func(bucket *blob.Bucket) { + err := bucket.Close() + if err != nil { + log.Err(err).Msg("failed to close bucket") + } + }(bucket) + err = bucket.Delete(ctx, doc.Path) if err != nil { return err } @@ -297,6 +399,456 @@ func (r *AttachmentRepo) Delete(ctx context.Context, id uuid.UUID) error { return r.db.Attachment.DeleteOneID(id).Exec(ctx) } -func (r *AttachmentRepo) Rename(ctx context.Context, id uuid.UUID, title string) (*ent.Attachment, error) { +func (r *AttachmentRepo) Rename(ctx context.Context, gid uuid.UUID, id uuid.UUID, title string) (*ent.Attachment, error) { + // Validate that the attachment belongs to the specified group + _, err := r.db.Attachment.Query(). + Where( + attachment.ID(id), + attachment.HasItemWith(item.HasGroupWith(group.ID(gid))), + ). + Only(ctx) + if err != nil { + return nil, err + } + return r.db.Attachment.UpdateOneID(id).SetTitle(title).Save(ctx) } + +//nolint:gocyclo +func (r *AttachmentRepo) CreateThumbnail(ctx context.Context, groupId, attachmentId uuid.UUID, title string, path string) error { + log.Debug().Msg("starting thumbnail creation") + tx, err := r.db.Tx(ctx) + if err != nil { + return nil + } + // If there is an error during file creation rollback the database + defer func() { + if v := recover(); v != nil { + err := tx.Rollback() + if err != nil { + return + } + } + }() + + log.Debug().Msg("set initial database transaction") + att := tx.Attachment.Create(). + SetID(uuid.New()). + SetTitle(fmt.Sprintf("%s-thumb", title)). + SetType("thumbnail") + + log.Debug().Msg("opening original file") + bucket, err := blob.OpenBucket(ctx, r.GetConnString()) + if err != nil { + log.Err(err).Msg("failed to open bucket") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + defer func(bucket *blob.Bucket) { + err := bucket.Close() + if err != nil { + err := tx.Rollback() + if err != nil { + return + } + log.Err(err).Msg("failed to close bucket") + } + }(bucket) + + origFile, err := bucket.Open(path) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + defer func(file fs.File) { + err := file.Close() + if err != nil { + err := tx.Rollback() + if err != nil { + return + } + log.Err(err).Msg("failed to close file") + } + }(origFile) + + log.Debug().Msg("stat original file for file size") + stats, err := origFile.Stat() + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + log.Err(err).Msg("failed to stat original file") + return err + } + + if stats.Size() > 100*1024*1024 { + return fmt.Errorf("original file %s is too large to create a thumbnail", title) + } + + log.Debug().Msg("reading original file content") + contentBytes, err := io.ReadAll(origFile) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + log.Err(err).Msg("failed to read original file content") + return err + } + + log.Debug().Msg("detecting content type of original file") + contentType := http.DetectContentType(contentBytes[:min(512, len(contentBytes))]) + + if contentType == "application/octet-stream" { + switch { + case strings.HasSuffix(title, ".heic") || strings.HasSuffix(title, ".heif"): + contentType = "image/heic" + case strings.HasSuffix(title, ".avif"): + contentType = "image/avif" + case strings.HasSuffix(title, ".jxl"): + contentType = "image/jxl" + } + } + + switch { + case isImageFile(contentType): + log.Debug().Msg("creating thumbnail for image file") + img, _, err := image.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode image file") + err := tx.Rollback() + if err != nil { + log.Err(err).Msg("failed to rollback transaction") + return err + } + return err + } + log.Debug().Msg("reading original file orientation") + imageMeta, err := imagemeta.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode original file content") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + orientation := uint16(imageMeta.Orientation) + thumbnailPath, err := r.processThumbnailFromImage(ctx, groupId, img, title, orientation) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + att.SetPath(thumbnailPath) + case contentType == "image/webp": + log.Debug().Msg("creating thumbnail for webp file") + img, err := webp.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode webp image") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + log.Debug().Msg("reading original file orientation") + imageMeta, err := imagemeta.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode original file content") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + orientation := uint16(imageMeta.Orientation) + thumbnailPath, err := r.processThumbnailFromImage(ctx, groupId, img, title, orientation) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + att.SetPath(thumbnailPath) + case contentType == "image/avif": + log.Debug().Msg("creating thumbnail for avif file") + img, err := avif.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode avif image") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + thumbnailPath, err := r.processThumbnailFromImage(ctx, groupId, img, title, uint16(1)) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + att.SetPath(thumbnailPath) + case contentType == "image/heic" || contentType == "image/heif": + log.Debug().Msg("creating thumbnail for heic file") + img, err := heic.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode avif image") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + log.Debug().Msg("reading original file orientation") + imageMeta, err := imagemeta.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode original file content") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + orientation := uint16(imageMeta.Orientation) + thumbnailPath, err := r.processThumbnailFromImage(ctx, groupId, img, title, orientation) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + att.SetPath(thumbnailPath) + case contentType == "image/jxl": + log.Debug().Msg("creating thumbnail for jpegxl file") + img, err := jpegxl.Decode(bytes.NewReader(contentBytes)) + if err != nil { + log.Err(err).Msg("failed to decode avif image") + err := tx.Rollback() + if err != nil { + return err + } + return err + } + thumbnailPath, err := r.processThumbnailFromImage(ctx, groupId, img, title, uint16(1)) + if err != nil { + err := tx.Rollback() + if err != nil { + return err + } + return err + } + att.SetPath(thumbnailPath) + default: + return fmt.Errorf("file type %s is not supported for thumbnail creation or document thumnails disabled", title) + } + + att.SetMimeType("image/webp") + + log.Debug().Msg("saving thumbnail attachment to database") + thumbnail, err := att.Save(ctx) + if err != nil { + return err + } + + _, err = tx.Attachment.UpdateOneID(attachmentId).SetThumbnail(thumbnail).Save(ctx) + if err != nil { + return err + } + + log.Debug().Msg("finishing thumbnail creation transaction") + if err := tx.Commit(); err != nil { + log.Err(err).Msg("failed to commit transaction") + return nil + } + return nil +} + +func (r *AttachmentRepo) CreateMissingThumbnails(ctx context.Context, groupId uuid.UUID) (int, error) { + attachments, err := r.db.Attachment.Query(). + Where( + attachment.HasItemWith(item.HasGroupWith(group.ID(groupId))), + attachment.TypeNEQ("thumbnail"), + ). + All(ctx) + if err != nil { + return 0, err + } + + pubsubString, err := utils.GenerateSubPubConn(r.pubSubConn, "thumbnails") + if err != nil { + log.Err(err).Msg("failed to generate pubsub connection string") + } + topic, err := pubsub.OpenTopic(ctx, pubsubString) + if err != nil { + log.Err(err).Msg("failed to open pubsub topic") + } + + count := 0 + for _, attachment := range attachments { + if r.thumbnail.Enabled { + if !attachment.QueryThumbnail().ExistX(ctx) { + if count > 0 && count%100 == 0 { + time.Sleep(2 * time.Second) + } + err = topic.Send(ctx, &pubsub.Message{ + Body: []byte(fmt.Sprintf("attachment_created:%s", attachment.ID.String())), + Metadata: map[string]string{ + "group_id": groupId.String(), + "attachment_id": attachment.ID.String(), + "title": attachment.Title, + "path": attachment.Path, + }, + }) + if err != nil { + log.Err(err).Msg("failed to send message to topic") + continue + } else { + count++ + } + } + } + } + + return count, nil +} + +func (r *AttachmentRepo) UploadFile(ctx context.Context, itemGroup *ent.Group, doc ItemCreateAttachment) (string, error) { + // Prepare for the hashing of the file contents + hashOut := make([]byte, 32) + + // Read all content into a buffer + buf := new(bytes.Buffer) + _, err := io.Copy(buf, doc.Content) + if err != nil { + log.Err(err).Msg("failed to read file content") + return "", err + } + // Now the buffer contains all the data, use it for hashing + contentBytes := buf.Bytes() + + // We use blake3 to generate a hash of the file contents, the group ID is used as context to ensure unique hashes + // for the same file across different groups to reduce the chance of collisions + // additionally, the hash can be used to validate the file contents if needed + blake3.DeriveKey(itemGroup.ID.String(), contentBytes, hashOut) + + // Write the file to the blob storage bucket which might be a local file system or cloud storage + bucket, err := blob.OpenBucket(ctx, r.GetConnString()) + if err != nil { + log.Err(err).Msg("failed to open bucket") + return "", err + } + defer func(bucket *blob.Bucket) { + err := bucket.Close() + if err != nil { + log.Err(err).Msg("failed to close bucket") + } + }(bucket) + md5hash := md5.New() + _, err = md5hash.Write(contentBytes) + if err != nil { + log.Err(err).Msg("failed to generate MD5 hash for storage") + return "", err + } + contentType := http.DetectContentType(contentBytes[:min(512, len(contentBytes))]) + options := &blob.WriterOptions{ + ContentType: contentType, + ContentMD5: md5hash.Sum(nil), + } + path := r.path(itemGroup.ID, fmt.Sprintf("%x", hashOut)) + err = bucket.WriteAll(ctx, path, contentBytes, options) + if err != nil { + log.Err(err).Msg("failed to write file to bucket") + return "", err + } + + return path, nil +} + +func isImageFile(mimetype string) bool { + // Check file extension for image types + return strings.Contains(mimetype, "image/jpeg") || strings.Contains(mimetype, "image/png") || strings.Contains(mimetype, "image/gif") +} + +// calculateThumbnailDimensions calculates new dimensions that preserve aspect ratio +// while fitting within the configured maximum width and height +func calculateThumbnailDimensions(origWidth, origHeight, maxWidth, maxHeight int) (int, int) { + if origWidth <= maxWidth && origHeight <= maxHeight { + return origWidth, origHeight + } + + // Calculate scaling factors for both dimensions + scaleX := float64(maxWidth) / float64(origWidth) + scaleY := float64(maxHeight) / float64(origHeight) + + // Use the smaller scaling factor to ensure both dimensions fit + scale := scaleX + if scaleY < scaleX { + scale = scaleY + } + + newWidth := int(float64(origWidth) * scale) + newHeight := int(float64(origHeight) * scale) + + // Ensure we don't get zero dimensions + if newWidth < 1 { + newWidth = 1 + } + if newHeight < 1 { + newHeight = 1 + } + + return newWidth, newHeight +} + +// processThumbnailFromImage handles the common thumbnail processing logic after image decoding +// Returns the thumbnail file path or an error +func (r *AttachmentRepo) processThumbnailFromImage(ctx context.Context, groupId uuid.UUID, img image.Image, title string, orientation uint16) (string, error) { + bounds := img.Bounds() + // Apply EXIF orientation if needed + if orientation > 1 { + img = utils.ApplyOrientation(img, orientation) + bounds = img.Bounds() + } + newWidth, newHeight := calculateThumbnailDimensions(bounds.Dx(), bounds.Dy(), r.thumbnail.Width, r.thumbnail.Height) + dst := image.NewRGBA(image.Rect(0, 0, newWidth, newHeight)) + draw.ApproxBiLinear.Scale(dst, dst.Rect, img, img.Bounds(), draw.Over, nil) + + buf := new(bytes.Buffer) + err := webp.Encode(buf, dst, webp.Options{Quality: 80, Lossless: false}) + if err != nil { + return "", err + } + contentBytes := buf.Bytes() + log.Debug().Msg("uploading thumbnail file") + + // Get the group for uploading the thumbnail + group, err := r.db.Group.Get(ctx, groupId) + if err != nil { + return "", err + } + + thumbnailFile, err := r.UploadFile(ctx, group, ItemCreateAttachment{ + Title: fmt.Sprintf("%s-thumb", title), + Content: bytes.NewReader(contentBytes), + }) + if err != nil { + log.Err(err).Msg("failed to upload thumbnail file") + return "", err + } + + return thumbnailFile, nil +} diff --git a/backend/internal/data/repo/repo_item_attachments_test.go b/backend/internal/data/repo/repo_item_attachments_test.go index c6de35b8..312de2e5 100644 --- a/backend/internal/data/repo/repo_item_attachments_test.go +++ b/backend/internal/data/repo/repo_item_attachments_test.go @@ -18,7 +18,7 @@ func TestAttachmentRepo_Create(t *testing.T) { ids := []uuid.UUID{item.ID} t.Cleanup(func() { for _, id := range ids { - _ = tRepos.Attachments.Delete(context.Background(), id) + _ = tRepos.Attachments.Delete(context.Background(), tGroup.ID, item.ID, id) } }) @@ -69,7 +69,7 @@ func TestAttachmentRepo_Create(t *testing.T) { assert.Equal(t, tt.want.Type, got.Type) - withItems, err := tRepos.Attachments.Get(tt.args.ctx, got.ID) + withItems, err := tRepos.Attachments.Get(tt.args.ctx, tGroup.ID, got.ID) require.NoError(t, err) assert.Equal(t, tt.args.itemID, withItems.Edges.Entity.ID) @@ -86,17 +86,17 @@ func useAttachments(t *testing.T, n int) []*ent.Attachment { ids := make([]uuid.UUID, 0, n) t.Cleanup(func() { for _, id := range ids { - _ = tRepos.Attachments.Delete(context.Background(), id) + _ = tRepos.Attachments.Delete(context.Background(), tGroup.ID, item.ID, id) } }) attachments := make([]*ent.Attachment, n) for i := 0; i < n; i++ { - attachment, err := tRepos.Attachments.Create(context.Background(), item.ID, ItemCreateAttachment{Title: "Test", Content: strings.NewReader("Test String")}, attachment.TypePhoto, true) + attach, err := tRepos.Attachments.Create(context.Background(), item.ID, ItemCreateAttachment{Title: "Test", Content: strings.NewReader("Test String")}, attachment.TypePhoto, true) require.NoError(t, err) - attachments[i] = attachment + attachments[i] = attach - ids = append(ids, attachment.ID) + ids = append(ids, attach.ID) } return attachments @@ -107,13 +107,13 @@ func TestAttachmentRepo_Update(t *testing.T) { for _, typ := range []attachment.Type{"photo", "manual", "warranty", "attachment"} { t.Run(string(typ), func(t *testing.T) { - _, err := tRepos.Attachments.Update(context.Background(), entity.ID, &ItemAttachmentUpdate{ + _, err := tRepos.Attachments.Update(context.Background(), tGroup.ID, entity.ID, &ItemAttachmentUpdate{ Type: string(typ), }) require.NoError(t, err) - updated, err := tRepos.Attachments.Get(context.Background(), entity.ID) + updated, err := tRepos.Attachments.Get(context.Background(), tGroup.ID, entity.ID) require.NoError(t, err) assert.Equal(t, typ, updated.Type) }) @@ -122,11 +122,12 @@ func TestAttachmentRepo_Update(t *testing.T) { func TestAttachmentRepo_Delete(t *testing.T) { entity := useAttachments(t, 1)[0] + item := useItems(t, 1)[0] - err := tRepos.Attachments.Delete(context.Background(), entity.ID) + err := tRepos.Attachments.Delete(context.Background(), tGroup.ID, item.ID, entity.ID) require.NoError(t, err) - _, err = tRepos.Attachments.Get(context.Background(), entity.ID) + _, err = tRepos.Attachments.Get(context.Background(), tGroup.ID, entity.ID) require.Error(t, err) } @@ -135,13 +136,13 @@ func TestAttachmentRepo_EnsureSinglePrimaryAttachment(t *testing.T) { attachments := useAttachments(t, 2) setAndVerifyPrimary := func(primaryAttachmentID, nonPrimaryAttachmentID uuid.UUID) { - primaryAttachment, err := tRepos.Attachments.Update(ctx, primaryAttachmentID, &ItemAttachmentUpdate{ + primaryAttachment, err := tRepos.Attachments.Update(ctx, tGroup.ID, primaryAttachmentID, &ItemAttachmentUpdate{ Type: attachment.TypePhoto.String(), Primary: true, }) require.NoError(t, err) - nonPrimaryAttachment, err := tRepos.Attachments.Get(ctx, nonPrimaryAttachmentID) + nonPrimaryAttachment, err := tRepos.Attachments.Get(ctx, tGroup.ID, nonPrimaryAttachmentID) require.NoError(t, err) assert.True(t, primaryAttachment.Primary) @@ -151,3 +152,132 @@ func TestAttachmentRepo_EnsureSinglePrimaryAttachment(t *testing.T) { setAndVerifyPrimary(attachments[0].ID, attachments[1].ID) setAndVerifyPrimary(attachments[1].ID, attachments[0].ID) } + +func TestAttachmentRepo_UpdateNonPhotoDoesNotAffectPrimaryPhoto(t *testing.T) { + ctx := context.Background() + item := useItems(t, 1)[0] + + // Create a photo attachment that will be primary + photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Test Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, true) + require.NoError(t, err) + + // Create a manual attachment (non-photo) + manualAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Test Manual", Content: strings.NewReader("Manual content")}, attachment.TypeManual, false) + require.NoError(t, err) + + // Cleanup + t.Cleanup(func() { + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, photoAttachment.ID) + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, manualAttachment.ID) + }) + + // Verify photo is primary initially + photoAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, photoAttachment.ID) + require.NoError(t, err) + assert.True(t, photoAttachment.Primary) + + // Update the manual attachment (this should NOT affect the photo's primary status) + _, err = tRepos.Attachments.Update(ctx, tGroup.ID, manualAttachment.ID, &ItemAttachmentUpdate{ + Type: attachment.TypeManual.String(), + Title: "Updated Manual", + Primary: false, // This should have no effect since it's not a photo + }) + require.NoError(t, err) + + // Verify photo is still primary after updating the manual + photoAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, photoAttachment.ID) + require.NoError(t, err) + assert.True(t, photoAttachment.Primary, "Photo attachment should remain primary after updating non-photo attachment") + + // Verify manual attachment is not primary + manualAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, manualAttachment.ID) + require.NoError(t, err) + assert.False(t, manualAttachment.Primary) +} + +func TestAttachmentRepo_AddingPDFAfterPhotoKeepsPhotoAsPrimary(t *testing.T) { + ctx := context.Background() + item := useItems(t, 1)[0] + + // Step 1: Upload a photo first (this should become primary since it's the first photo) + photoAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Item Photo", Content: strings.NewReader("Photo content")}, attachment.TypePhoto, false) + require.NoError(t, err) + + // Cleanup + t.Cleanup(func() { + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, photoAttachment.ID) + }) + + // Verify photo becomes primary automatically (since it's the first photo) + photoAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, photoAttachment.ID) + require.NoError(t, err) + assert.True(t, photoAttachment.Primary, "First photo should automatically become primary") + + // Step 2: Add a PDF receipt (this should NOT affect the photo's primary status) + pdfAttachment, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Receipt PDF", Content: strings.NewReader("PDF content")}, attachment.TypeReceipt, false) + require.NoError(t, err) + + // Add to cleanup + t.Cleanup(func() { + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, pdfAttachment.ID) + }) + + // Step 3: Verify photo is still primary after adding PDF + photoAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, photoAttachment.ID) + require.NoError(t, err) + assert.True(t, photoAttachment.Primary, "Photo should remain primary after adding PDF attachment") + + // Verify PDF is not primary + pdfAttachment, err = tRepos.Attachments.Get(ctx, tGroup.ID, pdfAttachment.ID) + require.NoError(t, err) + assert.False(t, pdfAttachment.Primary) + + // Step 4: Test the actual item summary mapping (this is what determines the card display) + updatedItem, err := tRepos.Items.GetOne(ctx, item.ID) + require.NoError(t, err) + + // The item should have the photo's ID as the imageId + assert.NotNil(t, updatedItem.ImageID, "Item should have an imageId") + assert.Equal(t, photoAttachment.ID, *updatedItem.ImageID, "Item's imageId should match the photo attachment ID") +} + +func TestAttachmentRepo_SettingPhotoPrimaryStillWorks(t *testing.T) { + ctx := context.Background() + item := useItems(t, 1)[0] + + // Create two photo attachments + photo1, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Photo 1", Content: strings.NewReader("Photo 1 content")}, attachment.TypePhoto, false) + require.NoError(t, err) + + photo2, err := tRepos.Attachments.Create(ctx, item.ID, ItemCreateAttachment{Title: "Photo 2", Content: strings.NewReader("Photo 2 content")}, attachment.TypePhoto, false) + require.NoError(t, err) + + // Cleanup + t.Cleanup(func() { + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, photo1.ID) + _ = tRepos.Attachments.Delete(ctx, tGroup.ID, item.ID, photo2.ID) + }) + + // First photo should be primary (since it was created first) + photo1, err = tRepos.Attachments.Get(ctx, tGroup.ID, photo1.ID) + require.NoError(t, err) + assert.True(t, photo1.Primary) + + photo2, err = tRepos.Attachments.Get(ctx, tGroup.ID, photo2.ID) + require.NoError(t, err) + assert.False(t, photo2.Primary) + + // Now set photo2 as primary (this should work and remove primary from photo1) + photo2, err = tRepos.Attachments.Update(ctx, tGroup.ID, photo2.ID, &ItemAttachmentUpdate{ + Type: attachment.TypePhoto.String(), + Title: "Photo 2", + Primary: true, + }) + require.NoError(t, err) + assert.True(t, photo2.Primary) + + // Verify photo1 is no longer primary + photo1, err = tRepos.Attachments.Get(ctx, tGroup.ID, photo1.ID) + require.NoError(t, err) + assert.False(t, photo1.Primary, "Photo 1 should no longer be primary after setting Photo 2 as primary") +} diff --git a/backend/internal/data/repo/repo_items.go b/backend/internal/data/repo/repo_items.go index b2bdf623..67135408 100644 --- a/backend/internal/data/repo/repo_items.go +++ b/backend/internal/data/repo/repo_items.go @@ -134,7 +134,8 @@ type ( Location *LocationSummary `json:"location,omitempty" extensions:"x-nullable,x-omitempty"` Labels []LabelSummary `json:"labels"` - ImageID *uuid.UUID `json:"imageId,omitempty"` + ImageID *uuid.UUID `json:"imageId,omitempty" extensions:"x-nullable,x-omitempty"` + ThumbnailId *uuid.UUID `json:"thumbnailId,omitempty" extensions:"x-nullable,x-omitempty"` // Sale details SoldTime time.Time `json:"soldTime"` @@ -189,10 +190,20 @@ func mapItemSummary(item *ent.Entity) ItemSummary { } var imageID *uuid.UUID + var thumbnailID *uuid.UUID if item.Edges.Attachments != nil { for _, a := range item.Edges.Attachments { if a.Primary && a.Type == attachment.TypePhoto { imageID = &a.ID + if a.Edges.Thumbnail != nil { + if a.Edges.Thumbnail.ID != uuid.Nil { + thumbnailID = &a.Edges.Thumbnail.ID + } else { + thumbnailID = nil + } + } else { + thumbnailID = nil + } break } } @@ -215,8 +226,9 @@ func mapItemSummary(item *ent.Entity) ItemSummary { Labels: labels, // Warranty - Insured: item.Insured, - ImageID: imageID, + Insured: item.Insured, + ImageID: imageID, + ThumbnailId: thumbnailID, } } @@ -349,6 +361,9 @@ func (e *ItemsRepository) QueryByGroup(ctx context.Context, gid uuid.UUID, q Ite } if q.Search != "" { + // Use accent-insensitive search predicates that normalize both + // the search query and database field values during comparison. + // For queries without accents, the traditional search is more efficient. qb.Where( entity.Or( entity.NameContainsFold(q.Search), @@ -357,6 +372,21 @@ func (e *ItemsRepository) QueryByGroup(ctx context.Context, gid uuid.UUID, q Ite entity.ModelNumberContainsFold(q.Search), entity.ManufacturerContainsFold(q.Search), entity.NotesContainsFold(q.Search), + item.Or( + // Regular case-insensitive search (fastest) + item.NameContainsFold(q.Search), + item.DescriptionContainsFold(q.Search), + item.SerialNumberContainsFold(q.Search), + item.ModelNumberContainsFold(q.Search), + item.ManufacturerContainsFold(q.Search), + item.NotesContainsFold(q.Search), + // Accent-insensitive search using custom predicates + ent.ItemNameAccentInsensitiveContains(q.Search), + ent.ItemDescriptionAccentInsensitiveContains(q.Search), + ent.ItemSerialNumberAccentInsensitiveContains(q.Search), + ent.ItemModelNumberAccentInsensitiveContains(q.Search), + ent.ItemManufacturerAccentInsensitiveContains(q.Search), + ent.ItemNotesAccentInsensitiveContains(q.Search), ), ) } @@ -467,6 +497,7 @@ func (e *ItemsRepository) QueryByGroup(ctx context.Context, gid uuid.UUID, q Ite aq.Where( attachment.Primary(true), ) + aq.WithThumbnail() }) if q.Page != -1 || q.PageSize != -1 { @@ -581,6 +612,10 @@ func (e *ItemsRepository) Create(ctx context.Context, gid uuid.UUID, data ItemCr SetAssetID(int(data.AssetID)). SetLocationID(data.LocationID) + if data.ParentID != uuid.Nil { + q.SetParentID(data.ParentID) + } + if len(data.LabelIDs) > 0 { q.AddLabelIDs(data.LabelIDs...) } diff --git a/backend/internal/data/repo/repo_items_search_test.go b/backend/internal/data/repo/repo_items_search_test.go new file mode 100644 index 00000000..cb7accf3 --- /dev/null +++ b/backend/internal/data/repo/repo_items_search_test.go @@ -0,0 +1,213 @@ +package repo + +import ( + "testing" + + "github.com/sysadminsmedia/homebox/backend/pkgs/textutils" + "github.com/stretchr/testify/assert" +) + +func TestItemsRepository_AccentInsensitiveSearch(t *testing.T) { + // Test cases for accent-insensitive search + testCases := []struct { + name string + itemName string + searchQuery string + shouldMatch bool + description string + }{ + { + name: "Spanish accented item, search without accents", + itemName: "electrónica", + searchQuery: "electronica", + shouldMatch: true, + description: "Should find 'electrónica' when searching for 'electronica'", + }, + { + name: "Spanish accented item, search with accents", + itemName: "electrónica", + searchQuery: "electrónica", + shouldMatch: true, + description: "Should find 'electrónica' when searching for 'electrónica'", + }, + { + name: "Non-accented item, search with accents", + itemName: "electronica", + searchQuery: "electrónica", + shouldMatch: true, + description: "Should find 'electronica' when searching for 'electrónica' (bidirectional search)", + }, + { + name: "Spanish item with tilde, search without accents", + itemName: "café", + searchQuery: "cafe", + shouldMatch: true, + description: "Should find 'café' when searching for 'cafe'", + }, + { + name: "Spanish item without tilde, search with accents", + itemName: "cafe", + searchQuery: "café", + shouldMatch: true, + description: "Should find 'cafe' when searching for 'café' (bidirectional)", + }, + { + name: "French accented item, search without accents", + itemName: "pére", + searchQuery: "pere", + shouldMatch: true, + description: "Should find 'pére' when searching for 'pere'", + }, + { + name: "French: père without accent, search with accents", + itemName: "pere", + searchQuery: "père", + shouldMatch: true, + description: "Should find 'pere' when searching for 'père' (bidirectional)", + }, + { + name: "Mixed case with accents", + itemName: "Electrónica", + searchQuery: "ELECTRONICA", + shouldMatch: true, + description: "Should find 'Electrónica' when searching for 'ELECTRONICA' (case insensitive)", + }, + { + name: "Bidirectional: Non-accented item, search with different accents", + itemName: "cafe", + searchQuery: "café", + shouldMatch: true, + description: "Should find 'cafe' when searching for 'café' (bidirectional)", + }, + { + name: "Bidirectional: Item with accent, search with different accent", + itemName: "résumé", + searchQuery: "resume", + shouldMatch: true, + description: "Should find 'résumé' when searching for 'resume' (bidirectional)", + }, + { + name: "Bidirectional: Spanish ñ to n", + itemName: "espanol", + searchQuery: "español", + shouldMatch: true, + description: "Should find 'espanol' when searching for 'español' (bidirectional ñ)", + }, + { + name: "French: français with accent, search without", + itemName: "français", + searchQuery: "francais", + shouldMatch: true, + description: "Should find 'français' when searching for 'francais'", + }, + { + name: "French: français without accent, search with", + itemName: "francais", + searchQuery: "français", + shouldMatch: true, + description: "Should find 'francais' when searching for 'français' (bidirectional)", + }, + { + name: "French: été with accent, search without", + itemName: "été", + searchQuery: "ete", + shouldMatch: true, + description: "Should find 'été' when searching for 'ete'", + }, + { + name: "French: été without accent, search with", + itemName: "ete", + searchQuery: "été", + shouldMatch: true, + description: "Should find 'ete' when searching for 'été' (bidirectional)", + }, + { + name: "French: hôtel with accent, search without", + itemName: "hôtel", + searchQuery: "hotel", + shouldMatch: true, + description: "Should find 'hôtel' when searching for 'hotel'", + }, + { + name: "French: hôtel without accent, search with", + itemName: "hotel", + searchQuery: "hôtel", + shouldMatch: true, + description: "Should find 'hotel' when searching for 'hôtel' (bidirectional)", + }, + { + name: "French: naïve with accent, search without", + itemName: "naïve", + searchQuery: "naive", + shouldMatch: true, + description: "Should find 'naïve' when searching for 'naive'", + }, + { + name: "French: naïve without accent, search with", + itemName: "naive", + searchQuery: "naïve", + shouldMatch: true, + description: "Should find 'naive' when searching for 'naïve' (bidirectional)", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Test the normalization logic used in the repository + normalizedSearch := textutils.NormalizeSearchQuery(tc.searchQuery) + + // This simulates what happens in the repository + // The original search would find exact matches (case-insensitive) + // The normalized search would find accent-insensitive matches + + // Test that our normalization works as expected + if tc.shouldMatch { + // If it should match, then either the original query should match + // or the normalized query should match when applied to the stored data + assert.NotEqual(t, "", normalizedSearch, "Normalized search should not be empty") + + // The key insight is that we're searching with both the original and normalized queries + // So "electrónica" will be found when searching for "electronica" because: + // 1. Original search: "electronica" doesn't match "electrónica" + // 2. Normalized search: "electronica" matches the normalized version + t.Logf("✓ %s: Item '%s' should be found with search '%s' (normalized: '%s')", + tc.description, tc.itemName, tc.searchQuery, normalizedSearch) + } else { + t.Logf("✗ %s: Item '%s' should NOT be found with search '%s' (normalized: '%s')", + tc.description, tc.itemName, tc.searchQuery, normalizedSearch) + } + }) + } +} + +func TestNormalizeSearchQueryIntegration(t *testing.T) { + // Test that the normalization function works correctly + testCases := []struct { + input string + expected string + }{ + {"electrónica", "electronica"}, + {"café", "cafe"}, + {"ELECTRÓNICA", "electronica"}, + {"Café París", "cafe paris"}, + {"hello world", "hello world"}, + // French accented words + {"père", "pere"}, + {"français", "francais"}, + {"été", "ete"}, + {"hôtel", "hotel"}, + {"naïve", "naive"}, + {"PÈRE", "pere"}, + {"FRANÇAIS", "francais"}, + {"ÉTÉ", "ete"}, + {"HÔTEL", "hotel"}, + {"NAÏVE", "naive"}, + } + + for _, tc := range testCases { + t.Run(tc.input, func(t *testing.T) { + result := textutils.NormalizeSearchQuery(tc.input) + assert.Equal(t, tc.expected, result, "Normalization should work correctly") + }) + } +} diff --git a/backend/internal/data/repo/repo_labels.go b/backend/internal/data/repo/repo_labels.go index 03e2b4c1..6238a8b3 100644 --- a/backend/internal/data/repo/repo_labels.go +++ b/backend/internal/data/repo/repo_labels.go @@ -35,6 +35,7 @@ type ( ID uuid.UUID `json:"id"` Name string `json:"name"` Description string `json:"description"` + Color string `json:"color"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` } @@ -49,6 +50,7 @@ func mapLabelSummary(label *ent.Label) LabelSummary { ID: label.ID, Name: label.Name, Description: label.Description, + Color: label.Color, CreatedAt: label.CreatedAt, UpdatedAt: label.UpdatedAt, } diff --git a/backend/internal/data/repo/repo_product_search.go b/backend/internal/data/repo/repo_product_search.go new file mode 100644 index 00000000..3e0c6b15 --- /dev/null +++ b/backend/internal/data/repo/repo_product_search.go @@ -0,0 +1,18 @@ +package repo + +type BarcodeProduct struct { + SearchEngineName string `json:"search_engine_name"` + + // Identifications + ModelNumber string `json:"modelNumber"` + Manufacturer string `json:"manufacturer"` + + // Extras + Country string `json:"notes"` + Barcode string `json:"barcode"` + + ImageURL string `json:"imageURL"` + ImageBase64 string `json:"imageBase64"` + + Item ItemCreate `json:"item"` +} diff --git a/backend/internal/data/repo/repos_all.go b/backend/internal/data/repo/repos_all.go index 159a05ad..15103aed 100644 --- a/backend/internal/data/repo/repos_all.go +++ b/backend/internal/data/repo/repos_all.go @@ -4,6 +4,7 @@ package repo import ( "github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus" "github.com/sysadminsmedia/homebox/backend/internal/data/ent" + "github.com/sysadminsmedia/homebox/backend/internal/sys/config" ) // AllRepos is a container for all the repository interfaces @@ -21,7 +22,7 @@ type AllRepos struct { Entities *EntitiesRepository } -func New(db *ent.Client, bus *eventbus.EventBus, root string) *AllRepos { +func New(db *ent.Client, bus *eventbus.EventBus, storage config.Storage, pubSubConn string, thumbnail config.Thumbnail) *AllRepos { return &AllRepos{ Users: &UserRepository{db}, AuthTokens: &TokenRepository{db}, @@ -29,7 +30,7 @@ func New(db *ent.Client, bus *eventbus.EventBus, root string) *AllRepos { Locations: &LocationRepository{db, bus}, Labels: &LabelRepository{db, bus}, Items: &ItemsRepository{db, bus}, - Attachments: &AttachmentRepo{db, root}, + Attachments: &AttachmentRepo{db, storage, pubSubConn, thumbnail}, MaintEntry: &MaintenanceEntryRepository{db}, Notifiers: NewNotifierRepository(db), EntityType: &EntityTypeRepository{db, bus}, diff --git a/backend/internal/sys/analytics/analytics.go b/backend/internal/sys/analytics/analytics.go index 8b88d348..6337eb45 100644 --- a/backend/internal/sys/analytics/analytics.go +++ b/backend/internal/sys/analytics/analytics.go @@ -11,6 +11,8 @@ import ( "github.com/rs/zerolog/log" ) +var startTime = time.Now() + type Data struct { Domain string `json:"domain"` Name string `json:"name"` @@ -18,7 +20,7 @@ type Data struct { Props map[string]interface{} `json:"props"` } -func Send(version, buildInfo string) { +func Send(version, buildInfo string) error { hostData, _ := host.Info() analytics := Data{ Domain: "homebox.software", @@ -32,22 +34,23 @@ func Send(version, buildInfo string) { "platform_version": hostData.PlatformVersion, "kernel_arch": hostData.KernelArch, "virt_type": hostData.VirtualizationSystem, + "uptime_min": time.Since(startTime).Minutes(), }, } jsonBody, err := json.Marshal(analytics) if err != nil { log.Error().Err(err).Msg("failed to marshal analytics data") - return + return err } bodyReader := bytes.NewReader(jsonBody) req, err := http.NewRequest("POST", "https://a.sysadmins.zone/api/event", bodyReader) if err != nil { log.Error().Err(err).Msg("failed to create analytics request") - return + return err } req.Header.Set("Content-Type", "application/json") - req.Header.Set("User-Agent", "Homebox/"+version+"/"+buildInfo+" (https://homebox.software)") + req.Header.Set("User-Agent", "Homebox/"+version+"/(https://homebox.software)") client := &http.Client{ Timeout: 10 * time.Second, @@ -56,7 +59,7 @@ func Send(version, buildInfo string) { res, err := client.Do(req) if err != nil { log.Error().Err(err).Msg("failed to send analytics request") - return + return err } defer func() { @@ -65,4 +68,5 @@ func Send(version, buildInfo string) { log.Error().Err(err).Msg("failed to close response body") } }() + return nil } diff --git a/backend/internal/sys/config/conf.go b/backend/internal/sys/config/conf.go index 64ea0e09..10bca909 100644 --- a/backend/internal/sys/config/conf.go +++ b/backend/internal/sys/config/conf.go @@ -28,6 +28,8 @@ type Config struct { Debug DebugConf `yaml:"debug"` Options Options `yaml:"options"` LabelMaker LabelMakerConf `yaml:"labelmaker"` + Thumbnail Thumbnail `yaml:"thumbnail"` + Barcode BarcodeAPIConf `yaml:"barcode"` } type Options struct { @@ -38,6 +40,12 @@ type Options struct { AllowAnalytics bool `yaml:"allow_analytics" conf:"default:false"` } +type Thumbnail struct { + Enabled bool `yaml:"enabled" conf:"default:true"` + Width int `yaml:"width" conf:"default:500"` + Height int `yaml:"height" conf:"default:500"` +} + type DebugConf struct { Enabled bool `yaml:"enabled" conf:"default:false"` Port string `yaml:"port" conf:"default:4000"` @@ -63,6 +71,10 @@ type LabelMakerConf struct { DynamicLength bool `yaml:"bool" conf:"default:true"` } +type BarcodeAPIConf struct { + TokenBarcodespider string `yaml:"token_barcodespider"` +} + // New parses the CLI/Config file and returns a Config struct. If the file argument is an empty string, the // file is not read. If the file is not empty, the file is read and the Config struct is returned. func New(buildstr string, description string) (*Config, error) { diff --git a/backend/internal/sys/config/conf_database.go b/backend/internal/sys/config/conf_database.go index ea060949..4cbbdaa9 100644 --- a/backend/internal/sys/config/conf_database.go +++ b/backend/internal/sys/config/conf_database.go @@ -6,16 +6,21 @@ const ( type Storage struct { // Data is the path to the root directory - Data string `yaml:"data" conf:"default:./.data"` + PrefixPath string `yaml:"prefix_path" conf:"default:.data"` + ConnString string `yaml:"conn_string" conf:"default:file:///./"` } type Database struct { - Driver string `yaml:"driver" conf:"default:sqlite3"` - Username string `yaml:"username"` - Password string `yaml:"password"` - Host string `yaml:"host"` - Port string `yaml:"port"` - Database string `yaml:"database"` - SslMode string `yaml:"ssl_mode"` - SqlitePath string `yaml:"sqlite_path" conf:"default:./.data/homebox.db?_pragma=busy_timeout=999&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite"` + Driver string `yaml:"driver" conf:"default:sqlite3"` + Username string `yaml:"username"` + Password string `yaml:"password"` + Host string `yaml:"host"` + Port string `yaml:"port"` + Database string `yaml:"database"` + SslMode string `yaml:"ssl_mode"` + SslRootCert string `yaml:"ssl_rootcert"` + SslCert string `yaml:"ssl_cert"` + SslKey string `yaml:"ssl_key"` + SqlitePath string `yaml:"sqlite_path" conf:"default:./.data/homebox.db?_pragma=busy_timeout=999&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite"` + PubSubConnString string `yaml:"pubsub_conn_string" conf:"default:mem://{{ .Topic }}"` } diff --git a/backend/pkgs/hasher/password.go b/backend/pkgs/hasher/password.go index a68c8689..796cf201 100644 --- a/backend/pkgs/hasher/password.go +++ b/backend/pkgs/hasher/password.go @@ -1,14 +1,35 @@ package hasher import ( + "crypto/rand" + "crypto/subtle" + "encoding/base64" "fmt" "os" + "strings" + "golang.org/x/crypto/argon2" "golang.org/x/crypto/bcrypt" ) var enabled = true +type params struct { + memory uint32 + iterations uint32 + parallelism uint8 + saltLength uint32 + keyLength uint32 +} + +var p = ¶ms{ + memory: 64 * 1024, + iterations: 3, + parallelism: 2, + saltLength: 16, + keyLength: 32, +} + func init() { // nolint: gochecknoinits disableHas := os.Getenv("UNSAFE_DISABLE_PASSWORD_PROJECTION") == "yes_i_am_sure" @@ -18,20 +39,108 @@ func init() { // nolint: gochecknoinits } } +func GenerateRandomBytes(n uint32) ([]byte, error) { + b := make([]byte, n) + _, err := rand.Read(b) + if err != nil { + return nil, err + } + return b, nil +} + func HashPassword(password string) (string, error) { if !enabled { return password, nil } - bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14) - return string(bytes), err + salt, err := GenerateRandomBytes(p.saltLength) + if err != nil { + return "", err + } + hash := argon2.IDKey([]byte(password), salt, p.iterations, p.memory, p.parallelism, p.keyLength) + + b64Salt := base64.RawStdEncoding.EncodeToString(salt) + b64Hash := base64.RawStdEncoding.EncodeToString(hash) + + encodedHash := fmt.Sprintf("$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s", argon2.Version, 64*1024, 3, 2, b64Salt, b64Hash) + return encodedHash, err } -func CheckPasswordHash(password, hash string) bool { +// CheckPasswordHash checks if the provided password matches the hash. +// Additionally, it returns a boolean indicating whether the password should be rehashed. +func CheckPasswordHash(password, hash string) (bool, bool) { if !enabled { - return password == hash + return password == hash, false } - err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) - return err == nil + // Compare Argon2id hash first + match, err := comparePasswordAndHash(password, hash) + if err != nil || !match { + // If argon2id hash fails or doesn't match, try bcrypt + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + if err == nil { + // If bcrypt hash matches, return true and indicate rehashing + return true, true + } else { + // If both fail, return false and indicate no rehashing + return false, false + } + } + return match, false +} + +func comparePasswordAndHash(password, encodedHash string) (match bool, err error) { + // Extract the parameters, salt and derived key from the encoded password + // hash. + p, salt, hash, err := decodeHash(encodedHash) + if err != nil { + return false, err + } + + // Derive the key from the other password using the same parameters. + otherHash := argon2.IDKey([]byte(password), salt, p.iterations, p.memory, p.parallelism, p.keyLength) + + // Check that the contents of the hashed passwords are identical. Note + // that we are using the subtle.ConstantTimeCompare() function for this + // to help prevent timing attacks. + if subtle.ConstantTimeCompare(hash, otherHash) == 1 { + return true, nil + } + return false, nil +} + +func decodeHash(encodedHash string) (p *params, salt, hash []byte, err error) { + vals := strings.Split(encodedHash, "$") + if len(vals) != 6 { + return nil, nil, nil, fmt.Errorf("invalid hash format") + } + + var version int + _, err = fmt.Sscanf(vals[2], "v=%d", &version) + if err != nil { + return nil, nil, nil, err + } + if version != argon2.Version { + return nil, nil, nil, fmt.Errorf("unsupported argon2 version: %d", version) + } + + p = ¶ms{} + _, err = fmt.Sscanf(vals[3], "m=%d,t=%d,p=%d", &p.memory, &p.iterations, &p.parallelism) + if err != nil { + return nil, nil, nil, err + } + + salt, err = base64.RawStdEncoding.Strict().DecodeString(vals[4]) + if err != nil { + return nil, nil, nil, err + } + p.saltLength = uint32(len(salt)) + + hash, err = base64.RawStdEncoding.Strict().DecodeString(vals[5]) + if err != nil { + return nil, nil, nil, err + } + p.keyLength = uint32(len(hash)) + + return p, salt, hash, nil } diff --git a/backend/pkgs/hasher/password_test.go b/backend/pkgs/hasher/password_test.go index 6f9128ef..1585ace5 100644 --- a/backend/pkgs/hasher/password_test.go +++ b/backend/pkgs/hasher/password_test.go @@ -1,11 +1,14 @@ package hasher -import "testing" +import ( + "testing" +) func TestHashPassword(t *testing.T) { t.Parallel() type args struct { - password string + password string + invalidInputs []string } tests := []struct { name string @@ -15,13 +18,29 @@ func TestHashPassword(t *testing.T) { { name: "letters_and_numbers", args: args{ - password: "password123456788", + password: "password123456788", + invalidInputs: []string{"testPassword", "AnotherBadPassword", "ThisShouldNeverWork", "1234567890"}, }, }, { name: "letters_number_and_special", args: args{ - password: "!2afj3214pofajip3142j;fa", + password: "!2afj3214pofajip3142j;fa", + invalidInputs: []string{"testPassword", "AnotherBadPassword", "ThisShouldNeverWork", "1234567890"}, + }, + }, + { + name: "extra_long_password", + args: args{ + password: "this_is_a_very_long_password_that_should_be_hashed_properly_and_still_work_with_the_check_function", + invalidInputs: []string{"testPassword", "AnotherBadPassword", "ThisShouldNeverWork", "1234567890"}, + }, + }, + { + name: "empty_password", + args: args{ + password: "", + invalidInputs: []string{"testPassword", "AnotherBadPassword", "ThisShouldNeverWork", "1234567890"}, }, }, } @@ -32,9 +51,17 @@ func TestHashPassword(t *testing.T) { t.Errorf("HashPassword() error = %v, wantErr %v", err, tt.wantErr) return } - if !CheckPasswordHash(tt.args.password, got) { + check, _ := CheckPasswordHash(tt.args.password, got) + if !check { t.Errorf("CheckPasswordHash() failed to validate password=%v against hash=%v", tt.args.password, got) } + + for _, invalid := range tt.args.invalidInputs { + check, _ := CheckPasswordHash(invalid, got) + if check { + t.Errorf("CheckPasswordHash() improperly validated password=%v against hash=%v", invalid, got) + } + } }) } } diff --git a/backend/pkgs/labelmaker/labelmaker.go b/backend/pkgs/labelmaker/labelmaker.go index 7adae223..fc1406e7 100644 --- a/backend/pkgs/labelmaker/labelmaker.go +++ b/backend/pkgs/labelmaker/labelmaker.go @@ -303,8 +303,27 @@ func PrintLabel(cfg *config.Config, params *GenerateParameters) error { commandTemplate := template.Must(template.New("command").Parse(*cfg.LabelMaker.PrintCommand)) builder := &strings.Builder{} + additionalInformation := func() string { + if params.AdditionalInformation != nil { + return *params.AdditionalInformation + } + return "" + }() if err := commandTemplate.Execute(builder, map[string]string{ - "FileName": f.Name(), + "FileName": f.Name(), + "Width": fmt.Sprintf("%d", params.Width), + "Height": fmt.Sprintf("%d", params.Height), + "QrSize": fmt.Sprintf("%d", params.QrSize), + "Margin": fmt.Sprintf("%d", params.Margin), + "ComponentPadding": fmt.Sprintf("%d", params.ComponentPadding), + "TitleText": params.TitleText, + "TitleFontSize": fmt.Sprintf("%f", params.TitleFontSize), + "DescriptionText": params.DescriptionText, + "DescriptionFontSize": fmt.Sprintf("%f", params.DescriptionFontSize), + "AdditionalInformation": additionalInformation, + "Dpi": fmt.Sprintf("%f", params.Dpi), + "URL": params.URL, + "DynamicLength": fmt.Sprintf("%t", params.DynamicLength), }); err != nil { return err } diff --git a/backend/pkgs/textutils/normalize.go b/backend/pkgs/textutils/normalize.go new file mode 100644 index 00000000..4e86235d --- /dev/null +++ b/backend/pkgs/textutils/normalize.go @@ -0,0 +1,40 @@ +package textutils + +import ( + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/text/transform" + "golang.org/x/text/unicode/norm" +) + +// RemoveAccents removes accents from text by normalizing Unicode characters +// and removing diacritical marks. This allows for accent-insensitive search. +// +// Example: +// - "electrónica" becomes "electronica" +// - "café" becomes "cafe" +// - "père" becomes "pere" +func RemoveAccents(text string) string { + // Create a transformer that: + // 1. Normalizes to NFD (canonical decomposition) + // 2. Removes diacritical marks (combining characters) + // 3. Normalizes back to NFC (canonical composition) + t := transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC) + + result, _, err := transform.String(t, text) + if err != nil { + // If transformation fails, return the original text + return text + } + + return result +} + +// NormalizeSearchQuery normalizes a search query for accent-insensitive matching. +// This function removes accents and converts to lowercase for consistent search behavior. +func NormalizeSearchQuery(query string) string { + normalized := RemoveAccents(query) + return strings.ToLower(normalized) +} diff --git a/backend/pkgs/textutils/normalize_test.go b/backend/pkgs/textutils/normalize_test.go new file mode 100644 index 00000000..8e6bd242 --- /dev/null +++ b/backend/pkgs/textutils/normalize_test.go @@ -0,0 +1,152 @@ +package textutils + +import ( + "strings" + "testing" +) + +func TestRemoveAccents(t *testing.T) { + testCases := []struct { + name string + input string + expected string + }{ + { + name: "Spanish accented characters", + input: "electrónica", + expected: "electronica", + }, + { + name: "Spanish accented characters with tilde", + input: "café", + expected: "cafe", + }, + { + name: "French accented characters", + input: "père", + expected: "pere", + }, + { + name: "German umlauts", + input: "Björk", + expected: "Bjork", + }, + { + name: "Mixed accented characters", + input: "résumé", + expected: "resume", + }, + { + name: "Portuguese accented characters", + input: "João", + expected: "Joao", + }, + { + name: "No accents", + input: "hello world", + expected: "hello world", + }, + { + name: "Empty string", + input: "", + expected: "", + }, + { + name: "Numbers and symbols", + input: "123!@#", + expected: "123!@#", + }, + { + name: "Multiple accents in one word", + input: "été", + expected: "ete", + }, + { + name: "Complex Unicode characters", + input: "français", + expected: "francais", + }, + { + name: "Unicode diacritics", + input: "naïve", + expected: "naive", + }, + { + name: "Unicode combining characters", + input: "e\u0301", // e with combining acute accent + expected: "e", + }, + { + name: "Very long string with accents", + input: strings.Repeat("café", 1000), + expected: strings.Repeat("cafe", 1000), + }, + { + name: "All French accents", + input: "àâäéèêëïîôöùûüÿç", + expected: "aaaeeeeiioouuuyc", + }, + { + name: "All Spanish accents", + input: "áéíóúñüÁÉÍÓÚÑÜ", + expected: "aeiounuAEIOUNU", + }, + { + name: "All German umlauts", + input: "äöüÄÖÜß", + expected: "aouAOUß", + }, + { + name: "Mixed languages", + input: "Français café España niño", + expected: "Francais cafe Espana nino", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := RemoveAccents(tc.input) + if result != tc.expected { + t.Errorf("RemoveAccents(%q) = %q, expected %q", tc.input, result, tc.expected) + } + }) + } +} + +func TestNormalizeSearchQuery(t *testing.T) { + testCases := []struct { + name string + input string + expected string + }{ + { + name: "Uppercase with accents", + input: "ELECTRÓNICA", + expected: "electronica", + }, + { + name: "Mixed case with accents", + input: "Electrónica", + expected: "electronica", + }, + { + name: "Multiple words with accents", + input: "Café París", + expected: "cafe paris", + }, + { + name: "No accents mixed case", + input: "Hello World", + expected: "hello world", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := NormalizeSearchQuery(tc.input) + if result != tc.expected { + t.Errorf("NormalizeSearchQuery(%q) = %q, expected %q", tc.input, result, tc.expected) + } + }) + } +} diff --git a/backend/pkgs/utils/generator.go b/backend/pkgs/utils/generator.go new file mode 100644 index 00000000..b026d3bc --- /dev/null +++ b/backend/pkgs/utils/generator.go @@ -0,0 +1,27 @@ +// Package utils +package utils + +import ( + "fmt" + "strings" + "text/template" +) + +// GenerateSubPubConn generates a subscription or publication connection string +func GenerateSubPubConn(pubSubConn string, topic string) (string, error) { + if strings.Contains(topic, "{{") || strings.Contains(topic, "}}") { + return "", fmt.Errorf("topic contains template placeholders, which is not allowed") + } + builder := &strings.Builder{} + tmpl, err := template.New("subPubConn").Parse(pubSubConn) + if err != nil { + return "", fmt.Errorf("failed to parse template: %w", err) + } + err = tmpl.Execute(builder, map[string]interface{}{ + "Topic": topic, + }) + if err != nil { + return "", fmt.Errorf("failed to parse template: %w", err) + } + return builder.String(), nil +} diff --git a/backend/pkgs/utils/image.go b/backend/pkgs/utils/image.go new file mode 100644 index 00000000..7a527a5d --- /dev/null +++ b/backend/pkgs/utils/image.go @@ -0,0 +1,86 @@ +package utils + +import "image" + +// flipHorizontal will flip the image horizontally. There is a limit of 10000 pixels in either dimension to prevent excessive memory usage. +func flipHorizontal(img image.Image) image.Image { + b := img.Bounds() + if b.Dx() > 10000 || b.Dy() > 10000 { + return img + } + dst := image.NewRGBA(b) + for y := b.Min.Y; y < b.Max.Y; y++ { + for x := b.Min.X; x < b.Max.X; x++ { + dst.Set(b.Max.X-1-(x-b.Min.X), y, img.At(x, y)) + } + } + return dst +} + +// flipVertical will flip the image vertically. There is a limit of 10000 pixels in either dimension to prevent excessive memory usage. +func flipVertical(img image.Image) image.Image { + b := img.Bounds() + if b.Dx() > 10000 || b.Dy() > 10000 { + return img + } + dst := image.NewRGBA(b) + for y := b.Min.Y; y < b.Max.Y; y++ { + for x := b.Min.X; x < b.Max.X; x++ { + dst.Set(x, b.Max.Y-1-(y-b.Min.Y), img.At(x, y)) + } + } + return dst +} + +// rotate90 will rotate the image 90 degrees clockwise. There is a limit of 10000 pixels in either dimension to prevent excessive memory usage. +func rotate90(img image.Image) image.Image { + b := img.Bounds() + if b.Dx() > 10000 || b.Dy() > 10000 { + return img + } + dst := image.NewRGBA(image.Rect(0, 0, b.Dy(), b.Dx())) + for y := b.Min.Y; y < b.Max.Y; y++ { + for x := b.Min.X; x < b.Max.X; x++ { + dst.Set(b.Max.Y-1-y, x, img.At(x, y)) + } + } + return dst +} + +func rotate180(img image.Image) image.Image { + return rotate90(rotate90(img)) +} + +func rotate270(img image.Image) image.Image { + return rotate90(rotate180(img)) +} + +// Applies EXIF orientation using only stdlib +func ApplyOrientation(img image.Image, orientation uint16) image.Image { + if img == nil { + return nil + } + if orientation < 1 || orientation > 8 { + return img // No orientation or invalid orientation + } + switch orientation { + case 1: + return img // No rotation needed + case 2: + return flipHorizontal(img) + case 3: + return rotate180(img) + case 4: + return flipVertical(img) + case 5: + return rotate90(flipHorizontal(img)) + case 6: + return rotate90(img) + case 7: + return rotate270(flipHorizontal(img)) + case 8: + return rotate270(img) + default: + return img + } +} diff --git a/docker-compose.yml b/docker-compose.yml index 6b577608..f576b709 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,9 +3,17 @@ services: image: homebox build: context: . - dockerfile: ./Dockerfile + dockerfile: ./Dockerfile.rootless args: - COMMIT=head - BUILD_TIME=0001-01-01T00:00:00Z + x-bake: + platforms: + - linux/amd64 + - linux/arm64 + - linux/arm/v7 + environment: + - HBOX_DEBUG=true + - HBOX_LOGGER_LEVEL=-1 ports: - 3100:7745 diff --git a/docs/.vitepress/menus/en.mts b/docs/.vitepress/menus/en.mts index e75ce893..ca1fb40a 100644 --- a/docs/.vitepress/menus/en.mts +++ b/docs/.vitepress/menus/en.mts @@ -5,6 +5,7 @@ export default [ {text: 'Quick Start', link: '/en/quick-start'}, {text: 'Installation', link: '/en/installation'}, {text: 'Configure', link: '/en/configure'}, + {text: 'Storage', link: '/en/configure/storage'}, {text: 'Upgrade Guide', link: '/en/upgrade'}, {text: 'Migration Guide', link: '/en/migration'}, ] diff --git a/docs/en/api/openapi-2.0.json b/docs/en/api/openapi-2.0.json index 92371e8f..1537b810 100644 --- a/docs/en/api/openapi-2.0.json +++ b/docs/en/api/openapi-2.0.json @@ -16,6 +16,31 @@ "host": "demo.homebox.software", "basePath": "/api", "paths": { + "/v1/actions/create-missing-thumbnails": { + "post": { + "security": [ + { + "Bearer": [] + } + ], + "description": "Creates thumbnails for items that are missing them", + "produces": [ + "application/json" + ], + "tags": [ + "Actions" + ], + "summary": "Create Missing Thumbnails", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/v1.ActionAmountResult" + } + } + } + } + }, "/v1/actions/ensure-asset-ids": { "post": { "security": [ @@ -778,8 +803,7 @@ "type": "string", "description": "Type of file", "name": "type", - "in": "formData", - "required": true + "in": "formData" }, { "type": "boolean", @@ -1802,6 +1826,41 @@ } } }, + "/v1/products/search-from-barcode": { + "get": { + "security": [ + { + "Bearer": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Items" + ], + "summary": "Search EAN from Barcode", + "parameters": [ + { + "type": "string", + "description": "barcode to be searched", + "name": "data", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/repo.BarcodeProduct" + } + } + } + } + } + }, "/v1/qrcode": { "get": { "security": [ @@ -2112,6 +2171,42 @@ } }, "definitions": { + "attachment.Type": { + "type": "string", + "enum": [ + "attachment", + "photo", + "manual", + "warranty", + "attachment", + "receipt", + "thumbnail" + ], + "x-enum-varnames": [ + "DefaultType", + "TypePhoto", + "TypeManual", + "TypeWarranty", + "TypeAttachment", + "TypeReceipt", + "TypeThumbnail" + ] + }, + "authroles.Role": { + "type": "string", + "enum": [ + "user", + "admin", + "user", + "attachments" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleAdmin", + "RoleUser", + "RoleAttachments" + ] + }, "currencies.Currency": { "type": "object", "properties": { @@ -2129,6 +2224,926 @@ } } }, + "ent.Attachment": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AttachmentQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AttachmentEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "mime_type": { + "description": "MimeType holds the value of the \"mime_type\" field.", + "type": "string" + }, + "path": { + "description": "Path holds the value of the \"path\" field.", + "type": "string" + }, + "primary": { + "description": "Primary holds the value of the \"primary\" field.", + "type": "boolean" + }, + "title": { + "description": "Title holds the value of the \"title\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/attachment.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AttachmentEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + }, + "thumbnail": { + "description": "Thumbnail holds the value of the thumbnail edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Attachment" + } + ] + } + } + }, + "ent.AuthRoles": { + "type": "object", + "properties": { + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthRolesQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRolesEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "integer" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/authroles.Role" + } + ] + } + } + }, + "ent.AuthRolesEdges": { + "type": "object", + "properties": { + "token": { + "description": "Token holds the value of the token edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokens" + } + ] + } + } + }, + "ent.AuthTokens": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the AuthTokensQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthTokensEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.AuthTokensEdges": { + "type": "object", + "properties": { + "roles": { + "description": "Roles holds the value of the roles edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.AuthRoles" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.Group": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "currency": { + "description": "Currency holds the value of the \"currency\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.GroupEdges": { + "type": "object", + "properties": { + "invitation_tokens": { + "description": "InvitationTokens holds the value of the invitation_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.GroupInvitationToken" + } + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "labels": { + "description": "Labels holds the value of the labels edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "locations": { + "description": "Locations holds the value of the locations edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + }, + "users": { + "description": "Users holds the value of the users edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.User" + } + } + } + }, + "ent.GroupInvitationToken": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the GroupInvitationTokenQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.GroupInvitationTokenEdges" + } + ] + }, + "expires_at": { + "description": "ExpiresAt holds the value of the \"expires_at\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "token": { + "description": "Token holds the value of the \"token\" field.", + "type": "array", + "items": { + "type": "integer" + } + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "uses": { + "description": "Uses holds the value of the \"uses\" field.", + "type": "integer" + } + } + }, + "ent.GroupInvitationTokenEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + } + } + }, + "ent.Item": { + "type": "object", + "properties": { + "archived": { + "description": "Archived holds the value of the \"archived\" field.", + "type": "boolean" + }, + "asset_id": { + "description": "AssetID holds the value of the \"asset_id\" field.", + "type": "integer" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "import_ref": { + "description": "ImportRef holds the value of the \"import_ref\" field.", + "type": "string" + }, + "insured": { + "description": "Insured holds the value of the \"insured\" field.", + "type": "boolean" + }, + "lifetime_warranty": { + "description": "LifetimeWarranty holds the value of the \"lifetime_warranty\" field.", + "type": "boolean" + }, + "manufacturer": { + "description": "Manufacturer holds the value of the \"manufacturer\" field.", + "type": "string" + }, + "model_number": { + "description": "ModelNumber holds the value of the \"model_number\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "notes": { + "description": "Notes holds the value of the \"notes\" field.", + "type": "string" + }, + "purchase_from": { + "description": "PurchaseFrom holds the value of the \"purchase_from\" field.", + "type": "string" + }, + "purchase_price": { + "description": "PurchasePrice holds the value of the \"purchase_price\" field.", + "type": "number" + }, + "purchase_time": { + "description": "PurchaseTime holds the value of the \"purchase_time\" field.", + "type": "string" + }, + "quantity": { + "description": "Quantity holds the value of the \"quantity\" field.", + "type": "integer" + }, + "serial_number": { + "description": "SerialNumber holds the value of the \"serial_number\" field.", + "type": "string" + }, + "sold_notes": { + "description": "SoldNotes holds the value of the \"sold_notes\" field.", + "type": "string" + }, + "sold_price": { + "description": "SoldPrice holds the value of the \"sold_price\" field.", + "type": "number" + }, + "sold_time": { + "description": "SoldTime holds the value of the \"sold_time\" field.", + "type": "string" + }, + "sold_to": { + "description": "SoldTo holds the value of the \"sold_to\" field.", + "type": "string" + }, + "sync_child_items_locations": { + "description": "SyncChildItemsLocations holds the value of the \"sync_child_items_locations\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "warranty_details": { + "description": "WarrantyDetails holds the value of the \"warranty_details\" field.", + "type": "string" + }, + "warranty_expires": { + "description": "WarrantyExpires holds the value of the \"warranty_expires\" field.", + "type": "string" + } + } + }, + "ent.ItemEdges": { + "type": "object", + "properties": { + "attachments": { + "description": "Attachments holds the value of the attachments edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Attachment" + } + }, + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "fields": { + "description": "Fields holds the value of the fields edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.ItemField" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "label": { + "description": "Label holds the value of the label edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Label" + } + }, + "location": { + "description": "Location holds the value of the location edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + }, + "maintenance_entries": { + "description": "MaintenanceEntries holds the value of the maintenance_entries edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.MaintenanceEntry" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.ItemField": { + "type": "object", + "properties": { + "boolean_value": { + "description": "BooleanValue holds the value of the \"boolean_value\" field.", + "type": "boolean" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the ItemFieldQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.ItemFieldEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "number_value": { + "description": "NumberValue holds the value of the \"number_value\" field.", + "type": "integer" + }, + "text_value": { + "description": "TextValue holds the value of the \"text_value\" field.", + "type": "string" + }, + "time_value": { + "description": "TimeValue holds the value of the \"time_value\" field.", + "type": "string" + }, + "type": { + "description": "Type holds the value of the \"type\" field.", + "allOf": [ + { + "$ref": "#/definitions/itemfield.Type" + } + ] + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.ItemFieldEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Label": { + "type": "object", + "properties": { + "color": { + "description": "Color holds the value of the \"color\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LabelQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LabelEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LabelEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + } + } + }, + "ent.Location": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the LocationQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.LocationEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.LocationEdges": { + "type": "object", + "properties": { + "children": { + "description": "Children holds the value of the children edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Location" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "items": { + "description": "Items holds the value of the items edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Item" + } + }, + "parent": { + "description": "Parent holds the value of the parent edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Location" + } + ] + } + } + }, + "ent.MaintenanceEntry": { + "type": "object", + "properties": { + "cost": { + "description": "Cost holds the value of the \"cost\" field.", + "type": "number" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "date": { + "description": "Date holds the value of the \"date\" field.", + "type": "string" + }, + "description": { + "description": "Description holds the value of the \"description\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the MaintenanceEntryQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.MaintenanceEntryEdges" + } + ] + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "item_id": { + "description": "ItemID holds the value of the \"item_id\" field.", + "type": "string" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "scheduled_date": { + "description": "ScheduledDate holds the value of the \"scheduled_date\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.MaintenanceEntryEdges": { + "type": "object", + "properties": { + "item": { + "description": "Item holds the value of the item edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Item" + } + ] + } + } + }, + "ent.Notifier": { + "type": "object", + "properties": { + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the NotifierQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.NotifierEdges" + } + ] + }, + "group_id": { + "description": "GroupID holds the value of the \"group_id\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_active": { + "description": "IsActive holds the value of the \"is_active\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + }, + "user_id": { + "description": "UserID holds the value of the \"user_id\" field.", + "type": "string" + } + } + }, + "ent.NotifierEdges": { + "type": "object", + "properties": { + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "user": { + "description": "User holds the value of the user edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.User" + } + ] + } + } + }, + "ent.User": { + "type": "object", + "properties": { + "activated_on": { + "description": "ActivatedOn holds the value of the \"activated_on\" field.", + "type": "string" + }, + "created_at": { + "description": "CreatedAt holds the value of the \"created_at\" field.", + "type": "string" + }, + "edges": { + "description": "Edges holds the relations/edges for other nodes in the graph.\nThe values are being populated by the UserQuery when eager-loading is set.", + "allOf": [ + { + "$ref": "#/definitions/ent.UserEdges" + } + ] + }, + "email": { + "description": "Email holds the value of the \"email\" field.", + "type": "string" + }, + "id": { + "description": "ID of the ent.", + "type": "string" + }, + "is_superuser": { + "description": "IsSuperuser holds the value of the \"is_superuser\" field.", + "type": "boolean" + }, + "name": { + "description": "Name holds the value of the \"name\" field.", + "type": "string" + }, + "role": { + "description": "Role holds the value of the \"role\" field.", + "allOf": [ + { + "$ref": "#/definitions/user.Role" + } + ] + }, + "superuser": { + "description": "Superuser holds the value of the \"superuser\" field.", + "type": "boolean" + }, + "updated_at": { + "description": "UpdatedAt holds the value of the \"updated_at\" field.", + "type": "string" + } + } + }, + "ent.UserEdges": { + "type": "object", + "properties": { + "auth_tokens": { + "description": "AuthTokens holds the value of the auth_tokens edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.AuthTokens" + } + }, + "group": { + "description": "Group holds the value of the group edge.", + "allOf": [ + { + "$ref": "#/definitions/ent.Group" + } + ] + }, + "notifiers": { + "description": "Notifiers holds the value of the notifiers edge.", + "type": "array", + "items": { + "$ref": "#/definitions/ent.Notifier" + } + } + } + }, + "itemfield.Type": { + "type": "string", + "enum": [ + "text", + "number", + "boolean", + "time" + ], + "x-enum-varnames": [ + "TypeText", + "TypeNumber", + "TypeBoolean", + "TypeTime" + ] + }, + "repo.BarcodeProduct": { + "type": "object", + "properties": { + "barcode": { + "type": "string" + }, + "imageBase64": { + "type": "string" + }, + "imageURL": { + "type": "string" + }, + "item": { + "$ref": "#/definitions/repo.ItemCreate" + }, + "manufacturer": { + "type": "string" + }, + "modelNumber": { + "description": "Identifications", + "type": "string" + }, + "notes": { + "description": "Extras", + "type": "string" + }, + "search_engine_name": { + "type": "string" + } + } + }, "repo.Group": { "type": "object", "properties": { @@ -2192,12 +3207,18 @@ "id": { "type": "string" }, + "mimeType": { + "type": "string" + }, "path": { "type": "string" }, "primary": { "type": "boolean" }, + "thumbnail": { + "$ref": "#/definitions/ent.Attachment" + }, "title": { "type": "string" }, @@ -2312,7 +3333,9 @@ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2391,6 +3414,11 @@ "syncChildItemsLocations": { "type": "boolean" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" }, @@ -2449,7 +3477,9 @@ "type": "string" }, "imageId": { - "type": "string" + "type": "string", + "x-nullable": true, + "x-omitempty": true }, "insured": { "type": "boolean" @@ -2483,6 +3513,11 @@ "description": "Sale details", "type": "string" }, + "thumbnailId": { + "type": "string", + "x-nullable": true, + "x-omitempty": true + }, "updatedAt": { "type": "string" } @@ -2631,6 +3666,9 @@ "repo.LabelOut": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -2651,6 +3689,9 @@ "repo.LabelSummary": { "type": "object", "properties": { + "color": { + "type": "string" + }, "createdAt": { "type": "string" }, @@ -3113,6 +4154,19 @@ } } }, + "user.Role": { + "type": "string", + "enum": [ + "user", + "user", + "owner" + ], + "x-enum-varnames": [ + "DefaultRole", + "RoleUser", + "RoleOwner" + ] + }, "v1.APISummary": { "type": "object", "properties": { diff --git a/docs/en/api/openapi-2.0.yaml b/docs/en/api/openapi-2.0.yaml index d95e008f..f8a2713e 100644 --- a/docs/en/api/openapi-2.0.yaml +++ b/docs/en/api/openapi-2.0.yaml @@ -1,5 +1,35 @@ basePath: /api definitions: + attachment.Type: + enum: + - attachment + - photo + - manual + - warranty + - attachment + - receipt + - thumbnail + type: string + x-enum-varnames: + - DefaultType + - TypePhoto + - TypeManual + - TypeWarranty + - TypeAttachment + - TypeReceipt + - TypeThumbnail + authroles.Role: + enum: + - user + - admin + - user + - attachments + type: string + x-enum-varnames: + - DefaultRole + - RoleAdmin + - RoleUser + - RoleAttachments currencies.Currency: properties: code: @@ -11,6 +41,632 @@ definitions: symbol: type: string type: object + ent.Attachment: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.AttachmentEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AttachmentQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + mime_type: + description: MimeType holds the value of the "mime_type" field. + type: string + path: + description: Path holds the value of the "path" field. + type: string + primary: + description: Primary holds the value of the "primary" field. + type: boolean + title: + description: Title holds the value of the "title" field. + type: string + type: + allOf: + - $ref: '#/definitions/attachment.Type' + description: Type holds the value of the "type" field. + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.AttachmentEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + thumbnail: + allOf: + - $ref: '#/definitions/ent.Attachment' + description: Thumbnail holds the value of the thumbnail edge. + type: object + ent.AuthRoles: + properties: + edges: + allOf: + - $ref: '#/definitions/ent.AuthRolesEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AuthRolesQuery when eager-loading is set. + id: + description: ID of the ent. + type: integer + role: + allOf: + - $ref: '#/definitions/authroles.Role' + description: Role holds the value of the "role" field. + type: object + ent.AuthRolesEdges: + properties: + token: + allOf: + - $ref: '#/definitions/ent.AuthTokens' + description: Token holds the value of the token edge. + type: object + ent.AuthTokens: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.AuthTokensEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the AuthTokensQuery when eager-loading is set. + expires_at: + description: ExpiresAt holds the value of the "expires_at" field. + type: string + id: + description: ID of the ent. + type: string + token: + description: Token holds the value of the "token" field. + items: + type: integer + type: array + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.AuthTokensEdges: + properties: + roles: + allOf: + - $ref: '#/definitions/ent.AuthRoles' + description: Roles holds the value of the roles edge. + user: + allOf: + - $ref: '#/definitions/ent.User' + description: User holds the value of the user edge. + type: object + ent.Group: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + currency: + description: Currency holds the value of the "currency" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.GroupEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the GroupQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.GroupEdges: + properties: + invitation_tokens: + description: InvitationTokens holds the value of the invitation_tokens edge. + items: + $ref: '#/definitions/ent.GroupInvitationToken' + type: array + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + labels: + description: Labels holds the value of the labels edge. + items: + $ref: '#/definitions/ent.Label' + type: array + locations: + description: Locations holds the value of the locations edge. + items: + $ref: '#/definitions/ent.Location' + type: array + notifiers: + description: Notifiers holds the value of the notifiers edge. + items: + $ref: '#/definitions/ent.Notifier' + type: array + users: + description: Users holds the value of the users edge. + items: + $ref: '#/definitions/ent.User' + type: array + type: object + ent.GroupInvitationToken: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.GroupInvitationTokenEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the GroupInvitationTokenQuery when eager-loading is set. + expires_at: + description: ExpiresAt holds the value of the "expires_at" field. + type: string + id: + description: ID of the ent. + type: string + token: + description: Token holds the value of the "token" field. + items: + type: integer + type: array + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + uses: + description: Uses holds the value of the "uses" field. + type: integer + type: object + ent.GroupInvitationTokenEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + type: object + ent.Item: + properties: + archived: + description: Archived holds the value of the "archived" field. + type: boolean + asset_id: + description: AssetID holds the value of the "asset_id" field. + type: integer + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.ItemEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + import_ref: + description: ImportRef holds the value of the "import_ref" field. + type: string + insured: + description: Insured holds the value of the "insured" field. + type: boolean + lifetime_warranty: + description: LifetimeWarranty holds the value of the "lifetime_warranty" field. + type: boolean + manufacturer: + description: Manufacturer holds the value of the "manufacturer" field. + type: string + model_number: + description: ModelNumber holds the value of the "model_number" field. + type: string + name: + description: Name holds the value of the "name" field. + type: string + notes: + description: Notes holds the value of the "notes" field. + type: string + purchase_from: + description: PurchaseFrom holds the value of the "purchase_from" field. + type: string + purchase_price: + description: PurchasePrice holds the value of the "purchase_price" field. + type: number + purchase_time: + description: PurchaseTime holds the value of the "purchase_time" field. + type: string + quantity: + description: Quantity holds the value of the "quantity" field. + type: integer + serial_number: + description: SerialNumber holds the value of the "serial_number" field. + type: string + sold_notes: + description: SoldNotes holds the value of the "sold_notes" field. + type: string + sold_price: + description: SoldPrice holds the value of the "sold_price" field. + type: number + sold_time: + description: SoldTime holds the value of the "sold_time" field. + type: string + sold_to: + description: SoldTo holds the value of the "sold_to" field. + type: string + sync_child_items_locations: + description: SyncChildItemsLocations holds the value of the "sync_child_items_locations" + field. + type: boolean + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + warranty_details: + description: WarrantyDetails holds the value of the "warranty_details" field. + type: string + warranty_expires: + description: WarrantyExpires holds the value of the "warranty_expires" field. + type: string + type: object + ent.ItemEdges: + properties: + attachments: + description: Attachments holds the value of the attachments edge. + items: + $ref: '#/definitions/ent.Attachment' + type: array + children: + description: Children holds the value of the children edge. + items: + $ref: '#/definitions/ent.Item' + type: array + fields: + description: Fields holds the value of the fields edge. + items: + $ref: '#/definitions/ent.ItemField' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + label: + description: Label holds the value of the label edge. + items: + $ref: '#/definitions/ent.Label' + type: array + location: + allOf: + - $ref: '#/definitions/ent.Location' + description: Location holds the value of the location edge. + maintenance_entries: + description: MaintenanceEntries holds the value of the maintenance_entries + edge. + items: + $ref: '#/definitions/ent.MaintenanceEntry' + type: array + parent: + allOf: + - $ref: '#/definitions/ent.Item' + description: Parent holds the value of the parent edge. + type: object + ent.ItemField: + properties: + boolean_value: + description: BooleanValue holds the value of the "boolean_value" field. + type: boolean + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.ItemFieldEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the ItemFieldQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + number_value: + description: NumberValue holds the value of the "number_value" field. + type: integer + text_value: + description: TextValue holds the value of the "text_value" field. + type: string + time_value: + description: TimeValue holds the value of the "time_value" field. + type: string + type: + allOf: + - $ref: '#/definitions/itemfield.Type' + description: Type holds the value of the "type" field. + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.ItemFieldEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + type: object + ent.Label: + properties: + color: + description: Color holds the value of the "color" field. + type: string + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.LabelEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LabelQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LabelEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + type: object + ent.Location: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.LocationEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the LocationQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.LocationEdges: + properties: + children: + description: Children holds the value of the children edge. + items: + $ref: '#/definitions/ent.Location' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + items: + description: Items holds the value of the items edge. + items: + $ref: '#/definitions/ent.Item' + type: array + parent: + allOf: + - $ref: '#/definitions/ent.Location' + description: Parent holds the value of the parent edge. + type: object + ent.MaintenanceEntry: + properties: + cost: + description: Cost holds the value of the "cost" field. + type: number + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + date: + description: Date holds the value of the "date" field. + type: string + description: + description: Description holds the value of the "description" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.MaintenanceEntryEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the MaintenanceEntryQuery when eager-loading is set. + id: + description: ID of the ent. + type: string + item_id: + description: ItemID holds the value of the "item_id" field. + type: string + name: + description: Name holds the value of the "name" field. + type: string + scheduled_date: + description: ScheduledDate holds the value of the "scheduled_date" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.MaintenanceEntryEdges: + properties: + item: + allOf: + - $ref: '#/definitions/ent.Item' + description: Item holds the value of the item edge. + type: object + ent.Notifier: + properties: + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.NotifierEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the NotifierQuery when eager-loading is set. + group_id: + description: GroupID holds the value of the "group_id" field. + type: string + id: + description: ID of the ent. + type: string + is_active: + description: IsActive holds the value of the "is_active" field. + type: boolean + name: + description: Name holds the value of the "name" field. + type: string + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + user_id: + description: UserID holds the value of the "user_id" field. + type: string + type: object + ent.NotifierEdges: + properties: + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + user: + allOf: + - $ref: '#/definitions/ent.User' + description: User holds the value of the user edge. + type: object + ent.User: + properties: + activated_on: + description: ActivatedOn holds the value of the "activated_on" field. + type: string + created_at: + description: CreatedAt holds the value of the "created_at" field. + type: string + edges: + allOf: + - $ref: '#/definitions/ent.UserEdges' + description: |- + Edges holds the relations/edges for other nodes in the graph. + The values are being populated by the UserQuery when eager-loading is set. + email: + description: Email holds the value of the "email" field. + type: string + id: + description: ID of the ent. + type: string + is_superuser: + description: IsSuperuser holds the value of the "is_superuser" field. + type: boolean + name: + description: Name holds the value of the "name" field. + type: string + role: + allOf: + - $ref: '#/definitions/user.Role' + description: Role holds the value of the "role" field. + superuser: + description: Superuser holds the value of the "superuser" field. + type: boolean + updated_at: + description: UpdatedAt holds the value of the "updated_at" field. + type: string + type: object + ent.UserEdges: + properties: + auth_tokens: + description: AuthTokens holds the value of the auth_tokens edge. + items: + $ref: '#/definitions/ent.AuthTokens' + type: array + group: + allOf: + - $ref: '#/definitions/ent.Group' + description: Group holds the value of the group edge. + notifiers: + description: Notifiers holds the value of the notifiers edge. + items: + $ref: '#/definitions/ent.Notifier' + type: array + type: object + itemfield.Type: + enum: + - text + - number + - boolean + - time + type: string + x-enum-varnames: + - TypeText + - TypeNumber + - TypeBoolean + - TypeTime + repo.BarcodeProduct: + properties: + barcode: + type: string + imageBase64: + type: string + imageURL: + type: string + item: + $ref: '#/definitions/repo.ItemCreate' + manufacturer: + type: string + modelNumber: + description: Identifications + type: string + notes: + description: Extras + type: string + search_engine_name: + type: string + type: object repo.Group: properties: createdAt: @@ -52,10 +708,14 @@ definitions: type: string id: type: string + mimeType: + type: string path: type: string primary: type: boolean + thumbnail: + $ref: '#/definitions/ent.Attachment' title: type: string type: @@ -134,6 +794,8 @@ definitions: type: string imageId: type: string + x-nullable: true + x-omitempty: true insured: type: boolean labels: @@ -185,6 +847,10 @@ definitions: type: string syncChildItemsLocations: type: boolean + thumbnailId: + type: string + x-nullable: true + x-omitempty: true updatedAt: type: string warrantyDetails: @@ -225,6 +891,8 @@ definitions: type: string imageId: type: string + x-nullable: true + x-omitempty: true insured: type: boolean labels: @@ -246,6 +914,10 @@ definitions: soldTime: description: Sale details type: string + thumbnailId: + type: string + x-nullable: true + x-omitempty: true updatedAt: type: string type: object @@ -351,6 +1023,8 @@ definitions: type: object repo.LabelOut: properties: + color: + type: string createdAt: type: string description: @@ -364,6 +1038,8 @@ definitions: type: object repo.LabelSummary: properties: + color: + type: string createdAt: type: string description: @@ -670,6 +1346,16 @@ definitions: token: type: string type: object + user.Role: + enum: + - user + - user + - owner + type: string + x-enum-varnames: + - DefaultRole + - RoleUser + - RoleOwner v1.APISummary: properties: allowRegistration: @@ -779,6 +1465,21 @@ info: title: Homebox API version: "1.0" paths: + /v1/actions/create-missing-thumbnails: + post: + description: Creates thumbnails for items that are missing them + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/v1.ActionAmountResult' + security: + - Bearer: [] + summary: Create Missing Thumbnails + tags: + - Actions /v1/actions/ensure-asset-ids: post: description: Ensures all items in the database have an asset ID @@ -1174,7 +1875,6 @@ paths: - description: Type of file in: formData name: type - required: true type: string - description: Is this the primary attachment in: formData @@ -1881,6 +2581,27 @@ paths: summary: Test Notifier tags: - Notifiers + /v1/products/search-from-barcode: + get: + parameters: + - description: barcode to be searched + in: query + name: data + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/repo.BarcodeProduct' + type: array + security: + - Bearer: [] + summary: Search EAN from Barcode + tags: + - Items /v1/qrcode: get: parameters: diff --git a/docs/en/configure.md b/docs/en/configure/index.md similarity index 72% rename from docs/en/configure.md rename to docs/en/configure/index.md index 1a568a65..b3147d6b 100644 --- a/docs/en/configure.md +++ b/docs/en/configure/index.md @@ -11,7 +11,7 @@ aside: false |-----------------------------------------|----------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | HBOX_MODE | `production` | application mode used for runtime behavior can be one of: `development`, `production` | | HBOX_WEB_PORT | 7745 | port to run the web server on, if you're using docker do not change this | -| HBOX_WEB_HOST | | host to run the web server on, if you're using docker do not change this | +| HBOX_WEB_HOST | | host to run the web server on, if you're using docker do not change this. see below for examples | | HBOX_OPTIONS_ALLOW_REGISTRATION | true | allow users to register themselves | | HBOX_OPTIONS_AUTO_INCREMENT_ASSET_ID | true | auto-increments the asset_id field for new items | | HBOX_OPTIONS_CURRENCY_CONFIG | | json configuration file containing additional currencie | @@ -20,7 +20,8 @@ aside: false | HBOX_WEB_READ_TIMEOUT | 10s | Read timeout of HTTP sever | | HBOX_WEB_WRITE_TIMEOUT | 10s | Write timeout of HTTP server | | HBOX_WEB_IDLE_TIMEOUT | 30s | Idle timeout of HTTP server | -| HBOX_STORAGE_DATA | /data/ | path to the data directory, do not change this if you're using docker | +| HBOX_STORAGE_CONN_STRING | file://./ | path to the data directory, do not change this if you're using docker | +| HBOX_STORAGE_PREFIX_PATH | .data | prefix path for the storage, if not set the storage will be used as is | | HBOX_LOG_LEVEL | `info` | log level to use, can be one of `trace`, `debug`, `info`, `warn`, `error`, `critical` | | HBOX_LOG_FORMAT | `text` | log format to use, can be one of: `text`, `json` | | HBOX_MAILER_HOST | | email host to use, if not set no email provider will be used | @@ -34,10 +35,13 @@ aside: false | HBOX_DATABASE_SQLITE_PATH | ./.data/homebox.db?_pragma=busy_timeout=999&_pragma=journal_mode=WAL&_fk=1 | sets the directory path for Sqlite | | HBOX_DATABASE_HOST | | sets the hostname for a postgres database | | HBOX_DATABASE_PORT | | sets the port for a postgres database | -| HBOX_DATABASE_USERNAME | | sets the username for a postgres connection | -| HBOX_DATABASE_PASSWORD | | sets the password for a postgres connection | +| HBOX_DATABASE_USERNAME | | sets the username for a postgres connection (optional if using cert auth) | +| HBOX_DATABASE_PASSWORD | | sets the password for a postgres connection (optional if using cert auth) | | HBOX_DATABASE_DATABASE | | sets the database for a postgres connection | | HBOX_DATABASE_SSL_MODE | | sets the sslmode for a postgres connection | +| HBOX_DATABASE_SSL_CERT | | sets the sslcert for a postgres connection (should be a path) | +| HBOX_DATABASE_SSL_KEY | | sets the sslkey for a postgres connection (should be a path) | +| HBOX_DATABASE_SSL_ROOTCERT | | sets the sslrootcert for a postgres connection (should be a path) | | HBOX_OPTIONS_CHECK_GITHUB_RELEASE | true | check for new github releases | | HBOX_LABEL_MAKER_WIDTH | 526 | width for generated labels in pixels | | HBOX_LABEL_MAKER_HEIGHT | 200 | height for generated labels in pixels | @@ -46,10 +50,91 @@ aside: false | HBOX_LABEL_MAKER_PRINT_COMMAND | | the command to use for printing labels. if empty, label printing is disabled. `{{.FileName}}` in the command will be replaced with the png filename of the label | | HBOX_LABEL_MAKER_DYNAMIC_LENGTH | true | allow label generation with open length. `HBOX_LABEL_MAKER_HEIGHT` is still used for layout and minimal height. If not used, long text may be cut off, but all labels have the same size. | | HBOX_LABEL_MAKER_ADDITIONAL_INFORMATION | | Additional information added to the label like name or phone number | +| HBOX_THUMBNAIL_ENABLED | true | enable thumbnail generation for images, supports PNG, JPEG, AVIF, WEBP, GIF file types | +| HBOX_THUMBNAIL_WIDTH | 500 | width for generated thumbnails in pixels | +| HBOX_THUMBNAIL_HEIGHT | 500 | height for generated thumbnails in pixels | +### HBOX_WEB_HOST examples + +| Value | Notes | +|-----------------------------|------------------------------------------------------------| +| 0.0.0.0 | Visible all interfaces (default behaviour) | +| 127.0.0.1 | Only visible on same host | +| 100.64.0.1 | Only visible on a specific interface (e.g., VPN in a VPS). | +| unix?path=/run/homebox.sock | Listen on unix socket at specified path | +| sysd?name=homebox.socket | Listen on systemd socket | + +For unix and systemd socket address syntax and available options, see the [anyhttp address-syntax documentation](https://pkg.go.dev/go.balki.me/anyhttp#readme-address-syntax). + +#### Private network example + +Below example starts homebox in an isolated network. The process cannot make +any external requests (including check for newer release) and thus more secure. + +```bash +❯ sudo systemd-run --property=PrivateNetwork=yes --uid $UID --pty --same-dir --wait --collect homebox --web-host "unix?path=/run/user/$UID/homebox.sock" +Running as unit: run-p74482-i74483.service +Press ^] three times within 1s to disconnect TTY. +2025/07/11 22:33:29 goose: no migrations to run. current version: 20250706190000 +10:33PM INF ../../../go/src/app/app/api/handlers/v1/v1_ctrl_auth.go:98 > registering auth provider name=local +10:33PM INF ../../../go/src/app/app/api/main.go:275 > Server is running on unix?path=/run/user/1000/homebox.sock +10:33PM ERR ../../../go/src/app/app/api/main.go:403 > failed to get latest github release error="failed to make latest version request: Get \"https://api.github.com/repos/sysadminsmedia/homebox/releases/l +atest\": dial tcp: lookup api.github.com on [::1]:53: read udp [::1]:50951->[::1]:53: read: connection refused" +10:33PM INF ../../../go/src/app/internal/web/mid/logger.go:36 > request received method=GET path=/ rid=hname/PoXyRgt6ol-000001 +10:33PM INF ../../../go/src/app/internal/web/mid/logger.go:41 > request finished method=GET path=/ rid=hname/PoXyRgt6ol-000001 status=0 +``` + +#### Systemd socket example + +In the example below, Homebox listens on a systemd socket securely so that only +the webserver (Caddy) can access it. Other processes/containers on the host +cannot connect to Homebox directly, bypassing the webserver. + +File: homebox.socket +```systemd +# /usr/local/lib/systemd/system/homebox.socket +[Unit] +Description=Homebox socket + +[Socket] +ListenStream=/run/homebox.sock +SocketGroup=caddy +SocketMode=0660 + +[Install] +WantedBy=sockets.target +``` + +File: homebox.service +```systemd +# /usr/local/lib/systemd/system/homebox.service +[Unit] +Description=Homebox +After=network.target +Documentation=https://homebox.software + +[Service] +DynamicUser=yes +StateDirectory=homebox +Environment=HBOX_WEB_HOST=sysd?name=homebox.socket +WorkingDirectory=/var/lib/homebox + +ExecStart=/usr/local/bin/homebox + +NoNewPrivileges=yes +CapabilityBoundingSet= +RestrictNamespaces=true +SystemCallFilter=@system-service +``` +Usage: + +```bash +systemctl start homebox.socket +``` ::: warning Security Considerations For postgreSQL in production: + - Do not use the default `postgres` user - Do not use the default `postgres` database - Always use a strong unique password @@ -57,14 +142,16 @@ For postgreSQL in production: - Consider using a connection pooler like `pgbouncer` For SQLite in production: + - Secure file permissions for the database file (e.g. `chmod 600`) - Use a secure directory for the database file - Use a secure backup strategy - Monitor the file size and consider using a different database for large installations -::: + ::: ::: tip CLI Arguments -If you're deploying without docker you can use command line arguments to configure the application. Run `homebox --help` for more information. +If you're deploying without docker you can use command line arguments to configure the application. Run `homebox --help` +for more information. ```sh Usage: api [options] [arguments] @@ -74,7 +161,8 @@ OPTIONS --web-port/$HBOX_WEB_PORT (default: 7745) --web-host/$HBOX_WEB_HOST --web-max-file-upload/$HBOX_WEB_MAX_FILE_UPLOAD (default: 10) ---storage-data/$HBOX_STORAGE_DATA (default: ./.data) +--storage-conn-string/$HBOX_STORAGE_CONN_STRING (default: file://./) +--storage-prefix-path/$HBOX_STORAGE_PREFIX_PATH (default: .data) --log-level/$HBOX_LOG_LEVEL (default: info) --log-format/$HBOX_LOG_FORMAT (default: text) --mailer-host/$HBOX_MAILER_HOST @@ -108,6 +196,10 @@ OPTIONS --label-maker-print-command/$HBOX_LABEL_MAKER_PRINT_COMMAND --label-maker-additional-information/$HBOX_LABEL_MAKER_DYNAMIC_LENGTH (default: true) --label-maker-additional-information/$HBOX_LABEL_MAKER_ADDITIONAL_INFORMATION +--thumbnail-enabled/$HBOX_THUMBNAIL_ENABLED (default: true) +--thumbnail-width/$HBOX_THUMBNAIL_WIDTH (default: 500) +--thumbnail-height/$HBOX_THUMBNAIL_HEIGHT (default: 500) --help/-h display this help message ``` + ::: diff --git a/docs/en/configure/storage.md b/docs/en/configure/storage.md new file mode 100644 index 00000000..8268378d --- /dev/null +++ b/docs/en/configure/storage.md @@ -0,0 +1,97 @@ +# Configure Storage + +## Local Storage + +By default, homebox uses local storage at the `.data` folder relative path to the binary, or `/data` in the docker +container. +You can change the storage path by setting the `HBOX_STORAGE_CONN_STRING` to `file:///full/path/you/want`. The +`HBOX_STORAGE_PREFIX_PATH` +can be used to set a "prefix" for the storage. This "prefix" comes after the path in the connection string. + +::: warning +The local storage path must be writable by the user running the Homebox process. Homebox will automatically create the +default directories, if you change the path you must ensure that the directory specified exists and is writable. +::: + +## S3 Storage + +### Authentication + +To authenticate with S3, you will need to set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables. +Optionally, you can also set `AWS_SESSION_TOKEN` if you are using temporary credentials. + +### AWS S3 + +You can use S3 storage by setting the `HBOX_STORAGE_CONN_STRING` to `s3://my-bucket?region=region-name&awssdk=v2`. + +In this case, the `HBOX_STORAGE_PREFIX_PATH` can be used to set a "prefix" for the storage. This "prefix" comes after +the bucket name in the connection string. + +### S3-Compatible Storage + +You can also use S3-compatible storage by setting the `HBOX_STORAGE_CONN_STRING` to +`s3://my-bucket?awssdk=v2&endpoint=http://my-s3-compatible-endpoint.tld&disableSSL=true&s3ForcePathStyle=true`. + +This allows you to connect to S3-compatible services like MinIO, DigitalOcean Spaces, or any other service that supports +the S3 API. Configure the `disableSSL`, `s3ForcePathStyle`, and `endpoint` parameters as needed for your specific +service. + +#### Tested S3-Compatible Storage + +| Service | Working | Connection String | +|---------------------|---------|--------------------------------------------------------------------------------------------------------------------------| +| MinIO | Yes | `s3://my-bucket?awssdk=v2&endpoint=http://minio:9000&disableSSL=true&s3ForcePathStyle=true` | +| Cloudflare R2 | Yes | `s3://my-bucket?awssdk=v2&endpoint=https://.r2.cloudflarestorage.com&disableSSL=false&s3ForcePathStyle=true` | +| Backblaze B2 | Yes | `s3://my-bucket?awssdk=v2&endpoint=https://s3.us-west-004.backblazeb2.com&disableSSL=false&s3ForcePathStyle=true` | + +::: info +If you know of any other S3-compatible storage that works with Homebox, please let us know or create a pull request to update the table. +::: + +### Extra Connection Parameters + +Additionally, the parameters in the URL can be used to configure specific S3 settings: + +- `region`: The AWS region where the bucket is located. +- `awssdk`: The version of the AWS SDK to use (e.g., `v2`). (We highly recommend using `v2` for better performance and + features.) +- `endpoint`: The custom endpoint for S3-compatible storage services. +- `s3ForcePathStyle`: Whether to force path-style access (set to `true` or `false`). +- `disableSSL`: Whether to disable SSL (set to `true` or `false`). +- `sseType`: The server-side encryption type (e.g., `AES256` or `aws:kms` or `aws:kms:dsse`). +- `kmskeyid`: The KMS key ID for server-side encryption. +- `fips`: Whether to use FIPS endpoints (set to `true` or `false`). +- `dualstack`: Whether to use dual-stack endpoints (set to `true` or `false`). +- `accelerate`: Whether to use S3 Transfer Acceleration (set to `true` or `false`). + + +## Google Cloud Storage + +### Authentication + +To authenticate with Google Cloud Storage, you will need to set the `GOOGLE_APPLICATION_CREDENTIALS` environment +variable to the path of your service account key file. +This file should be in JSON format and contain the necessary credentials to access your Google Cloud Storage bucket and +must be made available to the application if running docker via read-only volume mounts. + +### Using Google Cloud Storage + +You can use Google Cloud Storage by setting the `HBOX_STORAGE_CONN_STRING` to `gcs://my-bucket`. + +## Azure Blob Storage + +### Authentication + +To authenticate with Azure blob storage, you will need to set the `AZURE_STORAGE_ACCOUNT` and `AZURE_STORAGE_KEY` +environment variables. Optionally, you can also set `AZURE_STORAGE_SAS_TOKEN` if you are using a Shared Access +Signature (SAS) for authentication. + +### Using Azure Blob Storage + +You can use Azure Blob Storage by setting the `HBOX_STORAGE_CONN_STRING` to `azblob://my-container`. + +### Local Azure Storage Emulator + +If you want to use the local Azure Storage Emulator, you can set the `HBOX_STORAGE_CONN_STRING` to +`azblob://my-container?protocol=http&domain=localhost:10001`. This will allow you to use the emulator for development +and testing purposes. \ No newline at end of file diff --git a/docs/en/installation.md b/docs/en/installation.md index 9c5ad2ef..a7d4b128 100644 --- a/docs/en/installation.md +++ b/docs/en/installation.md @@ -6,7 +6,7 @@ There are two main ways to run the application. 2. Using the correct executable for your platform by downloading it from the [Releases](https://github.com/sysadminsmedia/homebox/releases). ::: info Configuration Options -The application can be configured using environment variables. You can find a list of all available options in the [configuration section](./configure). +The application can be configured using environment variables. You can find a list of all available options in the [configuration section](./configure/index). ::: ## Docker diff --git a/docs/public/_headers b/docs/public/_headers new file mode 100644 index 00000000..6bca5add --- /dev/null +++ b/docs/public/_headers @@ -0,0 +1,4 @@ +/* + X-Frame-Options: DENY + X-Content-Type-Options: nosniff + Content-Security-Policy: default-src 'self'; script-src 'report-sample' 'unsafe-inline' 'self' https://a.sysadmins.zone/js/embed.host.js https://static.cloudflareinsights.com/beacon.min.js/vcd15cbe7772f49c399c6a5babf22c1241717689176015 https://unpkg.com/@stoplight/elements/web-components.min.js; style-src 'report-sample' 'unsafe-inline' 'self' https://unpkg.com; object-src 'none'; base-uri 'self'; connect-src 'self' https://raw.githubusercontent.com; font-src 'self'; frame-src 'self' https://a.sysadmins.zone; img-src 'self' data: http://translate.sysadminsmedia.com; manifest-src 'self'; media-src 'self'; worker-src 'none'; diff --git a/docs/wrangler.toml b/docs/wrangler.toml new file mode 100644 index 00000000..92fcff67 --- /dev/null +++ b/docs/wrangler.toml @@ -0,0 +1,7 @@ +name = "homebox-docs" +compatibility_date = "2025-07-12" +preview_urls = true + +[assets] +directory = ".vitepress/dist" +not_found_handling = "single-page-application" \ No newline at end of file diff --git a/frontend/assets/css/main.css b/frontend/assets/css/main.css index 49f1a826..87b6ac6a 100644 --- a/frontend/assets/css/main.css +++ b/frontend/assets/css/main.css @@ -1042,3 +1042,8 @@ font-size: 0.75rem; font-weight: 700; } + +:root { + --header-height: 4rem; + --header-height-mobile: 7rem; +} \ No newline at end of file diff --git a/frontend/components/App/CreateModal.vue b/frontend/components/App/CreateModal.vue index c82fcf4d..a2efe690 100644 --- a/frontend/components/App/CreateModal.vue +++ b/frontend/components/App/CreateModal.vue @@ -2,16 +2,27 @@ - {{ title }} + + {{ title }} + + - - Use + to create and add - another. - + + + + + + + + @@ -21,6 +32,9 @@ {{ title }} + + + @@ -31,13 +45,14 @@ diff --git a/frontend/components/App/HeaderText.vue b/frontend/components/App/HeaderText.vue new file mode 100644 index 00000000..61b66eb0 --- /dev/null +++ b/frontend/components/App/HeaderText.vue @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/components/App/ImportDialog.vue b/frontend/components/App/ImportDialog.vue index 8f946346..eaf1048d 100644 --- a/frontend/components/App/ImportDialog.vue +++ b/frontend/components/App/ImportDialog.vue @@ -1,5 +1,5 @@ - + {{ $t("components.app.import_dialog.title") }} @@ -37,6 +37,8 @@ diff --git a/frontend/components/App/QuickMenuModal.vue b/frontend/components/App/QuickMenuModal.vue index 428056c6..8ce72f56 100644 --- a/frontend/components/App/QuickMenuModal.vue +++ b/frontend/components/App/QuickMenuModal.vue @@ -1,5 +1,6 @@ - + { - closeDialog('quick-menu'); + closeDialog(DialogID.QuickMenu); navigateTo(navigate.href); } " > {{ navigate.text }} + { + closeDialog(DialogID.QuickMenu); + openDialog(DialogID.Scanner); + } + " + > + {{ t("menu.scanner") }} + diff --git a/frontend/components/App/ScannerModal.vue b/frontend/components/App/ScannerModal.vue new file mode 100644 index 00000000..a5ba56c5 --- /dev/null +++ b/frontend/components/App/ScannerModal.vue @@ -0,0 +1,197 @@ + + + + + {{ t("scanner.title") }} + + + + + {{ errorMessage }} + + + + + + {{ detectedBarcodeType }} {{ $t("scanner.barcode_detected_message") }}: + {{ detectedBarcode }} + + + + + + {{ $t("scanner.barcode_fetch_data") }} + + + + + + + + + + + + + {{ source.label }} + + + + + + + + + + diff --git a/frontend/components/Base/Container.vue b/frontend/components/Base/Container.vue index 097d59d5..13d80719 100644 --- a/frontend/components/Base/Container.vue +++ b/frontend/components/Base/Container.vue @@ -8,7 +8,7 @@ - + diff --git a/frontend/components/Form/ColorSelector.vue b/frontend/components/Form/ColorSelector.vue new file mode 100644 index 00000000..1c6be061 --- /dev/null +++ b/frontend/components/Form/ColorSelector.vue @@ -0,0 +1,155 @@ + + + + + + {{ label }} + + + + {{ + modelValue || t("components.color_selector.no_color") + }} + + + + + + + + + + + + + + {{ label }} + + + + {{ + modelValue || t("components.color_selector.no_color") + }} + + + + + + + + + + + + diff --git a/frontend/components/Form/Password.vue b/frontend/components/Form/Password.vue index 76102189..0a3d1c7b 100644 --- a/frontend/components/Form/Password.vue +++ b/frontend/components/Form/Password.vue @@ -1,6 +1,7 @@ - + + @@ -12,29 +13,31 @@ - Toggle Password Show + {{ $t("components.form.password.toggle_show") }} + + diff --git a/frontend/components/Item/Card.vue b/frontend/components/Item/Card.vue index f38573eb..ea7a88dd 100644 --- a/frontend/components/Item/Card.vue +++ b/frontend/components/Item/Card.vue @@ -47,7 +47,7 @@ - + @@ -69,12 +69,15 @@ if (!props.item.imageId) { return "/no-image.jpg"; } - - return api.authURL(`/items/${props.item.id}/attachments/${props.item.imageId}`); + if (props.item.thumbnailId) { + return api.authURL(`/items/${props.item.id}/attachments/${props.item.thumbnailId}`); + } else { + return api.authURL(`/items/${props.item.id}/attachments/${props.item.imageId}`); + } }); - const top3 = computed(() => { - return props.item.labels.slice(0, 3) || []; + const itemLabels = computed(() => { + return props.item.labels || []; }); const props = defineProps({ diff --git a/frontend/components/Item/CreateModal.vue b/frontend/components/Item/CreateModal.vue index 191e3919..132a1c60 100644 --- a/frontend/components/Item/CreateModal.vue +++ b/frontend/components/Item/CreateModal.vue @@ -1,7 +1,45 @@ - + + + + + + + + + + + + + {{ $t("components.item.create_modal.product_tooltip_scan_barcode") }} + + + + + + + + + + {{ $t("components.item.create_modal.product_tooltip_input_barcode") }} + + + + + + + + - + @@ -68,11 +110,11 @@ - Delete photo + {{ $t("components.item.create_modal.delete_photo") }} - Delete photo + {{ $t("components.item.create_modal.delete_photo") }} @@ -88,11 +130,11 @@ " > - Rotate photo + {{ $t("components.item.create_modal.rotate_photo") }} - Rotate photo + {{ $t("components.item.create_modal.rotate_photo") }} @@ -105,11 +147,15 @@ > - Set as {{ photo.primary ? "non" : "" }} primary photo + + {{ $t("components.item.create_modal.set_as_primary_photo", { isPrimary: photo.primary }) }} + - Set as {{ photo.primary ? "non" : "" }} primary photo + + {{ $t("components.item.create_modal.set_as_primary_photo", { isPrimary: photo.primary }) }} + @@ -122,6 +168,8 @@ diff --git a/frontend/components/Item/Selector.vue b/frontend/components/Item/Selector.vue index a455f6a0..800be83b 100644 --- a/frontend/components/Item/Selector.vue +++ b/frontend/components/Item/Selector.vue @@ -8,7 +8,7 @@ - {{ displayValue(value) || placeholder }} + {{ displayValue(value) || localizedPlaceholder }} @@ -16,9 +16,9 @@ - + - {{ noResultsText }} + {{ localizedNoResultsText }} @@ -41,6 +41,7 @@ import { Check, ChevronsUpDown } from "lucide-vue-next"; import fuzzysort from "fuzzysort"; import { useVModel } from "@vueuse/core"; + import { useI18n } from "vue-i18n"; import { Button } from "~/components/ui/button"; import { Command, CommandEmpty, CommandGroup, CommandInput, CommandItem, CommandList } from "~/components/ui/command"; import { Label } from "~/components/ui/label"; @@ -48,6 +49,8 @@ import { cn } from "~/lib/utils"; import { useId } from "#imports"; + const { t } = useI18n(); + type ItemsObject = { [key: string]: unknown; }; @@ -62,6 +65,7 @@ searchPlaceholder?: string; noResultsText?: string; placeholder?: string; + excludeItems?: ItemsObject[]; } const emit = defineEmits(["update:modelValue", "update:search"]); @@ -72,9 +76,10 @@ itemText: "text", itemValue: "value", search: "", - searchPlaceholder: "Type to search...", - noResultsText: "No Results Found", - placeholder: "Select...", + searchPlaceholder: undefined, + noResultsText: undefined, + placeholder: undefined, + excludeItems: undefined, }); const id = useId(); @@ -82,6 +87,12 @@ const search = ref(props.search); const value = useVModel(props, "modelValue", emit); + const localizedSearchPlaceholder = computed( + () => props.searchPlaceholder ?? t("components.item.selector.search_placeholder") + ); + const localizedNoResultsText = computed(() => props.noResultsText ?? t("components.item.selector.no_results")); + const localizedPlaceholder = computed(() => props.placeholder ?? t("components.item.selector.placeholder")); + watch( () => props.search, val => { @@ -128,12 +139,19 @@ } const filtered = computed(() => { - if (!search.value) return props.items; - if (isStrings(props.items)) { - return props.items.filter(item => item.toLowerCase().includes(search.value.toLowerCase())); + let baseItems = props.items; + + if (!isStrings(baseItems) && props.excludeItems) { + const excludeIds = props.excludeItems.map(i => i.id); + baseItems = baseItems.filter(item => !excludeIds?.includes(item.id)); + } + if (!search.value) return baseItems; + + if (isStrings(baseItems)) { + return baseItems.filter(item => item.toLowerCase().includes(search.value.toLowerCase())); } else { // Fuzzy search on itemText - return fuzzysort.go(search.value, props.items, { key: props.itemText, all: true }).map(i => i.obj); + return fuzzysort.go(search.value, baseItems, { key: props.itemText, all: true }).map(i => i.obj); } }); diff --git a/frontend/components/Item/View/Selectable.vue b/frontend/components/Item/View/Selectable.vue index 78de9229..559c294c 100644 --- a/frontend/components/Item/View/Selectable.vue +++ b/frontend/components/Item/View/Selectable.vue @@ -3,6 +3,7 @@ import type { ItemSummary } from "~~/lib/api/types/data-contracts"; import MdiCardTextOutline from "~icons/mdi/card-text-outline"; import MdiTable from "~icons/mdi/table"; + import { Badge } from "@/components/ui/badge"; import { Button, ButtonGroup } from "@/components/ui/button"; type Props = { @@ -29,7 +30,12 @@ - {{ $t("components.item.view.selectable.items") }} + + {{ $t("components.item.view.selectable.items") }} + + {{ items.length }} + + diff --git a/frontend/components/Item/View/Table.vue b/frontend/components/Item/View/Table.vue index 231aa3d7..62f19062 100644 --- a/frontend/components/Item/View/Table.vue +++ b/frontend/components/Item/View/Table.vue @@ -1,5 +1,5 @@ - + {{ $t("components.item.view.table.table_settings") }} @@ -41,7 +41,7 @@ - {{ $t("global.save") }} + {{ $t("global.save") }} @@ -77,7 +77,7 @@ - + - - {{ d.name }} - + {{ d.name }} @@ -111,6 +109,11 @@ {{ extractValue(d, h.value) }} + + + {{ $t("components.item.view.table.view_item") }} + + @@ -120,7 +123,7 @@ hidden: disableControls, }" > - + { const header = headers.value.find(h => h.value === value); if (header) { @@ -254,8 +257,8 @@ function extractSortable(item: ItemSummary, property: keyof ItemSummary): string | number | boolean { const value = item[property]; if (typeof value === "string") { - // Try parse float - const parsed = parseFloat(value); + // Try to parse number + const parsed = Number(value); if (!isNaN(parsed)) { return parsed; } diff --git a/frontend/components/Label/Chip.vue b/frontend/components/Label/Chip.vue index 364436d7..bcb5ebe3 100644 --- a/frontend/components/Label/Chip.vue +++ b/frontend/components/Label/Chip.vue @@ -2,6 +2,7 @@ import type { LabelOut, LabelSummary } from "~~/lib/api/types/data-contracts"; import MdiArrowUp from "~icons/mdi/arrow-up"; import MdiTagOutline from "~icons/mdi/tag-outline"; + import { getContrastTextColor } from "~/lib/utils"; export type sizes = "sm" | "md" | "lg" | "xl"; defineProps({ @@ -18,12 +19,17 @@ diff --git a/frontend/components/Label/CreateModal.vue b/frontend/components/Label/CreateModal.vue index 62112d68..aec9dbbb 100644 --- a/frontend/components/Label/CreateModal.vue +++ b/frontend/components/Label/CreateModal.vue @@ -1,5 +1,5 @@ - + + {{ $t("global.create") }} @@ -27,13 +28,18 @@ diff --git a/frontend/components/global/DetailsSection/DetailsSection.vue b/frontend/components/global/DetailsSection/DetailsSection.vue index 8b1a0562..785cd48c 100644 --- a/frontend/components/global/DetailsSection/DetailsSection.vue +++ b/frontend/components/global/DetailsSection/DetailsSection.vue @@ -17,7 +17,13 @@ - + {{ detail.text }} @@ -39,7 +45,15 @@ - {{ detail.text }} + {{ detail.text }} + {{ detail.text }} + import { useI18n } from "vue-i18n"; import { route } from "../../lib/api/base"; import PageQRCode from "./PageQRCode.vue"; + import { DialogID } from "@/components/ui/dialog-provider/utils"; import { toast } from "@/components/ui/sonner"; + import MdiLoading from "~icons/mdi/loading"; import MdiPrinterPos from "~icons/mdi/printer-pos"; import MdiFileDownload from "~icons/mdi/file-download"; @@ -17,6 +20,7 @@ import { Button, ButtonGroup } from "@/components/ui/button"; import { Tooltip, TooltipContent, TooltipTrigger, TooltipProvider } from "@/components/ui/tooltip"; + const { t } = useI18n(); const { openDialog, closeDialog } = useDialog(); const props = defineProps<{ @@ -29,7 +33,7 @@ const { data: status } = useAsyncData(async () => { const { data, error } = await pubApi.status(); if (error) { - toast.error("Failed to load status"); + toast.error(t("components.global.label_maker.toast.load_status_failed")); return; } @@ -55,12 +59,12 @@ } catch (err) { console.error("Failed to print labels:", err); serverPrinting.value = false; - toast.error("Failed to print label"); + toast.error(t("components.global.label_maker.toast.print_failed")); return; } - toast.success("Label printed"); - closeDialog("print-label"); + toast.success(t("components.global.label_maker.toast.print_success")); + closeDialog(DialogID.PrintLabel); serverPrinting.value = false; } @@ -90,7 +94,7 @@ - + @@ -103,7 +107,8 @@ - + + {{ $t("components.global.label_maker.server_print") }} @@ -133,7 +138,7 @@ - + diff --git a/frontend/components/global/Markdown.vue b/frontend/components/global/Markdown.vue index 5fa5bf00..f0ff2f45 100644 --- a/frontend/components/global/Markdown.vue +++ b/frontend/components/global/Markdown.vue @@ -1,5 +1,6 @@ @@ -30,7 +30,6 @@ diff --git a/frontend/pages/tools.vue b/frontend/pages/tools.vue index 0362ae4a..ea9143fb 100644 --- a/frontend/pages/tools.vue +++ b/frontend/pages/tools.vue @@ -1,7 +1,7 @@ - + @@ -37,7 +37,7 @@ - + {{ $t("tools.import_export_set.import") }} @@ -84,6 +84,12 @@ {{ $t("tools.actions_set.set_primary_photo_button") }} + + {{ $t("tools.actions_set.create_missing_thumbnails") }} + + + {{ $t("tools.actions_set.create_missing_thumbnails_button") }} + @@ -92,18 +98,22 @@ diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 46343396..54b96ce3 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + '@mdit/plugin-img-size': + specifier: ^0.22.2 + version: 0.22.2(markdown-it@14.1.0) '@nuxtjs/color-mode': specifier: ^3.5.2 version: 3.5.2(magicast@0.3.5) @@ -124,8 +127,8 @@ importers: specifier: ^1.52.0 version: 1.52.0 '@types/markdown-it': - specifier: ^13.0.9 - version: 13.0.9 + specifier: ^14.1.0 + version: 14.1.2 '@types/semver': specifier: ^7.7.0 version: 7.7.0 @@ -1530,6 +1533,15 @@ packages: engines: {node: '>=18'} hasBin: true + '@mdit/plugin-img-size@0.22.2': + resolution: {integrity: sha512-+2+HpV5wZ3ZvFAs2alOiftDO635UbbOTr9uRQ0LZi/1lIZzKa0GE8sxYmtAZXRkdbGCj1uN6puoT7Bc7fdBs7Q==} + engines: {node: '>= 18'} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true + '@napi-rs/wasm-runtime@0.2.9': resolution: {integrity: sha512-OKRBiajrrxB9ATokgEQoG87Z25c67pCpYcCwmXYX8PBftC9pBfN18gnm/fh1wurSLEKIAt+QRFLFCQISrb66Jg==} @@ -2051,14 +2063,14 @@ packages: '@types/json5@0.0.29': resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/linkify-it@3.0.5': - resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} + '@types/linkify-it@5.0.0': + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} - '@types/markdown-it@13.0.9': - resolution: {integrity: sha512-1XPwR0+MgXLWfTn9gCsZ55AHOKW1WN+P9vr0PaQh5aerR9LLQXUbjfEAFhjmEmyoYFWAyuN2Mqkn40MZ4ukjBw==} + '@types/markdown-it@14.1.2': + resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==} - '@types/mdurl@1.0.5': - resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==} + '@types/mdurl@2.0.0': + resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} '@types/node@22.14.1': resolution: {integrity: sha512-u0HuPQwe/dHrItgHHpmw3N2fYCR6x4ivMNbPHRkBVP4CvN+kiRrKHWk3i8tXiO/joPwXLMYvF9TTF0eqgHIuOw==} @@ -8205,6 +8217,12 @@ snapshots: - encoding - supports-color + '@mdit/plugin-img-size@0.22.2(markdown-it@14.1.0)': + dependencies: + '@types/markdown-it': 14.1.2 + optionalDependencies: + markdown-it: 14.1.0 + '@napi-rs/wasm-runtime@0.2.9': dependencies: '@emnapi/core': 1.4.3 @@ -8551,7 +8569,7 @@ snapshots: '@nuxtjs/eslint-config-typescript@12.1.0(eslint@8.57.1)(typescript@5.6.2)': dependencies: - '@nuxtjs/eslint-config': 12.0.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1) + '@nuxtjs/eslint-config': 12.0.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0(eslint-plugin-import@2.31.0)(eslint@8.57.1))(eslint@8.57.1) '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint@8.57.1)(typescript@5.6.2) '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.6.2) eslint: 8.57.1 @@ -8564,10 +8582,10 @@ snapshots: - supports-color - typescript - '@nuxtjs/eslint-config@12.0.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1)': + '@nuxtjs/eslint-config@12.0.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0(eslint-plugin-import@2.31.0)(eslint@8.57.1))(eslint@8.57.1)': dependencies: eslint: 8.57.1 - eslint-config-standard: 17.1.0(eslint-plugin-import@2.31.0)(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1) + eslint-config-standard: 17.1.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1) eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1) eslint-plugin-n: 15.7.0(eslint@8.57.1) eslint-plugin-node: 11.1.0(eslint@8.57.1) @@ -8949,14 +8967,14 @@ snapshots: '@types/json5@0.0.29': {} - '@types/linkify-it@3.0.5': {} + '@types/linkify-it@5.0.0': {} - '@types/markdown-it@13.0.9': + '@types/markdown-it@14.1.2': dependencies: - '@types/linkify-it': 3.0.5 - '@types/mdurl': 1.0.5 + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 - '@types/mdurl@1.0.5': {} + '@types/mdurl@2.0.0': {} '@types/node@22.14.1': dependencies: @@ -10655,7 +10673,7 @@ snapshots: dependencies: eslint: 8.57.1 - eslint-config-standard@17.1.0(eslint-plugin-import@2.31.0)(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1): + eslint-config-standard@17.1.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1): dependencies: eslint: 8.57.1 eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1) @@ -10685,7 +10703,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1): + eslint-module-utils@2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.0(eslint-plugin-import@2.31.0)(eslint@8.57.1))(eslint@8.57.1): dependencies: debug: 3.2.7 optionalDependencies: @@ -10719,7 +10737,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.0)(eslint@8.57.1) + eslint-module-utils: 2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.0(eslint-plugin-import@2.31.0)(eslint@8.57.1))(eslint@8.57.1) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -12293,7 +12311,7 @@ snapshots: unenv: 1.10.0 unimport: 3.14.6(rollup@4.40.0) unplugin: 1.16.1 - unplugin-vue-router: 0.10.9(rollup@4.40.0)(vue-router@4.5.0(vue@3.4.8(typescript@5.6.2)))(vue@3.5.13(typescript@5.6.2)) + unplugin-vue-router: 0.10.9(rollup@4.40.0)(vue-router@4.5.0(vue@3.5.13(typescript@5.6.2)))(vue@3.5.13(typescript@5.6.2)) unstorage: 1.15.0(@netlify/blobs@8.2.0)(db0@0.3.2)(ioredis@5.6.1) untyped: 1.5.2 vue: 3.5.13(typescript@5.6.2) @@ -13924,7 +13942,7 @@ snapshots: pathe: 2.0.3 picomatch: 4.0.2 - unplugin-vue-router@0.10.9(rollup@4.40.0)(vue-router@4.5.0(vue@3.4.8(typescript@5.6.2)))(vue@3.5.13(typescript@5.6.2)): + unplugin-vue-router@0.10.9(rollup@4.40.0)(vue-router@4.5.0(vue@3.5.13(typescript@5.6.2)))(vue@3.5.13(typescript@5.6.2)): dependencies: '@babel/types': 7.27.0 '@rollup/pluginutils': 5.1.4(rollup@4.40.0) @@ -13941,7 +13959,7 @@ snapshots: unplugin: 2.0.0-beta.1 yaml: 2.7.1 optionalDependencies: - vue-router: 4.5.0(vue@3.4.8(typescript@5.6.2)) + vue-router: 4.5.0(vue@3.5.13(typescript@5.6.2)) transitivePeerDependencies: - rollup - vue diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js index cfa4bad9..d360fe53 100644 --- a/frontend/tailwind.config.js +++ b/frontend/tailwind.config.js @@ -123,6 +123,25 @@ module.exports = { "collapsible-down": "collapsible-down 0.2s ease-in-out", "collapsible-up": "collapsible-up 0.2s ease-in-out", }, + typography: { + DEFAULT: { + css: { + color: "hsl(var(--foreground))", + a: { + color: "hsl(var(--primary))", + "&:hover": { + color: "hsl(var(--primary) / 0.8)", + }, + }, + h1: { + color: "hsl(var(--foreground))", + }, + h2: { + color: "hsl(var(--foreground))", + }, + }, + }, + }, }, }, plugins: [require("@tailwindcss/aspect-ratio"), require("@tailwindcss/typography"), require("tailwindcss-animate")],
{{ $t("components.item.create_modal.product_tooltip_scan_barcode") }}
{{ $t("components.item.create_modal.product_tooltip_input_barcode") }}
Delete photo
{{ $t("components.item.create_modal.delete_photo") }}
Rotate photo
{{ $t("components.item.create_modal.rotate_photo") }}
Set as {{ photo.primary ? "non" : "" }} primary photo
+ {{ $t("components.item.create_modal.set_as_primary_photo", { isPrimary: photo.primary }) }} +