mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-23 22:18:22 +01:00
Compare commits
22 Commits
v0.11.1
...
fix/web-so
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2cd107b8bd | ||
|
|
a3cce59a2a | ||
|
|
9fa17bec90 | ||
|
|
b5987f2e8d | ||
|
|
2cbcc8bb1d | ||
|
|
cceec06148 | ||
|
|
2e2eed143d | ||
|
|
272cc5a370 | ||
|
|
275e106d72 | ||
|
|
3f0e65a2ad | ||
|
|
22bbaae08f | ||
|
|
8c7d91ea52 | ||
|
|
5a219f6a9c | ||
|
|
895017b28e | ||
|
|
02ce52dbe3 | ||
|
|
c5ae6b17f9 | ||
|
|
371fc0a6af | ||
|
|
016780920d | ||
|
|
06eb6c1f91 | ||
|
|
27dad0e118 | ||
|
|
dc9446516a | ||
|
|
a042496c71 |
@@ -35,6 +35,6 @@
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/go:1": "1.21"
|
||||
"golang": "1.20"
|
||||
}
|
||||
}
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
github: [tankerkiller125,katosdev]
|
||||
github: [hay-kot]
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -2,7 +2,6 @@
|
||||
name: "Bug Report"
|
||||
description: "Submit a bug report for the current release"
|
||||
labels: ["bug"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
3
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,8 +1,7 @@
|
||||
---
|
||||
name: "Feature Request"
|
||||
description: "Submit a feature request for the current release"
|
||||
labels: ["enhancement"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
labels: ["feature-request"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-statement
|
||||
|
||||
47
.github/workflows/binaries-publish.yaml
vendored
47
.github/workflows/binaries-publish.yaml
vendored
@@ -1,47 +0,0 @@
|
||||
name: Publish Release Binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ 'v*.*.*' ]
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
uses: sysadminsmedia/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
frontend-tests:
|
||||
name: "Frontend and End-to-End Tests"
|
||||
uses: sysadminsmedia/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
100
.github/workflows/docker-publish-rootless.yaml
vendored
100
.github/workflows/docker-publish-rootless.yaml
vendored
@@ -1,100 +0,0 @@
|
||||
name: Docker publish rootless
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 6 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
|
||||
jobs:
|
||||
build-rootless:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up BuildKit Docker container builder to be able to build
|
||||
# multi-platform images and export cache
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0 # v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
flavor: |
|
||||
suffix=-rootless,onlatest=true
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
97
.github/workflows/docker-publish.yaml
vendored
97
.github/workflows/docker-publish.yaml
vendored
@@ -1,97 +0,0 @@
|
||||
name: Docker publish
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 6 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up BuildKit Docker container builder to be able to build
|
||||
# multi-platform images and export cache
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0 # v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
8
.github/workflows/partial-backend.yaml
vendored
8
.github/workflows/partial-backend.yaml
vendored
@@ -7,12 +7,12 @@ jobs:
|
||||
Go:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21"
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Install Task
|
||||
uses: arduino/setup-task@v1
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||
version: latest
|
||||
|
||||
14
.github/workflows/partial-frontend.yaml
vendored
14
.github/workflows/partial-frontend.yaml
vendored
@@ -9,11 +9,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
with:
|
||||
version: 6.0.2
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -44,15 +44,15 @@ jobs:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.21"
|
||||
go-version: "1.20"
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
with:
|
||||
version: 6.0.2
|
||||
|
||||
|
||||
89
.github/workflows/partial-publish.yaml
vendored
Normal file
89
.github/workflows/partial-publish.yaml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Frontend / E2E
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
required: true
|
||||
type: string
|
||||
release:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
secrets:
|
||||
GH_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish Homebox"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: install buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: login to container registry
|
||||
run: docker login ghcr.io --username hay-kot --password $CR_PAT
|
||||
env:
|
||||
CR_PAT: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
- name: build nightly image
|
||||
if: ${{ inputs.release == false }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg=COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build nightly-rootless image
|
||||
if: ${{ inputs.release == false }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
|
||||
--build-arg=COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--file Dockerfile.rootless \
|
||||
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build release tagged the image
|
||||
if: ${{ inputs.release == true }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag ghcr.io/hay-kot/homebox:nightly \
|
||||
--tag ghcr.io/hay-kot/homebox:latest \
|
||||
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg VERSION=${{ inputs.tag }} \
|
||||
--build-arg COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build release tagged the rootless image
|
||||
if: ${{ inputs.release == true }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag ghcr.io/hay-kot/homebox:nightly-rootless \
|
||||
--tag ghcr.io/hay-kot/homebox:latest-rootless \
|
||||
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
|
||||
--build-arg VERSION=${{ inputs.tag }} \
|
||||
--build-arg COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 \
|
||||
--file Dockerfile.rootless .
|
||||
29
.github/workflows/publish.yaml
vendored
Normal file
29
.github/workflows/publish.yaml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Publish Dockers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: "Deploy Nightly to Fly.io"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
- run: flyctl deploy --remote-only
|
||||
|
||||
publish-nightly:
|
||||
name: "Publish Nightly"
|
||||
if: github.event_name != 'release'
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
tag: nightly
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
|
||||
4
.github/workflows/pull-requests.yaml
vendored
4
.github/workflows/pull-requests.yaml
vendored
@@ -5,10 +5,6 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
|
||||
77
.github/workflows/tag.yaml
vendored
Normal file
77
.github/workflows/tag.yaml
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
frontend-tests:
|
||||
name: "Frontend and End-to-End Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-tag:
|
||||
name: "Publish Tag"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
release: true
|
||||
tag: ${{ github.ref_name }}
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
deploy-docs:
|
||||
name: Deploy docs
|
||||
needs:
|
||||
- publish-tag
|
||||
- goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Deploy docs
|
||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CONFIG_FILE: docs/mkdocs.yml
|
||||
EXTRA_PACKAGES: build-base
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -53,7 +53,4 @@ dist/
|
||||
|
||||
# Nuxt Publish Dir
|
||||
backend/app/api/static/public/*
|
||||
!backend/app/api/static/public/.gitkeep
|
||||
backend/api
|
||||
|
||||
docs/.vitepress/cache/
|
||||
!backend/app/api/static/public/.gitkeep
|
||||
@@ -3,7 +3,7 @@ package schema
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/schema/mixins"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
|
||||
)
|
||||
|
||||
type {{ .Scaffold.model }} struct {
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -16,7 +16,7 @@
|
||||
"editor.formatOnSave": false,
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
"source.fixAll.eslint": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
|
||||
|
||||
@@ -1,128 +0,0 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
[support@sysadminemedia.com](mailto:support@sysadminemedia.com).
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
[Contributor Covenant Code of Conduct](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available at
|
||||
[Translations](https://www.contributor-covenant.org/translations).
|
||||
@@ -1,16 +1,16 @@
|
||||
# Contributing
|
||||
|
||||
## We Develop with GitHub
|
||||
## We Develop with Github
|
||||
|
||||
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
We use github to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
|
||||
## Branch Flow
|
||||
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request, you can use the following steps:
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request you can use the following steps:
|
||||
|
||||
1. Fork the repository and create a new branch from `main`.
|
||||
2. If you've added code that should be tested, add tests.
|
||||
3. If you've changed APIs, update the documentation.
|
||||
3. If you've changed API's, update the documentation.
|
||||
4. Ensure that the test suite and linters pass
|
||||
5. Issue your pull request
|
||||
|
||||
@@ -18,7 +18,7 @@ We use the `main` branch as the development branch. All PRs should be made to th
|
||||
|
||||
### Prerequisites
|
||||
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you need to ensure that you have the following tools installed:
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you can need to ensure that you have the following tools installed:
|
||||
|
||||
- [Go 1.19+](https://golang.org/doc/install)
|
||||
- [Swaggo](https://github.com/swaggo/swag)
|
||||
@@ -31,27 +31,27 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
|
||||
|
||||
### Setup
|
||||
|
||||
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file.
|
||||
If you're using the taskfile you can use the `task setup` command to run the required setup commands. Otherwise you can review the commands required in the `Taskfile.yml` file.
|
||||
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server.
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag you will get an error when running the the frontend server.
|
||||
|
||||
### API Development Notes
|
||||
|
||||
start command `task go:run`
|
||||
|
||||
1. API Server does not auto reload. You'll need to restart the server after making changes.
|
||||
2. Unit tests should be written in Go, however, end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
2. Unit tests should be written in Go, however end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
|
||||
### Frontend Development Notes
|
||||
|
||||
start command `task: ui:dev`
|
||||
|
||||
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
|
||||
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass.
|
||||
2. We're using Vitest for our automated testing. you can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running and in some cases the first run will fail due to a race condition. If this happens just run the tests again and they should pass.
|
||||
|
||||
## Publishing Release
|
||||
|
||||
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
|
||||
Create a new tag in github with the version number vX.X.X. This will trigger a new release to be created.
|
||||
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
||||
@@ -32,7 +32,7 @@ FROM alpine:latest
|
||||
|
||||
ENV HBOX_MODE=production
|
||||
ENV HBOX_STORAGE_DATA=/data/
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
RUN mkdir /app
|
||||
@@ -41,7 +41,7 @@ COPY --from=builder /go/bin/api /app
|
||||
RUN chmod +x /app/api
|
||||
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
|
||||
EXPOSE 7745
|
||||
WORKDIR /app
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine as frontend-builder
|
||||
FROM node:17-alpine as frontend-builder
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
@@ -43,7 +43,7 @@ COPY --from=builder --chown=nonroot /go/bin/api /app
|
||||
COPY --from=builder --chown=nonroot /data /data
|
||||
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
|
||||
EXPOSE 7745
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
|
||||
30
README.md
30
README.md
@@ -1,28 +1,19 @@
|
||||
<div align="center">
|
||||
<img src="/docs/public/lilbox.svg" height="200"/>
|
||||
<img src="/docs/docs/assets/img/lilbox.svg" height="200"/>
|
||||
</div>
|
||||
|
||||
<h1 align="center" style="margin-top: -10px"> HomeBox </h1>
|
||||
<p align="center" style="width: 100;">
|
||||
<a href="https://homebox.sysadminsmedia.com">Docs</a>
|
||||
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
||||
|
|
||||
<a href="https://homebox.fly.dev">Demo</a>
|
||||
|
|
||||
<a href="https://discord.gg/aY4DCkpNA9">Discord</a>
|
||||
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
||||
</p>
|
||||
|
||||
## What is HomeBox
|
||||
|
||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
|
||||
|
||||
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
|
||||
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general idle memory usage is less than 50MB for the whole container.
|
||||
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
[Configuration & Docker Compose](https://homebox.sysadminsmedia.com/quick-start.html)
|
||||
[Configuration & Docker Compose](https://hay-kot.github.io/homebox/quick-start)
|
||||
|
||||
```bash
|
||||
# If using the rootless image, ensure data
|
||||
@@ -35,19 +26,10 @@ docker run -d \
|
||||
--publish 3100:7745 \
|
||||
--env TZ=Europe/Bucharest \
|
||||
--volume /path/to/data/folder/:/data \
|
||||
ghcr.io/sysadminsmedia/homebox:latest
|
||||
# ghcr.io/sysadminsmedia/homebox:latest-rootless
|
||||
ghcr.io/hay-kot/homebox:latest
|
||||
# ghcr.io/hay-kot/homebox:latest-rootless
|
||||
```
|
||||
|
||||
<!-- CONTRIBUTING -->
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
|
||||
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
|
||||
|
||||
## Credits
|
||||
|
||||
- Original project by [@hay-kot](https://github.com/hay-kot)
|
||||
- Logo by [@lakotelman](https://github.com/lakotelman)
|
||||
|
||||
45
Taskfile.yml
45
Taskfile.yml
@@ -1,7 +1,6 @@
|
||||
version: "3"
|
||||
|
||||
env:
|
||||
HBOX_LOG_LEVEL: debug
|
||||
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
|
||||
HBOX_OPTIONS_ALLOW_REGISTRATION: true
|
||||
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
|
||||
@@ -13,25 +12,15 @@ tasks:
|
||||
- cd backend && go mod tidy
|
||||
- cd frontend && pnpm install --shamefully-hoist
|
||||
|
||||
swag:
|
||||
desc: Generate swagger docs
|
||||
dir: backend/app/api/static/
|
||||
vars:
|
||||
API: "../"
|
||||
INTERNAL: "../../../internal"
|
||||
PKGS: "../../../pkgs"
|
||||
cmds:
|
||||
- swag fmt --dir={{ .API }}
|
||||
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
|
||||
typescript-types:
|
||||
desc: Generates typescript types from swagger definition
|
||||
generate:
|
||||
desc: |
|
||||
Generates collateral files from the backend project
|
||||
including swagger docs and typescripts type for the frontend
|
||||
deps:
|
||||
- db:generate
|
||||
cmds:
|
||||
- cd backend/app/api/static && swag fmt --dir=../
|
||||
- cd backend/app/api/static && swag init --dir=../,../../../internal,../../../pkgs
|
||||
- |
|
||||
npx swagger-typescript-api \
|
||||
--no-client \
|
||||
@@ -39,17 +28,12 @@ tasks:
|
||||
--path ./backend/app/api/static/docs/swagger.json \
|
||||
--output ./frontend/lib/api/types
|
||||
- go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts
|
||||
sources:
|
||||
- ./backend/app/tools/typegen/main.go
|
||||
- ./backend/app/api/static/docs/swagger.json
|
||||
|
||||
generate:
|
||||
deps:
|
||||
- db:generate
|
||||
cmds:
|
||||
- task: swag
|
||||
- task: typescript-types
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
|
||||
go:run:
|
||||
desc: Starts the backend api server (depends on generate task)
|
||||
@@ -103,7 +87,8 @@ tasks:
|
||||
dir: backend/internal/
|
||||
cmds:
|
||||
- |
|
||||
go generate ./...
|
||||
go generate ./... \
|
||||
--template=./data/ent/schema/templates/has_id.tmpl
|
||||
sources:
|
||||
- "./backend/internal/data/ent/schema/**/*"
|
||||
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
run:
|
||||
timeout: 10m
|
||||
skip-dirs:
|
||||
- internal/data/ent.*
|
||||
linters-settings:
|
||||
goconst:
|
||||
min-len: 5
|
||||
min-occurrences: 5
|
||||
exhaustive:
|
||||
default-signifies-exhaustive: true
|
||||
revive:
|
||||
ignore-generated-header: false
|
||||
severity: warning
|
||||
confidence: 3
|
||||
depguard:
|
||||
rules:
|
||||
main:
|
||||
deny:
|
||||
- pkg: io/util
|
||||
desc: |
|
||||
Deprecated: As of Go 1.16, the same functionality is now provided by
|
||||
package io or package os, and those implementations should be
|
||||
preferred in new code. See the specific function documentation for
|
||||
details.
|
||||
gocritic:
|
||||
enabled-checks:
|
||||
- ruleguard
|
||||
testifylint:
|
||||
enable-all: true
|
||||
tagalign:
|
||||
order:
|
||||
- json
|
||||
- schema
|
||||
- yaml
|
||||
- yml
|
||||
- toml
|
||||
- validate
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- asciicheck
|
||||
- bodyclose
|
||||
- depguard
|
||||
- dogsled
|
||||
- errcheck
|
||||
- errorlint
|
||||
- exhaustive
|
||||
- exportloopref
|
||||
- gochecknoinits
|
||||
- goconst
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- gofmt
|
||||
- goprintffuncname
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- revive
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- tagalign
|
||||
- testifylint
|
||||
- typecheck
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
- zerologlint
|
||||
- sqlclosecheck
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
fix: true
|
||||
BIN
backend/api
Executable file
BIN
backend/api
Executable file
Binary file not shown.
@@ -1,18 +1,22 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/mailer"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
)
|
||||
|
||||
type app struct {
|
||||
conf *config.Config
|
||||
mailer mailer.Mailer
|
||||
db *ent.Client
|
||||
server *server.Server
|
||||
repos *repo.AllRepos
|
||||
services *services.AllServices
|
||||
bus *eventbus.EventBus
|
||||
@@ -33,3 +37,13 @@ func new(conf *config.Config) *app {
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (a *app) startBgTask(t time.Duration, fn func()) {
|
||||
timer := time.NewTimer(t)
|
||||
|
||||
for {
|
||||
timer.Reset(t)
|
||||
a.server.Background(fn)
|
||||
<-timer.C
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BackgroundTask struct {
|
||||
name string
|
||||
Interval time.Duration
|
||||
Fn func(context.Context)
|
||||
}
|
||||
|
||||
func (tsk *BackgroundTask) Name() string {
|
||||
return tsk.name
|
||||
}
|
||||
|
||||
func NewTask(name string, interval time.Duration, fn func(context.Context)) *BackgroundTask {
|
||||
return &BackgroundTask{
|
||||
Interval: interval,
|
||||
Fn: fn,
|
||||
}
|
||||
}
|
||||
|
||||
func (tsk *BackgroundTask) Start(ctx context.Context) error {
|
||||
timer := time.NewTimer(tsk.Interval)
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case <-timer.C:
|
||||
timer.Reset(tsk.Interval)
|
||||
tsk.Fn(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,9 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
func (a *app) SetupDemo() {
|
||||
@@ -16,12 +15,9 @@ func (a *app) SetupDemo() {
|
||||
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||
,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,"Ecolink Z-Wave PIR Motion Detector Pet Immune, White (PIRZWAVE2.5-ECO)",,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,,,,,,
|
||||
,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,"Yale Security YRD226-ZW2-619 YRD226ZW2619 Touchscreen Deadbolt, Satin Nickel",,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,,,,,,
|
||||
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
||||
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
||||
`
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
registration := services.UserRegistration{
|
||||
Email: "demo@example.com",
|
||||
Name: "Demo",
|
||||
@@ -29,34 +25,21 @@ func (a *app) SetupDemo() {
|
||||
}
|
||||
|
||||
// First check if we've already setup a demo user and skip if so
|
||||
log.Debug().Msg("Checking if demo user already exists")
|
||||
_, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
|
||||
if err == nil {
|
||||
log.Info().Msg("Demo user already exists, skipping setup")
|
||||
return
|
||||
}
|
||||
|
||||
log.Debug().Msg("Demo user does not exist, setting up demo")
|
||||
_, err = a.services.User.RegisterUser(ctx, registration)
|
||||
_, err = a.services.User.RegisterUser(context.Background(), registration)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to register demo user")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
}
|
||||
|
||||
token, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to login demo user")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return
|
||||
}
|
||||
self, err := a.services.User.GetSelf(ctx, token.Raw)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to get self")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return
|
||||
}
|
||||
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
|
||||
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
|
||||
|
||||
_, err = a.services.Items.CsvImport(ctx, self.GroupID, strings.NewReader(csvText))
|
||||
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to import CSV")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// Package debughandlers provides handlers for debugging.
|
||||
package debughandlers
|
||||
|
||||
import (
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
// Package v1 provides the API handlers for version 1 of the API.
|
||||
package v1
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
|
||||
"github.com/olahol/melody"
|
||||
)
|
||||
@@ -51,14 +49,7 @@ func WithRegistration(allowRegistration bool) func(*V1Controller) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.cookieSecure = secure
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
svc *services.AllServices
|
||||
maxUploadSize int64
|
||||
@@ -76,7 +67,7 @@ type (
|
||||
BuildTime string `json:"buildTime"`
|
||||
}
|
||||
|
||||
APISummary struct {
|
||||
ApiSummary struct {
|
||||
Healthy bool `json:"health"`
|
||||
Versions []string `json:"versions"`
|
||||
Title string `json:"title"`
|
||||
@@ -87,7 +78,7 @@ type (
|
||||
}
|
||||
)
|
||||
|
||||
func BaseURLFunc(prefix string) func(s string) string {
|
||||
func BaseUrlFunc(prefix string) func(s string) string {
|
||||
return func(s string) string {
|
||||
return prefix + "/v1" + s
|
||||
}
|
||||
@@ -113,11 +104,11 @@ func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *event
|
||||
// @Summary Application Info
|
||||
// @Tags Base
|
||||
// @Produce json
|
||||
// @Success 200 {object} APISummary
|
||||
// @Success 200 {object} ApiSummary
|
||||
// @Router /v1/status [GET]
|
||||
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
return server.JSON(w, http.StatusOK, APISummary{
|
||||
return server.JSON(w, http.StatusOK, ApiSummary{
|
||||
Healthy: ready(),
|
||||
Title: "Homebox",
|
||||
Message: "Track, Manage, and Organize your Things",
|
||||
@@ -128,27 +119,7 @@ func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.Hand
|
||||
}
|
||||
}
|
||||
|
||||
// HandleCurrency godoc
|
||||
//
|
||||
// @Summary Currency
|
||||
// @Tags Base
|
||||
// @Produce json
|
||||
// @Success 200 {object} currencies.Currency
|
||||
// @Router /v1/currency [GET]
|
||||
func (ctrl *V1Controller) HandleCurrency() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
// Set Cache for 10 Minutes
|
||||
w.Header().Set("Cache-Control", "max-age=600")
|
||||
|
||||
return server.JSON(w, http.StatusOK, ctrl.svc.Currencies.Slice())
|
||||
}
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
|
||||
type eventMsg struct {
|
||||
Event string `json:"event"`
|
||||
}
|
||||
|
||||
m := melody.New()
|
||||
|
||||
m.HandleConnect(func(s *melody.Session) {
|
||||
@@ -164,15 +135,9 @@ func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
|
||||
return
|
||||
}
|
||||
|
||||
msg := &eventMsg{Event: e}
|
||||
jsonStr := fmt.Sprintf(`{"event": "%s"}`, e)
|
||||
|
||||
jsonBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
log.Log().Msgf("error marshling event data %v: %v", data, err)
|
||||
return
|
||||
}
|
||||
|
||||
_ = m.BroadcastFilter(jsonBytes, func(s *melody.Session) bool {
|
||||
_ = m.BroadcastFilter([]byte(jsonStr), func(s *melody.Session) bool {
|
||||
groupIDStr, ok := s.Get("gid")
|
||||
if !ok {
|
||||
return false
|
||||
@@ -188,25 +153,6 @@ func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
|
||||
ctrl.bus.Subscribe(eventbus.EventLocationMutation, factory("location.mutation"))
|
||||
ctrl.bus.Subscribe(eventbus.EventItemMutation, factory("item.mutation"))
|
||||
|
||||
// Persistent asynchronous ticker that keeps all websocket connections alive with periodic pings.
|
||||
go func() {
|
||||
const interval = 10 * time.Second
|
||||
|
||||
ping := time.NewTicker(interval)
|
||||
defer ping.Stop()
|
||||
|
||||
for range ping.C {
|
||||
msg := &eventMsg{Event: "ping"}
|
||||
|
||||
pingBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
log.Log().Msgf("error marshaling ping: %v", err)
|
||||
} else {
|
||||
_ = m.Broadcast(pingBytes)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
return m.HandleRequest(w, r)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// routeID extracts the ID from the request URL. If the ID is not in a valid
|
||||
|
||||
@@ -5,11 +5,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type ActionAmountResult struct {
|
||||
@@ -68,16 +68,3 @@ func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
|
||||
}
|
||||
|
||||
// HandleSetPrimaryPhotos godoc
|
||||
//
|
||||
// @Summary Set Primary Photos
|
||||
// @Description Sets the first photo of each item as the primary photo
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/set-primary-photos [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
|
||||
}
|
||||
|
||||
@@ -6,11 +6,11 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
@@ -27,17 +27,17 @@ import (
|
||||
func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
assetIDParam := chi.URLParam(r, "id")
|
||||
assetIDParam = strings.ReplaceAll(assetIDParam, "-", "") // Remove dashes
|
||||
assetIdParam := chi.URLParam(r, "id")
|
||||
assetIdParam = strings.ReplaceAll(assetIdParam, "-", "") // Remove dashes
|
||||
// Convert the asset ID to an int64
|
||||
assetID, err := strconv.ParseInt(assetIDParam, 10, 64)
|
||||
assetId, err := strconv.ParseInt(assetIdParam, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pageParam := r.URL.Query().Get("page")
|
||||
var page int64 = -1
|
||||
if pageParam != "" {
|
||||
page, err = strconv.ParseInt(pageParam, 10, 32)
|
||||
page, err = strconv.ParseInt(pageParam, 10, 64)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page number")
|
||||
}
|
||||
@@ -46,13 +46,13 @@ func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
pageSizeParam := r.URL.Query().Get("pageSize")
|
||||
var pageSize int64 = -1
|
||||
if pageSizeParam != "" {
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 32)
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 64)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page size")
|
||||
}
|
||||
}
|
||||
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetID), int(page), int(pageSize))
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetId), int(page), int(pageSize))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to get item")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
|
||||
@@ -3,21 +3,14 @@ package v1
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
const (
|
||||
cookieNameToken = "hb.auth.token"
|
||||
cookieNameRemember = "hb.auth.remember"
|
||||
cookieNameSession = "hb.auth.session"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -34,49 +27,6 @@ type (
|
||||
}
|
||||
)
|
||||
|
||||
type CookieContents struct {
|
||||
Token string
|
||||
ExpiresAt time.Time
|
||||
Remember bool
|
||||
}
|
||||
|
||||
func GetCookies(r *http.Request) (*CookieContents, error) {
|
||||
cookie, err := r.Cookie(cookieNameToken)
|
||||
if err != nil {
|
||||
return nil, errors.New("authorization cookie is required")
|
||||
}
|
||||
|
||||
rememberCookie, err := r.Cookie(cookieNameRemember)
|
||||
if err != nil {
|
||||
return nil, errors.New("remember cookie is required")
|
||||
}
|
||||
|
||||
return &CookieContents{
|
||||
Token: cookie.Value,
|
||||
ExpiresAt: cookie.Expires,
|
||||
Remember: rememberCookie.Value == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// AuthProvider is an interface that can be implemented by any authentication provider.
|
||||
// to extend authentication methods for the API.
|
||||
type AuthProvider interface {
|
||||
// Name returns the name of the authentication provider. This should be a unique name.
|
||||
// that is URL friendly.
|
||||
//
|
||||
// Example: "local", "ldap"
|
||||
Name() string
|
||||
// Authenticate is called when a user attempts to login to the API. The implementation
|
||||
// should return an error if the user cannot be authenticated. If an error is returned
|
||||
// the API controller will return a vague error message to the user.
|
||||
//
|
||||
// Authenticate should do the following:
|
||||
//
|
||||
// 1. Ensure that the user exists within the database (either create, or get)
|
||||
// 2. On successful authentication, they must set the user cookies.
|
||||
Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error)
|
||||
}
|
||||
|
||||
// HandleAuthLogin godoc
|
||||
//
|
||||
// @Summary User Login
|
||||
@@ -85,42 +35,52 @@ type AuthProvider interface {
|
||||
// @Accept application/json
|
||||
// @Param username formData string false "string" example(admin@admin.com)
|
||||
// @Param password formData string false "string" example(admin)
|
||||
// @Param payload body LoginForm true "Login Data"
|
||||
// @Param provider query string false "auth provider"
|
||||
// @Param payload body LoginForm true "Login Data"
|
||||
// @Produce json
|
||||
// @Success 200 {object} TokenResponse
|
||||
// @Router /v1/users/login [POST]
|
||||
func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc {
|
||||
if len(ps) == 0 {
|
||||
panic("no auth providers provided")
|
||||
}
|
||||
|
||||
providers := make(map[string]AuthProvider)
|
||||
for _, p := range ps {
|
||||
log.Info().Str("name", p.Name()).Msg("registering auth provider")
|
||||
providers[p.Name()] = p
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) HandleAuthLogin() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
// Extract provider query
|
||||
provider := r.URL.Query().Get("provider")
|
||||
if provider == "" {
|
||||
provider = "local"
|
||||
loginForm := &LoginForm{}
|
||||
|
||||
switch r.Header.Get("Content-Type") {
|
||||
case "application/x-www-form-urlencoded":
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return errors.New("failed to parse form")
|
||||
}
|
||||
|
||||
loginForm.Username = r.PostFormValue("username")
|
||||
loginForm.Password = r.PostFormValue("password")
|
||||
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
|
||||
case "application/json":
|
||||
err := server.Decode(r, loginForm)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode login form")
|
||||
return errors.New("failed to decode login form")
|
||||
}
|
||||
default:
|
||||
return server.JSON(w, http.StatusBadRequest, errors.New("invalid content type"))
|
||||
}
|
||||
|
||||
// Get the provider
|
||||
p, ok := providers[provider]
|
||||
if !ok {
|
||||
return validate.NewRequestError(errors.New("invalid auth provider"), http.StatusBadRequest)
|
||||
if loginForm.Username == "" || loginForm.Password == "" {
|
||||
return validate.NewFieldErrors(
|
||||
validate.FieldError{
|
||||
Field: "username",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
validate.FieldError{
|
||||
Field: "password",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
newToken, err := p.Authenticate(w, r)
|
||||
newToken, err := ctrl.svc.User.Login(r.Context(), strings.ToLower(loginForm.Username), loginForm.Password, loginForm.StayLoggedIn)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to authenticate")
|
||||
return server.JSON(w, http.StatusInternalServerError, err.Error())
|
||||
return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true)
|
||||
return server.JSON(w, http.StatusOK, TokenResponse{
|
||||
Token: "Bearer " + newToken.Raw,
|
||||
ExpiresAt: newToken.ExpiresAt,
|
||||
@@ -148,12 +108,11 @@ func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
ctrl.unsetCookies(w, noPort(r.Host))
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleAuthRefresh godoc
|
||||
// HandleAuthLogout godoc
|
||||
//
|
||||
// @Summary User Token Refresh
|
||||
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
|
||||
@@ -174,78 +133,6 @@ func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
|
||||
return validate.NewUnauthorizedError()
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
|
||||
return server.JSON(w, http.StatusOK, newToken)
|
||||
}
|
||||
}
|
||||
|
||||
func noPort(host string) string {
|
||||
return strings.Split(host, ":")[0]
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: strconv.FormatBool(remember),
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set HTTP only cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: token,
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "true",
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: "",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,16 +4,15 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
type (
|
||||
GroupInvitationCreate struct {
|
||||
Uses int `json:"uses" validate:"required,min=1,max=100"`
|
||||
Uses int `json:"uses" validate:"required,min=1,max=100"`
|
||||
ExpiresAt time.Time `json:"expiresAt"`
|
||||
}
|
||||
|
||||
@@ -53,14 +52,6 @@ func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
ok := ctrl.svc.Currencies.IsSupported(body.Currency)
|
||||
if !ok {
|
||||
return repo.Group{}, validate.NewFieldErrors(
|
||||
validate.NewFieldError("currency", "currency '"+body.Currency+"' is not supported"),
|
||||
)
|
||||
}
|
||||
|
||||
return ctrl.svc.Group.UpdateGroup(auth, body)
|
||||
}
|
||||
|
||||
|
||||
@@ -8,13 +8,13 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleItemsGetAll godoc
|
||||
@@ -27,7 +27,6 @@ import (
|
||||
// @Param pageSize query int false "items per page"
|
||||
// @Param labels query []string false "label Ids" collectionFormat(multi)
|
||||
// @Param locations query []string false "location Ids" collectionFormat(multi)
|
||||
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/items [GET]
|
||||
// @Security Bearer
|
||||
@@ -57,7 +56,6 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
Search: params.Get("q"),
|
||||
LocationIDs: queryUUIDList(params, "locations"),
|
||||
LabelIDs: queryUUIDList(params, "labels"),
|
||||
ParentItemIDs: queryUUIDList(params, "parentIds"),
|
||||
IncludeArchived: queryBool(params.Get("includeArchived")),
|
||||
Fields: filterFieldItems(params["fields"]),
|
||||
OrderBy: params.Get("orderBy"),
|
||||
@@ -93,48 +91,6 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
// HandleItemFullPath godoc
|
||||
//
|
||||
// @Summary Get the full path of an item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 200 {object} []repo.ItemPath
|
||||
// @Router /v1/items/{id}/path [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GID, item.Location.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if item.Parent != nil {
|
||||
paths = append(paths, repo.ItemPath{
|
||||
Type: repo.ItemTypeItem,
|
||||
ID: item.Parent.ID,
|
||||
Name: item.Parent.Name,
|
||||
})
|
||||
}
|
||||
|
||||
paths = append(paths, repo.ItemPath{
|
||||
Type: repo.ItemTypeItem,
|
||||
ID: item.ID,
|
||||
Name: item.Name,
|
||||
})
|
||||
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleItemsCreate godoc
|
||||
//
|
||||
// @Summary Create Item
|
||||
@@ -211,6 +167,7 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
|
||||
// HandleItemPatch godocs
|
||||
//
|
||||
// @Summary Update Item
|
||||
@@ -226,12 +183,12 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
body.ID = ID
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
@@ -274,7 +231,8 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
|
||||
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
return adapters.Action(fn, http.StatusOK)
|
||||
|
||||
}
|
||||
|
||||
// HandleItemsImport godocs
|
||||
@@ -323,17 +281,17 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID)
|
||||
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to export items")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.csv")
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
|
||||
|
||||
writer := csv.NewWriter(w)
|
||||
writer.Comma = ','
|
||||
writer.Comma = '\t'
|
||||
return writer.WriteAll(csvData)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,16 +3,14 @@ package v1
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -40,6 +38,7 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse multipart form")
|
||||
return validate.NewRequestError(errors.New("failed to parse multipart form"), http.StatusBadRequest)
|
||||
|
||||
}
|
||||
|
||||
errs := validate.NewFieldErrors()
|
||||
@@ -68,15 +67,7 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
|
||||
attachmentType := r.FormValue("type")
|
||||
if attachmentType == "" {
|
||||
// Attempt to auto-detect the type of the file
|
||||
ext := filepath.Ext(attachmentName)
|
||||
|
||||
switch strings.ToLower(ext) {
|
||||
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
|
||||
attachmentType = attachment.TypePhoto.String()
|
||||
default:
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
}
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
}
|
||||
|
||||
id, err := ctrl.routeID(r)
|
||||
|
||||
@@ -4,10 +4,10 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLabelsGetAll godoc
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLocationTreeQuery godoc
|
||||
// HandleLocationTreeQuery
|
||||
//
|
||||
// @Summary Get Locations Tree
|
||||
// @Tags Locations
|
||||
@@ -28,7 +28,7 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationGetAll godoc
|
||||
// HandleLocationGetAll
|
||||
//
|
||||
// @Summary Get All Locations
|
||||
// @Tags Locations
|
||||
@@ -46,7 +46,7 @@ func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationCreate godoc
|
||||
// HandleLocationCreate
|
||||
//
|
||||
// @Summary Create Location
|
||||
// @Tags Locations
|
||||
@@ -64,7 +64,7 @@ func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
|
||||
return adapters.Action(fn, http.StatusCreated)
|
||||
}
|
||||
|
||||
// HandleLocationDelete godoc
|
||||
// HandleLocationDelete
|
||||
//
|
||||
// @Summary Delete Location
|
||||
// @Tags Locations
|
||||
@@ -83,7 +83,7 @@ func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
}
|
||||
|
||||
// HandleLocationGet godoc
|
||||
// HandleLocationGet
|
||||
//
|
||||
// @Summary Get Location
|
||||
// @Tags Locations
|
||||
@@ -101,7 +101,7 @@ func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationUpdate godoc
|
||||
// HandleLocationUpdate
|
||||
//
|
||||
// @Summary Update Location
|
||||
// @Tags Locations
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleMaintenanceLogGet godoc
|
||||
// HandleMaintenanceGetLog godoc
|
||||
//
|
||||
// @Summary Get Maintenance Log
|
||||
// @Tags Maintenance
|
||||
|
||||
@@ -5,10 +5,10 @@ import (
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGetUserNotifiers godoc
|
||||
|
||||
@@ -5,10 +5,9 @@ import (
|
||||
"image/png"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
"github.com/yeqown/go-qrcode/v2"
|
||||
"github.com/yeqown/go-qrcode/writer/standard"
|
||||
|
||||
@@ -44,12 +43,7 @@ func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
decodedStr, err := url.QueryUnescape(q.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
qrc, err := qrcode.New(decodedStr)
|
||||
qrc, err := qrcode.New(q.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
)
|
||||
|
||||
// HandleBillOfMaterialsExport godoc
|
||||
@@ -18,13 +19,13 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
actor := services.UseUserCtx(r.Context())
|
||||
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsCSV(r.Context(), actor.GroupID)
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.csv")
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
|
||||
_, err = w.Write(csv)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,15 +4,15 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGroupStatisticsLocations godoc
|
||||
// HandleGroupGet godoc
|
||||
//
|
||||
// @Summary Get Location Statistics
|
||||
// @Tags Statistics
|
||||
|
||||
@@ -5,12 +5,12 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// HandleUserRegistration godoc
|
||||
|
||||
@@ -2,10 +2,11 @@ package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
)
|
||||
|
||||
// setupLogger initializes the zerolog config
|
||||
@@ -17,8 +18,24 @@ func (a *app) setupLogger() {
|
||||
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).With().Caller().Logger()
|
||||
}
|
||||
|
||||
level, err := zerolog.ParseLevel(a.conf.Log.Level)
|
||||
if err == nil {
|
||||
zerolog.SetGlobalLevel(level)
|
||||
log.Level(getLevel(a.conf.Log.Level))
|
||||
}
|
||||
|
||||
func getLevel(l string) zerolog.Level {
|
||||
switch strings.ToLower(l) {
|
||||
case "debug":
|
||||
return zerolog.DebugLevel
|
||||
case "info":
|
||||
return zerolog.InfoLevel
|
||||
case "warn":
|
||||
return zerolog.WarnLevel
|
||||
case "error":
|
||||
return zerolog.ErrorLevel
|
||||
case "fatal":
|
||||
return zerolog.FatalLevel
|
||||
case "panic":
|
||||
return zerolog.PanicLevel
|
||||
default:
|
||||
return zerolog.InfoLevel
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@@ -14,21 +13,20 @@ import (
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/migrations"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/mid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/graceful"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/rs/zerolog/pkgerrors"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/migrations"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/mid"
|
||||
|
||||
_ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite"
|
||||
_ "github.com/hay-kot/homebox/backend/pkgs/cgofreesqlite"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -37,15 +35,6 @@ var (
|
||||
buildTime = "now"
|
||||
)
|
||||
|
||||
func build() string {
|
||||
short := commit
|
||||
if len(short) > 7 {
|
||||
short = short[:7]
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime)
|
||||
}
|
||||
|
||||
// @title Homebox API
|
||||
// @version 1.0
|
||||
// @description Track, Manage, and Organize your Things.
|
||||
@@ -58,7 +47,7 @@ func build() string {
|
||||
func main() {
|
||||
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
|
||||
|
||||
cfg, err := config.New(build(), "Homebox inventory management system")
|
||||
cfg, err := config.New()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@@ -80,12 +69,12 @@ func run(cfg *config.Config) error {
|
||||
log.Fatal().Err(err).Msg("failed to create data directory")
|
||||
}
|
||||
|
||||
c, err := ent.Open("sqlite3", cfg.Storage.SqliteURL)
|
||||
c, err := ent.Open("sqlite3", cfg.Storage.SqliteUrl)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Str("url", cfg.Storage.SqliteUrl).
|
||||
Msg("failed opening connection to sqlite")
|
||||
}
|
||||
defer func(c *ent.Client) {
|
||||
@@ -118,7 +107,7 @@ func run(cfg *config.Config) error {
|
||||
log.Fatal().
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Str("url", cfg.Storage.SqliteUrl).
|
||||
Msg("failed creating schema resources")
|
||||
}
|
||||
|
||||
@@ -128,40 +117,12 @@ func run(cfg *config.Config) error {
|
||||
return err
|
||||
}
|
||||
|
||||
collectFuncs := []currencies.CollectorFunc{
|
||||
currencies.CollectDefaults(),
|
||||
}
|
||||
|
||||
if cfg.Options.CurrencyConfig != "" {
|
||||
log.Info().
|
||||
Str("path", cfg.Options.CurrencyConfig).
|
||||
Msg("loading currency config file")
|
||||
|
||||
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
Err(err).
|
||||
Str("path", cfg.Options.CurrencyConfig).
|
||||
Msg("failed to read currency config file")
|
||||
}
|
||||
|
||||
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
|
||||
}
|
||||
|
||||
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
Err(err).
|
||||
Msg("failed to collect currencies")
|
||||
}
|
||||
|
||||
app.bus = eventbus.New()
|
||||
app.db = c
|
||||
app.repos = repo.New(c, app.bus, cfg.Storage.Data)
|
||||
app.services = services.New(
|
||||
app.repos,
|
||||
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
|
||||
services.WithCurrencies(currencies),
|
||||
)
|
||||
|
||||
// =========================================================================
|
||||
@@ -178,63 +139,38 @@ func run(cfg *config.Config) error {
|
||||
middleware.StripSlashes,
|
||||
)
|
||||
|
||||
chain := errchain.New(mid.Errors(logger))
|
||||
chain := errchain.New(mid.Errors(app.server, logger))
|
||||
|
||||
app.mountRoutes(router, chain, app.repos)
|
||||
|
||||
runner := graceful.NewRunner()
|
||||
|
||||
runner.AddFunc("server", func(ctx context.Context) error {
|
||||
httpserver := http.Server{
|
||||
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Web.Port),
|
||||
Handler: router,
|
||||
ReadTimeout: cfg.Web.ReadTimeout,
|
||||
WriteTimeout: cfg.Web.WriteTimeout,
|
||||
IdleTimeout: cfg.Web.IdleTimeout,
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
_ = httpserver.Shutdown(context.Background())
|
||||
}()
|
||||
|
||||
log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port)
|
||||
return httpserver.ListenAndServe()
|
||||
})
|
||||
app.server = server.NewServer(
|
||||
server.WithHost(app.conf.Web.Host),
|
||||
server.WithPort(app.conf.Web.Port),
|
||||
)
|
||||
log.Info().Msgf("Starting HTTP Server on %s:%s", app.server.Host, app.server.Port)
|
||||
|
||||
// =========================================================================
|
||||
// Start Reoccurring Tasks
|
||||
|
||||
runner.AddFunc("eventbus", app.bus.Run)
|
||||
go app.bus.Run()
|
||||
|
||||
runner.AddFunc("seed_database", func(ctx context.Context) error {
|
||||
// TODO: Remove through external API that does setup
|
||||
if cfg.Demo {
|
||||
log.Info().Msg("Running in demo mode, creating demo data")
|
||||
app.SetupDemo()
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) {
|
||||
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
|
||||
go app.startBgTask(time.Duration(24)*time.Hour, func() {
|
||||
_, err := app.repos.AuthTokens.PurgeExpiredTokens(context.Background())
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to purge expired tokens")
|
||||
}
|
||||
}))
|
||||
|
||||
runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) {
|
||||
_, err := app.repos.Groups.InvitationPurge(ctx)
|
||||
})
|
||||
go app.startBgTask(time.Duration(24)*time.Hour, func() {
|
||||
_, err := app.repos.Groups.InvitationPurge(context.Background())
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to purge expired invitations")
|
||||
}
|
||||
}))
|
||||
|
||||
runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) {
|
||||
})
|
||||
go app.startBgTask(time.Duration(1)*time.Hour, func() {
|
||||
now := time.Now()
|
||||
|
||||
if now.Hour() == 8 {
|
||||
@@ -246,27 +182,22 @@ func run(cfg *config.Config) error {
|
||||
Msg("failed to send notifiers")
|
||||
}
|
||||
}
|
||||
}))
|
||||
})
|
||||
|
||||
if cfg.Debug.Enabled {
|
||||
runner.AddFunc("debug", func(ctx context.Context) error {
|
||||
debugserver := http.Server{
|
||||
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port),
|
||||
Handler: app.debugRouter(),
|
||||
ReadTimeout: cfg.Web.ReadTimeout,
|
||||
WriteTimeout: cfg.Web.WriteTimeout,
|
||||
IdleTimeout: cfg.Web.IdleTimeout,
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
_ = debugserver.Shutdown(context.Background())
|
||||
}()
|
||||
|
||||
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
|
||||
return debugserver.ListenAndServe()
|
||||
})
|
||||
// TODO: Remove through external API that does setup
|
||||
if cfg.Demo {
|
||||
log.Info().Msg("Running in demo mode, creating demo data")
|
||||
app.SetupDemo()
|
||||
}
|
||||
|
||||
return runner.Start(context.Background())
|
||||
if cfg.Debug.Enabled {
|
||||
debugrouter := app.debugRouter()
|
||||
go func() {
|
||||
if err := http.ListenAndServe(":"+cfg.Debug.Port, debugrouter); err != nil {
|
||||
log.Fatal().Err(err).Msg("failed to start debug server")
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
return app.server.Start(router)
|
||||
}
|
||||
|
||||
@@ -7,11 +7,9 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type tokenHasKey struct {
|
||||
@@ -96,6 +94,20 @@ func getQuery(r *http.Request) (string, error) {
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func getCookie(r *http.Request) (string, error) {
|
||||
cookie, err := r.Cookie("hb.auth.token")
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
token, err := url.QueryUnescape(cookie.Value)
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// mwAuthToken is a middleware that will check the database for a stateful token
|
||||
// and attach it's user to the request context, or return an appropriate error.
|
||||
// Authorization support is by token via Headers or Query Parameter
|
||||
@@ -103,35 +115,26 @@ func getQuery(r *http.Request) (string, error) {
|
||||
// Example:
|
||||
// - header = "Bearer 1234567890"
|
||||
// - query = "?access_token=1234567890"
|
||||
// - cookie = hb.auth.token = 1234567890
|
||||
func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
|
||||
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getCookie,
|
||||
getQuery,
|
||||
}
|
||||
|
||||
var requestToken string
|
||||
|
||||
// We ignore the error to allow the next strategy to be attempted
|
||||
{
|
||||
cookies, _ := v1.GetCookies(r)
|
||||
if cookies != nil {
|
||||
requestToken = cookies.Token
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if requestToken == "" {
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getQuery,
|
||||
}
|
||||
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if requestToken == "" {
|
||||
return validate.NewRequestError(errors.New("authorization header or query is required"), http.StatusUnauthorized)
|
||||
return validate.NewRequestError(errors.New("Authorization header or query is required"), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
requestToken = strings.TrimPrefix(requestToken, "Bearer ")
|
||||
@@ -141,11 +144,7 @@ func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
|
||||
usr, err := a.services.User.GetSelf(r.Context(), requestToken)
|
||||
// Check the database for the token
|
||||
if err != nil {
|
||||
if ent.IsNotFound(err) {
|
||||
return validate.NewRequestError(errors.New("valid authorization token is required"), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
return err
|
||||
return validate.NewRequestError(errors.New("valid authorization header is required"), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken))
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
// Package providers provides a authentication abstraction for the backend.
|
||||
package providers
|
||||
@@ -1,55 +0,0 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type LoginForm struct {
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
StayLoggedIn bool `json:"stayLoggedIn"`
|
||||
}
|
||||
|
||||
func getLoginForm(r *http.Request) (LoginForm, error) {
|
||||
loginForm := LoginForm{}
|
||||
|
||||
switch r.Header.Get("Content-Type") {
|
||||
case "application/x-www-form-urlencoded":
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return loginForm, errors.New("failed to parse form")
|
||||
}
|
||||
|
||||
loginForm.Username = r.PostFormValue("username")
|
||||
loginForm.Password = r.PostFormValue("password")
|
||||
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
|
||||
case "application/json":
|
||||
err := server.Decode(r, &loginForm)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode login form")
|
||||
return loginForm, errors.New("failed to decode login form")
|
||||
}
|
||||
default:
|
||||
return loginForm, errors.New("invalid content type")
|
||||
}
|
||||
|
||||
if loginForm.Username == "" || loginForm.Password == "" {
|
||||
return loginForm, validate.NewFieldErrors(
|
||||
validate.FieldError{
|
||||
Field: "username",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
validate.FieldError{
|
||||
Field: "password",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return loginForm, nil
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
type LocalProvider struct {
|
||||
service *services.UserService
|
||||
}
|
||||
|
||||
func NewLocalProvider(service *services.UserService) *LocalProvider {
|
||||
return &LocalProvider{
|
||||
service: service,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *LocalProvider) Name() string {
|
||||
return "local"
|
||||
}
|
||||
|
||||
func (p *LocalProvider) Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
|
||||
loginForm, err := getLoginForm(r)
|
||||
if err != nil {
|
||||
return services.UserAuthTokenDetail{}, err
|
||||
}
|
||||
|
||||
return p.service.Login(r.Context(), loginForm.Username, loginForm.Password, loginForm.StayLoggedIn)
|
||||
}
|
||||
@@ -10,14 +10,13 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
|
||||
_ "github.com/hay-kot/homebox/backend/app/api/static/docs"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
httpSwagger "github.com/swaggo/http-swagger/v2" // http-swagger middleware
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/providers"
|
||||
_ "github.com/sysadminsmedia/homebox/backend/app/api/static/docs"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
|
||||
)
|
||||
|
||||
const prefix = "/api"
|
||||
@@ -47,12 +46,12 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
// =========================================================================
|
||||
// API Version 1
|
||||
|
||||
v1Base := v1.BaseURLFunc(prefix)
|
||||
v1Base := v1.BaseUrlFunc(prefix)
|
||||
|
||||
v1Ctrl := v1.NewControllerV1(
|
||||
a.services,
|
||||
a.repos,
|
||||
a.bus,
|
||||
a.bus,
|
||||
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
|
||||
v1.WithRegistration(a.conf.Options.AllowRegistration),
|
||||
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
|
||||
@@ -64,14 +63,8 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
BuildTime: buildTime,
|
||||
})))
|
||||
|
||||
r.Get(v1Base("/currencies"), chain.ToHandlerFunc(v1Ctrl.HandleCurrency()))
|
||||
|
||||
providers := []v1.AuthProvider{
|
||||
providers.NewLocalProvider(a.services.User),
|
||||
}
|
||||
|
||||
r.Post(v1Base("/users/register"), chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
|
||||
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
|
||||
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin()))
|
||||
|
||||
userMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
@@ -99,7 +92,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post(v1Base("/actions/set-primary-photos"), chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||
|
||||
r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
@@ -122,7 +114,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
r.Get(v1Base("/items/fields/values"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
|
||||
r.Get(v1Base("/items/{id}/path"), chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...))
|
||||
r.Put(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
|
||||
r.Patch(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
|
||||
@@ -186,7 +177,7 @@ func notFoundHandler() errchain.HandlerFunc {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = f.Close() }()
|
||||
defer f.Close()
|
||||
|
||||
stat, _ := f.Stat()
|
||||
if stat.IsDir() {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
// Package docs Code generated by swaggo/swag. DO NOT EDIT
|
||||
// Package docs GENERATED BY SWAG; DO NOT EDIT
|
||||
// This file was generated by swaggo/swag
|
||||
package docs
|
||||
|
||||
import "github.com/swaggo/swag"
|
||||
@@ -67,31 +68,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
@@ -150,25 +126,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/currency": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Base"
|
||||
],
|
||||
"summary": "Currency",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/currencies.Currency"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/groups": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -429,16 +386,6 @@ const docTemplate = `{
|
||||
"description": "location Ids",
|
||||
"name": "locations",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "multi",
|
||||
"description": "parent Ids",
|
||||
"name": "parentIds",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1017,42 +964,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Items"
|
||||
],
|
||||
"summary": "Get the full path of an item",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemPath"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/labels": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1639,7 +1550,7 @@ const docTemplate = `{
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.APISummary"
|
||||
"$ref": "#/definitions/v1.ApiSummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1710,12 +1621,6 @@ const docTemplate = `{
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.LoginForm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "auth provider",
|
||||
"name": "provider",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1894,23 +1799,6 @@ const docTemplate = `{
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"currencies.Currency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string"
|
||||
},
|
||||
"local": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"symbol": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.DocumentOut": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1991,9 +1879,6 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2005,9 +1890,6 @@ const docTemplate = `{
|
||||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2086,6 +1968,12 @@ const docTemplate = `{
|
||||
"$ref": "#/definitions/repo.ItemAttachment"
|
||||
}
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2101,9 +1989,6 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2119,13 +2004,9 @@ const docTemplate = `{
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
@@ -2141,13 +2022,9 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
@@ -2204,20 +2081,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPath": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/repo.ItemType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2233,9 +2096,6 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2247,13 +2107,9 @@ const docTemplate = `{
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
@@ -2270,17 +2126,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"location",
|
||||
"item"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ItemTypeLocation",
|
||||
"ItemTypeItem"
|
||||
]
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2373,6 +2218,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2409,6 +2255,12 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2470,6 +2322,12 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2546,6 +2404,7 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2562,6 +2421,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2573,6 +2433,7 @@ const docTemplate = `{
|
||||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2586,6 +2447,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2594,6 +2456,7 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2607,6 +2470,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2842,7 +2706,15 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.APISummary": {
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ApiSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allowRegistration": {
|
||||
@@ -2871,14 +2743,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.Build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3008,8 +2872,6 @@ var SwaggerInfo = &swag.Spec{
|
||||
Description: "Track, Manage, and Organize your Things.",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
LeftDelim: "{{",
|
||||
RightDelim: "}}",
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -60,31 +60,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
@@ -143,25 +118,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/currency": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Base"
|
||||
],
|
||||
"summary": "Currency",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/currencies.Currency"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/groups": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -422,16 +378,6 @@
|
||||
"description": "location Ids",
|
||||
"name": "locations",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "multi",
|
||||
"description": "parent Ids",
|
||||
"name": "parentIds",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1010,42 +956,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Items"
|
||||
],
|
||||
"summary": "Get the full path of an item",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemPath"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/labels": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1632,7 +1542,7 @@
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.APISummary"
|
||||
"$ref": "#/definitions/v1.ApiSummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1703,12 +1613,6 @@
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.LoginForm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "auth provider",
|
||||
"name": "provider",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1887,23 +1791,6 @@
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"currencies.Currency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string"
|
||||
},
|
||||
"local": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"symbol": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.DocumentOut": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1984,9 +1871,6 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1998,9 +1882,6 @@
|
||||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2079,6 +1960,12 @@
|
||||
"$ref": "#/definitions/repo.ItemAttachment"
|
||||
}
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2094,9 +1981,6 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2112,13 +1996,9 @@
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
@@ -2134,13 +2014,9 @@
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
@@ -2197,20 +2073,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPath": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/repo.ItemType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2226,9 +2088,6 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2240,13 +2099,9 @@
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
@@ -2263,17 +2118,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"location",
|
||||
"item"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ItemTypeLocation",
|
||||
"ItemTypeItem"
|
||||
]
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2366,6 +2210,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2402,6 +2247,12 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2463,6 +2314,12 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2539,6 +2396,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2555,6 +2413,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2566,6 +2425,7 @@
|
||||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2579,6 +2439,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2587,6 +2448,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2600,6 +2462,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2835,7 +2698,15 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.APISummary": {
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ApiSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allowRegistration": {
|
||||
@@ -2864,14 +2735,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.Build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
basePath: /api
|
||||
definitions:
|
||||
currencies.Currency:
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
local:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
symbol:
|
||||
type: string
|
||||
type: object
|
||||
repo.DocumentOut:
|
||||
properties:
|
||||
id:
|
||||
@@ -63,8 +52,6 @@ definitions:
|
||||
$ref: '#/definitions/repo.DocumentOut'
|
||||
id:
|
||||
type: string
|
||||
primary:
|
||||
type: boolean
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
@@ -72,8 +59,6 @@ definitions:
|
||||
type: object
|
||||
repo.ItemAttachmentUpdate:
|
||||
properties:
|
||||
primary:
|
||||
type: boolean
|
||||
title:
|
||||
type: string
|
||||
type:
|
||||
@@ -127,6 +112,10 @@ definitions:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemAttachment'
|
||||
type: array
|
||||
children:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
createdAt:
|
||||
type: string
|
||||
description:
|
||||
@@ -137,8 +126,6 @@ definitions:
|
||||
type: array
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
@@ -149,8 +136,7 @@ definitions:
|
||||
description: Warranty
|
||||
type: boolean
|
||||
location:
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
@@ -164,8 +150,7 @@ definitions:
|
||||
description: Extras
|
||||
type: string
|
||||
parent:
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.ItemSummary'
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
@@ -206,15 +191,6 @@ definitions:
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
type: object
|
||||
repo.ItemPath:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/definitions/repo.ItemType'
|
||||
type: object
|
||||
repo.ItemSummary:
|
||||
properties:
|
||||
archived:
|
||||
@@ -225,8 +201,6 @@ definitions:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
@@ -234,8 +208,7 @@ definitions:
|
||||
$ref: '#/definitions/repo.LabelSummary'
|
||||
type: array
|
||||
location:
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
@@ -249,14 +222,6 @@ definitions:
|
||||
updatedAt:
|
||||
type: string
|
||||
type: object
|
||||
repo.ItemType:
|
||||
enum:
|
||||
- location
|
||||
- item
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- ItemTypeLocation
|
||||
- ItemTypeItem
|
||||
repo.ItemUpdate:
|
||||
properties:
|
||||
archived:
|
||||
@@ -322,6 +287,7 @@ definitions:
|
||||
warrantyDetails:
|
||||
type: string
|
||||
warrantyExpires:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.LabelCreate:
|
||||
@@ -346,6 +312,10 @@ definitions:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
updatedAt:
|
||||
@@ -386,6 +356,10 @@ definitions:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
parent:
|
||||
@@ -436,6 +410,7 @@ definitions:
|
||||
repo.MaintenanceEntry:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -447,11 +422,13 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceEntryCreate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -461,6 +438,7 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
@@ -468,6 +446,7 @@ definitions:
|
||||
repo.MaintenanceEntryUpdate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -477,6 +456,7 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceLog:
|
||||
@@ -631,7 +611,12 @@ definitions:
|
||||
token:
|
||||
type: string
|
||||
type: object
|
||||
v1.APISummary:
|
||||
v1.ActionAmountResult:
|
||||
properties:
|
||||
completed:
|
||||
type: integer
|
||||
type: object
|
||||
v1.ApiSummary:
|
||||
properties:
|
||||
allowRegistration:
|
||||
type: boolean
|
||||
@@ -650,11 +635,6 @@ definitions:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
v1.ActionAmountResult:
|
||||
properties:
|
||||
completed:
|
||||
type: integer
|
||||
type: object
|
||||
v1.Build:
|
||||
properties:
|
||||
buildTime:
|
||||
@@ -762,21 +742,6 @@ paths:
|
||||
summary: Ensures Import Refs
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/set-primary-photos:
|
||||
post:
|
||||
description: Sets the first photo of each item as the primary photo
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/v1.ActionAmountResult'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Set Primary Photos
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/zero-item-time-fields:
|
||||
post:
|
||||
description: Resets all item date fields to the beginning of the day
|
||||
@@ -812,18 +777,6 @@ paths:
|
||||
summary: Get Item by Asset ID
|
||||
tags:
|
||||
- Items
|
||||
/v1/currency:
|
||||
get:
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/currencies.Currency'
|
||||
summary: Currency
|
||||
tags:
|
||||
- Base
|
||||
/v1/groups:
|
||||
get:
|
||||
produces:
|
||||
@@ -977,13 +930,6 @@ paths:
|
||||
type: string
|
||||
name: locations
|
||||
type: array
|
||||
- collectionFormat: multi
|
||||
description: parent Ids
|
||||
in: query
|
||||
items:
|
||||
type: string
|
||||
name: parentIds
|
||||
type: array
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
@@ -1281,28 +1227,6 @@ paths:
|
||||
summary: Update Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/items/{id}/path:
|
||||
get:
|
||||
parameters:
|
||||
- description: Item ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemPath'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Get the full path of an item
|
||||
tags:
|
||||
- Items
|
||||
/v1/items/export:
|
||||
get:
|
||||
responses:
|
||||
@@ -1720,7 +1644,7 @@ paths:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/v1.APISummary'
|
||||
$ref: '#/definitions/v1.ApiSummary'
|
||||
summary: Application Info
|
||||
tags:
|
||||
- Base
|
||||
@@ -1763,10 +1687,6 @@ paths:
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/v1.LoginForm'
|
||||
- description: auth provider
|
||||
in: query
|
||||
name: provider
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
|
||||
|
||||
atlas "ariga.io/atlas/sql/migrate"
|
||||
_ "ariga.io/atlas/sql/sqlite"
|
||||
|
||||
@@ -1,42 +1,40 @@
|
||||
module github.com/sysadminsmedia/homebox/backend
|
||||
module github.com/hay-kot/homebox/backend
|
||||
|
||||
go 1.22
|
||||
|
||||
toolchain go1.22.0
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
ariga.io/atlas v0.19.1
|
||||
entgo.io/ent v0.12.5
|
||||
github.com/ardanlabs/conf/v3 v3.1.7
|
||||
github.com/containrrr/shoutrrr v0.8.0
|
||||
github.com/go-chi/chi/v5 v5.0.12
|
||||
github.com/go-playground/validator/v10 v10.18.0
|
||||
github.com/gocarina/gocsv v0.0.0-20231116093920-b87c2d0e983a
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/schema v1.2.1
|
||||
github.com/hay-kot/httpkit v0.0.9
|
||||
github.com/mattn/go-sqlite3 v1.14.22
|
||||
ariga.io/atlas v0.12.0
|
||||
entgo.io/ent v0.12.3
|
||||
github.com/ardanlabs/conf/v3 v3.1.6
|
||||
github.com/containrrr/shoutrrr v0.7.1
|
||||
github.com/go-chi/chi/v5 v5.0.10
|
||||
github.com/go-playground/validator/v10 v10.14.1
|
||||
github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/schema v1.2.0
|
||||
github.com/hay-kot/httpkit v0.0.3
|
||||
github.com/mattn/go-sqlite3 v1.14.17
|
||||
github.com/olahol/melody v1.1.4
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.32.0
|
||||
github.com/rs/zerolog v1.29.1
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/swaggo/http-swagger/v2 v2.0.2
|
||||
github.com/swaggo/swag v1.16.3
|
||||
github.com/swaggo/http-swagger v1.3.4
|
||||
github.com/swaggo/swag v1.16.1
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.2
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.2
|
||||
golang.org/x/crypto v0.23.0
|
||||
modernc.org/sqlite v1.29.2
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.1
|
||||
golang.org/x/crypto v0.11.0
|
||||
modernc.org/sqlite v1.24.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/fatih/color v1.16.0 // indirect
|
||||
github.com/fatih/color v1.15.0 // indirect
|
||||
github.com/fogleman/gg v1.3.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||
github.com/go-openapi/inflect v0.19.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.20.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.2 // indirect
|
||||
@@ -45,34 +43,35 @@ require (
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.1 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.17.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/rogpeppe/go-internal v1.11.0 // indirect
|
||||
github.com/swaggo/files/v2 v2.0.0 // indirect
|
||||
github.com/swaggo/files v1.0.1 // indirect
|
||||
github.com/yeqown/reedsolomon v1.0.0 // indirect
|
||||
github.com/zclconf/go-cty v1.14.1 // indirect
|
||||
golang.org/x/image v0.18.0 // indirect
|
||||
golang.org/x/mod v0.17.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/sys v0.20.0 // indirect
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
|
||||
github.com/zclconf/go-cty v1.13.2 // indirect
|
||||
golang.org/x/image v0.9.0 // indirect
|
||||
golang.org/x/mod v0.12.0 // indirect
|
||||
golang.org/x/net v0.12.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
golang.org/x/tools v0.11.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
|
||||
modernc.org/libc v1.41.0 // indirect
|
||||
lukechampine.com/uint128 v1.3.0 // indirect
|
||||
modernc.org/cc/v3 v3.41.0 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.14 // indirect
|
||||
modernc.org/libc v1.24.1 // indirect
|
||||
modernc.org/mathutil v1.6.0 // indirect
|
||||
modernc.org/memory v1.7.2 // indirect
|
||||
modernc.org/strutil v1.2.0 // indirect
|
||||
modernc.org/memory v1.6.0 // indirect
|
||||
modernc.org/opt v0.1.3 // indirect
|
||||
modernc.org/strutil v1.1.3 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
||||
|
||||
1288
backend/go.sum
1288
backend/go.sum
File diff suppressed because it is too large
Load Diff
@@ -1,104 +0,0 @@
|
||||
// Package currencies provides a shared definition of currencies. This uses a global
|
||||
// variable to hold the currencies.
|
||||
package currencies
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//go:embed currencies.json
|
||||
var defaults []byte
|
||||
|
||||
type CollectorFunc func() ([]Currency, error)
|
||||
|
||||
func CollectJSON(reader io.Reader) CollectorFunc {
|
||||
return func() ([]Currency, error) {
|
||||
var currencies []Currency
|
||||
err := json.NewDecoder(reader).Decode(¤cies)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return currencies, nil
|
||||
}
|
||||
}
|
||||
|
||||
func CollectDefaults() CollectorFunc {
|
||||
return CollectJSON(bytes.NewReader(defaults))
|
||||
}
|
||||
|
||||
func CollectionCurrencies(collectors ...CollectorFunc) ([]Currency, error) {
|
||||
out := make([]Currency, 0, len(collectors))
|
||||
for i := range collectors {
|
||||
c, err := collectors[i]()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out = append(out, c...)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
type Currency struct {
|
||||
Name string `json:"name"`
|
||||
Code string `json:"code"`
|
||||
Local string `json:"local"`
|
||||
Symbol string `json:"symbol"`
|
||||
}
|
||||
|
||||
type CurrencyRegistry struct {
|
||||
mu sync.RWMutex
|
||||
registry map[string]Currency
|
||||
}
|
||||
|
||||
func NewCurrencyService(currencies []Currency) *CurrencyRegistry {
|
||||
registry := make(map[string]Currency, len(currencies))
|
||||
for i := range currencies {
|
||||
registry[currencies[i].Code] = currencies[i]
|
||||
}
|
||||
|
||||
return &CurrencyRegistry{
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
func (cs *CurrencyRegistry) Slice() []Currency {
|
||||
cs.mu.RLock()
|
||||
defer cs.mu.RUnlock()
|
||||
|
||||
out := make([]Currency, 0, len(cs.registry))
|
||||
for key := range cs.registry {
|
||||
out = append(out, cs.registry[key])
|
||||
}
|
||||
|
||||
slices.SortFunc(out, func(a, b Currency) int {
|
||||
if a.Name < b.Name {
|
||||
return -1
|
||||
}
|
||||
|
||||
if a.Name > b.Name {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (cs *CurrencyRegistry) IsSupported(code string) bool {
|
||||
upper := strings.ToUpper(code)
|
||||
|
||||
cs.mu.RLock()
|
||||
defer cs.mu.RUnlock()
|
||||
_, ok := cs.registry[upper]
|
||||
return ok
|
||||
}
|
||||
@@ -1,638 +0,0 @@
|
||||
[
|
||||
{
|
||||
"code": "USD",
|
||||
"local": "United States",
|
||||
"symbol": "$",
|
||||
"name": "United States Dollar"
|
||||
},
|
||||
{
|
||||
"code": "AED",
|
||||
"local": "United Arab Emirates",
|
||||
"symbol": "د.إ",
|
||||
"name": "United Arab Emirates Dirham"
|
||||
},
|
||||
{
|
||||
"code": "AFN",
|
||||
"local": "Afghanistan",
|
||||
"symbol": "؋",
|
||||
"name": "Afghan Afghani"
|
||||
},
|
||||
{
|
||||
"code": "ALL",
|
||||
"local": "Albania",
|
||||
"symbol": "L",
|
||||
"name": "Albanian Lek"
|
||||
},
|
||||
{
|
||||
"code": "AMD",
|
||||
"local": "Armenia",
|
||||
"symbol": "֏",
|
||||
"name": "Armenian Dram"
|
||||
},
|
||||
{
|
||||
"code": "ANG",
|
||||
"local": "Netherlands Antilles",
|
||||
"symbol": "ƒ",
|
||||
"name": "Netherlands Antillean Guilder"
|
||||
},
|
||||
{
|
||||
"code": "AOA",
|
||||
"local": "Angola",
|
||||
"symbol": "Kz",
|
||||
"name": "Angolan Kwanza"
|
||||
},
|
||||
{
|
||||
"code": "ARS",
|
||||
"local": "Argentina",
|
||||
"symbol": "$",
|
||||
"name": "Argentine Peso"
|
||||
},
|
||||
{
|
||||
"code": "AUD",
|
||||
"local": "Australia",
|
||||
"symbol": "A$",
|
||||
"name": "Australian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "AWG",
|
||||
"local": "Aruba",
|
||||
"symbol": "ƒ",
|
||||
"name": "Aruban Florin"
|
||||
},
|
||||
{
|
||||
"code": "AZN",
|
||||
"local": "Azerbaijan",
|
||||
"symbol": "₼",
|
||||
"name": "Azerbaijani Manat"
|
||||
},
|
||||
{
|
||||
"code": "BAM",
|
||||
"local": "Bosnia and Herzegovina",
|
||||
"symbol": "KM",
|
||||
"name": "Bosnia and Herzegovina Convertible Mark"
|
||||
},
|
||||
{
|
||||
"code": "BBD",
|
||||
"local": "Barbados",
|
||||
"symbol": "Bds$",
|
||||
"name": "Barbadian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "BDT",
|
||||
"local": "Bangladesh",
|
||||
"symbol": "৳",
|
||||
"name": "Bangladeshi Taka"
|
||||
},
|
||||
{
|
||||
"code": "BGN",
|
||||
"local": "Bulgaria",
|
||||
"symbol": "лв",
|
||||
"name": "Bulgarian lev"
|
||||
},
|
||||
{
|
||||
"code": "BHD",
|
||||
"local": "Bahrain",
|
||||
"symbol": "ب.د",
|
||||
"name": "Bahraini Dinar"
|
||||
},
|
||||
{
|
||||
"code": "BIF",
|
||||
"local": "Burundi",
|
||||
"symbol": "FBu",
|
||||
"name": "Burundian Franc"
|
||||
},
|
||||
{
|
||||
"code": "BMD",
|
||||
"local": "Bermuda",
|
||||
"symbol": "BD$",
|
||||
"name": "Bermudian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "BND",
|
||||
"local": "Brunei",
|
||||
"symbol": "B$",
|
||||
"name": "Brunei Dollar"
|
||||
},
|
||||
{
|
||||
"code": "BOB",
|
||||
"local": "Bolivia",
|
||||
"symbol": "Bs.",
|
||||
"name": "Bolivian Boliviano"
|
||||
},
|
||||
{
|
||||
"code": "BRL",
|
||||
"local": "Brazil",
|
||||
"symbol": "R$",
|
||||
"name": "Brazilian Real"
|
||||
},
|
||||
{
|
||||
"code": "BSD",
|
||||
"local": "Bahamas",
|
||||
"symbol": "B$",
|
||||
"name": "Bahamian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "BTN",
|
||||
"local": "Bhutan",
|
||||
"symbol": "Nu.",
|
||||
"name": "Bhutanese Ngultrum"
|
||||
},
|
||||
{
|
||||
"code": "BWP",
|
||||
"local": "Botswana",
|
||||
"symbol": "P",
|
||||
"name": "Botswana Pula"
|
||||
},
|
||||
{
|
||||
"code": "BYN",
|
||||
"local": "Belarus",
|
||||
"symbol": "Br",
|
||||
"name": "Belarusian Ruble"
|
||||
},
|
||||
{
|
||||
"code": "BZD",
|
||||
"local": "Belize",
|
||||
"symbol": "BZ$",
|
||||
"name": "Belize Dollar"
|
||||
},
|
||||
{
|
||||
"code": "CAD",
|
||||
"local": "Canada",
|
||||
"symbol": "C$",
|
||||
"name": "Canadian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "CDF",
|
||||
"local": "Democratic Republic of the Congo",
|
||||
"symbol": "FC",
|
||||
"name": "Congolese Franc"
|
||||
},
|
||||
{
|
||||
"code": "CHF",
|
||||
"local": "Switzerland",
|
||||
"symbol": "CHF",
|
||||
"name": "Swiss Franc"
|
||||
},
|
||||
{
|
||||
"code": "CLP",
|
||||
"local": "Chile",
|
||||
"symbol": "CL$",
|
||||
"name": "Chilean Peso"
|
||||
},
|
||||
{
|
||||
"code": "CNY",
|
||||
"local": "China",
|
||||
"symbol": "¥",
|
||||
"name": "Chinese Yuan"
|
||||
},
|
||||
{
|
||||
"code": "COP",
|
||||
"local": "Colombia",
|
||||
"symbol": "COL$",
|
||||
"name": "Colombian Peso"
|
||||
},
|
||||
{
|
||||
"code": "CRC",
|
||||
"local": "Costa Rica",
|
||||
"symbol": "₡",
|
||||
"name": "Costa Rican Colón"
|
||||
},
|
||||
{
|
||||
"code": "CUP",
|
||||
"local": "Cuba",
|
||||
"symbol": "₱",
|
||||
"name": "Cuban Peso"
|
||||
},
|
||||
{
|
||||
"code": "CVE",
|
||||
"local": "Cape Verde",
|
||||
"symbol": "$",
|
||||
"name": "Cape Verdean Escudo"
|
||||
},
|
||||
{
|
||||
"code": "CZK",
|
||||
"local": "Czech Republic",
|
||||
"symbol": "Kč",
|
||||
"name": "Czech Koruna"
|
||||
},
|
||||
{
|
||||
"code": "DJF",
|
||||
"local": "Djibouti",
|
||||
"symbol": "Fdj",
|
||||
"name": "Djiboutian Franc"
|
||||
},
|
||||
{
|
||||
"code": "DKK",
|
||||
"local": "Denmark",
|
||||
"symbol": "kr",
|
||||
"name": "Danish Krone"
|
||||
},
|
||||
{
|
||||
"code": "DOP",
|
||||
"local": "Dominican Republic",
|
||||
"symbol": "RD$",
|
||||
"name": "Dominican Peso"
|
||||
},
|
||||
{
|
||||
"code": "DZD",
|
||||
"local": "Algeria",
|
||||
"symbol": "د.ج",
|
||||
"name": "Algerian Dinar"
|
||||
},
|
||||
{
|
||||
"code": "EGP",
|
||||
"local": "Egypt",
|
||||
"symbol": "£",
|
||||
"name": "Egyptian Pound"
|
||||
},
|
||||
{
|
||||
"code": "ERN",
|
||||
"local": "Eritrea",
|
||||
"symbol": "Nfk",
|
||||
"name": "Eritrean Nakfa"
|
||||
},
|
||||
{
|
||||
"code": "ETB",
|
||||
"local": "Ethiopia",
|
||||
"symbol": "Br",
|
||||
"name": "Ethiopian Birr"
|
||||
},
|
||||
{
|
||||
"code": "EUR",
|
||||
"local": "Eurozone",
|
||||
"symbol": "€",
|
||||
"name": "Euro"
|
||||
},
|
||||
{
|
||||
"code": "FJD",
|
||||
"local": "Fiji",
|
||||
"symbol": "FJ$",
|
||||
"name": "Fijian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "FKP",
|
||||
"local": "Falkland Islands",
|
||||
"symbol": "£",
|
||||
"name": "Falkland Islands Pound"
|
||||
},
|
||||
{
|
||||
"code": "FOK",
|
||||
"local": "Faroe Islands",
|
||||
"symbol": "kr",
|
||||
"name": "Faroese Króna"
|
||||
},
|
||||
{
|
||||
"code": "GBP",
|
||||
"local": "United Kingdom",
|
||||
"symbol": "£",
|
||||
"name": "British Pound Sterling"
|
||||
},
|
||||
{
|
||||
"code": "GEL",
|
||||
"local": "Georgia",
|
||||
"symbol": "₾",
|
||||
"name": "Georgian Lari"
|
||||
},
|
||||
{
|
||||
"code": "GGP",
|
||||
"local": "Guernsey",
|
||||
"symbol": "£",
|
||||
"name": "Guernsey Pound"
|
||||
},
|
||||
{
|
||||
"code": "GHS",
|
||||
"local": "Ghana",
|
||||
"symbol": "GH₵",
|
||||
"name": "Ghanaian Cedi"
|
||||
},
|
||||
{
|
||||
"code": "GIP",
|
||||
"local": "Gibraltar",
|
||||
"symbol": "£",
|
||||
"name": "Gibraltar Pound"
|
||||
},
|
||||
{
|
||||
"code": "GMD",
|
||||
"local": "Gambia",
|
||||
"symbol": "D",
|
||||
"name": "Gambian Dalasi"
|
||||
},
|
||||
{
|
||||
"code": "GNF",
|
||||
"local": "Guinea",
|
||||
"symbol": "FG",
|
||||
"name": "Guinean Franc"
|
||||
},
|
||||
{
|
||||
"code": "GTQ",
|
||||
"local": "Guatemala",
|
||||
"symbol": "Q",
|
||||
"name": "Guatemalan Quetzal"
|
||||
},
|
||||
{
|
||||
"code": "GYD",
|
||||
"local": "Guyana",
|
||||
"symbol": "GY$",
|
||||
"name": "Guyanese Dollar"
|
||||
},
|
||||
{
|
||||
"code": "HKD",
|
||||
"local": "Hong Kong",
|
||||
"symbol": "HK$",
|
||||
"name": "Hong Kong Dollar"
|
||||
},
|
||||
{
|
||||
"code": "HNL",
|
||||
"local": "Honduras",
|
||||
"symbol": "L",
|
||||
"name": "Honduran Lempira"
|
||||
},
|
||||
{
|
||||
"code": "HRK",
|
||||
"local": "Croatia",
|
||||
"symbol": "kn",
|
||||
"name": "Croatian Kuna"
|
||||
},
|
||||
{
|
||||
"code": "HTG",
|
||||
"local": "Haiti",
|
||||
"symbol": "G",
|
||||
"name": "Haitian Gourde"
|
||||
},
|
||||
{
|
||||
"code": "HUF",
|
||||
"local": "Hungary",
|
||||
"symbol": "Ft",
|
||||
"name": "Hungarian Forint"
|
||||
},
|
||||
{
|
||||
"code": "IDR",
|
||||
"local": "Indonesia",
|
||||
"symbol": "Rp",
|
||||
"name": "Indonesian Rupiah"
|
||||
},
|
||||
{
|
||||
"code": "ILS",
|
||||
"local": "Israel",
|
||||
"symbol": "₪",
|
||||
"name": "Israeli New Shekel"
|
||||
},
|
||||
{
|
||||
"code": "IMP",
|
||||
"local": "Isle of Man",
|
||||
"symbol": "£",
|
||||
"name": "Manx Pound"
|
||||
},
|
||||
{
|
||||
"code": "INR",
|
||||
"local": "India",
|
||||
"symbol": "₹",
|
||||
"name": "Indian Rupee"
|
||||
},
|
||||
{
|
||||
"code": "IQD",
|
||||
"local": "Iraq",
|
||||
"symbol": "ع.د",
|
||||
"name": "Iraqi Dinar"
|
||||
},
|
||||
{
|
||||
"code": "IRR",
|
||||
"local": "Iran",
|
||||
"symbol": "﷼",
|
||||
"name": "Iranian Rial"
|
||||
},
|
||||
{
|
||||
"code": "ISK",
|
||||
"local": "Iceland",
|
||||
"symbol": "kr",
|
||||
"name": "Icelandic Króna"
|
||||
},
|
||||
{
|
||||
"code": "JEP",
|
||||
"local": "Jersey",
|
||||
"symbol": "£",
|
||||
"name": "Jersey Pound"
|
||||
},
|
||||
{
|
||||
"code": "JMD",
|
||||
"local": "Jamaica",
|
||||
"symbol": "J$",
|
||||
"name": "Jamaican Dollar"
|
||||
},
|
||||
{
|
||||
"code": "JOD",
|
||||
"local": "Jordan",
|
||||
"symbol": "د.ا",
|
||||
"name": "Jordanian Dinar"
|
||||
},
|
||||
{
|
||||
"code": "JPY",
|
||||
"local": "Japan",
|
||||
"symbol": "¥",
|
||||
"name": "Japanese Yen"
|
||||
},
|
||||
{
|
||||
"code": "KES",
|
||||
"local": "Kenya",
|
||||
"symbol": "KSh",
|
||||
"name": "Kenyan Shilling"
|
||||
},
|
||||
{
|
||||
"code": "KGS",
|
||||
"local": "Kyrgyzstan",
|
||||
"symbol": "с",
|
||||
"name": "Kyrgyzstani Som"
|
||||
},
|
||||
{
|
||||
"code": "KHR",
|
||||
"local": "Cambodia",
|
||||
"symbol": "៛",
|
||||
"name": "Cambodian Riel"
|
||||
},
|
||||
{
|
||||
"code": "KID",
|
||||
"local": "Kiribati",
|
||||
"symbol": "$",
|
||||
"name": "Kiribati Dollar"
|
||||
},
|
||||
{
|
||||
"code": "KMF",
|
||||
"local": "Comoros",
|
||||
"symbol": "CF",
|
||||
"name": "Comorian Franc"
|
||||
},
|
||||
{
|
||||
"code": "KRW",
|
||||
"local": "South Korea",
|
||||
"symbol": "₩",
|
||||
"name": "South Korean Won"
|
||||
},
|
||||
{
|
||||
"code": "KWD",
|
||||
"local": "Kuwait",
|
||||
"symbol": "د.ك",
|
||||
"name": "Kuwaiti Dinar"
|
||||
},
|
||||
{
|
||||
"code": "KYD",
|
||||
"local": "Cayman Islands",
|
||||
"symbol": "CI$",
|
||||
"name": "Cayman Islands Dollar"
|
||||
},
|
||||
{
|
||||
"code": "KZT",
|
||||
"local": "Kazakhstan",
|
||||
"symbol": "₸",
|
||||
"name": "Kazakhstani Tenge"
|
||||
},
|
||||
{
|
||||
"code": "LAK",
|
||||
"local": "Laos",
|
||||
"symbol": "₭",
|
||||
"name": "Lao Kip"
|
||||
},
|
||||
{
|
||||
"code": "LBP",
|
||||
"local": "Lebanon",
|
||||
"symbol": "ل.ل",
|
||||
"name": "Lebanese Pound"
|
||||
},
|
||||
{
|
||||
"code": "LKR",
|
||||
"local": "Sri Lanka",
|
||||
"symbol": "₨",
|
||||
"name": "Sri Lankan Rupee"
|
||||
},
|
||||
{
|
||||
"code": "LRD",
|
||||
"local": "Liberia",
|
||||
"symbol": "L$",
|
||||
"name": "Liberian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "LSL",
|
||||
"local": "Lesotho",
|
||||
"symbol": "M",
|
||||
"name": "Lesotho Loti"
|
||||
},
|
||||
{
|
||||
"code": "LYD",
|
||||
"local": "Libya",
|
||||
"symbol": "ل.د",
|
||||
"name": "Libyan Dinar"
|
||||
},
|
||||
{
|
||||
"code": "MAD",
|
||||
"local": "Morocco",
|
||||
"symbol": "د.م.",
|
||||
"name": "Moroccan Dirham"
|
||||
},
|
||||
{
|
||||
"code": "MDL",
|
||||
"local": "Moldova",
|
||||
"symbol": "lei",
|
||||
"name": "Moldovan Leu"
|
||||
},
|
||||
{
|
||||
"code": "MGA",
|
||||
"local": "Madagascar",
|
||||
"symbol": "Ar",
|
||||
"name": "Malagasy Ariary"
|
||||
},
|
||||
{
|
||||
"code": "MKD",
|
||||
"local": "North Macedonia",
|
||||
"symbol": "ден",
|
||||
"name": "Macedonian Denar"
|
||||
},
|
||||
{
|
||||
"code": "MMK",
|
||||
"local": "Myanmar",
|
||||
"symbol": "K",
|
||||
"name": "Myanmar Kyat"
|
||||
},
|
||||
{
|
||||
"code": "MNT",
|
||||
"local": "Mongolia",
|
||||
"symbol": "₮",
|
||||
"name": "Mongolian Tugrik"
|
||||
},
|
||||
{
|
||||
"code": "MOP",
|
||||
"local": "Macau",
|
||||
"symbol": "MOP$",
|
||||
"name": "Macanese Pataca"
|
||||
},
|
||||
{
|
||||
"code": "MRU",
|
||||
"local": "Mauritania",
|
||||
"symbol": "UM",
|
||||
"name": "Mauritanian Ouguiya"
|
||||
},
|
||||
{
|
||||
"code": "MUR",
|
||||
"local": "Mauritius",
|
||||
"symbol": "₨",
|
||||
"name": "Mauritian Rupee"
|
||||
},
|
||||
{
|
||||
"code": "MVR",
|
||||
"local": "Maldives",
|
||||
"symbol": "Rf",
|
||||
"name": "Maldivian Rufiyaa"
|
||||
},
|
||||
{
|
||||
"code": "MWK",
|
||||
"local": "Malawi",
|
||||
"symbol": "MK",
|
||||
"name": "Malawian Kwacha"
|
||||
},
|
||||
{
|
||||
"code": "MXN",
|
||||
"local": "Mexico",
|
||||
"symbol": "Mex$",
|
||||
"name": "Mexican Peso"
|
||||
},
|
||||
{
|
||||
"code": "MYR",
|
||||
"local": "Malaysia",
|
||||
"symbol": "RM",
|
||||
"name": "Malaysian Ringgit"
|
||||
},
|
||||
{
|
||||
"code": "MZN",
|
||||
"local": "Mozambique",
|
||||
"symbol": "MT",
|
||||
"name": "Mozambican Metical"
|
||||
},
|
||||
{
|
||||
"code": "NAD",
|
||||
"local": "Namibia",
|
||||
"symbol": "N$",
|
||||
"name": "Namibian Dollar"
|
||||
},
|
||||
{
|
||||
"code": "NGN",
|
||||
"local": "Nigeria",
|
||||
"symbol": "₦",
|
||||
"name": "Nigerian Naira"
|
||||
},
|
||||
{
|
||||
"code": "NIO",
|
||||
"local": "Nicaragua",
|
||||
"symbol": "C$",
|
||||
"name": "Nicaraguan Córdoba"
|
||||
},
|
||||
{
|
||||
"code": "NOK",
|
||||
"local": "Norway",
|
||||
"symbol": "kr",
|
||||
"name": "Norwegian Krone"
|
||||
},
|
||||
{
|
||||
"code": "UAH",
|
||||
"local": "Ukraine",
|
||||
"symbol": "₴",
|
||||
"name": "Ukrainian Hryvnia"
|
||||
}
|
||||
]
|
||||
@@ -1,9 +1,7 @@
|
||||
// Package services provides the core business logic for the application.
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type AllServices struct {
|
||||
@@ -11,14 +9,12 @@ type AllServices struct {
|
||||
Group *GroupService
|
||||
Items *ItemService
|
||||
BackgroundService *BackgroundService
|
||||
Currencies *currencies.CurrencyRegistry
|
||||
}
|
||||
|
||||
type OptionsFunc func(*options)
|
||||
|
||||
type options struct {
|
||||
autoIncrementAssetID bool
|
||||
currencies []currencies.Currency
|
||||
}
|
||||
|
||||
func WithAutoIncrementAssetID(v bool) func(*options) {
|
||||
@@ -27,27 +23,13 @@ func WithAutoIncrementAssetID(v bool) func(*options) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithCurrencies(v []currencies.Currency) func(*options) {
|
||||
return func(o *options) {
|
||||
o.currencies = v
|
||||
}
|
||||
}
|
||||
|
||||
func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||
if repos == nil {
|
||||
panic("repos cannot be nil")
|
||||
}
|
||||
|
||||
defaultCurrencies, err := currencies.CollectionCurrencies(
|
||||
currencies.CollectDefaults(),
|
||||
)
|
||||
if err != nil {
|
||||
panic("failed to collect default currencies")
|
||||
}
|
||||
|
||||
options := &options{
|
||||
autoIncrementAssetID: true,
|
||||
currencies: defaultCurrencies,
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
@@ -62,6 +44,5 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||
autoIncrementAssetID: options.autoIncrementAssetID,
|
||||
},
|
||||
BackgroundService: &BackgroundService{repos},
|
||||
Currencies: currencies.NewCurrencyService(options.currencies),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type contextKeys struct {
|
||||
|
||||
@@ -5,8 +5,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func Test_SetAuthContext(t *testing.T) {
|
||||
|
||||
@@ -6,12 +6,11 @@ import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/faker"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/faker"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -62,13 +61,8 @@ func TestMain(m *testing.M) {
|
||||
|
||||
tClient = client
|
||||
tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox")
|
||||
|
||||
defaults, _ := currencies.CollectionCurrencies(
|
||||
currencies.CollectDefaults(),
|
||||
)
|
||||
|
||||
tSvc = New(tRepos, WithCurrencies(defaults))
|
||||
defer func() { _ = client.Close() }()
|
||||
tSvc = New(tRepos)
|
||||
defer client.Close()
|
||||
|
||||
bootstrap()
|
||||
tCtx = Context{
|
||||
|
||||
@@ -2,8 +2,8 @@ package reporting
|
||||
|
||||
import (
|
||||
"github.com/gocarina/gocsv"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// =================================================================================================
|
||||
@@ -20,9 +20,9 @@ type BillOfMaterialsEntry struct {
|
||||
TotalPrice float64 `csv:"Total Price"`
|
||||
}
|
||||
|
||||
// BillOfMaterialsCSV returns a byte slice of the Bill of Materials for a given GID in CSV format
|
||||
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
|
||||
// See BillOfMaterialsEntry for the format of the output
|
||||
func BillOfMaterialsCSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
bomEntries := make([]BillOfMaterialsEntry, len(entities))
|
||||
for i, entity := range entities {
|
||||
bomEntries[i] = BillOfMaterialsEntry{
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
// Package eventbus provides an interface for event bus.
|
||||
// / Package eventbus provides an interface for event bus.
|
||||
package eventbus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -35,38 +34,33 @@ type EventBus struct {
|
||||
|
||||
func New() *EventBus {
|
||||
return &EventBus{
|
||||
ch: make(chan eventData, 100),
|
||||
ch: make(chan eventData, 10),
|
||||
subscribers: map[Event][]func(any){
|
||||
EventLabelMutation: {},
|
||||
EventLocationMutation: {},
|
||||
EventItemMutation: {},
|
||||
EventItemMutation: {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (e *EventBus) Run(ctx context.Context) error {
|
||||
func (e *EventBus) Run() {
|
||||
if e.started {
|
||||
panic("event bus already started")
|
||||
}
|
||||
|
||||
e.started = true
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case event := <-e.ch:
|
||||
e.mu.RLock()
|
||||
arr, ok := e.subscribers[event.event]
|
||||
e.mu.RUnlock()
|
||||
for event := range e.ch {
|
||||
e.mu.RLock()
|
||||
arr, ok := e.subscribers[event.event]
|
||||
e.mu.RUnlock()
|
||||
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, fn := range arr {
|
||||
fn(event.data)
|
||||
}
|
||||
for _, fn := range arr {
|
||||
fn(event.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// Package reporting provides a way to import CSV files into the database.
|
||||
package reporting
|
||||
|
||||
import (
|
||||
|
||||
@@ -3,8 +3,8 @@ package reporting
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type ExportItemFields struct {
|
||||
@@ -12,7 +12,7 @@ type ExportItemFields struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
type ExportCSVRow struct {
|
||||
type ExportTSVRow struct {
|
||||
ImportRef string `csv:"HB.import_ref"`
|
||||
Location LocationString `csv:"HB.location"`
|
||||
LabelStr LabelString `csv:"HB.labels"`
|
||||
@@ -84,7 +84,7 @@ func (csf LocationString) String() string {
|
||||
return strings.Join(csf, " / ")
|
||||
}
|
||||
|
||||
func fromPathSlice(s []repo.ItemPath) LocationString {
|
||||
func fromPathSlice(s []repo.LocationPath) LocationString {
|
||||
v := make(LocationString, len(s))
|
||||
|
||||
for i := range s {
|
||||
|
||||
@@ -10,21 +10,21 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
|
||||
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
|
||||
// the standard format of the file.
|
||||
//
|
||||
// See ExportCSVRow for the format of the data in the sheet.
|
||||
// See ExportTSVRow for the format of the data in the sheet.
|
||||
type IOSheet struct {
|
||||
headers []string
|
||||
custom []int
|
||||
index map[string]int
|
||||
Rows []ExportCSVRow
|
||||
Rows []ExportTSVRow
|
||||
}
|
||||
|
||||
func (s *IOSheet) indexHeaders() {
|
||||
@@ -70,16 +70,16 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
s.headers = sheet[0]
|
||||
s.Rows = make([]ExportCSVRow, len(sheet)-1)
|
||||
s.Rows = make([]ExportTSVRow, len(sheet)-1)
|
||||
|
||||
for i, row := range sheet[1:] {
|
||||
if len(row) != len(s.headers) {
|
||||
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
|
||||
}
|
||||
|
||||
rowData := ExportCSVRow{}
|
||||
rowData := ExportTSVRow{}
|
||||
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
field := st.Field(i)
|
||||
@@ -152,9 +152,9 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadItems writes the sheet to a writer.
|
||||
// Write writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
|
||||
s.Rows = make([]ExportCSVRow, len(items))
|
||||
s.Rows = make([]ExportTSVRow, len(items))
|
||||
|
||||
extraHeaders := map[string]struct{}{}
|
||||
|
||||
@@ -162,9 +162,9 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
item := items[i]
|
||||
|
||||
// TODO: Support fetching nested locations
|
||||
locID := item.Location.ID
|
||||
locId := item.Location.ID
|
||||
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), GID, locID)
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), GID, locId)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not get location path")
|
||||
return err
|
||||
@@ -189,7 +189,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
}
|
||||
}
|
||||
|
||||
s.Rows[i] = ExportCSVRow{
|
||||
s.Rows[i] = ExportTSVRow{
|
||||
// fill struct
|
||||
Location: locString,
|
||||
LabelStr: labelString,
|
||||
@@ -232,7 +232,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
|
||||
sort.Strings(customHeaders)
|
||||
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
|
||||
// Write headers
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
@@ -252,8 +252,8 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
return nil
|
||||
}
|
||||
|
||||
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
|
||||
func (s *IOSheet) CSV() ([][]string, error) {
|
||||
// Writes the current sheet to a writer in TSV format.
|
||||
func (s *IOSheet) TSV() ([][]string, error) {
|
||||
memcsv := make([][]string, len(s.Rows)+1)
|
||||
|
||||
memcsv[0] = s.headers
|
||||
|
||||
@@ -7,9 +7,8 @@ import (
|
||||
|
||||
_ "embed"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -27,13 +26,13 @@ func TestSheet_Read(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data []byte
|
||||
want []ExportCSVRow
|
||||
want []ExportTSVRow
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "minimal import",
|
||||
data: minimalImportCSV,
|
||||
want: []ExportCSVRow{
|
||||
want: []ExportTSVRow{
|
||||
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
|
||||
@@ -42,7 +41,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom field import",
|
||||
data: customFieldImportCSV,
|
||||
want: []ExportCSVRow{
|
||||
want: []ExportTSVRow{
|
||||
{
|
||||
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
|
||||
Fields: []ExportItemFields{
|
||||
@@ -72,7 +71,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom types import",
|
||||
data: customTypesImportCSV,
|
||||
want: []ExportCSVRow{
|
||||
want: []ExportTSVRow{
|
||||
{
|
||||
Name: "Item 1",
|
||||
AssetID: repo.AssetID(1),
|
||||
@@ -104,9 +103,9 @@ func TestSheet_Read(t *testing.T) {
|
||||
|
||||
switch {
|
||||
case tt.wantErr:
|
||||
require.Error(t, err)
|
||||
assert.Error(t, err)
|
||||
default:
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.ElementsMatch(t, tt.want, sheet.Rows)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -6,9 +6,9 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type BackgroundService struct {
|
||||
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
type GroupService struct {
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -32,7 +32,7 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
item.AssetID = highest + 1
|
||||
item.AssetID = repo.AssetID(highest + 1)
|
||||
}
|
||||
|
||||
return svc.repo.Items.Create(ctx, ctx.GID, item)
|
||||
@@ -53,7 +53,7 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
|
||||
for _, item := range items {
|
||||
highest++
|
||||
|
||||
err = svc.repo.Items.SetAssetID(ctx, GID, item.ID, highest)
|
||||
err = svc.repo.Items.SetAssetID(ctx, GID, item.ID, repo.AssetID(highest))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -329,7 +329,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||
func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -342,14 +342,14 @@ func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID) ([][]strin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sheet.CSV()
|
||||
return sheet.TSV()
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportBillOfMaterialsCSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
|
||||
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reporting.BillOfMaterialsCSV(items)
|
||||
return reporting.BillOfMaterialsTSV(items)
|
||||
}
|
||||
|
||||
@@ -6,14 +6,14 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Document, error) {
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
|
||||
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentId uuid.UUID) (*ent.Document, error) {
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -21,9 +21,9 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UU
|
||||
return attachment.Edges.Document, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
// Update Attachment
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data)
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, attachment.Type(data.Type))
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
@@ -35,15 +35,15 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *re
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
}
|
||||
|
||||
// AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
|
||||
// Table and Items table. The file provided via the reader is stored on the file system based on the provided
|
||||
// relative path during construction of the service.
|
||||
func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
|
||||
func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
|
||||
// Get the Item
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
@@ -56,29 +56,29 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename st
|
||||
}
|
||||
|
||||
// Create the attachment
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemID, doc.ID, attachmentType)
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemId, doc.ID, attachmentType)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to create attachment")
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error {
|
||||
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemId, attachmentId uuid.UUID) error {
|
||||
// Get the Item
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemID)
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete the attachment
|
||||
err = svc.repo.Attachments.Delete(ctx, attachmentID)
|
||||
err = svc.repo.Attachments.Delete(ctx, attachmentId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -7,9 +7,8 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func TestItemService_AddAttachment(t *testing.T) {
|
||||
@@ -24,7 +23,7 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
Description: "test",
|
||||
Name: "test",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, loc)
|
||||
|
||||
itmC := repo.ItemCreate{
|
||||
@@ -34,11 +33,11 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
}
|
||||
|
||||
itm, err := svc.repo.Items.Create(context.Background(), tGroup.ID, itmC)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, itm)
|
||||
t.Cleanup(func() {
|
||||
err := svc.repo.Items.Delete(context.Background(), itm.ID)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
contents := fk.Str(1000)
|
||||
@@ -46,7 +45,7 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
|
||||
// Setup
|
||||
afterAttachment, err := svc.AttachmentAdd(tCtx, itm.ID, "testfile.txt", "attachment", reader)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, afterAttachment)
|
||||
|
||||
// Check that the file exists
|
||||
@@ -57,6 +56,6 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
|
||||
// Check that the file contents are correct
|
||||
bts, err := os.ReadFile(storedPath)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, contents, string(bts))
|
||||
}
|
||||
|
||||
@@ -6,17 +6,17 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
var (
|
||||
oneWeek = time.Hour * 24 * 7
|
||||
ErrorInvalidLogin = errors.New("invalid username or password")
|
||||
ErrorInvalidToken = errors.New("invalid token")
|
||||
ErrorTokenIDMismatch = errors.New("token id mismatch")
|
||||
ErrorTokenIdMismatch = errors.New("token id mismatch")
|
||||
)
|
||||
|
||||
type UserService struct {
|
||||
@@ -92,11 +92,9 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
if err != nil {
|
||||
return repo.UserOut{}, err
|
||||
}
|
||||
log.Debug().Msg("user created")
|
||||
|
||||
// Create the default labels and locations for the group.
|
||||
if creatingGroup {
|
||||
log.Debug().Msg("creating default labels")
|
||||
for _, label := range defaultLabels() {
|
||||
_, err := svc.repos.Labels.Create(ctx, usr.GroupID, label)
|
||||
if err != nil {
|
||||
@@ -104,7 +102,6 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug().Msg("creating default locations")
|
||||
for _, location := range defaultLocations() {
|
||||
_, err := svc.repos.Locations.Create(ctx, usr.GroupID, location)
|
||||
if err != nil {
|
||||
@@ -115,7 +112,6 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
|
||||
// Decrement the invitation token if it was used.
|
||||
if token.ID != uuid.Nil {
|
||||
log.Debug().Msg("decrementing invitation token")
|
||||
err = svc.repos.Groups.InvitationUpdate(ctx, token.ID, token.Uses-1)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to update invitation token")
|
||||
@@ -138,13 +134,13 @@ func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data repo.
|
||||
return repo.UserOut{}, err
|
||||
}
|
||||
|
||||
return svc.repos.Users.GetOneID(ctx, ID)
|
||||
return svc.repos.Users.GetOneId(ctx, ID)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// User Authentication
|
||||
|
||||
func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
|
||||
func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
|
||||
attachmentToken := hasher.GenerateToken()
|
||||
|
||||
expiresAt := time.Now().Add(oneWeek)
|
||||
@@ -153,7 +149,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID
|
||||
}
|
||||
|
||||
attachmentData := repo.UserAuthTokenCreate{
|
||||
UserID: userID,
|
||||
UserID: userId,
|
||||
TokenHash: attachmentToken.Hash,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
@@ -165,7 +161,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID
|
||||
|
||||
userToken := hasher.GenerateToken()
|
||||
data := repo.UserAuthTokenCreate{
|
||||
UserID: userID,
|
||||
UserID: userId,
|
||||
TokenHash: userToken.Hash,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
@@ -222,7 +218,7 @@ func (svc *UserService) DeleteSelf(ctx context.Context, ID uuid.UUID) error {
|
||||
}
|
||||
|
||||
func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) {
|
||||
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UID)
|
||||
usr, err := svc.repos.Users.GetOneId(ctx, ctx.UID)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func defaultLocations() []repo.LocationCreate {
|
||||
|
||||
@@ -10,9 +10,9 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// Attachment is the model entity for the Attachment schema.
|
||||
@@ -26,8 +26,6 @@ type Attachment struct {
|
||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||
// Type holds the value of the "type" field.
|
||||
Type attachment.Type `json:"type,omitempty"`
|
||||
// Primary holds the value of the "primary" field.
|
||||
Primary bool `json:"primary,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the AttachmentQuery when eager-loading is set.
|
||||
Edges AttachmentEdges `json:"edges"`
|
||||
@@ -78,8 +76,6 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case attachment.FieldPrimary:
|
||||
values[i] = new(sql.NullBool)
|
||||
case attachment.FieldType:
|
||||
values[i] = new(sql.NullString)
|
||||
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
|
||||
@@ -129,12 +125,6 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
a.Type = attachment.Type(value.String)
|
||||
}
|
||||
case attachment.FieldPrimary:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field primary", values[i])
|
||||
} else if value.Valid {
|
||||
a.Primary = value.Bool
|
||||
}
|
||||
case attachment.ForeignKeys[0]:
|
||||
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
|
||||
@@ -203,9 +193,6 @@ func (a *Attachment) String() string {
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("type=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Type))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("primary=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Primary))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -22,8 +22,6 @@ const (
|
||||
FieldUpdatedAt = "updated_at"
|
||||
// FieldType holds the string denoting the type field in the database.
|
||||
FieldType = "type"
|
||||
// FieldPrimary holds the string denoting the primary field in the database.
|
||||
FieldPrimary = "primary"
|
||||
// EdgeItem holds the string denoting the item edge name in mutations.
|
||||
EdgeItem = "item"
|
||||
// EdgeDocument holds the string denoting the document edge name in mutations.
|
||||
@@ -52,7 +50,6 @@ var Columns = []string{
|
||||
FieldCreatedAt,
|
||||
FieldUpdatedAt,
|
||||
FieldType,
|
||||
FieldPrimary,
|
||||
}
|
||||
|
||||
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
|
||||
@@ -84,8 +81,6 @@ var (
|
||||
DefaultUpdatedAt func() time.Time
|
||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||
UpdateDefaultUpdatedAt func() time.Time
|
||||
// DefaultPrimary holds the default value on creation for the "primary" field.
|
||||
DefaultPrimary bool
|
||||
// DefaultID holds the default value on creation for the "id" field.
|
||||
DefaultID func() uuid.UUID
|
||||
)
|
||||
@@ -142,11 +137,6 @@ func ByType(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldType, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByPrimary orders the results by the primary field.
|
||||
func ByPrimary(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldPrimary, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByItemField orders the results by item field.
|
||||
func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -66,11 +66,6 @@ func UpdatedAt(v time.Time) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// Primary applies equality check predicate on the "primary" field. It's identical to PrimaryEQ.
|
||||
func Primary(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||
func CreatedAtEQ(v time.Time) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
|
||||
@@ -171,16 +166,6 @@ func TypeNotIn(vs ...Type) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldNotIn(FieldType, vs...))
|
||||
}
|
||||
|
||||
// PrimaryEQ applies the EQ predicate on the "primary" field.
|
||||
func PrimaryEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// PrimaryNEQ applies the NEQ predicate on the "primary" field.
|
||||
func PrimaryNEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldNEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// HasItem applies the HasEdge predicate on the "item" edge.
|
||||
func HasItem() predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
@@ -229,15 +214,32 @@ func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(sql.AndPredicates(predicates...))
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(sql.OrPredicates(predicates...))
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(sql.NotPredicates(p))
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// AttachmentCreate is the builder for creating a Attachment entity.
|
||||
@@ -65,20 +65,6 @@ func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreat
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (ac *AttachmentCreate) SetPrimary(b bool) *AttachmentCreate {
|
||||
ac.mutation.SetPrimary(b)
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (ac *AttachmentCreate) SetNillablePrimary(b *bool) *AttachmentCreate {
|
||||
if b != nil {
|
||||
ac.SetPrimary(*b)
|
||||
}
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetID sets the "id" field.
|
||||
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
|
||||
ac.mutation.SetID(u)
|
||||
@@ -162,10 +148,6 @@ func (ac *AttachmentCreate) defaults() {
|
||||
v := attachment.DefaultType
|
||||
ac.mutation.SetType(v)
|
||||
}
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
v := attachment.DefaultPrimary
|
||||
ac.mutation.SetPrimary(v)
|
||||
}
|
||||
if _, ok := ac.mutation.ID(); !ok {
|
||||
v := attachment.DefaultID()
|
||||
ac.mutation.SetID(v)
|
||||
@@ -188,9 +170,6 @@ func (ac *AttachmentCreate) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
|
||||
}
|
||||
if _, ok := ac.mutation.ItemID(); !ok {
|
||||
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
|
||||
}
|
||||
@@ -244,10 +223,6 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
_node.Type = value
|
||||
}
|
||||
if value, ok := ac.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
_node.Primary = value
|
||||
}
|
||||
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
@@ -288,15 +263,11 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
||||
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
|
||||
type AttachmentCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AttachmentCreate
|
||||
}
|
||||
|
||||
// Save creates the Attachment entities in the database.
|
||||
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
|
||||
if acb.err != nil {
|
||||
return nil, acb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
|
||||
nodes := make([]*Attachment, len(acb.builders))
|
||||
mutators := make([]Mutator, len(acb.builders))
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentDelete is the builder for deleting a Attachment entity.
|
||||
|
||||
@@ -11,10 +11,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentQuery is the builder for querying Attachment entities.
|
||||
|
||||
@@ -12,10 +12,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentUpdate is the builder for updating Attachment entities.
|
||||
@@ -51,20 +51,6 @@ func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdat
|
||||
return au
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (au *AttachmentUpdate) SetPrimary(b bool) *AttachmentUpdate {
|
||||
au.mutation.SetPrimary(b)
|
||||
return au
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (au *AttachmentUpdate) SetNillablePrimary(b *bool) *AttachmentUpdate {
|
||||
if b != nil {
|
||||
au.SetPrimary(*b)
|
||||
}
|
||||
return au
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
|
||||
au.mutation.SetItemID(id)
|
||||
@@ -174,9 +160,6 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := au.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := au.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if au.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
@@ -275,20 +258,6 @@ func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentU
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (auo *AttachmentUpdateOne) SetPrimary(b bool) *AttachmentUpdateOne {
|
||||
auo.mutation.SetPrimary(b)
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (auo *AttachmentUpdateOne) SetNillablePrimary(b *bool) *AttachmentUpdateOne {
|
||||
if b != nil {
|
||||
auo.SetPrimary(*b)
|
||||
}
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
|
||||
auo.mutation.SetItemID(id)
|
||||
@@ -428,9 +397,6 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
|
||||
if value, ok := auo.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := auo.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if auo.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRoles is the model entity for the AuthRoles schema.
|
||||
|
||||
@@ -5,7 +5,7 @@ package authroles
|
||||
import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -98,15 +98,32 @@ func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(sql.AndPredicates(predicates...))
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(sql.OrPredicates(predicates...))
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(sql.NotPredicates(p))
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRolesCreate is the builder for creating a AuthRoles entity.
|
||||
@@ -158,15 +158,11 @@ func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
|
||||
// AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk.
|
||||
type AuthRolesCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AuthRolesCreate
|
||||
}
|
||||
|
||||
// Save creates the AuthRoles entities in the database.
|
||||
func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) {
|
||||
if arcb.err != nil {
|
||||
return nil, arcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(arcb.builders))
|
||||
nodes := make([]*AuthRoles, len(arcb.builders))
|
||||
mutators := make([]Mutator, len(arcb.builders))
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesDelete is the builder for deleting a AuthRoles entity.
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesQuery is the builder for querying AuthRoles entities.
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesUpdate is the builder for updating AuthRoles entities.
|
||||
|
||||
@@ -10,9 +10,9 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokens is the model entity for the AuthTokens schema.
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -284,15 +284,32 @@ func HasRolesWith(preds ...predicate.AuthRoles) predicate.AuthTokens {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(sql.AndPredicates(predicates...))
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(sql.OrPredicates(predicates...))
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.AuthTokens) predicate.AuthTokens {
|
||||
return predicate.AuthTokens(sql.NotPredicates(p))
|
||||
return predicate.AuthTokens(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokensCreate is the builder for creating a AuthTokens entity.
|
||||
@@ -280,15 +280,11 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
|
||||
// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk.
|
||||
type AuthTokensCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AuthTokensCreate
|
||||
}
|
||||
|
||||
// Save creates the AuthTokens entities in the database.
|
||||
func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) {
|
||||
if atcb.err != nil {
|
||||
return nil, atcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(atcb.builders))
|
||||
nodes := make([]*AuthTokens, len(atcb.builders))
|
||||
mutators := make([]Mutator, len(atcb.builders))
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthTokensDelete is the builder for deleting a AuthTokens entity.
|
||||
|
||||
@@ -12,10 +12,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokensQuery is the builder for querying AuthTokens entities.
|
||||
|
||||
@@ -12,10 +12,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokensUpdate is the builder for updating AuthTokens entities.
|
||||
|
||||
@@ -7,28 +7,27 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/group"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/groupinvitationtoken"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/itemfield"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/label"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/location"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/maintenanceentry"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/notifier"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/groupinvitationtoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/itemfield"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/label"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/location"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/maintenanceentry"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/notifier"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// Client is the client that holds all ent builders.
|
||||
@@ -66,7 +65,9 @@ type Client struct {
|
||||
|
||||
// NewClient creates a new client configured with the given options.
|
||||
func NewClient(opts ...Option) *Client {
|
||||
client := &Client{config: newConfig(opts...)}
|
||||
cfg := config{log: log.Println, hooks: &hooks{}, inters: &inters{}}
|
||||
cfg.options(opts...)
|
||||
client := &Client{config: cfg}
|
||||
client.init()
|
||||
return client
|
||||
}
|
||||
@@ -106,13 +107,6 @@ type (
|
||||
Option func(*config)
|
||||
)
|
||||
|
||||
// newConfig creates a new config for the client.
|
||||
func newConfig(opts ...Option) config {
|
||||
cfg := config{log: log.Println, hooks: &hooks{}, inters: &inters{}}
|
||||
cfg.options(opts...)
|
||||
return cfg
|
||||
}
|
||||
|
||||
// options applies the options on the config object.
|
||||
func (c *config) options(opts ...Option) {
|
||||
for _, opt := range opts {
|
||||
@@ -160,14 +154,11 @@ func Open(driverName, dataSourceName string, options ...Option) (*Client, error)
|
||||
}
|
||||
}
|
||||
|
||||
// ErrTxStarted is returned when trying to start a new transaction from a transactional client.
|
||||
var ErrTxStarted = errors.New("ent: cannot start a transaction within a transaction")
|
||||
|
||||
// Tx returns a new transactional client. The provided context
|
||||
// is used until the transaction is committed or rolled back.
|
||||
func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
||||
if _, ok := c.driver.(*txDriver); ok {
|
||||
return nil, ErrTxStarted
|
||||
return nil, errors.New("ent: cannot start a transaction within a transaction")
|
||||
}
|
||||
tx, err := newTx(ctx, c.driver)
|
||||
if err != nil {
|
||||
@@ -339,21 +330,6 @@ func (c *AttachmentClient) CreateBulk(builders ...*AttachmentCreate) *Attachment
|
||||
return &AttachmentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AttachmentClient) MapCreateBulk(slice any, setFunc func(*AttachmentCreate, int)) *AttachmentCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AttachmentCreateBulk{err: fmt.Errorf("calling to AttachmentClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AttachmentCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AttachmentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Attachment.
|
||||
func (c *AttachmentClient) Update() *AttachmentUpdate {
|
||||
mutation := newAttachmentMutation(c.config, OpUpdate)
|
||||
@@ -504,21 +480,6 @@ func (c *AuthRolesClient) CreateBulk(builders ...*AuthRolesCreate) *AuthRolesCre
|
||||
return &AuthRolesCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AuthRolesClient) MapCreateBulk(slice any, setFunc func(*AuthRolesCreate, int)) *AuthRolesCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AuthRolesCreateBulk{err: fmt.Errorf("calling to AuthRolesClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AuthRolesCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AuthRolesCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for AuthRoles.
|
||||
func (c *AuthRolesClient) Update() *AuthRolesUpdate {
|
||||
mutation := newAuthRolesMutation(c.config, OpUpdate)
|
||||
@@ -653,21 +614,6 @@ func (c *AuthTokensClient) CreateBulk(builders ...*AuthTokensCreate) *AuthTokens
|
||||
return &AuthTokensCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *AuthTokensClient) MapCreateBulk(slice any, setFunc func(*AuthTokensCreate, int)) *AuthTokensCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &AuthTokensCreateBulk{err: fmt.Errorf("calling to AuthTokensClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*AuthTokensCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &AuthTokensCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for AuthTokens.
|
||||
func (c *AuthTokensClient) Update() *AuthTokensUpdate {
|
||||
mutation := newAuthTokensMutation(c.config, OpUpdate)
|
||||
@@ -818,21 +764,6 @@ func (c *DocumentClient) CreateBulk(builders ...*DocumentCreate) *DocumentCreate
|
||||
return &DocumentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *DocumentClient) MapCreateBulk(slice any, setFunc func(*DocumentCreate, int)) *DocumentCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &DocumentCreateBulk{err: fmt.Errorf("calling to DocumentClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*DocumentCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &DocumentCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Document.
|
||||
func (c *DocumentClient) Update() *DocumentUpdate {
|
||||
mutation := newDocumentMutation(c.config, OpUpdate)
|
||||
@@ -983,21 +914,6 @@ func (c *GroupClient) CreateBulk(builders ...*GroupCreate) *GroupCreateBulk {
|
||||
return &GroupCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *GroupClient) MapCreateBulk(slice any, setFunc func(*GroupCreate, int)) *GroupCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &GroupCreateBulk{err: fmt.Errorf("calling to GroupClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*GroupCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &GroupCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Group.
|
||||
func (c *GroupClient) Update() *GroupUpdate {
|
||||
mutation := newGroupMutation(c.config, OpUpdate)
|
||||
@@ -1228,21 +1144,6 @@ func (c *GroupInvitationTokenClient) CreateBulk(builders ...*GroupInvitationToke
|
||||
return &GroupInvitationTokenCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *GroupInvitationTokenClient) MapCreateBulk(slice any, setFunc func(*GroupInvitationTokenCreate, int)) *GroupInvitationTokenCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &GroupInvitationTokenCreateBulk{err: fmt.Errorf("calling to GroupInvitationTokenClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*GroupInvitationTokenCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &GroupInvitationTokenCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for GroupInvitationToken.
|
||||
func (c *GroupInvitationTokenClient) Update() *GroupInvitationTokenUpdate {
|
||||
mutation := newGroupInvitationTokenMutation(c.config, OpUpdate)
|
||||
@@ -1377,21 +1278,6 @@ func (c *ItemClient) CreateBulk(builders ...*ItemCreate) *ItemCreateBulk {
|
||||
return &ItemCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *ItemClient) MapCreateBulk(slice any, setFunc func(*ItemCreate, int)) *ItemCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &ItemCreateBulk{err: fmt.Errorf("calling to ItemClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*ItemCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &ItemCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Item.
|
||||
func (c *ItemClient) Update() *ItemUpdate {
|
||||
mutation := newItemMutation(c.config, OpUpdate)
|
||||
@@ -1638,21 +1524,6 @@ func (c *ItemFieldClient) CreateBulk(builders ...*ItemFieldCreate) *ItemFieldCre
|
||||
return &ItemFieldCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *ItemFieldClient) MapCreateBulk(slice any, setFunc func(*ItemFieldCreate, int)) *ItemFieldCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &ItemFieldCreateBulk{err: fmt.Errorf("calling to ItemFieldClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*ItemFieldCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &ItemFieldCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for ItemField.
|
||||
func (c *ItemFieldClient) Update() *ItemFieldUpdate {
|
||||
mutation := newItemFieldMutation(c.config, OpUpdate)
|
||||
@@ -1787,21 +1658,6 @@ func (c *LabelClient) CreateBulk(builders ...*LabelCreate) *LabelCreateBulk {
|
||||
return &LabelCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *LabelClient) MapCreateBulk(slice any, setFunc func(*LabelCreate, int)) *LabelCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &LabelCreateBulk{err: fmt.Errorf("calling to LabelClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*LabelCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &LabelCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Label.
|
||||
func (c *LabelClient) Update() *LabelUpdate {
|
||||
mutation := newLabelMutation(c.config, OpUpdate)
|
||||
@@ -1952,21 +1808,6 @@ func (c *LocationClient) CreateBulk(builders ...*LocationCreate) *LocationCreate
|
||||
return &LocationCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *LocationClient) MapCreateBulk(slice any, setFunc func(*LocationCreate, int)) *LocationCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &LocationCreateBulk{err: fmt.Errorf("calling to LocationClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*LocationCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &LocationCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Location.
|
||||
func (c *LocationClient) Update() *LocationUpdate {
|
||||
mutation := newLocationMutation(c.config, OpUpdate)
|
||||
@@ -2149,21 +1990,6 @@ func (c *MaintenanceEntryClient) CreateBulk(builders ...*MaintenanceEntryCreate)
|
||||
return &MaintenanceEntryCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *MaintenanceEntryClient) MapCreateBulk(slice any, setFunc func(*MaintenanceEntryCreate, int)) *MaintenanceEntryCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &MaintenanceEntryCreateBulk{err: fmt.Errorf("calling to MaintenanceEntryClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*MaintenanceEntryCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &MaintenanceEntryCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for MaintenanceEntry.
|
||||
func (c *MaintenanceEntryClient) Update() *MaintenanceEntryUpdate {
|
||||
mutation := newMaintenanceEntryMutation(c.config, OpUpdate)
|
||||
@@ -2298,21 +2124,6 @@ func (c *NotifierClient) CreateBulk(builders ...*NotifierCreate) *NotifierCreate
|
||||
return &NotifierCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *NotifierClient) MapCreateBulk(slice any, setFunc func(*NotifierCreate, int)) *NotifierCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &NotifierCreateBulk{err: fmt.Errorf("calling to NotifierClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*NotifierCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &NotifierCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for Notifier.
|
||||
func (c *NotifierClient) Update() *NotifierUpdate {
|
||||
mutation := newNotifierMutation(c.config, OpUpdate)
|
||||
@@ -2463,21 +2274,6 @@ func (c *UserClient) CreateBulk(builders ...*UserCreate) *UserCreateBulk {
|
||||
return &UserCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||
// a builder and applies setFunc on it.
|
||||
func (c *UserClient) MapCreateBulk(slice any, setFunc func(*UserCreate, int)) *UserCreateBulk {
|
||||
rv := reflect.ValueOf(slice)
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return &UserCreateBulk{err: fmt.Errorf("calling to UserClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||
}
|
||||
builders := make([]*UserCreate, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
builders[i] = c.Create()
|
||||
setFunc(builders[i], i)
|
||||
}
|
||||
return &UserCreateBulk{config: c.config, builders: builders}
|
||||
}
|
||||
|
||||
// Update returns an update builder for User.
|
||||
func (c *UserClient) Update() *UserUpdate {
|
||||
mutation := newUserMutation(c.config, OpUpdate)
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
)
|
||||
|
||||
// Document is the model entity for the Document schema.
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -334,15 +334,32 @@ func HasAttachmentsWith(preds ...predicate.Attachment) predicate.Document {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Document) predicate.Document {
|
||||
return predicate.Document(sql.AndPredicates(predicates...))
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Document) predicate.Document {
|
||||
return predicate.Document(sql.OrPredicates(predicates...))
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Document) predicate.Document {
|
||||
return predicate.Document(sql.NotPredicates(p))
|
||||
return predicate.Document(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/group"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/group"
|
||||
)
|
||||
|
||||
// DocumentCreate is the builder for creating a Document entity.
|
||||
@@ -269,15 +269,11 @@ func (dc *DocumentCreate) createSpec() (*Document, *sqlgraph.CreateSpec) {
|
||||
// DocumentCreateBulk is the builder for creating many Document entities in bulk.
|
||||
type DocumentCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*DocumentCreate
|
||||
}
|
||||
|
||||
// Save creates the Document entities in the database.
|
||||
func (dcb *DocumentCreateBulk) Save(ctx context.Context) ([]*Document, error) {
|
||||
if dcb.err != nil {
|
||||
return nil, dcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(dcb.builders))
|
||||
nodes := make([]*Document, len(dcb.builders))
|
||||
mutators := make([]Mutator, len(dcb.builders))
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// DocumentDelete is the builder for deleting a Document entity.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user