Compare commits

..

22 Commits

Author SHA1 Message Date
Hayden
2cd107b8bd explicity dependency 2023-08-24 09:20:53 -05:00
Hayden
a3cce59a2a fix https connection 2023-08-23 12:27:34 -05:00
Hayden
9fa17bec90 update lock file 2023-08-09 21:49:32 -05:00
Cheng Gu
b5987f2e8d feat: set cookies' expires attribute and fix remember me (#530) 2023-08-09 18:48:39 -08:00
Hayden
2cbcc8bb1d feat: WebSocket based implementation of server sent events for cache busting (#527)
* rough implementation of WS based event system for server side notifications of mutation

* fix test construction

* fix deadlock on event bus

* disable linter error

* add item mutation events

* remove old event bus code

* refactor event system to use composables

* refresh items table when new item is added

* fix create form errors

* cleanup unnecessary calls

* fix importer erorrs + limit fn calls on import
2023-08-02 13:00:57 -08:00
Hayden
cceec06148 specify h3 dependency 2023-08-02 09:05:07 -05:00
Hayden
2e2eed143d try node 18 2023-08-02 09:01:47 -05:00
renovate[bot]
272cc5a370 chore(deps): update dependency vitest to ^0.34.0 (#529)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-08-02 05:59:31 -08:00
Hayden
275e106d72 build nightly rootless 2023-08-02 08:47:53 -05:00
Hayden
3f0e65a2ad include rootless dockerfile 2023-08-02 08:45:22 -05:00
Hayden
22bbaae08f feat: add support for create + add more for all create modals and support k… (#526)
* add support for create + add more for all create modals and support keyboard bindings

* listen for esc to close modals
2023-07-31 09:53:26 -08:00
Hayden
8c7d91ea52 fix: prevent resetting dialog state on error (#524) 2023-07-31 08:22:08 -08:00
Hayden
5a219f6a9c feat: support cmd+s / ctrl+s and rework button display on edit (#523) 2023-07-31 06:57:42 -08:00
Hayden
895017b28e fix: label prop not being passed to password input (#522) 2023-07-31 06:08:35 -08:00
Hayden
02ce52dbe3 fix: assert/asserts (#521) 2023-07-31 06:05:37 -08:00
Hayden
c5ae6b17f9 feat: more currency support (#520)
* add multiple new currencies

* add multiple new currencies

* remove duplicate yen
2023-07-31 05:59:36 -08:00
renovate[bot]
371fc0a6af chore(deps): update dependency mkdocs-material to v9.1.21 (#512)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-29 09:54:55 -08:00
Hayden
016780920d ui: rework location/labels pages (#475)
* formatting

* slimdown locations page

* update location/labels

* fix dependency issues

* fix type generator

* cleanup unused variables
2023-07-27 13:21:28 -08:00
db8200
06eb6c1f91 fix 3 places where API URLs were not constructed by function route (#451)
* Fixed 3 places where API URLs were not constructed by function route(path, params).

* autofix

---------

Co-authored-by: Hayden <64056131+hay-kot@users.noreply.github.com>
2023-07-22 20:11:29 -08:00
renovate[bot]
27dad0e118 fix(deps): update module github.com/swaggo/http-swagger to v2 (#508)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 20:10:52 -08:00
renovate[bot]
dc9446516a fix(deps): update module github.com/swaggo/http-swagger to v2 (#506)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-07-22 20:09:43 -08:00
Hayden
a042496c71 chore: bump all go deps (#507)
* bump all deps

* run code-gen
2023-07-22 19:57:51 -08:00
413 changed files with 14138 additions and 32192 deletions

View File

@@ -35,6 +35,6 @@
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node", "remoteUser": "node",
"features": { "features": {
"ghcr.io/devcontainers/features/go:1": "1.21" "golang": "1.20"
} }
} }

2
.github/FUNDING.yml vendored
View File

@@ -1 +1 @@
github: [tankerkiller125,katosdev,tonyaellie] github: [hay-kot]

View File

@@ -1,9 +1,7 @@
--- ---
name: "Bug Report" name: "Bug Report"
description: "Submit a bug report for the current release" description: "Submit a bug report for the current release"
labels: ["🕷️ bug"] labels: ["bug"]
projects: ["sysadminsmedia/2"]
type: "Bug"
body: body:
- type: checkboxes - type: checkboxes
id: checks id: checks
@@ -21,8 +19,6 @@ body:
required: true required: true
- label: I already read the docs and didn't find an answer. - label: I already read the docs and didn't find an answer.
required: true required: true
- label: I can replicate the issue inside the Demo install.
required: true
- type: input - type: input
id: homebox-version id: homebox-version
attributes: attributes:
@@ -58,18 +54,6 @@ body:
- Other - Other
validations: validations:
required: true required: true
- type: dropdown
id: arch
attributes:
label: OS Architechture
description: What type of processor are you running on.
multiple: true
options:
- x86_64 (AMD, Intel)
- ARM64
- ARM/v7
validations:
required: true
- type: textarea - type: textarea
id: os-details id: os-details
attributes: attributes:

View File

@@ -1,9 +1,7 @@
--- ---
name: "Feature Request" name: "Feature Request"
description: "Submit a feature request for the current release" description: "Submit a feature request for the current release"
labels: ["⬆️ enhancement"] labels: ["feature-request"]
projects: ["sysadminsmedia/2"]
type: "Enhancement"
body: body:
- type: textarea - type: textarea
id: problem-statement id: problem-statement

View File

@@ -55,4 +55,18 @@ _(fill-in or delete this section)_
<!-- <!--
Describe how you tested this change. Describe how you tested this change.
--> -->
## Release Notes
_(REQUIRED)_
<!--
If this PR makes user facing changes, please describe them here. This
description will be copied into the release notes/changelog, whenever the
next version is released. Keep this section short, and focus on high level
changes.
Put your text between the block. To omit notes, use NONE within the block.
-->
```release-note
```

View File

@@ -1,64 +0,0 @@
import requests
import json
import os
def fetch_currencies():
try:
response = requests.get('https://restcountries.com/v3.1/all?fields=name,common,currencies')
response.raise_for_status()
except requests.exceptions.RequestException as e:
print(f"An error occurred while making the request: {e}")
return []
try:
countries = response.json()
except json.JSONDecodeError:
print("Failed to decode JSON from the response.")
return []
currencies_list = []
for country in countries:
country_name = country.get('name', {}).get('common')
country_currencies = country.get('currencies', {})
for currency_code, currency_info in country_currencies.items():
symbol = currency_info.get('symbol', '')
currencies_list.append({
'code': currency_code,
'local': country_name,
'symbol': symbol,
'name': currency_info.get('name')
})
return currencies_list
def save_currencies(currencies, file_path):
# Sort the list by the "local" field
sorted_currencies = sorted(currencies, key=lambda x: x['local'].lower() if x['local'] else "")
try:
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(sorted_currencies, f, ensure_ascii=False, indent=4)
except IOError as e:
print(f"An error occurred while writing to the file: {e}")
def load_existing_currencies(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
except (IOError, json.JSONDecodeError):
return [] # Return an empty list if file doesn't exist or is invalid
def main():
save_path = 'backend/internal/core/currencies/currencies.json'
existing_currencies = load_existing_currencies(save_path)
new_currencies = fetch_currencies()
if new_currencies == existing_currencies:
print("Currencies up-to-date with API, skipping commit.")
else:
save_currencies(new_currencies, save_path)
print("Currencies updated and saved.")
if __name__ == "__main__":
main()

View File

@@ -1,47 +0,0 @@
name: Publish Release Binaries
on:
push:
tags: [ 'v*.*.*' ]
jobs:
# backend-tests:
# name: "Backend Server Tests"
# uses: sysadminsmedia/homebox/.github/workflows/partial-backend.yaml@main
# frontend-tests:
# name: "Frontend and End-to-End Tests"
# uses: sysadminsmedia/homebox/.github/workflows/partial-frontend.yaml@main
goreleaser:
name: goreleaser
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v5
- uses: pnpm/action-setup@v2
with:
version: 7.30.1
- name: Build Frontend and Copy to Backend
working-directory: frontend
run: |
pnpm install --shamefully-hoist
pnpm run build
cp -r ./.output/public ../backend/app/api/static/
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5
with:
workdir: "backend"
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,25 +0,0 @@
name: Docker Cleanup
on:
schedule:
- cron: '00 0 * * *'
workflow_dispatch:
jobs:
delete-untagged-images:
name: Delete Untagged Images
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- uses: dataaxiom/ghcr-cleanup-action@v1
with:
dry-run: false
package: homebox
delete-ghost-images: true
delete-orphaned-images: true
delete-partial-images: true
delete-untagged: true
# Make sure to update this to include the latest major tags
exclude-tags: main,vnext,latest,0.*,1.*
older-than: 3 months

View File

@@ -1,184 +0,0 @@
name: Docker publish rootless
on:
schedule:
- cron: '00 0 * * *'
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/**'
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- 'Dockerfile.rootless'
- '.dockerignore'
- '.github/workflows/**'
env:
DOCKERHUB_REPO: sysadminsmedia/homebox
GHCR_REPO: ghcr.io/sysadminsmedia/homebox
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
attestations: write
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
- linux/arm/v7
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
branch=${{ github.event.pull_request.number || github.ref_name }}
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.DOCKERHUB_REPO }}
${{ env.GHCR_REPO }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: ghcr.io/amitie10g/binfmt:latest
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=ghcr.io/amitie10g/buildkit:master
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: . # Explicitly specify the build context
file: ./Dockerfile.rootless # Explicitly specify the Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless,mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
attestations: write
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=ghcr.io/amitie10g/buildkit:master
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.DOCKERHUB_REPO }}
${{ env.GHCR_REPO }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=schedule,pattern=nightly
flavor: |
suffix=-rootless,onlatest=true
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *)
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.DOCKERHUB_REPO }}:${{ steps.meta.outputs.version }}
docker buildx imagetools inspect ${{ env.GHCR_REPO }}:${{ steps.meta.outputs.version }}

View File

@@ -1,178 +0,0 @@
name: Docker publish
on:
schedule:
- cron: '00 0 * * *'
push:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- '.dockerignore'
- '.github/workflows/**'
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
paths:
- 'backend/**'
- 'frontend/**'
- 'Dockerfile'
- '.dockerignore'
- '.github/workflows/**'
env:
DOCKERHUB_REPO: sysadminsmedia/homebox
GHCR_REPO: ghcr.io/sysadminsmedia/homebox
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read # Allows access to repository contents (read-only)
packages: write # Allows pushing to GHCR
id-token: write # Allows identity token write access for authentication
attestations: write # Needed for signing and attestation (if required)
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
- linux/arm/v7
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
branch=${{ github.event.pull_request.number || github.ref_name }}
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.DOCKERHUB_REPO }}
${{ env.GHCR_REPO }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: ghcr.io/amitie10g/binfmt:latest
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=ghcr.io/amitie10g/buildkit:master
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR}}-${{ env.BRANCH }},mode=max
build-args: |
VERSION=${{ github.ref_name }}
COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
permissions:
contents: read # Allows access to repository contents (read-only)
packages: write # Allows pushing to GHCR
id-token: write # Allows identity token write access for authentication
attestations: write # Needed for signing and attestation (if required)
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=ghcr.io/amitie10g/buildkit:master
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.DOCKERHUB_REPO }}
${{ env.GHCR_REPO }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=schedule,pattern=nightly
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *)
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.DOCKERHUB_REPO }}:${{ steps.meta.outputs.version }}
docker buildx imagetools inspect ${{ env.GHCR_REPO }}:${{ steps.meta.outputs.version }}

View File

@@ -7,12 +7,12 @@ jobs:
Go: Go:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v4
with: with:
go-version: "1.21" go-version: "1.20"
- name: Install Task - name: Install Task
uses: arduino/setup-task@v1 uses: arduino/setup-task@v1
@@ -20,7 +20,7 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v3
with: with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version # Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest version: latest
@@ -34,8 +34,3 @@ jobs:
- name: Test - name: Test
run: task go:coverage run: task go:coverage
- name: Validate OpenAPI definition
uses: swaggerexpert/swagger-editor-validate@v1
with:
definition-file: backend/app/api/static/docs/swagger.json

View File

@@ -9,13 +9,13 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: pnpm/action-setup@v3.0.0 - uses: pnpm/action-setup@v2.2.4
with: with:
version: 9.12.2 version: 6.0.2
- name: Install dependencies - name: Install dependencies
run: pnpm install --shamefully-hoist run: pnpm install --shamefully-hoist
@@ -34,7 +34,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -44,17 +44,17 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Go - name: Set up Go
uses: actions/setup-go@v5 uses: actions/setup-go@v4
with: with:
go-version: "1.21" go-version: "1.20"
- uses: actions/setup-node@v4 - uses: actions/setup-node@v3
with: with:
node-version: 18 node-version: 18
- uses: pnpm/action-setup@v3.0.0 - uses: pnpm/action-setup@v2.2.4
with: with:
version: 9.12.2 version: 6.0.2
- name: Install dependencies - name: Install dependencies
run: pnpm install run: pnpm install

89
.github/workflows/partial-publish.yaml vendored Normal file
View File

@@ -0,0 +1,89 @@
name: Frontend / E2E
on:
workflow_call:
inputs:
tag:
required: true
type: string
release:
required: false
type: boolean
default: false
secrets:
GH_TOKEN:
required: true
jobs:
publish:
name: "Publish Homebox"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: "1.20"
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v2
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: install buildx
id: buildx
uses: docker/setup-buildx-action@v2
with:
install: true
- name: login to container registry
run: docker login ghcr.io --username hay-kot --password $CR_PAT
env:
CR_PAT: ${{ secrets.GH_TOKEN }}
- name: build nightly image
if: ${{ inputs.release == false }}
run: |
docker build --push --no-cache \
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
--build-arg=COMMIT=$(git rev-parse HEAD) \
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
- name: build nightly-rootless image
if: ${{ inputs.release == false }}
run: |
docker build --push --no-cache \
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
--build-arg=COMMIT=$(git rev-parse HEAD) \
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--file Dockerfile.rootless \
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
- name: build release tagged the image
if: ${{ inputs.release == true }}
run: |
docker build --push --no-cache \
--tag ghcr.io/hay-kot/homebox:nightly \
--tag ghcr.io/hay-kot/homebox:latest \
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
--build-arg VERSION=${{ inputs.tag }} \
--build-arg COMMIT=$(git rev-parse HEAD) \
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform linux/amd64,linux/arm64,linux/arm/v7 .
- name: build release tagged the rootless image
if: ${{ inputs.release == true }}
run: |
docker build --push --no-cache \
--tag ghcr.io/hay-kot/homebox:nightly-rootless \
--tag ghcr.io/hay-kot/homebox:latest-rootless \
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
--build-arg VERSION=${{ inputs.tag }} \
--build-arg COMMIT=$(git rev-parse HEAD) \
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
--platform linux/amd64,linux/arm64,linux/arm/v7 \
--file Dockerfile.rootless .

29
.github/workflows/publish.yaml vendored Normal file
View File

@@ -0,0 +1,29 @@
name: Publish Dockers
on:
push:
branches:
- main
env:
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
jobs:
deploy:
name: "Deploy Nightly to Fly.io"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: superfly/flyctl-actions/setup-flyctl@master
- run: flyctl deploy --remote-only
publish-nightly:
name: "Publish Nightly"
if: github.event_name != 'release'
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
with:
tag: nightly
secrets:
GH_TOKEN: ${{ secrets.CR_PAT }}

View File

@@ -5,10 +5,6 @@ on:
branches: branches:
- main - main
paths:
- 'backend/**'
- 'frontend/**'
jobs: jobs:
backend-tests: backend-tests:
name: "Backend Server Tests" name: "Backend Server Tests"

77
.github/workflows/tag.yaml vendored Normal file
View File

@@ -0,0 +1,77 @@
name: Publish Release
on:
push:
tags:
- v*
env:
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
jobs:
backend-tests:
name: "Backend Server Tests"
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
frontend-tests:
name: "Frontend and End-to-End Tests"
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
goreleaser:
name: goreleaser
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Go
uses: actions/setup-go@v4
- uses: pnpm/action-setup@v2
with:
version: 7.30.1
- name: Build Frontend and Copy to Backend
working-directory: frontend
run: |
pnpm install --shamefully-hoist
pnpm run build
cp -r ./.output/public ../backend/app/api/static/
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4
with:
workdir: "backend"
distribution: goreleaser
version: latest
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-tag:
name: "Publish Tag"
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
with:
release: true
tag: ${{ github.ref_name }}
secrets:
GH_TOKEN: ${{ secrets.CR_PAT }}
deploy-docs:
name: Deploy docs
needs:
- publish-tag
- goreleaser
runs-on: ubuntu-latest
steps:
- name: Checkout main
uses: actions/checkout@v3
- name: Deploy docs
uses: mhausenblas/mkdocs-deploy-gh-pages@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CONFIG_FILE: docs/mkdocs.yml
EXTRA_PACKAGES: build-base

View File

@@ -1,100 +0,0 @@
name: Update Currencies
on:
push:
branches:
- main
jobs:
update-currencies:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests
- name: Run currency fetch script
run: python .github/scripts/update_currencies.py
- name: Check for changes
id: check_changes
run: |
if [[ $(git status --porcelain) ]]; then
echo "Changes detected."
echo "changes=true" >> $GITHUB_ENV
else
echo "No changes detected."
echo "changes=false" >> $GITHUB_ENV
fi
- name: Delete existing update-currencies branch
run: |
if git show-ref --verify --quiet refs/heads/update-currencies; then
git branch -D update-currencies
echo "Deleted existing update-currencies branch."
else
echo "No existing update-currencies branch to delete."
fi
- name: Create new update-currencies branch
if: env.changes == 'true'
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Create a new branch
git checkout -b update-currencies
git add backend/internal/core/currencies/currencies.json
git commit -m "Update currencies.json"
# Fetch the latest changes from the remote
git fetch origin
# Attempt to rebase with the latest changes
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
if ! git rebase origin/update-currencies; then
echo "Rebase conflicts occurred. Please resolve them manually."
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
exit 1
fi
else
echo "No existing remote branch 'update-currencies'. Skipping rebase."
fi
# Push the new branch to the remote
if ! git push --set-upstream origin update-currencies; then
echo "Push failed, trying to fetch and rebase again."
git fetch origin
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
if ! git rebase origin/update-currencies; then
echo "Second rebase failed. Please resolve manually."
exit 1
fi
else
echo "No existing remote branch 'update-currencies'. Skipping rebase."
fi
if ! git push --set-upstream origin update-currencies; then
echo "Second push failed. Please resolve manually."
exit 1
fi
fi
# Create a pull request
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-X POST \
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
https://api.github.com/repos/${{ github.repository }}/pulls
- name: Notify no changes
if: env.changes == 'false'
run: echo "Currencies up-to-date with API, skipping commit."

10
.gitignore vendored
View File

@@ -33,7 +33,7 @@ go.work
backend/.env backend/.env
build/* build/*
# Output Directory for Nuxt/Frontend during build steps # Output Directory for Nuxt/Frontend during build step
backend/app/api/public/* backend/app/api/public/*
!backend/app/api/public/.gitkeep !backend/app/api/public/.gitkeep
@@ -48,13 +48,9 @@ dist
.pnpm-store .pnpm-store
backend/app/api/app backend/app/api/app
backend/app/api/__debug_bin* backend/app/api/__debug_bin
dist/ dist/
# Nuxt Publish Dir # Nuxt Publish Dir
backend/app/api/static/public/* backend/app/api/static/public/*
!backend/app/api/static/public/.gitkeep !backend/app/api/static/public/.gitkeep
backend/api
docs/.vitepress/cache/
/.data/

View File

@@ -3,7 +3,7 @@ package schema
import ( import (
"entgo.io/ent" "entgo.io/ent"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/schema/mixins" "github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
) )
type {{ .Scaffold.model }} struct { type {{ .Scaffold.model }} struct {

8
.vscode/launch.json vendored
View File

@@ -23,9 +23,8 @@
"HBOX_LOG_LEVEL": "debug", "HBOX_LOG_LEVEL": "debug",
"HBOX_DEBUG_ENABLED": "true", "HBOX_DEBUG_ENABLED": "true",
"HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data", "HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data",
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1&_time_format=sqlite" "HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1"
}, },
"console": "integratedTerminal",
}, },
{ {
"name": "Launch Frontend", "name": "Launch Frontend",
@@ -39,11 +38,10 @@
"cwd": "${workspaceFolder}/frontend", "cwd": "${workspaceFolder}/frontend",
"serverReadyAction": { "serverReadyAction": {
"action": "debugWithChrome", "action": "debugWithChrome",
"pattern": "Local: +http://localhost:([0-9]+)", "pattern": "Local: http://localhost:([0-9]+)",
"uriFormat": "http://localhost:%s", "uriFormat": "http://localhost:%s",
"webRoot": "${workspaceFolder}/frontend" "webRoot": "${workspaceFolder}/frontend"
}, }
"console": "integratedTerminal",
} }
] ]
} }

View File

@@ -16,7 +16,7 @@
"editor.formatOnSave": false, "editor.formatOnSave": false,
"editor.defaultFormatter": "dbaeumer.vscode-eslint", "editor.defaultFormatter": "dbaeumer.vscode-eslint",
"editor.codeActionsOnSave": { "editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit" "source.fixAll.eslint": true
}, },
"[typescript]": { "[typescript]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint" "editor.defaultFormatter": "dbaeumer.vscode-eslint"

View File

@@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[support@sysadminemedia.com](mailto:support@sysadminemedia.com).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
[Contributor Covenant Code of Conduct](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
[FAQ](https://www.contributor-covenant.org/faq). Translations are available at
[Translations](https://www.contributor-covenant.org/translations).

View File

@@ -1,16 +1,16 @@
# Contributing # Contributing
## We Develop with GitHub ## We Develop with Github
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests. We use github to host code, to track issues and feature requests, as well as accept pull requests.
## Branch Flow ## Branch Flow
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request, you can use the following steps: We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request you can use the following steps:
1. Fork the repository and create a new branch from `main`. 1. Fork the repository and create a new branch from `main`.
2. If you've added code that should be tested, add tests. 2. If you've added code that should be tested, add tests.
3. If you've changed APIs, update the documentation. 3. If you've changed API's, update the documentation.
4. Ensure that the test suite and linters pass 4. Ensure that the test suite and linters pass
5. Issue your pull request 5. Issue your pull request
@@ -18,7 +18,7 @@ We use the `main` branch as the development branch. All PRs should be made to th
### Prerequisites ### Prerequisites
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you need to ensure that you have the following tools installed: There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you can need to ensure that you have the following tools installed:
- [Go 1.19+](https://golang.org/doc/install) - [Go 1.19+](https://golang.org/doc/install)
- [Swaggo](https://github.com/swaggo/swag) - [Swaggo](https://github.com/swaggo/swag)
@@ -31,27 +31,27 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
### Setup ### Setup
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file. If you're using the taskfile you can use the `task setup` command to run the required setup commands. Otherwise you can review the commands required in the `Taskfile.yml` file.
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server. Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag you will get an error when running the the frontend server.
### API Development Notes ### API Development Notes
start command `task go:run` start command `task go:run`
1. API Server does not auto reload. You'll need to restart the server after making changes. 1. API Server does not auto reload. You'll need to restart the server after making changes.
2. Unit tests should be written in Go, however, end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory. 2. Unit tests should be written in Go, however end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
### Frontend Development Notes ### Frontend Development Notes
start command `task ui:dev` start command `task: ui:dev`
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling. 1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`. 2. We're using Vitest for our automated testing. you can run these with `task ui:watch`.
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass. 3. Tests require the API server to be running and in some cases the first run will fail due to a race condition. If this happens just run the tests again and they should pass.
## Publishing Release ## Publishing Release
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created. Create a new tag in github with the version number vX.X.X. This will trigger a new release to be created.
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo

View File

@@ -1,91 +1,50 @@
# Node dependencies stage
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-dependencies
WORKDIR /app
# Install pnpm globally (caching layer) # Build Nuxt
FROM node:18-alpine as frontend-builder
WORKDIR /app
RUN npm install -g pnpm RUN npm install -g pnpm
# Copy package.json and lockfile to leverage caching
COPY frontend/package.json frontend/pnpm-lock.yaml ./ COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --shamefully-hoist RUN pnpm install --frozen-lockfile --shamefully-hoist
COPY frontend .
# Build Nuxt (frontend) stage
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-builder
WORKDIR /app
# Install pnpm globally again (it can reuse the cache if not changed)
RUN npm install -g pnpm
# Copy over source files and node_modules from dependencies stage
COPY frontend .
COPY --from=frontend-dependencies /app/node_modules ./node_modules
RUN pnpm build RUN pnpm build
# Go dependencies stage # Build API
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies FROM golang:alpine AS builder
WORKDIR /go/src/app
# Copy go.mod and go.sum for better caching
COPY ./backend/go.mod ./backend/go.sum ./
RUN go mod download
# Build API stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder
ARG BUILD_TIME ARG BUILD_TIME
ARG COMMIT ARG COMMIT
ARG VERSION ARG VERSION
# Install necessary build tools
RUN apk update && \ RUN apk update && \
apk upgrade && \ apk upgrade && \
apk add --no-cache git build-base gcc g++ apk add --update git build-base gcc g++
WORKDIR /go/src/app WORKDIR /go/src/app
# Copy Go modules (from dependencies stage) and source code
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
COPY ./backend . COPY ./backend .
RUN go get -d -v ./...
# Clear old public files and copy new ones from frontend build
RUN rm -rf ./app/api/public RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public COPY --from=frontend-builder /app/.output/public ./app/api/static/public
RUN CGO_ENABLED=0 GOOS=linux go build \
# Use cache for Go build artifacts -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
RUN --mount=type=cache,target=/root/.cache/go-build \
CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \ -o /go/bin/api \
-v ./app/api/*.go -v ./app/api/*.go
# Production stage # Production Stage
FROM public.ecr.aws/docker/library/alpine:latest FROM alpine:latest
ENV HBOX_MODE=production ENV HBOX_MODE=production
ENV HBOX_STORAGE_DATA=/data/ ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
# Install necessary runtime dependencies RUN apk --no-cache add ca-certificates
RUN apk --no-cache add ca-certificates wget
# Create application directory and copy over built Go binary
RUN mkdir /app RUN mkdir /app
COPY --from=builder /go/bin/api /app COPY --from=builder /go/bin/api /app
RUN chmod +x /app/api RUN chmod +x /app/api
# Labels and configuration for the final image
LABEL Name=homebox Version=0.0.1 LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox" LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
# Expose necessary ports for Homebox
EXPOSE 7745 EXPOSE 7745
WORKDIR /app WORKDIR /app
# Healthcheck configuration
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
# Persist volume
VOLUME [ "/data" ] VOLUME [ "/data" ]
# Entrypoint and CMD
ENTRYPOINT [ "/app/api" ] ENTRYPOINT [ "/app/api" ]
CMD [ "/data/config.yml" ] CMD [ "/data/config.yml" ]

View File

@@ -1,96 +1,53 @@
# Node dependencies stage
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-dependencies
WORKDIR /app
# Install pnpm globally (caching layer) # Build Nuxt
FROM node:17-alpine as frontend-builder
WORKDIR /app
RUN npm install -g pnpm RUN npm install -g pnpm
# Copy package.json and lockfile to leverage caching
COPY frontend/package.json frontend/pnpm-lock.yaml ./ COPY frontend/package.json frontend/pnpm-lock.yaml ./
RUN pnpm install --frozen-lockfile --shamefully-hoist RUN pnpm install --frozen-lockfile --shamefully-hoist
# Build Nuxt (frontend) stage
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-builder
WORKDIR /app
# Install pnpm globally again (it can reuse the cache if not changed)
RUN npm install -g pnpm
# Copy over source files and node_modules from dependencies stage
COPY frontend . COPY frontend .
COPY --from=frontend-dependencies /app/node_modules ./node_modules
RUN pnpm build RUN pnpm build
# Go dependencies stage # Build API
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies FROM golang:alpine AS builder
WORKDIR /go/src/app
# Copy go.mod and go.sum for better caching
COPY ./backend/go.mod ./backend/go.sum ./
RUN go mod download
# Build API stage
FROM public.ecr.aws/docker/library/golang:alpine AS builder
ARG BUILD_TIME ARG BUILD_TIME
ARG COMMIT ARG COMMIT
ARG VERSION ARG VERSION
# Install necessary build tools
RUN apk update && \ RUN apk update && \
apk upgrade && \ apk upgrade && \
apk add --no-cache git build-base gcc g++ apk add --update git build-base gcc g++
WORKDIR /go/src/app WORKDIR /go/src/app
# Copy Go modules (from dependencies stage) and source code
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
COPY ./backend . COPY ./backend .
RUN go get -d -v ./...
# Clear old public files and copy new ones from frontend build
RUN rm -rf ./app/api/public RUN rm -rf ./app/api/public
COPY --from=frontend-builder /app/.output/public ./app/api/static/public COPY --from=frontend-builder /app/.output/public ./app/api/static/public
RUN CGO_ENABLED=0 GOOS=linux go build \
# Use cache for Go build artifacts -ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
RUN --mount=type=cache,target=/root/.cache/go-build \
CGO_ENABLED=0 GOOS=linux go build \
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
-o /go/bin/api \ -o /go/bin/api \
-v ./app/api/*.go -v ./app/api/*.go && \
chmod +x /go/bin/api && \
# create a directory so that we can copy it in the next stage
mkdir /data
RUN mkdir /data # Production Stage
FROM gcr.io/distroless/static
# Production stage
FROM public.ecr.aws/docker/library/alpine:latest
ENV HBOX_MODE=production ENV HBOX_MODE=production
ENV HBOX_STORAGE_DATA=/data/ ENV HBOX_STORAGE_DATA=/data/
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
# Install necessary runtime dependencies # Copy the binary and the (empty) /data dir and
RUN apk --no-cache add ca-certificates wget # change the ownership to the low-privileged user
RUN addgroup -S nonroot && adduser -S nonroot -G nonroot
# Create application directory and copy over built Go binary
RUN mkdir /app
COPY --from=builder --chown=nonroot /go/bin/api /app COPY --from=builder --chown=nonroot /go/bin/api /app
COPY --from=builder --chown=nonroot /data /data COPY --from=builder --chown=nonroot /data /data
RUN chmod +x /app/api
# Labels and configuration for the final image
LABEL Name=homebox Version=0.0.1 LABEL Name=homebox Version=0.0.1
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox" LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
# Expose necessary ports for Homebox
EXPOSE 7745 EXPOSE 7745
WORKDIR /app
# Healthcheck configuration
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
# Persist volume
VOLUME [ "/data" ] VOLUME [ "/data" ]
# Entrypoint and CMD # Drop root and run as low-privileged user
USER nonroot USER nonroot
ENTRYPOINT [ "/app/api" ] ENTRYPOINT [ "/app" ]
CMD [ "/data/config.yml" ] CMD [ "/data/config.yml" ]

View File

@@ -1,34 +1,19 @@
<div align="center"> <div align="center">
<img src="/docs/public/lilbox.svg" height="200"/> <img src="/docs/docs/assets/img/lilbox.svg" height="200"/>
</div> </div>
<h1 align="center" style="margin-top: -10px"> HomeBox </h1> <h1 align="center" style="margin-top: -10px"> HomeBox </h1>
<p align="center" style="width: 100;"> <p align="center" style="width: 100;">
<a href="https://homebox.software/en/">Docs</a> <a href="https://hay-kot.github.io/homebox/">Docs</a>
| |
<a href="https://demo.homebox.software">Demo</a> <a href="https://homebox.fly.dev">Demo</a>
| |
<a href="https://discord.gg/aY4DCkpNA9">Discord</a> <a href="https://discord.gg/tuncmNrE4z">Discord</a>
</p> </p>
## What is HomeBox
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general, idle memory usage is less than 50MB for the whole container.
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
# Screenshots
Check out screenshots of the project [here](https://imgur.com/a/5gLWt2j).
You can also try the demo instances of Homebox:
- [Demo](https://demo.homebox.software)
- [Nightly](https://nightly.homebox.software)
- [VNext](https://vnext.homebox.software/)
## Quick Start ## Quick Start
[Configuration & Docker Compose](https://homebox.software/en/quick-start.html) [Configuration & Docker Compose](https://hay-kot.github.io/homebox/quick-start)
```bash ```bash
# If using the rootless image, ensure data # If using the rootless image, ensure data
@@ -41,24 +26,10 @@ docker run -d \
--publish 3100:7745 \ --publish 3100:7745 \
--env TZ=Europe/Bucharest \ --env TZ=Europe/Bucharest \
--volume /path/to/data/folder/:/data \ --volume /path/to/data/folder/:/data \
ghcr.io/sysadminsmedia/homebox:latest ghcr.io/hay-kot/homebox:latest
# ghcr.io/sysadminsmedia/homebox:latest-rootless # ghcr.io/hay-kot/homebox:latest-rootless
``` ```
<!-- CONTRIBUTING -->
## Contributing
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
## Help us Translate
We want to make sure that Homebox is available in as many languages as possible. If you are interested in helping us translate Homebox, please help us via our [Weblate instance](https://translate.sysadminsmedia.com/projects/homebox/).
[![Translation status](http://translate.sysadminsmedia.com/widget/homebox/multi-auto.svg)](http://translate.sysadminsmedia.com/engage/homebox/)
## Credits ## Credits
- Original project by [@hay-kot](https://github.com/hay-kot)
- Logo by [@lakotelman](https://github.com/lakotelman) - Logo by [@lakotelman](https://github.com/lakotelman)

View File

@@ -6,6 +6,4 @@ Since this software is still considered beta/WIP support is always only given fo
## Reporting a Vulnerability ## Reporting a Vulnerability
Please open a normal public issue for minor security issues or general security inquires. Please open a normal public issue if you have any security related concerns.
For major or critical security issues, please open a private github security issue.

View File

@@ -1,8 +1,7 @@
version: "3" version: "3"
env: env:
HBOX_LOG_LEVEL: debug HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
HBOX_OPTIONS_ALLOW_REGISTRATION: true HBOX_OPTIONS_ALLOW_REGISTRATION: true
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure" UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
tasks: tasks:
@@ -13,24 +12,15 @@ tasks:
- cd backend && go mod tidy - cd backend && go mod tidy
- cd frontend && pnpm install --shamefully-hoist - cd frontend && pnpm install --shamefully-hoist
swag: generate:
desc: Generate swagger docs desc: |
dir: backend/app/api/static/ Generates collateral files from the backend project
vars: including swagger docs and typescripts type for the frontend
API: "../" deps:
INTERNAL: "../../../internal" - db:generate
PKGS: "../../../pkgs"
cmds:
- swag fmt --dir={{ .API }}
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
- "./backend/internal/core/services/**/*"
- "./backend/app/tools/typegen/main.go"
typescript-types:
desc: Generates typescript types from swagger definition
cmds: cmds:
- cd backend/app/api/static && swag fmt --dir=../
- cd backend/app/api/static && swag init --dir=../,../../../internal,../../../pkgs
- | - |
npx swagger-typescript-api \ npx swagger-typescript-api \
--no-client \ --no-client \
@@ -38,21 +28,14 @@ tasks:
--path ./backend/app/api/static/docs/swagger.json \ --path ./backend/app/api/static/docs/swagger.json \
--output ./frontend/lib/api/types --output ./frontend/lib/api/types
- go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts - go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts
sources:
- ./backend/app/tools/typegen/main.go
- ./backend/app/api/static/docs/swagger.json
generate:
deps:
- db:generate
cmds:
- task: swag
- task: typescript-types
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json - cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
sources:
- "./backend/app/api/**/*"
- "./backend/internal/data/**"
- "./backend/internal/core/services/**/*"
- "./backend/app/tools/typegen/main.go"
go:run: go:run:
env:
HBOX_DEMO: true
desc: Starts the backend api server (depends on generate task) desc: Starts the backend api server (depends on generate task)
dir: backend dir: backend
deps: deps:
@@ -104,7 +87,8 @@ tasks:
dir: backend/internal/ dir: backend/internal/
cmds: cmds:
- | - |
go generate ./... go generate ./... \
--template=./data/ent/schema/templates/has_id.tmpl
sources: sources:
- "./backend/internal/data/ent/schema/**/*" - "./backend/internal/data/ent/schema/**/*"

View File

@@ -1,74 +0,0 @@
run:
timeout: 10m
linters-settings:
goconst:
min-len: 5
min-occurrences: 5
exhaustive:
default-signifies-exhaustive: true
revive:
ignore-generated-header: false
severity: warning
confidence: 3
depguard:
rules:
main:
deny:
- pkg: io/util
desc: |
Deprecated: As of Go 1.16, the same functionality is now provided by
package io or package os, and those implementations should be
preferred in new code. See the specific function documentation for
details.
gocritic:
enabled-checks:
- ruleguard
testifylint:
enable-all: true
tagalign:
order:
- json
- schema
- yaml
- yml
- toml
- validate
linters:
disable-all: true
enable:
- asciicheck
- bodyclose
- depguard
- dogsled
- errcheck
- errorlint
- exhaustive
- copyloopvar
- gochecknoinits
- goconst
- gocritic
- gocyclo
- gofmt
- goprintffuncname
- gosimple
- govet
- ineffassign
- misspell
- nakedret
- revive
- staticcheck
- stylecheck
- tagalign
- testifylint
- typecheck
- typecheck
- unconvert
- unused
- whitespace
- zerologlint
- sqlclosecheck
issues:
exclude-use-default: false
exclude-dirs:
- internal/data/ent.*
fix: true

BIN
backend/api Executable file

Binary file not shown.

View File

@@ -1,18 +1,22 @@
package main package main
import ( import (
"github.com/sysadminsmedia/homebox/backend/internal/core/services" "time"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent" "github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config" "github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/sysadminsmedia/homebox/backend/pkgs/mailer" "github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/hay-kot/homebox/backend/pkgs/mailer"
"github.com/hay-kot/httpkit/server"
) )
type app struct { type app struct {
conf *config.Config conf *config.Config
mailer mailer.Mailer mailer mailer.Mailer
db *ent.Client db *ent.Client
server *server.Server
repos *repo.AllRepos repos *repo.AllRepos
services *services.AllServices services *services.AllServices
bus *eventbus.EventBus bus *eventbus.EventBus
@@ -33,3 +37,13 @@ func new(conf *config.Config) *app {
return s return s
} }
func (a *app) startBgTask(t time.Duration, fn func()) {
timer := time.NewTimer(t)
for {
timer.Reset(t)
a.server.Background(fn)
<-timer.C
}
}

View File

@@ -1,38 +0,0 @@
package main
import (
"context"
"time"
)
type BackgroundTask struct {
name string
Interval time.Duration
Fn func(context.Context)
}
func (tsk *BackgroundTask) Name() string {
return tsk.name
}
func NewTask(name string, interval time.Duration, fn func(context.Context)) *BackgroundTask {
return &BackgroundTask{
Interval: interval,
Fn: fn,
}
}
func (tsk *BackgroundTask) Start(ctx context.Context) error {
tsk.Fn(ctx)
timer := time.NewTimer(tsk.Interval)
for {
select {
case <-ctx.Done():
return nil
case <-timer.C:
timer.Reset(tsk.Interval)
tsk.Fn(ctx)
}
}
}

View File

@@ -2,27 +2,22 @@ package main
import ( import (
"context" "context"
"errors"
"strings" "strings"
"time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
) )
func (a *app) SetupDemo() error { func (a *app) SetupDemo() {
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,, ,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,, ,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,, ,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,"Ecolink Z-Wave PIR Motion Detector Pet Immune, White (PIRZWAVE2.5-ECO)",,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,,,,,, ,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,"Ecolink Z-Wave PIR Motion Detector Pet Immune, White (PIRZWAVE2.5-ECO)",,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,,,,,,
,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,"Yale Security YRD226-ZW2-619 YRD226ZW2619 Touchscreen Deadbolt, Satin Nickel",,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,,,,,, ,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,"Yale Security YRD226-ZW2-619 YRD226ZW2619 Touchscreen Deadbolt, Satin Nickel",,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,,,,,,
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,, ,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
` `
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
registration := services.UserRegistration{ registration := services.UserRegistration{
Email: "demo@example.com", Email: "demo@example.com",
Name: "Demo", Name: "Demo",
@@ -30,38 +25,25 @@ func (a *app) SetupDemo() error {
} }
// First check if we've already setup a demo user and skip if so // First check if we've already setup a demo user and skip if so
log.Debug().Msg("Checking if demo user already exists") _, err := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
_, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
if err == nil { if err == nil {
log.Info().Msg("Demo user already exists, skipping setup") return
return nil
} }
log.Debug().Msg("Demo user does not exist, setting up demo") _, err = a.services.User.RegisterUser(context.Background(), registration)
_, err = a.services.User.RegisterUser(ctx, registration)
if err != nil { if err != nil {
log.Err(err).Msg("Failed to register demo user") log.Err(err).Msg("Failed to register demo user")
return errors.New("failed to setup demo") log.Fatal().Msg("Failed to setup demo")
} }
token, err := a.services.User.Login(ctx, registration.Email, registration.Password, false) token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
if err != nil { self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
log.Err(err).Msg("Failed to login demo user")
return errors.New("failed to setup demo")
}
self, err := a.services.User.GetSelf(ctx, token.Raw)
if err != nil {
log.Err(err).Msg("Failed to get self")
return errors.New("failed to setup demo")
}
_, err = a.services.Items.CsvImport(ctx, self.GroupID, strings.NewReader(csvText)) _, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
if err != nil { if err != nil {
log.Err(err).Msg("Failed to import CSV") log.Err(err).Msg("Failed to import CSV")
return errors.New("failed to setup demo") log.Fatal().Msg("Failed to setup demo")
} }
log.Info().Msg("Demo setup complete") log.Info().Msg("Demo setup complete")
return nil
} }

View File

@@ -1,4 +1,3 @@
// Package debughandlers provides handlers for debugging.
package debughandlers package debughandlers
import ( import (

View File

@@ -1,18 +1,16 @@
// Package v1 provides the API handlers for version 1 of the API.
package v1 package v1
import ( import (
"encoding/json" "fmt"
"net/http" "net/http"
"time"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/olahol/melody" "github.com/olahol/melody"
) )
@@ -51,27 +49,13 @@ func WithRegistration(allowRegistration bool) func(*V1Controller) {
} }
} }
func WithSecureCookies(secure bool) func(*V1Controller) {
return func(ctrl *V1Controller) {
ctrl.cookieSecure = secure
}
}
func WithURL(url string) func(*V1Controller) {
return func(ctrl *V1Controller) {
ctrl.url = url
}
}
type V1Controller struct { type V1Controller struct {
cookieSecure bool
repo *repo.AllRepos repo *repo.AllRepos
svc *services.AllServices svc *services.AllServices
maxUploadSize int64 maxUploadSize int64
isDemo bool isDemo bool
allowRegistration bool allowRegistration bool
bus *eventbus.EventBus bus *eventbus.EventBus
url string
} }
type ( type (
@@ -83,18 +67,23 @@ type (
BuildTime string `json:"buildTime"` BuildTime string `json:"buildTime"`
} }
APISummary struct { ApiSummary struct {
Healthy bool `json:"health"` Healthy bool `json:"health"`
Versions []string `json:"versions"` Versions []string `json:"versions"`
Title string `json:"title"` Title string `json:"title"`
Message string `json:"message"` Message string `json:"message"`
Build Build `json:"build"` Build Build `json:"build"`
Latest services.Latest `json:"latest"` Demo bool `json:"demo"`
Demo bool `json:"demo"` AllowRegistration bool `json:"allowRegistration"`
AllowRegistration bool `json:"allowRegistration"`
} }
) )
func BaseUrlFunc(prefix string) func(s string) string {
return func(s string) string {
return prefix + "/v1" + s
}
}
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller { func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller {
ctrl := &V1Controller{ ctrl := &V1Controller{
repo: repos, repo: repos,
@@ -112,46 +101,25 @@ func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *event
// HandleBase godoc // HandleBase godoc
// //
// @Summary Application Info // @Summary Application Info
// @Tags Base // @Tags Base
// @Produce json // @Produce json
// @Success 200 {object} APISummary // @Success 200 {object} ApiSummary
// @Router /v1/status [GET] // @Router /v1/status [GET]
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc { func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
return server.JSON(w, http.StatusOK, APISummary{ return server.JSON(w, http.StatusOK, ApiSummary{
Healthy: ready(), Healthy: ready(),
Title: "Homebox", Title: "Homebox",
Message: "Track, Manage, and Organize your Things", Message: "Track, Manage, and Organize your Things",
Build: build, Build: build,
Latest: ctrl.svc.BackgroundService.GetLatestVersion(),
Demo: ctrl.isDemo, Demo: ctrl.isDemo,
AllowRegistration: ctrl.allowRegistration, AllowRegistration: ctrl.allowRegistration,
}) })
} }
} }
// HandleCurrency godoc
//
// @Summary Currency
// @Tags Base
// @Produce json
// @Success 200 {object} currencies.Currency
// @Router /v1/currency [GET]
func (ctrl *V1Controller) HandleCurrency() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error {
// Set Cache for 10 Minutes
w.Header().Set("Cache-Control", "max-age=600")
return server.JSON(w, http.StatusOK, ctrl.svc.Currencies.Slice())
}
}
func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc { func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
type eventMsg struct {
Event string `json:"event"`
}
m := melody.New() m := melody.New()
m.HandleConnect(func(s *melody.Session) { m.HandleConnect(func(s *melody.Session) {
@@ -167,15 +135,9 @@ func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
return return
} }
msg := &eventMsg{Event: e} jsonStr := fmt.Sprintf(`{"event": "%s"}`, e)
jsonBytes, err := json.Marshal(msg) _ = m.BroadcastFilter([]byte(jsonStr), func(s *melody.Session) bool {
if err != nil {
log.Log().Msgf("error marshling event data %v: %v", data, err)
return
}
_ = m.BroadcastFilter(jsonBytes, func(s *melody.Session) bool {
groupIDStr, ok := s.Get("gid") groupIDStr, ok := s.Get("gid")
if !ok { if !ok {
return false return false
@@ -191,25 +153,6 @@ func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
ctrl.bus.Subscribe(eventbus.EventLocationMutation, factory("location.mutation")) ctrl.bus.Subscribe(eventbus.EventLocationMutation, factory("location.mutation"))
ctrl.bus.Subscribe(eventbus.EventItemMutation, factory("item.mutation")) ctrl.bus.Subscribe(eventbus.EventItemMutation, factory("item.mutation"))
// Persistent asynchronous ticker that keeps all websocket connections alive with periodic pings.
go func() {
const interval = 10 * time.Second
ping := time.NewTicker(interval)
defer ping.Stop()
for range ping.C {
msg := &eventMsg{Event: "ping"}
pingBytes, err := json.Marshal(msg)
if err != nil {
log.Log().Msgf("error marshaling ping: %v", err)
} else {
_ = m.Broadcast(pingBytes)
}
}
}()
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
return m.HandleRequest(w, r) return m.HandleRequest(w, r)
} }

View File

@@ -5,7 +5,7 @@ import (
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate" "github.com/hay-kot/homebox/backend/internal/sys/validate"
) )
// routeID extracts the ID from the request URL. If the ID is not in a valid // routeID extracts the ID from the request URL. If the ID is not in a valid

View File

@@ -5,11 +5,11 @@ import (
"net/http" "net/http"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
) )
type ActionAmountResult struct { type ActionAmountResult struct {
@@ -32,52 +32,39 @@ func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int,
// HandleEnsureAssetID godoc // HandleEnsureAssetID godoc
// //
// @Summary Ensure Asset IDs // @Summary Ensure Asset IDs
// @Description Ensures all items in the database have an asset ID // @Description Ensures all items in the database have an asset ID
// @Tags Actions // @Tags Actions
// @Produce json // @Produce json
// @Success 200 {object} ActionAmountResult // @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-asset-ids [Post] // @Router /v1/actions/ensure-asset-ids [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleEnsureAssetID() errchain.HandlerFunc { func (ctrl *V1Controller) HandleEnsureAssetID() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID) return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
} }
// HandleEnsureImportRefs godoc // HandleEnsureImportRefs godoc
// //
// @Summary Ensures Import Refs // @Summary Ensures Import Refs
// @Description Ensures all items in the database have an import ref // @Description Ensures all items in the database have an import ref
// @Tags Actions // @Tags Actions
// @Produce json // @Produce json
// @Success 200 {object} ActionAmountResult // @Success 200 {object} ActionAmountResult
// @Router /v1/actions/ensure-import-refs [Post] // @Router /v1/actions/ensure-import-refs [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc { func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef) return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
} }
// HandleItemDateZeroOut godoc // HandleItemDateZeroOut godoc
// //
// @Summary Zero Out Time Fields // @Summary Zero Out Time Fields
// @Description Resets all item date fields to the beginning of the day // @Description Resets all item date fields to the beginning of the day
// @Tags Actions // @Tags Actions
// @Produce json // @Produce json
// @Success 200 {object} ActionAmountResult // @Success 200 {object} ActionAmountResult
// @Router /v1/actions/zero-item-time-fields [Post] // @Router /v1/actions/zero-item-time-fields [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields) return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
} }
// HandleSetPrimaryPhotos godoc
//
// @Summary Set Primary Photos
// @Description Sets the first photo of each item as the primary photo
// @Tags Actions
// @Produce json
// @Success 200 {object} ActionAmountResult
// @Router /v1/actions/set-primary-photos [Post]
// @Security Bearer
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
}

View File

@@ -6,38 +6,38 @@ import (
"strings" "strings"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
) )
// HandleAssetGet godocs // HandleAssetGet godocs
// //
// @Summary Get Item by Asset ID // @Summary Get Item by Asset ID
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param id path string true "Asset ID" // @Param id path string true "Asset ID"
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{} // @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Router /v1/assets/{id} [GET] // @Router /v1/assets/{id} [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context()) ctx := services.NewContext(r.Context())
assetIDParam := chi.URLParam(r, "id") assetIdParam := chi.URLParam(r, "id")
assetIDParam = strings.ReplaceAll(assetIDParam, "-", "") // Remove dashes assetIdParam = strings.ReplaceAll(assetIdParam, "-", "") // Remove dashes
// Convert the asset ID to an int64 // Convert the asset ID to an int64
assetID, err := strconv.ParseInt(assetIDParam, 10, 64) assetId, err := strconv.ParseInt(assetIdParam, 10, 64)
if err != nil { if err != nil {
return err return err
} }
pageParam := r.URL.Query().Get("page") pageParam := r.URL.Query().Get("page")
var page int64 = -1 var page int64 = -1
if pageParam != "" { if pageParam != "" {
page, err = strconv.ParseInt(pageParam, 10, 32) page, err = strconv.ParseInt(pageParam, 10, 64)
if err != nil { if err != nil {
return server.JSON(w, http.StatusBadRequest, "Invalid page number") return server.JSON(w, http.StatusBadRequest, "Invalid page number")
} }
@@ -46,13 +46,13 @@ func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
pageSizeParam := r.URL.Query().Get("pageSize") pageSizeParam := r.URL.Query().Get("pageSize")
var pageSize int64 = -1 var pageSize int64 = -1
if pageSizeParam != "" { if pageSizeParam != "" {
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 32) pageSize, err = strconv.ParseInt(pageSizeParam, 10, 64)
if err != nil { if err != nil {
return server.JSON(w, http.StatusBadRequest, "Invalid page size") return server.JSON(w, http.StatusBadRequest, "Invalid page size")
} }
} }
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetID), int(page), int(pageSize)) items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetId), int(page), int(pageSize))
if err != nil { if err != nil {
log.Err(err).Msg("failed to get item") log.Err(err).Msg("failed to get item")
return validate.NewRequestError(err, http.StatusInternalServerError) return validate.NewRequestError(err, http.StatusInternalServerError)

View File

@@ -3,21 +3,14 @@ package v1
import ( import (
"errors" "errors"
"net/http" "net/http"
"strconv"
"strings" "strings"
"time" "time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
)
const (
cookieNameToken = "hb.auth.token"
cookieNameRemember = "hb.auth.remember"
cookieNameSession = "hb.auth.session"
) )
type ( type (
@@ -28,97 +21,66 @@ type (
} }
LoginForm struct { LoginForm struct {
Username string `json:"username" example:"admin@admin.com"` Username string `json:"username"`
Password string `json:"password" example:"admin"` Password string `json:"password"`
StayLoggedIn bool `json:"stayLoggedIn"` StayLoggedIn bool `json:"stayLoggedIn"`
} }
) )
type CookieContents struct {
Token string
ExpiresAt time.Time
Remember bool
}
func GetCookies(r *http.Request) (*CookieContents, error) {
cookie, err := r.Cookie(cookieNameToken)
if err != nil {
return nil, errors.New("authorization cookie is required")
}
rememberCookie, err := r.Cookie(cookieNameRemember)
if err != nil {
return nil, errors.New("remember cookie is required")
}
return &CookieContents{
Token: cookie.Value,
ExpiresAt: cookie.Expires,
Remember: rememberCookie.Value == "true",
}, nil
}
// AuthProvider is an interface that can be implemented by any authentication provider.
// to extend authentication methods for the API.
type AuthProvider interface {
// Name returns the name of the authentication provider. This should be a unique name.
// that is URL friendly.
//
// Example: "local", "ldap"
Name() string
// Authenticate is called when a user attempts to login to the API. The implementation
// should return an error if the user cannot be authenticated. If an error is returned
// the API controller will return a vague error message to the user.
//
// Authenticate should do the following:
//
// 1. Ensure that the user exists within the database (either create, or get)
// 2. On successful authentication, they must set the user cookies.
Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error)
}
// HandleAuthLogin godoc // HandleAuthLogin godoc
// //
// @Summary User Login // @Summary User Login
// @Tags Authentication // @Tags Authentication
// @Accept x-www-form-urlencoded // @Accept x-www-form-urlencoded
// @Accept application/json // @Accept application/json
// @Param payload body LoginForm true "Login Data" // @Param username formData string false "string" example(admin@admin.com)
// @Param provider query string false "auth provider" // @Param password formData string false "string" example(admin)
// @Param payload body LoginForm true "Login Data"
// @Produce json // @Produce json
// @Success 200 {object} TokenResponse // @Success 200 {object} TokenResponse
// @Router /v1/users/login [POST] // @Router /v1/users/login [POST]
func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc { func (ctrl *V1Controller) HandleAuthLogin() errchain.HandlerFunc {
if len(ps) == 0 {
panic("no auth providers provided")
}
providers := make(map[string]AuthProvider)
for _, p := range ps {
log.Info().Str("name", p.Name()).Msg("registering auth provider")
providers[p.Name()] = p
}
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
// Extract provider query loginForm := &LoginForm{}
provider := r.URL.Query().Get("provider")
if provider == "" { switch r.Header.Get("Content-Type") {
provider = "local" case "application/x-www-form-urlencoded":
err := r.ParseForm()
if err != nil {
return errors.New("failed to parse form")
}
loginForm.Username = r.PostFormValue("username")
loginForm.Password = r.PostFormValue("password")
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
case "application/json":
err := server.Decode(r, loginForm)
if err != nil {
log.Err(err).Msg("failed to decode login form")
return errors.New("failed to decode login form")
}
default:
return server.JSON(w, http.StatusBadRequest, errors.New("invalid content type"))
} }
// Get the provider if loginForm.Username == "" || loginForm.Password == "" {
p, ok := providers[provider] return validate.NewFieldErrors(
if !ok { validate.FieldError{
return validate.NewRequestError(errors.New("invalid auth provider"), http.StatusBadRequest) Field: "username",
Error: "username or password is empty",
},
validate.FieldError{
Field: "password",
Error: "username or password is empty",
},
)
} }
newToken, err := p.Authenticate(w, r) newToken, err := ctrl.svc.User.Login(r.Context(), strings.ToLower(loginForm.Username), loginForm.Password, loginForm.StayLoggedIn)
if err != nil { if err != nil {
log.Err(err).Msg("failed to authenticate") return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
return server.JSON(w, http.StatusInternalServerError, err.Error())
} }
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true)
return server.JSON(w, http.StatusOK, TokenResponse{ return server.JSON(w, http.StatusOK, TokenResponse{
Token: "Bearer " + newToken.Raw, Token: "Bearer " + newToken.Raw,
ExpiresAt: newToken.ExpiresAt, ExpiresAt: newToken.ExpiresAt,
@@ -129,11 +91,11 @@ func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFu
// HandleAuthLogout godoc // HandleAuthLogout godoc
// //
// @Summary User Logout // @Summary User Logout
// @Tags Authentication // @Tags Authentication
// @Success 204 // @Success 204
// @Router /v1/users/logout [POST] // @Router /v1/users/logout [POST]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc { func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context()) token := services.UseTokenCtx(r.Context())
@@ -146,20 +108,19 @@ func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
return validate.NewRequestError(err, http.StatusInternalServerError) return validate.NewRequestError(err, http.StatusInternalServerError)
} }
ctrl.unsetCookies(w, noPort(r.Host))
return server.JSON(w, http.StatusNoContent, nil) return server.JSON(w, http.StatusNoContent, nil)
} }
} }
// HandleAuthRefresh godoc // HandleAuthLogout godoc
// //
// @Summary User Token Refresh // @Summary User Token Refresh
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token. // @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
// @Description This does not validate that the user still exists within the database. // @Description This does not validate that the user still exists within the database.
// @Tags Authentication // @Tags Authentication
// @Success 200 // @Success 200
// @Router /v1/users/refresh [GET] // @Router /v1/users/refresh [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc { func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
requestToken := services.UseTokenCtx(r.Context()) requestToken := services.UseTokenCtx(r.Context())
@@ -172,78 +133,6 @@ func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
return validate.NewUnauthorizedError() return validate.NewUnauthorizedError()
} }
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
return server.JSON(w, http.StatusOK, newToken) return server.JSON(w, http.StatusOK, newToken)
} }
} }
func noPort(host string) string {
return strings.Split(host, ":")[0]
}
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
http.SetCookie(w, &http.Cookie{
Name: cookieNameRemember,
Value: strconv.FormatBool(remember),
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set HTTP only cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameToken,
Value: token,
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set Fake Session cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameSession,
Value: "true",
Expires: expires,
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
http.SetCookie(w, &http.Cookie{
Name: cookieNameToken,
Value: "",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
http.SetCookie(w, &http.Cookie{
Name: cookieNameRemember,
Value: "false",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: true,
Path: "/",
})
// Set Fake Session cookie
http.SetCookie(w, &http.Cookie{
Name: cookieNameSession,
Value: "false",
Expires: time.Unix(0, 0),
Domain: domain,
Secure: ctrl.cookieSecure,
HttpOnly: false,
Path: "/",
})
}

View File

@@ -4,16 +4,15 @@ import (
"net/http" "net/http"
"time" "time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
type ( type (
GroupInvitationCreate struct { GroupInvitationCreate struct {
Uses int `json:"uses" validate:"required,min=1,max=100"` Uses int `json:"uses" validate:"required,min=1,max=100"`
ExpiresAt time.Time `json:"expiresAt"` ExpiresAt time.Time `json:"expiresAt"`
} }
@@ -26,12 +25,12 @@ type (
// HandleGroupGet godoc // HandleGroupGet godoc
// //
// @Summary Get Group // @Summary Get Group
// @Tags Group // @Tags Group
// @Produce json // @Produce json
// @Success 200 {object} repo.Group // @Success 200 {object} repo.Group
// @Router /v1/groups [Get] // @Router /v1/groups [Get]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.Group, error) { fn := func(r *http.Request) (repo.Group, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -43,24 +42,16 @@ func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
// HandleGroupUpdate godoc // HandleGroupUpdate godoc
// //
// @Summary Update Group // @Summary Update Group
// @Tags Group // @Tags Group
// @Produce json // @Produce json
// @Param payload body repo.GroupUpdate true "User Data" // @Param payload body repo.GroupUpdate true "User Data"
// @Success 200 {object} repo.Group // @Success 200 {object} repo.Group
// @Router /v1/groups [Put] // @Router /v1/groups [Put]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) { fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
ok := ctrl.svc.Currencies.IsSupported(body.Currency)
if !ok {
return repo.Group{}, validate.NewFieldErrors(
validate.NewFieldError("currency", "currency '"+body.Currency+"' is not supported"),
)
}
return ctrl.svc.Group.UpdateGroup(auth, body) return ctrl.svc.Group.UpdateGroup(auth, body)
} }
@@ -69,13 +60,13 @@ func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
// HandleGroupInvitationsCreate godoc // HandleGroupInvitationsCreate godoc
// //
// @Summary Create Group Invitation // @Summary Create Group Invitation
// @Tags Group // @Tags Group
// @Produce json // @Produce json
// @Param payload body GroupInvitationCreate true "User Data" // @Param payload body GroupInvitationCreate true "User Data"
// @Success 200 {object} GroupInvitation // @Success 200 {object} GroupInvitation
// @Router /v1/groups/invitations [Post] // @Router /v1/groups/invitations [Post]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupInvitationsCreate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupInvitationsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body GroupInvitationCreate) (GroupInvitation, error) { fn := func(r *http.Request, body GroupInvitationCreate) (GroupInvitation, error) {
if body.ExpiresAt.IsZero() { if body.ExpiresAt.IsZero() {

View File

@@ -4,37 +4,32 @@ import (
"database/sql" "database/sql"
"encoding/csv" "encoding/csv"
"errors" "errors"
"fmt"
"math/big"
"net/http" "net/http"
"net/url"
"strings" "strings"
"time"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleItemsGetAll godoc // HandleItemsGetAll godoc
// //
// @Summary Query All Items // @Summary Query All Items
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param q query string false "search string" // @Param q query string false "search string"
// @Param page query int false "page number" // @Param page query int false "page number"
// @Param pageSize query int false "items per page" // @Param pageSize query int false "items per page"
// @Param labels query []string false "label Ids" collectionFormat(multi) // @Param labels query []string false "label Ids" collectionFormat(multi)
// @Param locations query []string false "location Ids" collectionFormat(multi) // @Param locations query []string false "location Ids" collectionFormat(multi)
// @Param parentIds query []string false "parent Ids" collectionFormat(multi) // @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{} // @Router /v1/items [GET]
// @Router /v1/items [GET] // @Security Bearer
// @Security Bearer
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
extractQuery := func(r *http.Request) repo.ItemQuery { extractQuery := func(r *http.Request) repo.ItemQuery {
params := r.URL.Query() params := r.URL.Query()
@@ -61,8 +56,6 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
Search: params.Get("q"), Search: params.Get("q"),
LocationIDs: queryUUIDList(params, "locations"), LocationIDs: queryUUIDList(params, "locations"),
LabelIDs: queryUUIDList(params, "labels"), LabelIDs: queryUUIDList(params, "labels"),
NegateLabels: queryBool(params.Get("negateLabels")),
ParentItemIDs: queryUUIDList(params, "parentIds"),
IncludeArchived: queryBool(params.Get("includeArchived")), IncludeArchived: queryBool(params.Get("includeArchived")),
Fields: filterFieldItems(params["fields"]), Fields: filterFieldItems(params["fields"]),
OrderBy: params.Get("orderBy"), OrderBy: params.Get("orderBy"),
@@ -85,14 +78,6 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
ctx := services.NewContext(r.Context()) ctx := services.NewContext(r.Context())
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r)) items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r))
totalPrice := new(big.Int)
for _, item := range items.Items {
totalPrice.Add(totalPrice, big.NewInt(int64(item.PurchasePrice*100)))
}
totalPriceFloat := new(big.Float).SetInt(totalPrice)
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
if err != nil { if err != nil {
if errors.Is(err, sql.ErrNoRows) { if errors.Is(err, sql.ErrNoRows) {
return server.JSON(w, http.StatusOK, repo.PaginationResult[repo.ItemSummary]{ return server.JSON(w, http.StatusOK, repo.PaginationResult[repo.ItemSummary]{
@@ -106,57 +91,15 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
} }
} }
// HandleItemFullPath godoc
//
// @Summary Get the full path of an item
// @Tags Items
// @Produce json
// @Param id path string true "Item ID"
// @Success 200 {object} []repo.ItemPath
// @Router /v1/items/{id}/path [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
auth := services.NewContext(r.Context())
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
if err != nil {
return nil, err
}
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GID, item.Location.ID)
if err != nil {
return nil, err
}
if item.Parent != nil {
paths = append(paths, repo.ItemPath{
Type: repo.ItemTypeItem,
ID: item.Parent.ID,
Name: item.Parent.Name,
})
}
paths = append(paths, repo.ItemPath{
Type: repo.ItemTypeItem,
ID: item.ID,
Name: item.Name,
})
return paths, nil
}
return adapters.CommandID("id", fn, http.StatusOK)
}
// HandleItemsCreate godoc // HandleItemsCreate godoc
// //
// @Summary Create Item // @Summary Create Item
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param payload body repo.ItemCreate true "Item Data" // @Param payload body repo.ItemCreate true "Item Data"
// @Success 201 {object} repo.ItemSummary // @Success 201 {object} repo.ItemSummary
// @Router /v1/items [POST] // @Router /v1/items [POST]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, body repo.ItemCreate) (repo.ItemOut, error) { fn := func(r *http.Request, body repo.ItemCreate) (repo.ItemOut, error) {
return ctrl.svc.Items.Create(services.NewContext(r.Context()), body) return ctrl.svc.Items.Create(services.NewContext(r.Context()), body)
@@ -167,13 +110,13 @@ func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
// HandleItemGet godocs // HandleItemGet godocs
// //
// @Summary Get Item // @Summary Get Item
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Success 200 {object} repo.ItemOut // @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [GET] // @Router /v1/items/{id} [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) { fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -186,13 +129,13 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
// HandleItemDelete godocs // HandleItemDelete godocs
// //
// @Summary Delete Item // @Summary Delete Item
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Success 204 // @Success 204
// @Router /v1/items/{id} [DELETE] // @Router /v1/items/{id} [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) { fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -205,14 +148,14 @@ func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
// HandleItemUpdate godocs // HandleItemUpdate godocs
// //
// @Summary Update Item // @Summary Update Item
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param payload body repo.ItemUpdate true "Item Data" // @Param payload body repo.ItemUpdate true "Item Data"
// @Success 200 {object} repo.ItemOut // @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [PUT] // @Router /v1/items/{id} [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemUpdate) (repo.ItemOut, error) { fn := func(r *http.Request, ID uuid.UUID, body repo.ItemUpdate) (repo.ItemOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -224,27 +167,28 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
return adapters.ActionID("id", fn, http.StatusOK) return adapters.ActionID("id", fn, http.StatusOK)
} }
// HandleItemPatch godocs // HandleItemPatch godocs
// //
// @Summary Update Item // @Summary Update Item
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param payload body repo.ItemPatch true "Item Data" // @Param payload body repo.ItemPatch true "Item Data"
// @Success 200 {object} repo.ItemOut // @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id} [Patch] // @Router /v1/items/{id} [Patch]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemPatch) (repo.ItemOut, error) { fn := func(r *http.Request, ID uuid.UUID, body repo.ItemPatch) (repo.ItemOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
body.ID = ID body.ID = ID
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body) err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
if err != nil { if err != nil {
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID) return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
} }
return adapters.ActionID("id", fn, http.StatusOK) return adapters.ActionID("id", fn, http.StatusOK)
@@ -252,13 +196,13 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
// HandleGetAllCustomFieldNames godocs // HandleGetAllCustomFieldNames godocs
// //
// @Summary Get All Custom Field Names // @Summary Get All Custom Field Names
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Success 200 // @Success 200
// @Router /v1/items/fields [GET] // @Router /v1/items/fields [GET]
// @Success 200 {object} []string // @Success 200 {object} []string
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
fn := func(r *http.Request) ([]string, error) { fn := func(r *http.Request) ([]string, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -270,13 +214,13 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
// HandleGetAllCustomFieldValues godocs // HandleGetAllCustomFieldValues godocs
// //
// @Summary Get All Custom Field Values // @Summary Get All Custom Field Values
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Success 200 // @Success 200
// @Router /v1/items/fields/values [GET] // @Router /v1/items/fields/values [GET]
// @Success 200 {object} []string // @Success 200 {object} []string
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
type query struct { type query struct {
Field string `schema:"field" validate:"required"` Field string `schema:"field" validate:"required"`
@@ -287,14 +231,14 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field) return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
} }
return adapters.Query(fn, http.StatusOK) return adapters.Action(fn, http.StatusOK)
} }
// HandleItemsImport godocs // HandleItemsImport godocs
// //
// @Summary Import Items // @Summary Import Items
// @Tags Items // @Tags Items
// @Accept multipart/form-data
// @Produce json // @Produce json
// @Success 204 // @Success 204
// @Param csv formData file true "Image to upload" // @Param csv formData file true "Image to upload"
@@ -328,52 +272,26 @@ func (ctrl *V1Controller) HandleItemsImport() errchain.HandlerFunc {
// HandleItemsExport godocs // HandleItemsExport godocs
// //
// @Summary Export Items // @Summary Export Items
// @Tags Items // @Tags Items
// @Success 200 {string} string "text/csv" // @Success 200 {string} string "text/csv"
// @Router /v1/items/export [GET] // @Router /v1/items/export [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
ctx := services.NewContext(r.Context()) ctx := services.NewContext(r.Context())
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url)) csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
if err != nil { if err != nil {
log.Err(err).Msg("failed to export items") log.Err(err).Msg("failed to export items")
return validate.NewRequestError(err, http.StatusInternalServerError) return validate.NewRequestError(err, http.StatusInternalServerError)
} }
timestamp := time.Now().Format("2006-01-02_15-04-05") // YYYY-MM-DD_HH-MM-SS format w.Header().Set("Content-Type", "text/tsv")
filename := fmt.Sprintf("homebox-items_%s.csv", timestamp) // add timestamp to filename w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
w.Header().Set("Content-Type", "text/csv")
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment;filename=%s", filename))
writer := csv.NewWriter(w) writer := csv.NewWriter(w)
writer.Comma = ',' writer.Comma = '\t'
return writer.WriteAll(csvData) return writer.WriteAll(csvData)
} }
} }
func getHBURL(refererHeader, fallback string) (hbURL string) {
hbURL = refererHeader
if hbURL == "" {
hbURL = fallback
}
return stripPathFromURL(hbURL)
}
// stripPathFromURL removes the path from a URL.
// ex. https://example.com/tools -> https://example.com
func stripPathFromURL(rawURL string) string {
parsedURL, err := url.Parse(rawURL)
if err != nil {
log.Err(err).Msg("failed to parse URL")
return ""
}
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
return strippedURL.String()
}

View File

@@ -3,16 +3,14 @@ package v1
import ( import (
"errors" "errors"
"net/http" "net/http"
"path/filepath"
"strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
) )
type ( type (
@@ -25,7 +23,6 @@ type (
// //
// @Summary Create Item Attachment // @Summary Create Item Attachment
// @Tags Items Attachments // @Tags Items Attachments
// @Accept multipart/form-data
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param file formData file true "File attachment" // @Param file formData file true "File attachment"
@@ -41,6 +38,7 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
if err != nil { if err != nil {
log.Err(err).Msg("failed to parse multipart form") log.Err(err).Msg("failed to parse multipart form")
return validate.NewRequestError(errors.New("failed to parse multipart form"), http.StatusBadRequest) return validate.NewRequestError(errors.New("failed to parse multipart form"), http.StatusBadRequest)
} }
errs := validate.NewFieldErrors() errs := validate.NewFieldErrors()
@@ -69,15 +67,7 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
attachmentType := r.FormValue("type") attachmentType := r.FormValue("type")
if attachmentType == "" { if attachmentType == "" {
// Attempt to auto-detect the type of the file attachmentType = attachment.TypeAttachment.String()
ext := filepath.Ext(attachmentName)
switch strings.ToLower(ext) {
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
attachmentType = attachment.TypePhoto.String()
default:
attachmentType = attachment.TypeAttachment.String()
}
} }
id, err := ctrl.routeID(r) id, err := ctrl.routeID(r)
@@ -105,41 +95,41 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
// HandleItemAttachmentGet godocs // HandleItemAttachmentGet godocs
// //
// @Summary Get Item Attachment // @Summary Get Item Attachment
// @Tags Items Attachments // @Tags Items Attachments
// @Produce application/octet-stream // @Produce application/octet-stream
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID" // @Param attachment_id path string true "Attachment ID"
// @Success 200 {object} ItemAttachmentToken // @Success 200 {object} ItemAttachmentToken
// @Router /v1/items/{id}/attachments/{attachment_id} [GET] // @Router /v1/items/{id}/attachments/{attachment_id} [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemAttachmentGet() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler return ctrl.handleItemAttachmentsHandler
} }
// HandleItemAttachmentDelete godocs // HandleItemAttachmentDelete godocs
// //
// @Summary Delete Item Attachment // @Summary Delete Item Attachment
// @Tags Items Attachments // @Tags Items Attachments
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID" // @Param attachment_id path string true "Attachment ID"
// @Success 204 // @Success 204
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE] // @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler return ctrl.handleItemAttachmentsHandler
} }
// HandleItemAttachmentUpdate godocs // HandleItemAttachmentUpdate godocs
// //
// @Summary Update Item Attachment // @Summary Update Item Attachment
// @Tags Items Attachments // @Tags Items Attachments
// @Param id path string true "Item ID" // @Param id path string true "Item ID"
// @Param attachment_id path string true "Attachment ID" // @Param attachment_id path string true "Attachment ID"
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update" // @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
// @Success 200 {object} repo.ItemOut // @Success 200 {object} repo.ItemOut
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT] // @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleItemAttachmentUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleItemAttachmentUpdate() errchain.HandlerFunc {
return ctrl.handleItemAttachmentsHandler return ctrl.handleItemAttachmentsHandler
} }

View File

@@ -4,20 +4,20 @@ import (
"net/http" "net/http"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleLabelsGetAll godoc // HandleLabelsGetAll godoc
// //
// @Summary Get All Labels // @Summary Get All Labels
// @Tags Labels // @Tags Labels
// @Produce json // @Produce json
// @Success 200 {object} []repo.LabelOut // @Success 200 {object} []repo.LabelOut
// @Router /v1/labels [GET] // @Router /v1/labels [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.LabelSummary, error) { fn := func(r *http.Request) ([]repo.LabelSummary, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -29,13 +29,13 @@ func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
// HandleLabelsCreate godoc // HandleLabelsCreate godoc
// //
// @Summary Create Label // @Summary Create Label
// @Tags Labels // @Tags Labels
// @Produce json // @Produce json
// @Param payload body repo.LabelCreate true "Label Data" // @Param payload body repo.LabelCreate true "Label Data"
// @Success 200 {object} repo.LabelSummary // @Success 200 {object} repo.LabelSummary
// @Router /v1/labels [POST] // @Router /v1/labels [POST]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) { fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -47,13 +47,13 @@ func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
// HandleLabelDelete godocs // HandleLabelDelete godocs
// //
// @Summary Delete Label // @Summary Delete Label
// @Tags Labels // @Tags Labels
// @Produce json // @Produce json
// @Param id path string true "Label ID" // @Param id path string true "Label ID"
// @Success 204 // @Success 204
// @Router /v1/labels/{id} [DELETE] // @Router /v1/labels/{id} [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) { fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -66,13 +66,13 @@ func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
// HandleLabelGet godocs // HandleLabelGet godocs
// //
// @Summary Get Label // @Summary Get Label
// @Tags Labels // @Tags Labels
// @Produce json // @Produce json
// @Param id path string true "Label ID" // @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut // @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [GET] // @Router /v1/labels/{id} [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) { fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -84,13 +84,13 @@ func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
// HandleLabelUpdate godocs // HandleLabelUpdate godocs
// //
// @Summary Update Label // @Summary Update Label
// @Tags Labels // @Tags Labels
// @Produce json // @Produce json
// @Param id path string true "Label ID" // @Param id path string true "Label ID"
// @Success 200 {object} repo.LabelOut // @Success 200 {object} repo.LabelOut
// @Router /v1/labels/{id} [PUT] // @Router /v1/labels/{id} [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) { fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())

View File

@@ -1,26 +1,24 @@
package v1 package v1
import ( import (
"context"
"math/big"
"net/http" "net/http"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleLocationTreeQuery godoc // HandleLocationTreeQuery
// //
// @Summary Get Locations Tree // @Summary Get Locations Tree
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param withItems query bool false "include items in response tree" // @Param withItems query bool false "include items in response tree"
// @Success 200 {object} []repo.TreeItem // @Success 200 {object} []repo.TreeItem
// @Router /v1/locations/tree [GET] // @Router /v1/locations/tree [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) { fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -30,15 +28,15 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
return adapters.Query(fn, http.StatusOK) return adapters.Query(fn, http.StatusOK)
} }
// HandleLocationGetAll godoc // HandleLocationGetAll
// //
// @Summary Get All Locations // @Summary Get All Locations
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param filterChildren query bool false "Filter locations with parents" // @Param filterChildren query bool false "Filter locations with parents"
// @Success 200 {object} []repo.LocationOutCount // @Success 200 {object} []repo.LocationOutCount
// @Router /v1/locations [GET] // @Router /v1/locations [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) { fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -48,15 +46,15 @@ func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
return adapters.Query(fn, http.StatusOK) return adapters.Query(fn, http.StatusOK)
} }
// HandleLocationCreate godoc // HandleLocationCreate
// //
// @Summary Create Location // @Summary Create Location
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param payload body repo.LocationCreate true "Location Data" // @Param payload body repo.LocationCreate true "Location Data"
// @Success 200 {object} repo.LocationSummary // @Success 200 {object} repo.LocationSummary
// @Router /v1/locations [POST] // @Router /v1/locations [POST]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) { fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -66,15 +64,15 @@ func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
return adapters.Action(fn, http.StatusCreated) return adapters.Action(fn, http.StatusCreated)
} }
// HandleLocationDelete godoc // HandleLocationDelete
// //
// @Summary Delete Location // @Summary Delete Location
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param id path string true "Location ID" // @Param id path string true "Location ID"
// @Success 204 // @Success 204
// @Router /v1/locations/{id} [DELETE] // @Router /v1/locations/{id} [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) { fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -85,73 +83,34 @@ func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
return adapters.CommandID("id", fn, http.StatusNoContent) return adapters.CommandID("id", fn, http.StatusNoContent)
} }
func (ctrl *V1Controller) GetLocationWithPrice(auth context.Context, gid uuid.UUID, id uuid.UUID) (repo.LocationOut, error) { // HandleLocationGet
var location, err = ctrl.repo.Locations.GetOneByGroup(auth, gid, id)
if err != nil {
return repo.LocationOut{}, err
}
// Add direct child items price
totalPrice := new(big.Int)
items, err := ctrl.repo.Items.QueryByGroup(auth, gid, repo.ItemQuery{LocationIDs: []uuid.UUID{id}})
if err != nil {
return repo.LocationOut{}, err
}
for _, item := range items.Items {
// Convert item.Quantity to float64 for multiplication
quantity := float64(item.Quantity)
itemTotal := big.NewInt(int64(item.PurchasePrice * quantity * 100))
totalPrice.Add(totalPrice, itemTotal)
}
totalPriceFloat := new(big.Float).SetInt(totalPrice)
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
location.TotalPrice, _ = totalPriceFloat.Float64()
// Add price from child locations
for _, childLocation := range location.Children {
var childLocationWithPrice repo.LocationOut
childLocationWithPrice, err = ctrl.GetLocationWithPrice(auth, gid, childLocation.ID)
if err != nil {
return repo.LocationOut{}, err
}
location.TotalPrice += childLocationWithPrice.TotalPrice
}
return location, nil
}
// HandleLocationGet godoc
// //
// @Summary Get Location // @Summary Get Location
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param id path string true "Location ID" // @Param id path string true "Location ID"
// @Success 200 {object} repo.LocationOut // @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [GET] // @Router /v1/locations/{id} [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) { fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
var location, err = ctrl.GetLocationWithPrice(auth, auth.GID, ID) return ctrl.repo.Locations.GetOneByGroup(auth, auth.GID, ID)
return location, err
} }
return adapters.CommandID("id", fn, http.StatusOK) return adapters.CommandID("id", fn, http.StatusOK)
} }
// HandleLocationUpdate godoc // HandleLocationUpdate
// //
// @Summary Update Location // @Summary Update Location
// @Tags Locations // @Tags Locations
// @Produce json // @Produce json
// @Param id path string true "Location ID" // @Param id path string true "Location ID"
// @Param payload body repo.LocationUpdate true "Location Data" // @Param payload body repo.LocationUpdate true "Location Data"
// @Success 200 {object} repo.LocationOut // @Success 200 {object} repo.LocationOut
// @Router /v1/locations/{id} [PUT] // @Router /v1/locations/{id} [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) { fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())

View File

@@ -4,26 +4,24 @@ import (
"net/http" "net/http"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleMaintenanceLogGet godoc // HandleMaintenanceGetLog godoc
// //
// @Summary Get Maintenance Log // @Summary Get Maintenance Log
// @Tags Item Maintenance // @Tags Maintenance
// @Produce json // @Produce json
// @Param id path string true "Item ID" // @Success 200 {object} repo.MaintenanceLog
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/items/{id}/maintenance [GET] // @Router /v1/items/{id}/maintenance [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc { func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) { fn := func(r *http.Request, ID uuid.UUID, q repo.MaintenanceLogQuery) (repo.MaintenanceLog, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.GetMaintenanceByItemID(auth, auth.GID, ID, filters) return ctrl.repo.MaintEntry.GetLog(auth, auth.GID, ID, q)
} }
return adapters.QueryID("id", fn, http.StatusOK) return adapters.QueryID("id", fn, http.StatusOK)
@@ -32,9 +30,8 @@ func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
// HandleMaintenanceEntryCreate godoc // HandleMaintenanceEntryCreate godoc
// //
// @Summary Create Maintenance Entry // @Summary Create Maintenance Entry
// @Tags Item Maintenance // @Tags Maintenance
// @Produce json // @Produce json
// @Param id path string true "Item ID"
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data" // @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
// @Success 201 {object} repo.MaintenanceEntry // @Success 201 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance [POST] // @Router /v1/items/{id}/maintenance [POST]
@@ -47,3 +44,39 @@ func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc {
return adapters.ActionID("id", fn, http.StatusCreated) return adapters.ActionID("id", fn, http.StatusCreated)
} }
// HandleMaintenanceEntryDelete godoc
//
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Success 204
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
return nil, err
}
return adapters.CommandID("entry_id", fn, http.StatusNoContent)
}
// HandleMaintenanceEntryUpdate godoc
//
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
}
return adapters.ActionID("entry_id", fn, http.StatusOK)
}

View File

@@ -1,67 +0,0 @@
package v1
import (
"net/http"
"github.com/google/uuid"
"github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
)
// HandleMaintenanceGetAll godoc
//
// @Summary Query All Maintenance
// @Tags Maintenance
// @Produce json
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
// @Router /v1/maintenance [GET]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc {
fn := func(r *http.Request, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.GetAllMaintenance(auth, auth.GID, filters)
}
return adapters.Query(fn, http.StatusOK)
}
// HandleMaintenanceEntryUpdate godoc
//
// @Summary Update Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param id path string true "Maintenance ID"
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
// @Success 200 {object} repo.MaintenanceEntry
// @Router /v1/maintenance/{id} [PUT]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
auth := services.NewContext(r.Context())
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
}
return adapters.ActionID("id", fn, http.StatusOK)
}
// HandleMaintenanceEntryDelete godoc
//
// @Summary Delete Maintenance Entry
// @Tags Maintenance
// @Produce json
// @Param id path string true "Maintenance ID"
// @Success 204
// @Router /v1/maintenance/{id} [DELETE]
// @Security Bearer
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context())
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
return nil, err
}
return adapters.CommandID("id", fn, http.StatusNoContent)
}

View File

@@ -5,20 +5,20 @@ import (
"github.com/containrrr/shoutrrr" "github.com/containrrr/shoutrrr"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleGetUserNotifiers godoc // HandleGetUserNotifiers godoc
// //
// @Summary Get Notifiers // @Summary Get Notifiers
// @Tags Notifiers // @Tags Notifiers
// @Produce json // @Produce json
// @Success 200 {object} []repo.NotifierOut // @Success 200 {object} []repo.NotifierOut
// @Router /v1/notifiers [GET] // @Router /v1/notifiers [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
fn := func(r *http.Request, _ struct{}) ([]repo.NotifierOut, error) { fn := func(r *http.Request, _ struct{}) ([]repo.NotifierOut, error) {
user := services.UseUserCtx(r.Context()) user := services.UseUserCtx(r.Context())
@@ -30,13 +30,13 @@ func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
// HandleCreateNotifier godoc // HandleCreateNotifier godoc
// //
// @Summary Create Notifier // @Summary Create Notifier
// @Tags Notifiers // @Tags Notifiers
// @Produce json // @Produce json
// @Param payload body repo.NotifierCreate true "Notifier Data" // @Param payload body repo.NotifierCreate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut // @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers [POST] // @Router /v1/notifiers [POST]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc { func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) { fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
// HandleDeleteNotifier godocs // HandleDeleteNotifier godocs
// //
// @Summary Delete a Notifier // @Summary Delete a Notifier
// @Tags Notifiers // @Tags Notifiers
// @Param id path string true "Notifier ID" // @Param id path string true "Notifier ID"
// @Success 204 // @Success 204
// @Router /v1/notifiers/{id} [DELETE] // @Router /v1/notifiers/{id} [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc { func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID) (any, error) { fn := func(r *http.Request, ID uuid.UUID) (any, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -65,13 +65,13 @@ func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
// HandleUpdateNotifier godocs // HandleUpdateNotifier godocs
// //
// @Summary Update Notifier // @Summary Update Notifier
// @Tags Notifiers // @Tags Notifiers
// @Param id path string true "Notifier ID" // @Param id path string true "Notifier ID"
// @Param payload body repo.NotifierUpdate true "Notifier Data" // @Param payload body repo.NotifierUpdate true "Notifier Data"
// @Success 200 {object} repo.NotifierOut // @Success 200 {object} repo.NotifierOut
// @Router /v1/notifiers/{id} [PUT] // @Router /v1/notifiers/{id} [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) { fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -86,6 +86,7 @@ func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
// @Summary Test Notifier // @Summary Test Notifier
// @Tags Notifiers // @Tags Notifiers
// @Produce json // @Produce json
// @Param id path string true "Notifier ID"
// @Param url query string true "URL" // @Param url query string true "URL"
// @Success 204 // @Success 204
// @Router /v1/notifiers/test [POST] // @Router /v1/notifiers/test [POST]

View File

@@ -5,10 +5,9 @@ import (
"image/png" "image/png"
"io" "io"
"net/http" "net/http"
"net/url"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
"github.com/yeqown/go-qrcode/v2" "github.com/yeqown/go-qrcode/v2"
"github.com/yeqown/go-qrcode/writer/standard" "github.com/yeqown/go-qrcode/writer/standard"
@@ -20,13 +19,13 @@ var qrcodeLogo []byte
// HandleGenerateQRCode godoc // HandleGenerateQRCode godoc
// //
// @Summary Create QR Code // @Summary Create QR Code
// @Tags Items // @Tags Items
// @Produce json // @Produce json
// @Param data query string false "data to be encoded into qrcode" // @Param data query string false "data to be encoded into qrcode"
// @Success 200 {string} string "image/jpeg" // @Success 200 {string} string "image/jpeg"
// @Router /v1/qrcode [GET] // @Router /v1/qrcode [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
type query struct { type query struct {
// 4,296 characters is the maximum length of a QR code // 4,296 characters is the maximum length of a QR code
@@ -44,12 +43,7 @@ func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
panic(err) panic(err)
} }
decodedStr, err := url.QueryUnescape(q.Data) qrc, err := qrcode.New(q.Data)
if err != nil {
return err
}
qrc, err := qrcode.New(decodedStr)
if err != nil { if err != nil {
return err return err
} }

View File

@@ -1,30 +1,31 @@
package v1 package v1
import ( import (
"github.com/hay-kot/httpkit/errchain"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"net/http" "net/http"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/httpkit/errchain"
) )
// HandleBillOfMaterialsExport godoc // HandleBillOfMaterialsExport godoc
// //
// @Summary Export Bill of Materials // @Summary Export Bill of Materials
// @Tags Reporting // @Tags Reporting
// @Produce json // @Produce json
// @Success 200 {string} string "text/csv" // @Success 200 {string} string "text/csv"
// @Router /v1/reporting/bill-of-materials [GET] // @Router /v1/reporting/bill-of-materials [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc { func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
actor := services.UseUserCtx(r.Context()) actor := services.UseUserCtx(r.Context())
csv, err := ctrl.svc.Items.ExportBillOfMaterialsCSV(r.Context(), actor.GroupID) csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
if err != nil { if err != nil {
return err return err
} }
w.Header().Set("Content-Type", "text/csv") w.Header().Set("Content-Type", "text/tsv")
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.csv") w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
_, err = w.Write(csv) _, err = w.Write(csv)
return err return err
} }

View File

@@ -4,22 +4,22 @@ import (
"net/http" "net/http"
"time" "time"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/homebox/backend/internal/web/adapters"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
) )
// HandleGroupStatisticsLocations godoc // HandleGroupGet godoc
// //
// @Summary Get Location Statistics // @Summary Get Location Statistics
// @Tags Statistics // @Tags Statistics
// @Produce json // @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer // @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/locations [GET] // @Router /v1/groups/statistics/locations [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) { fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -31,12 +31,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc
// HandleGroupStatisticsLabels godoc // HandleGroupStatisticsLabels godoc
// //
// @Summary Get Label Statistics // @Summary Get Label Statistics
// @Tags Statistics // @Tags Statistics
// @Produce json // @Produce json
// @Success 200 {object} []repo.TotalsByOrganizer // @Success 200 {object} []repo.TotalsByOrganizer
// @Router /v1/groups/statistics/labels [GET] // @Router /v1/groups/statistics/labels [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) { fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
// HandleGroupStatistics godoc // HandleGroupStatistics godoc
// //
// @Summary Get Group Statistics // @Summary Get Group Statistics
// @Tags Statistics // @Tags Statistics
// @Produce json // @Produce json
// @Success 200 {object} repo.GroupStatistics // @Success 200 {object} repo.GroupStatistics
// @Router /v1/groups/statistics [GET] // @Router /v1/groups/statistics [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
fn := func(r *http.Request) (repo.GroupStatistics, error) { fn := func(r *http.Request) (repo.GroupStatistics, error) {
auth := services.NewContext(r.Context()) auth := services.NewContext(r.Context())
@@ -65,14 +65,14 @@ func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
// HandleGroupStatisticsPriceOverTime godoc // HandleGroupStatisticsPriceOverTime godoc
// //
// @Summary Get Purchase Price Statistics // @Summary Get Purchase Price Statistics
// @Tags Statistics // @Tags Statistics
// @Produce json // @Produce json
// @Success 200 {object} repo.ValueOverTime // @Success 200 {object} repo.ValueOverTime
// @Param start query string false "start date" // @Param start query string false "start date"
// @Param end query string false "end date" // @Param end query string false "end date"
// @Router /v1/groups/statistics/purchase-price [GET] // @Router /v1/groups/statistics/purchase-price [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerFunc { func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerFunc {
parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) { parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) {
if datestr == "" { if datestr == "" {

View File

@@ -5,22 +5,22 @@ import (
"net/http" "net/http"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/server" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
) )
// HandleUserRegistration godoc // HandleUserRegistration godoc
// //
// @Summary Register New User // @Summary Register New User
// @Tags User // @Tags User
// @Produce json // @Produce json
// @Param payload body services.UserRegistration true "User Data" // @Param payload body services.UserRegistration true "User Data"
// @Success 204 // @Success 204
// @Router /v1/users/register [Post] // @Router /v1/users/register [Post]
func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
regData := services.UserRegistration{} regData := services.UserRegistration{}
@@ -46,12 +46,12 @@ func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
// HandleUserSelf godoc // HandleUserSelf godoc
// //
// @Summary Get User Self // @Summary Get User Self
// @Tags User // @Tags User
// @Produce json // @Produce json
// @Success 200 {object} Wrapped{item=repo.UserOut} // @Success 200 {object} Wrapped{item=repo.UserOut}
// @Router /v1/users/self [GET] // @Router /v1/users/self [GET]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
token := services.UseTokenCtx(r.Context()) token := services.UseTokenCtx(r.Context())
@@ -67,13 +67,13 @@ func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
// HandleUserSelfUpdate godoc // HandleUserSelfUpdate godoc
// //
// @Summary Update Account // @Summary Update Account
// @Tags User // @Tags User
// @Produce json // @Produce json
// @Param payload body repo.UserUpdate true "User Data" // @Param payload body repo.UserUpdate true "User Data"
// @Success 200 {object} Wrapped{item=repo.UserUpdate} // @Success 200 {object} Wrapped{item=repo.UserUpdate}
// @Router /v1/users/self [PUT] // @Router /v1/users/self [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
updateData := repo.UserUpdate{} updateData := repo.UserUpdate{}
@@ -94,12 +94,12 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
// HandleUserSelfDelete godoc // HandleUserSelfDelete godoc
// //
// @Summary Delete Account // @Summary Delete Account
// @Tags User // @Tags User
// @Produce json // @Produce json
// @Success 204 // @Success 204
// @Router /v1/users/self [DELETE] // @Router /v1/users/self [DELETE]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo { if ctrl.isDemo {
@@ -124,12 +124,12 @@ type (
// HandleUserSelfChangePassword godoc // HandleUserSelfChangePassword godoc
// //
// @Summary Change Password // @Summary Change Password
// @Tags User // @Tags User
// @Success 204 // @Success 204
// @Param payload body ChangePassword true "Password Payload" // @Param payload body ChangePassword true "Password Payload"
// @Router /v1/users/change-password [PUT] // @Router /v1/users/change-password [PUT]
// @Security Bearer // @Security Bearer
func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc { func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) error { return func(w http.ResponseWriter, r *http.Request) error {
if ctrl.isDemo { if ctrl.isDemo {

View File

@@ -2,10 +2,11 @@ package main
import ( import (
"os" "os"
"strings"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/rs/zerolog" "github.com/rs/zerolog"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
) )
// setupLogger initializes the zerolog config // setupLogger initializes the zerolog config
@@ -17,8 +18,24 @@ func (a *app) setupLogger() {
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).With().Caller().Logger() log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).With().Caller().Logger()
} }
level, err := zerolog.ParseLevel(a.conf.Log.Level) log.Level(getLevel(a.conf.Log.Level))
if err == nil { }
zerolog.SetGlobalLevel(level)
func getLevel(l string) zerolog.Level {
switch strings.ToLower(l) {
case "debug":
return zerolog.DebugLevel
case "info":
return zerolog.InfoLevel
case "warn":
return zerolog.WarnLevel
case "error":
return zerolog.ErrorLevel
case "fatal":
return zerolog.FatalLevel
case "panic":
return zerolog.PanicLevel
default:
return zerolog.InfoLevel
} }
} }

View File

@@ -1,7 +1,6 @@
package main package main
import ( import (
"bytes"
"context" "context"
"fmt" "fmt"
"net/http" "net/http"
@@ -14,21 +13,20 @@ import (
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware" "github.com/go-chi/chi/v5/middleware"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/migrations"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/sys/config"
"github.com/hay-kot/homebox/backend/internal/web/mid"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
"github.com/hay-kot/httpkit/graceful" "github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog" "github.com/rs/zerolog"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/rs/zerolog/pkgerrors" "github.com/rs/zerolog/pkgerrors"
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
"github.com/sysadminsmedia/homebox/backend/internal/data/migrations"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
"github.com/sysadminsmedia/homebox/backend/internal/web/mid"
_ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite" _ "github.com/hay-kot/homebox/backend/pkgs/cgofreesqlite"
) )
var ( var (
@@ -37,28 +35,19 @@ var (
buildTime = "now" buildTime = "now"
) )
func build() string { // @title Homebox API
short := commit // @version 1.0
if len(short) > 7 { // @description Track, Manage, and Organize your Things.
short = short[:7] // @contact.name Don't
} // @BasePath /api
// @securityDefinitions.apikey Bearer
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime) // @in header
} // @name Authorization
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
// @title Homebox API
// @version 1.0
// @description Track, Manage, and Organize your Things.
// @contact.name Don't
// @BasePath /api
// @securityDefinitions.apikey Bearer
// @in header
// @name Authorization
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
func main() { func main() {
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
cfg, err := config.New(build(), "Homebox inventory management system") cfg, err := config.New()
if err != nil { if err != nil {
panic(err) panic(err)
} }
@@ -80,12 +69,12 @@ func run(cfg *config.Config) error {
log.Fatal().Err(err).Msg("failed to create data directory") log.Fatal().Err(err).Msg("failed to create data directory")
} }
c, err := ent.Open("sqlite3", cfg.Storage.SqliteURL) c, err := ent.Open("sqlite3", cfg.Storage.SqliteUrl)
if err != nil { if err != nil {
log.Fatal(). log.Fatal().
Err(err). Err(err).
Str("driver", "sqlite"). Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteURL). Str("url", cfg.Storage.SqliteUrl).
Msg("failed opening connection to sqlite") Msg("failed opening connection to sqlite")
} }
defer func(c *ent.Client) { defer func(c *ent.Client) {
@@ -115,46 +104,16 @@ func run(cfg *config.Config) error {
err = c.Schema.Create(context.Background(), options...) err = c.Schema.Create(context.Background(), options...)
if err != nil { if err != nil {
log.Error(). log.Fatal().
Err(err). Err(err).
Str("driver", "sqlite"). Str("driver", "sqlite").
Str("url", cfg.Storage.SqliteURL). Str("url", cfg.Storage.SqliteUrl).
Msg("failed creating schema resources") Msg("failed creating schema resources")
return err
} }
err = os.RemoveAll(temp) err = os.RemoveAll(temp)
if err != nil { if err != nil {
log.Error().Err(err).Msg("failed to remove temporary directory for database migrations") log.Fatal().Err(err).Msg("failed to remove temporary directory for database migrations")
return err
}
collectFuncs := []currencies.CollectorFunc{
currencies.CollectDefaults(),
}
if cfg.Options.CurrencyConfig != "" {
log.Info().
Str("path", cfg.Options.CurrencyConfig).
Msg("loading currency config file")
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
if err != nil {
log.Error().
Err(err).
Str("path", cfg.Options.CurrencyConfig).
Msg("failed to read currency config file")
return err
}
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
}
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
if err != nil {
log.Error().
Err(err).
Msg("failed to collect currencies")
return err return err
} }
@@ -164,7 +123,6 @@ func run(cfg *config.Config) error {
app.services = services.New( app.services = services.New(
app.repos, app.repos,
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID), services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
services.WithCurrencies(currencies),
) )
// ========================================================================= // =========================================================================
@@ -181,66 +139,38 @@ func run(cfg *config.Config) error {
middleware.StripSlashes, middleware.StripSlashes,
) )
chain := errchain.New(mid.Errors(logger)) chain := errchain.New(mid.Errors(app.server, logger))
app.mountRoutes(router, chain, app.repos) app.mountRoutes(router, chain, app.repos)
runner := graceful.NewRunner() app.server = server.NewServer(
server.WithHost(app.conf.Web.Host),
runner.AddFunc("server", func(ctx context.Context) error { server.WithPort(app.conf.Web.Port),
httpserver := http.Server{ )
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Web.Port), log.Info().Msgf("Starting HTTP Server on %s:%s", app.server.Host, app.server.Port)
Handler: router,
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
}
go func() {
<-ctx.Done()
_ = httpserver.Shutdown(context.Background())
}()
log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port)
return httpserver.ListenAndServe()
})
// ========================================================================= // =========================================================================
// Start Reoccurring Tasks // Start Reoccurring Tasks
runner.AddFunc("eventbus", app.bus.Run) go app.bus.Run()
runner.AddFunc("seed_database", func(ctx context.Context) error { go app.startBgTask(time.Duration(24)*time.Hour, func() {
// TODO: Remove through external API that does setup _, err := app.repos.AuthTokens.PurgeExpiredTokens(context.Background())
if cfg.Demo {
log.Info().Msg("Running in demo mode, creating demo data")
err := app.SetupDemo()
if err != nil {
log.Fatal().Msg(err.Error())
}
}
return nil
})
runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) {
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
if err != nil { if err != nil {
log.Error(). log.Error().
Err(err). Err(err).
Msg("failed to purge expired tokens") Msg("failed to purge expired tokens")
} }
})) })
go app.startBgTask(time.Duration(24)*time.Hour, func() {
runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) { _, err := app.repos.Groups.InvitationPurge(context.Background())
_, err := app.repos.Groups.InvitationPurge(ctx)
if err != nil { if err != nil {
log.Error(). log.Error().
Err(err). Err(err).
Msg("failed to purge expired invitations") Msg("failed to purge expired invitations")
} }
})) })
go app.startBgTask(time.Duration(1)*time.Hour, func() {
runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) {
now := time.Now() now := time.Now()
if now.Hour() == 8 { if now.Hour() == 8 {
@@ -252,39 +182,22 @@ func run(cfg *config.Config) error {
Msg("failed to send notifiers") Msg("failed to send notifiers")
} }
} }
})) })
if cfg.Options.GithubReleaseCheck { // TODO: Remove through external API that does setup
runner.AddPlugin(NewTask("get-latest-github-release", time.Hour, func(ctx context.Context) { if cfg.Demo {
log.Debug().Msg("running get latest github release") log.Info().Msg("Running in demo mode, creating demo data")
err := app.services.BackgroundService.GetLatestGithubRelease(context.Background()) app.SetupDemo()
if err != nil {
log.Error().
Err(err).
Msg("failed to get latest github release")
}
}))
} }
if cfg.Debug.Enabled { if cfg.Debug.Enabled {
runner.AddFunc("debug", func(ctx context.Context) error { debugrouter := app.debugRouter()
debugserver := http.Server{ go func() {
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port), if err := http.ListenAndServe(":"+cfg.Debug.Port, debugrouter); err != nil {
Handler: app.debugRouter(), log.Fatal().Err(err).Msg("failed to start debug server")
ReadTimeout: cfg.Web.ReadTimeout,
WriteTimeout: cfg.Web.WriteTimeout,
IdleTimeout: cfg.Web.IdleTimeout,
} }
}()
go func() {
<-ctx.Done()
_ = debugserver.Shutdown(context.Background())
}()
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
return debugserver.ListenAndServe()
})
} }
return runner.Start(context.Background()) return app.server.Start(router)
} }

View File

@@ -7,11 +7,9 @@ import (
"net/url" "net/url"
"strings" "strings"
"github.com/hay-kot/homebox/backend/internal/core/services"
"github.com/hay-kot/homebox/backend/internal/sys/validate"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
) )
type tokenHasKey struct { type tokenHasKey struct {
@@ -96,6 +94,20 @@ func getQuery(r *http.Request) (string, error) {
return token, nil return token, nil
} }
func getCookie(r *http.Request) (string, error) {
cookie, err := r.Cookie("hb.auth.token")
if err != nil {
return "", errors.New("access_token cookie is required")
}
token, err := url.QueryUnescape(cookie.Value)
if err != nil {
return "", errors.New("access_token cookie is required")
}
return token, nil
}
// mwAuthToken is a middleware that will check the database for a stateful token // mwAuthToken is a middleware that will check the database for a stateful token
// and attach it's user to the request context, or return an appropriate error. // and attach it's user to the request context, or return an appropriate error.
// Authorization support is by token via Headers or Query Parameter // Authorization support is by token via Headers or Query Parameter
@@ -103,35 +115,26 @@ func getQuery(r *http.Request) (string, error) {
// Example: // Example:
// - header = "Bearer 1234567890" // - header = "Bearer 1234567890"
// - query = "?access_token=1234567890" // - query = "?access_token=1234567890"
// - cookie = hb.auth.token = 1234567890
func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler { func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error { return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
keyFuncs := [...]KeyFunc{
getBearer,
getCookie,
getQuery,
}
var requestToken string var requestToken string
for _, keyFunc := range keyFuncs {
// We ignore the error to allow the next strategy to be attempted token, err := keyFunc(r)
{ if err == nil {
cookies, _ := v1.GetCookies(r) requestToken = token
if cookies != nil { break
requestToken = cookies.Token
} }
} }
if requestToken == "" { if requestToken == "" {
keyFuncs := [...]KeyFunc{ return validate.NewRequestError(errors.New("Authorization header or query is required"), http.StatusUnauthorized)
getBearer,
getQuery,
}
for _, keyFunc := range keyFuncs {
token, err := keyFunc(r)
if err == nil {
requestToken = token
break
}
}
}
if requestToken == "" {
return validate.NewRequestError(errors.New("authorization header or query is required"), http.StatusUnauthorized)
} }
requestToken = strings.TrimPrefix(requestToken, "Bearer ") requestToken = strings.TrimPrefix(requestToken, "Bearer ")
@@ -141,11 +144,7 @@ func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
usr, err := a.services.User.GetSelf(r.Context(), requestToken) usr, err := a.services.User.GetSelf(r.Context(), requestToken)
// Check the database for the token // Check the database for the token
if err != nil { if err != nil {
if ent.IsNotFound(err) { return validate.NewRequestError(errors.New("valid authorization header is required"), http.StatusUnauthorized)
return validate.NewRequestError(errors.New("valid authorization token is required"), http.StatusUnauthorized)
}
return err
} }
r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken)) r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken))

View File

@@ -1,2 +0,0 @@
// Package providers provides a authentication abstraction for the backend.
package providers

View File

@@ -1,55 +0,0 @@
package providers
import (
"errors"
"net/http"
"github.com/hay-kot/httpkit/server"
"github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
)
type LoginForm struct {
Username string `json:"username"`
Password string `json:"password"`
StayLoggedIn bool `json:"stayLoggedIn"`
}
func getLoginForm(r *http.Request) (LoginForm, error) {
loginForm := LoginForm{}
switch r.Header.Get("Content-Type") {
case "application/x-www-form-urlencoded":
err := r.ParseForm()
if err != nil {
return loginForm, errors.New("failed to parse form")
}
loginForm.Username = r.PostFormValue("username")
loginForm.Password = r.PostFormValue("password")
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
case "application/json":
err := server.Decode(r, &loginForm)
if err != nil {
log.Err(err).Msg("failed to decode login form")
return loginForm, errors.New("failed to decode login form")
}
default:
return loginForm, errors.New("invalid content type")
}
if loginForm.Username == "" || loginForm.Password == "" {
return loginForm, validate.NewFieldErrors(
validate.FieldError{
Field: "username",
Error: "username or password is empty",
},
validate.FieldError{
Field: "password",
Error: "username or password is empty",
},
)
}
return loginForm, nil
}

View File

@@ -1,30 +0,0 @@
package providers
import (
"net/http"
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
)
type LocalProvider struct {
service *services.UserService
}
func NewLocalProvider(service *services.UserService) *LocalProvider {
return &LocalProvider{
service: service,
}
}
func (p *LocalProvider) Name() string {
return "local"
}
func (p *LocalProvider) Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
loginForm, err := getLoginForm(r)
if err != nil {
return services.UserAuthTokenDetail{}, err
}
return p.service.Login(r.Context(), loginForm.Username, loginForm.Password, loginForm.StayLoggedIn)
}

View File

@@ -3,7 +3,6 @@ package main
import ( import (
"embed" "embed"
"errors" "errors"
"fmt"
"io" "io"
"mime" "mime"
"net/http" "net/http"
@@ -11,14 +10,13 @@ import (
"path/filepath" "path/filepath"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/hay-kot/homebox/backend/app/api/handlers/debughandlers"
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
_ "github.com/hay-kot/homebox/backend/app/api/static/docs"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/httpkit/errchain" "github.com/hay-kot/httpkit/errchain"
httpSwagger "github.com/swaggo/http-swagger/v2" // http-swagger middleware httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
"github.com/sysadminsmedia/homebox/backend/app/api/handlers/debughandlers"
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
"github.com/sysadminsmedia/homebox/backend/app/api/providers"
_ "github.com/sysadminsmedia/homebox/backend/app/api/static/docs"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
const prefix = "/api" const prefix = "/api"
@@ -48,124 +46,113 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
// ========================================================================= // =========================================================================
// API Version 1 // API Version 1
v1Base := v1.BaseUrlFunc(prefix)
v1Ctrl := v1.NewControllerV1( v1Ctrl := v1.NewControllerV1(
a.services, a.services,
a.repos, a.repos,
a.bus, a.bus,
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize), v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
v1.WithRegistration(a.conf.Options.AllowRegistration), v1.WithRegistration(a.conf.Options.AllowRegistration),
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
v1.WithURL(fmt.Sprintf("%s:%s", a.conf.Web.Host, a.conf.Web.Port)),
) )
r.Route(prefix+"/v1", func(r chi.Router) { r.Get(v1Base("/status"), chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
r.Get("/status", chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{ Version: version,
Version: version, Commit: commit,
Commit: commit, BuildTime: buildTime,
BuildTime: buildTime, })))
})))
r.Get("/currencies", chain.ToHandlerFunc(v1Ctrl.HandleCurrency())) r.Post(v1Base("/users/register"), chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin()))
providers := []v1.AuthProvider{ userMW := []errchain.Middleware{
providers.NewLocalProvider(a.services.User), a.mwAuthToken,
} a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
}
r.Post("/users/register", chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration())) r.Get(v1Base("/ws/events"), chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...))
r.Post("/users/login", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...))) r.Get(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
r.Put(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
r.Delete(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
r.Post(v1Base("/users/logout"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
r.Get(v1Base("/users/refresh"), chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
r.Put(v1Base("/users/self/change-password"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
userMW := []errchain.Middleware{ r.Post(v1Base("/groups/invitations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
a.mwAuthToken, r.Get(v1Base("/groups/statistics"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
a.mwRoles(RoleModeOr, authroles.RoleUser.String()), r.Get(v1Base("/groups/statistics/purchase-price"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
} r.Get(v1Base("/groups/statistics/locations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
r.Get(v1Base("/groups/statistics/labels"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
r.Get("/ws/events", chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...)) // TODO: I don't like /groups being the URL for users
r.Get("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...)) r.Get(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
r.Put("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...)) r.Put(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
r.Delete("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
r.Post("/users/logout", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
r.Get("/users/refresh", chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
r.Put("/users/self/change-password", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
r.Post("/groups/invitations", chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...)) r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
r.Get("/groups/statistics", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...)) r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
r.Get("/groups/statistics/purchase-price", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...)) r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
r.Get("/groups/statistics/locations", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
r.Get("/groups/statistics/labels", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
// TODO: I don't like /groups being the URL for users r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
r.Get("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...)) r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
r.Put("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...)) r.Get(v1Base("/locations/tree"), chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
r.Get(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
r.Put(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
r.Delete(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
r.Post("/actions/ensure-asset-ids", chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...)) r.Get(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
r.Post("/actions/zero-item-time-fields", chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...)) r.Post(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...)) r.Get(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...)) r.Put(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
r.Delete(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...)) r.Get(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...)) r.Post(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
r.Get("/locations/tree", chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...)) r.Post(v1Base("/items/import"), chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
r.Get("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...)) r.Get(v1Base("/items/export"), chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
r.Put("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...)) r.Get(v1Base("/items/fields"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
r.Delete("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...)) r.Get(v1Base("/items/fields/values"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
r.Get("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...)) r.Get(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
r.Post("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...)) r.Put(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
r.Get("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...)) r.Patch(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
r.Put("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...)) r.Delete(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
r.Delete("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
r.Get("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...)) r.Post(v1Base("/items/{id}/attachments"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
r.Post("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...)) r.Put(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
r.Post("/items/import", chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...)) r.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
r.Get("/items/export", chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
r.Get("/items/fields", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
r.Get("/items/fields/values", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
r.Get("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...)) r.Get(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
r.Get("/items/{id}/path", chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...)) r.Post(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
r.Put("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...)) r.Put(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
r.Patch("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...)) r.Delete(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
r.Delete("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
r.Post("/items/{id}/attachments", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...)) r.Get(v1Base("/assets/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
r.Put("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
r.Delete("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
r.Get("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...)) // Notifiers
r.Post("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...)) r.Get(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
r.Post(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
r.Put(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
r.Delete(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
r.Post(v1Base("/notifiers/test"), chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
r.Get("/assets/{id}", chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...)) // Asset-Like endpoints
assetMW := []errchain.Middleware{
a.mwAuthToken,
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
}
// Maintenance r.Get(
r.Get("/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceGetAll(), userMW...)) v1Base("/qrcode"),
r.Put("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...)) chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...),
r.Delete("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...)) )
r.Get(
v1Base("/items/{id}/attachments/{attachment_id}"),
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
)
// Notifiers // Reporting Services
r.Get("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...)) r.Get(v1Base("/reporting/bill-of-materials"), chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
r.Post("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
r.Put("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
r.Delete("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
r.Post("/notifiers/test", chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
// Asset-Like endpoints
assetMW := []errchain.Middleware{
a.mwAuthToken,
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
}
r.Get("/qrcode", chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...))
r.Get(
"/items/{id}/attachments/{attachment_id}",
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
)
// Reporting Services
r.Get("/reporting/bill-of-materials", chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
r.NotFound(http.NotFound)
})
r.NotFound(chain.ToHandlerFunc(notFoundHandler())) r.NotFound(chain.ToHandlerFunc(notFoundHandler()))
} }
@@ -190,7 +177,7 @@ func notFoundHandler() errchain.HandlerFunc {
if err != nil { if err != nil {
return err return err
} }
defer func() { _ = f.Close() }() defer f.Close()
stat, _ := f.Stat() stat, _ := f.Stat()
if stat.IsDir() { if stat.IsDir() {

View File

@@ -1,4 +1,5 @@
// Package docs Code generated by swaggo/swag. DO NOT EDIT // Package docs GENERATED BY SWAG; DO NOT EDIT
// This file was generated by swaggo/swag
package docs package docs
import "github.com/swaggo/swag" import "github.com/swaggo/swag"
@@ -67,31 +68,6 @@ const docTemplate = `{
} }
} }
}, },
"/v1/actions/set-primary-photos": {
"post": {
"security": [
{
"Bearer": []
}
],
"description": "Sets the first photo of each item as the primary photo",
"produces": [
"application/json"
],
"tags": [
"Actions"
],
"summary": "Set Primary Photos",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/zero-item-time-fields": { "/v1/actions/zero-item-time-fields": {
"post": { "post": {
"security": [ "security": [
@@ -150,25 +126,6 @@ const docTemplate = `{
} }
} }
}, },
"/v1/currency": {
"get": {
"produces": [
"application/json"
],
"tags": [
"Base"
],
"summary": "Currency",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/currencies.Currency"
}
}
}
}
},
"/v1/groups": { "/v1/groups": {
"get": { "get": {
"security": [ "security": [
@@ -429,16 +386,6 @@ const docTemplate = `{
"description": "location Ids", "description": "location Ids",
"name": "locations", "name": "locations",
"in": "query" "in": "query"
},
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "multi",
"description": "parent Ids",
"name": "parentIds",
"in": "query"
} }
], ],
"responses": { "responses": {
@@ -566,9 +513,6 @@ const docTemplate = `{
"Bearer": [] "Bearer": []
} }
], ],
"consumes": [
"multipart/form-data"
],
"produces": [ "produces": [
"application/json" "application/json"
], ],
@@ -740,9 +684,6 @@ const docTemplate = `{
"Bearer": [] "Bearer": []
} }
], ],
"consumes": [
"multipart/form-data"
],
"produces": [ "produces": [
"application/json" "application/json"
], ],
@@ -923,41 +864,14 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Item Maintenance" "Maintenance"
], ],
"summary": "Get Maintenance Log", "summary": "Get Maintenance Log",
"parameters": [
{
"type": "string",
"description": "Item ID",
"name": "id",
"in": "path",
"required": true
},
{
"enum": [
"scheduled",
"completed",
"both"
],
"type": "string",
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
],
"name": "status",
"in": "query"
}
],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"type": "array", "$ref": "#/definitions/repo.MaintenanceLog"
"items": {
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
}
} }
} }
} }
@@ -972,17 +886,10 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Item Maintenance" "Maintenance"
], ],
"summary": "Create Maintenance Entry", "summary": "Create Maintenance Entry",
"parameters": [ "parameters": [
{
"type": "string",
"description": "Item ID",
"name": "id",
"in": "path",
"required": true
},
{ {
"description": "Entry Data", "description": "Entry Data",
"name": "payload", "name": "payload",
@@ -1003,8 +910,8 @@ const docTemplate = `{
} }
} }
}, },
"/v1/items/{id}/path": { "/v1/items/{id}/maintenance/{entry_id}": {
"get": { "put": {
"security": [ "security": [
{ {
"Bearer": [] "Bearer": []
@@ -1014,29 +921,47 @@ const docTemplate = `{
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Items" "Maintenance"
], ],
"summary": "Get the full path of an item", "summary": "Update Maintenance Entry",
"parameters": [ "parameters": [
{ {
"type": "string", "description": "Entry Data",
"description": "Item ID", "name": "payload",
"name": "id", "in": "body",
"in": "path", "required": true,
"required": true "schema": {
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
}
} }
], ],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"type": "array", "$ref": "#/definitions/repo.MaintenanceEntry"
"items": {
"$ref": "#/definitions/repo.ItemPath"
}
} }
} }
} }
},
"delete": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Delete Maintenance Entry",
"responses": {
"204": {
"description": "No Content"
}
}
} }
}, },
"/v1/labels": { "/v1/labels": {
@@ -1395,120 +1320,6 @@ const docTemplate = `{
} }
} }
}, },
"/v1/maintenance": {
"get": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Query All Maintenance",
"parameters": [
{
"enum": [
"scheduled",
"completed",
"both"
],
"type": "string",
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
],
"name": "status",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
}
}
}
}
}
},
"/v1/maintenance/{id}": {
"put": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Update Maintenance Entry",
"parameters": [
{
"type": "string",
"description": "Maintenance ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Entry Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.MaintenanceEntry"
}
}
}
},
"delete": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Delete Maintenance Entry",
"parameters": [
{
"type": "string",
"description": "Maintenance ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/notifiers": { "/v1/notifiers": {
"get": { "get": {
"security": [ "security": [
@@ -1584,6 +1395,13 @@ const docTemplate = `{
], ],
"summary": "Test Notifier", "summary": "Test Notifier",
"parameters": [ "parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{ {
"type": "string", "type": "string",
"description": "URL", "description": "URL",
@@ -1732,7 +1550,7 @@ const docTemplate = `{
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/v1.APISummary" "$ref": "#/definitions/v1.ApiSummary"
} }
} }
} }
@@ -1781,6 +1599,20 @@ const docTemplate = `{
], ],
"summary": "User Login", "summary": "User Login",
"parameters": [ "parameters": [
{
"type": "string",
"example": "admin@admin.com",
"description": "string",
"name": "username",
"in": "formData"
},
{
"type": "string",
"example": "admin",
"description": "string",
"name": "password",
"in": "formData"
},
{ {
"description": "Login Data", "description": "Login Data",
"name": "payload", "name": "payload",
@@ -1789,12 +1621,6 @@ const docTemplate = `{
"schema": { "schema": {
"$ref": "#/definitions/v1.LoginForm" "$ref": "#/definitions/v1.LoginForm"
} }
},
{
"type": "string",
"description": "auth provider",
"name": "provider",
"in": "query"
} }
], ],
"responses": { "responses": {
@@ -1973,23 +1799,6 @@ const docTemplate = `{
} }
}, },
"definitions": { "definitions": {
"currencies.Currency": {
"type": "object",
"properties": {
"code": {
"type": "string"
},
"local": {
"type": "string"
},
"name": {
"type": "string"
},
"symbol": {
"type": "string"
}
}
},
"repo.DocumentOut": { "repo.DocumentOut": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -2070,9 +1879,6 @@ const docTemplate = `{
"id": { "id": {
"type": "string" "type": "string"
}, },
"primary": {
"type": "boolean"
},
"type": { "type": {
"type": "string" "type": "string"
}, },
@@ -2084,9 +1890,6 @@ const docTemplate = `{
"repo.ItemAttachmentUpdate": { "repo.ItemAttachmentUpdate": {
"type": "object", "type": "object",
"properties": { "properties": {
"primary": {
"type": "boolean"
},
"title": { "title": {
"type": "string" "type": "string"
}, },
@@ -2165,6 +1968,12 @@ const docTemplate = `{
"$ref": "#/definitions/repo.ItemAttachment" "$ref": "#/definitions/repo.ItemAttachment"
} }
}, },
"children": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"createdAt": { "createdAt": {
"type": "string" "type": "string"
}, },
@@ -2180,9 +1989,6 @@ const docTemplate = `{
"id": { "id": {
"type": "string" "type": "string"
}, },
"imageId": {
"type": "string"
},
"insured": { "insured": {
"type": "boolean" "type": "boolean"
}, },
@@ -2198,13 +2004,9 @@ const docTemplate = `{
}, },
"location": { "location": {
"description": "Edges", "description": "Edges",
"allOf": [
{
"$ref": "#/definitions/repo.LocationSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.LocationSummary"
}, },
"manufacturer": { "manufacturer": {
"type": "string" "type": "string"
@@ -2220,19 +2022,16 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"parent": { "parent": {
"allOf": [
{
"$ref": "#/definitions/repo.ItemSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.ItemSummary"
}, },
"purchaseFrom": { "purchaseFrom": {
"type": "string" "type": "string"
}, },
"purchasePrice": { "purchasePrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"purchaseTime": { "purchaseTime": {
"description": "Purchase", "description": "Purchase",
@@ -2248,7 +2047,8 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"soldPrice": { "soldPrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"soldTime": { "soldTime": {
"description": "Sold", "description": "Sold",
@@ -2257,9 +2057,6 @@ const docTemplate = `{
"soldTo": { "soldTo": {
"type": "string" "type": "string"
}, },
"syncChildItemsLocations": {
"type": "boolean"
},
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
}, },
@@ -2284,30 +2081,12 @@ const docTemplate = `{
} }
} }
}, },
"repo.ItemPath": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"type": {
"$ref": "#/definitions/repo.ItemType"
}
}
},
"repo.ItemSummary": { "repo.ItemSummary": {
"type": "object", "type": "object",
"properties": { "properties": {
"archived": { "archived": {
"type": "boolean" "type": "boolean"
}, },
"assetId": {
"type": "string",
"example": "0"
},
"createdAt": { "createdAt": {
"type": "string" "type": "string"
}, },
@@ -2317,9 +2096,6 @@ const docTemplate = `{
"id": { "id": {
"type": "string" "type": "string"
}, },
"imageId": {
"type": "string"
},
"insured": { "insured": {
"type": "boolean" "type": "boolean"
}, },
@@ -2331,19 +2107,16 @@ const docTemplate = `{
}, },
"location": { "location": {
"description": "Edges", "description": "Edges",
"allOf": [
{
"$ref": "#/definitions/repo.LocationSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.LocationSummary"
}, },
"name": { "name": {
"type": "string" "type": "string"
}, },
"purchasePrice": { "purchasePrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"quantity": { "quantity": {
"type": "integer" "type": "integer"
@@ -2353,22 +2126,8 @@ const docTemplate = `{
} }
} }
}, },
"repo.ItemType": {
"type": "string",
"enum": [
"location",
"item"
],
"x-enum-varnames": [
"ItemTypeLocation",
"ItemTypeItem"
]
},
"repo.ItemUpdate": { "repo.ItemUpdate": {
"type": "object", "type": "object",
"required": [
"name"
],
"properties": { "properties": {
"archived": { "archived": {
"type": "boolean" "type": "boolean"
@@ -2377,8 +2136,7 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"description": { "description": {
"type": "string", "type": "string"
"maxLength": 1000
}, },
"fields": { "fields": {
"type": "array", "type": "array",
@@ -2413,9 +2171,7 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"name": { "name": {
"type": "string", "type": "string"
"maxLength": 255,
"minLength": 1
}, },
"notes": { "notes": {
"description": "Extras", "description": "Extras",
@@ -2427,13 +2183,11 @@ const docTemplate = `{
"x-omitempty": true "x-omitempty": true
}, },
"purchaseFrom": { "purchaseFrom": {
"type": "string", "type": "string"
"maxLength": 255
}, },
"purchasePrice": { "purchasePrice": {
"type": "number", "type": "string",
"x-nullable": true, "example": "0"
"x-omitempty": true
}, },
"purchaseTime": { "purchaseTime": {
"description": "Purchase", "description": "Purchase",
@@ -2450,25 +2204,21 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"soldPrice": { "soldPrice": {
"type": "number", "type": "string",
"x-nullable": true, "example": "0"
"x-omitempty": true
}, },
"soldTime": { "soldTime": {
"description": "Sold", "description": "Sold",
"type": "string" "type": "string"
}, },
"soldTo": { "soldTo": {
"type": "string", "type": "string"
"maxLength": 255
},
"syncChildItemsLocations": {
"type": "boolean"
}, },
"warrantyDetails": { "warrantyDetails": {
"type": "string" "type": "string"
}, },
"warrantyExpires": { "warrantyExpires": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2505,6 +2255,12 @@ const docTemplate = `{
"id": { "id": {
"type": "string" "type": "string"
}, },
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@@ -2566,15 +2322,18 @@ const docTemplate = `{
"id": { "id": {
"type": "string" "type": "string"
}, },
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"name": { "name": {
"type": "string" "type": "string"
}, },
"parent": { "parent": {
"$ref": "#/definitions/repo.LocationSummary" "$ref": "#/definitions/repo.LocationSummary"
}, },
"totalPrice": {
"type": "number"
},
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
} }
@@ -2645,6 +2404,7 @@ const docTemplate = `{
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2661,6 +2421,7 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2672,6 +2433,7 @@ const docTemplate = `{
], ],
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2685,6 +2447,7 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2693,6 +2456,7 @@ const docTemplate = `{
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2706,53 +2470,31 @@ const docTemplate = `{
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
}, },
"repo.MaintenanceEntryWithDetails": { "repo.MaintenanceLog": {
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "costAverage": {
"type": "string" "type": "number"
}, },
"cost": { "costTotal": {
"type": "string", "type": "number"
"example": "0"
}, },
"description": { "entries": {
"type": "string" "type": "array",
"items": {
"$ref": "#/definitions/repo.MaintenanceEntry"
}
}, },
"id": { "itemId": {
"type": "string"
},
"itemID": {
"type": "string"
},
"itemName": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"type": "string" "type": "string"
} }
} }
}, },
"repo.MaintenanceFilterStatus": {
"type": "string",
"enum": [
"scheduled",
"completed",
"both"
],
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
]
},
"repo.NotifierCreate": { "repo.NotifierCreate": {
"type": "object", "type": "object",
"required": [ "required": [
@@ -2794,9 +2536,6 @@ const docTemplate = `{
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
}, },
"url": {
"type": "string"
},
"userId": { "userId": {
"type": "string" "type": "string"
} }
@@ -2950,17 +2689,6 @@ const docTemplate = `{
} }
} }
}, },
"services.Latest": {
"type": "object",
"properties": {
"date": {
"type": "string"
},
"version": {
"type": "string"
}
}
},
"services.UserRegistration": { "services.UserRegistration": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -2978,7 +2706,15 @@ const docTemplate = `{
} }
} }
}, },
"v1.APISummary": { "v1.ActionAmountResult": {
"type": "object",
"properties": {
"completed": {
"type": "integer"
}
}
},
"v1.ApiSummary": {
"type": "object", "type": "object",
"properties": { "properties": {
"allowRegistration": { "allowRegistration": {
@@ -2993,9 +2729,6 @@ const docTemplate = `{
"health": { "health": {
"type": "boolean" "type": "boolean"
}, },
"latest": {
"$ref": "#/definitions/services.Latest"
},
"message": { "message": {
"type": "string" "type": "string"
}, },
@@ -3010,14 +2743,6 @@ const docTemplate = `{
} }
} }
}, },
"v1.ActionAmountResult": {
"type": "object",
"properties": {
"completed": {
"type": "integer"
}
}
},
"v1.Build": { "v1.Build": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -3085,15 +2810,13 @@ const docTemplate = `{
"type": "object", "type": "object",
"properties": { "properties": {
"password": { "password": {
"type": "string", "type": "string"
"example": "admin"
}, },
"stayLoggedIn": { "stayLoggedIn": {
"type": "boolean" "type": "boolean"
}, },
"username": { "username": {
"type": "string", "type": "string"
"example": "admin@admin.com"
} }
} }
}, },
@@ -3149,8 +2872,6 @@ var SwaggerInfo = &swag.Spec{
Description: "Track, Manage, and Organize your Things.", Description: "Track, Manage, and Organize your Things.",
InfoInstanceName: "swagger", InfoInstanceName: "swagger",
SwaggerTemplate: docTemplate, SwaggerTemplate: docTemplate,
LeftDelim: "{{",
RightDelim: "}}",
} }
func init() { func init() {

View File

@@ -60,31 +60,6 @@
} }
} }
}, },
"/v1/actions/set-primary-photos": {
"post": {
"security": [
{
"Bearer": []
}
],
"description": "Sets the first photo of each item as the primary photo",
"produces": [
"application/json"
],
"tags": [
"Actions"
],
"summary": "Set Primary Photos",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/v1.ActionAmountResult"
}
}
}
}
},
"/v1/actions/zero-item-time-fields": { "/v1/actions/zero-item-time-fields": {
"post": { "post": {
"security": [ "security": [
@@ -143,25 +118,6 @@
} }
} }
}, },
"/v1/currency": {
"get": {
"produces": [
"application/json"
],
"tags": [
"Base"
],
"summary": "Currency",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/currencies.Currency"
}
}
}
}
},
"/v1/groups": { "/v1/groups": {
"get": { "get": {
"security": [ "security": [
@@ -422,16 +378,6 @@
"description": "location Ids", "description": "location Ids",
"name": "locations", "name": "locations",
"in": "query" "in": "query"
},
{
"type": "array",
"items": {
"type": "string"
},
"collectionFormat": "multi",
"description": "parent Ids",
"name": "parentIds",
"in": "query"
} }
], ],
"responses": { "responses": {
@@ -559,9 +505,6 @@
"Bearer": [] "Bearer": []
} }
], ],
"consumes": [
"multipart/form-data"
],
"produces": [ "produces": [
"application/json" "application/json"
], ],
@@ -733,9 +676,6 @@
"Bearer": [] "Bearer": []
} }
], ],
"consumes": [
"multipart/form-data"
],
"produces": [ "produces": [
"application/json" "application/json"
], ],
@@ -916,41 +856,14 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Item Maintenance" "Maintenance"
], ],
"summary": "Get Maintenance Log", "summary": "Get Maintenance Log",
"parameters": [
{
"type": "string",
"description": "Item ID",
"name": "id",
"in": "path",
"required": true
},
{
"enum": [
"scheduled",
"completed",
"both"
],
"type": "string",
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
],
"name": "status",
"in": "query"
}
],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"type": "array", "$ref": "#/definitions/repo.MaintenanceLog"
"items": {
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
}
} }
} }
} }
@@ -965,17 +878,10 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Item Maintenance" "Maintenance"
], ],
"summary": "Create Maintenance Entry", "summary": "Create Maintenance Entry",
"parameters": [ "parameters": [
{
"type": "string",
"description": "Item ID",
"name": "id",
"in": "path",
"required": true
},
{ {
"description": "Entry Data", "description": "Entry Data",
"name": "payload", "name": "payload",
@@ -996,8 +902,8 @@
} }
} }
}, },
"/v1/items/{id}/path": { "/v1/items/{id}/maintenance/{entry_id}": {
"get": { "put": {
"security": [ "security": [
{ {
"Bearer": [] "Bearer": []
@@ -1007,29 +913,47 @@
"application/json" "application/json"
], ],
"tags": [ "tags": [
"Items" "Maintenance"
], ],
"summary": "Get the full path of an item", "summary": "Update Maintenance Entry",
"parameters": [ "parameters": [
{ {
"type": "string", "description": "Entry Data",
"description": "Item ID", "name": "payload",
"name": "id", "in": "body",
"in": "path", "required": true,
"required": true "schema": {
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
}
} }
], ],
"responses": { "responses": {
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"type": "array", "$ref": "#/definitions/repo.MaintenanceEntry"
"items": {
"$ref": "#/definitions/repo.ItemPath"
}
} }
} }
} }
},
"delete": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Delete Maintenance Entry",
"responses": {
"204": {
"description": "No Content"
}
}
} }
}, },
"/v1/labels": { "/v1/labels": {
@@ -1388,120 +1312,6 @@
} }
} }
}, },
"/v1/maintenance": {
"get": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Query All Maintenance",
"parameters": [
{
"enum": [
"scheduled",
"completed",
"both"
],
"type": "string",
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
],
"name": "status",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
}
}
}
}
}
},
"/v1/maintenance/{id}": {
"put": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Update Maintenance Entry",
"parameters": [
{
"type": "string",
"description": "Maintenance ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Entry Data",
"name": "payload",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/repo.MaintenanceEntry"
}
}
}
},
"delete": {
"security": [
{
"Bearer": []
}
],
"produces": [
"application/json"
],
"tags": [
"Maintenance"
],
"summary": "Delete Maintenance Entry",
"parameters": [
{
"type": "string",
"description": "Maintenance ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"204": {
"description": "No Content"
}
}
}
},
"/v1/notifiers": { "/v1/notifiers": {
"get": { "get": {
"security": [ "security": [
@@ -1577,6 +1387,13 @@
], ],
"summary": "Test Notifier", "summary": "Test Notifier",
"parameters": [ "parameters": [
{
"type": "string",
"description": "Notifier ID",
"name": "id",
"in": "path",
"required": true
},
{ {
"type": "string", "type": "string",
"description": "URL", "description": "URL",
@@ -1725,7 +1542,7 @@
"200": { "200": {
"description": "OK", "description": "OK",
"schema": { "schema": {
"$ref": "#/definitions/v1.APISummary" "$ref": "#/definitions/v1.ApiSummary"
} }
} }
} }
@@ -1774,6 +1591,20 @@
], ],
"summary": "User Login", "summary": "User Login",
"parameters": [ "parameters": [
{
"type": "string",
"example": "admin@admin.com",
"description": "string",
"name": "username",
"in": "formData"
},
{
"type": "string",
"example": "admin",
"description": "string",
"name": "password",
"in": "formData"
},
{ {
"description": "Login Data", "description": "Login Data",
"name": "payload", "name": "payload",
@@ -1782,12 +1613,6 @@
"schema": { "schema": {
"$ref": "#/definitions/v1.LoginForm" "$ref": "#/definitions/v1.LoginForm"
} }
},
{
"type": "string",
"description": "auth provider",
"name": "provider",
"in": "query"
} }
], ],
"responses": { "responses": {
@@ -1966,23 +1791,6 @@
} }
}, },
"definitions": { "definitions": {
"currencies.Currency": {
"type": "object",
"properties": {
"code": {
"type": "string"
},
"local": {
"type": "string"
},
"name": {
"type": "string"
},
"symbol": {
"type": "string"
}
}
},
"repo.DocumentOut": { "repo.DocumentOut": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -2063,9 +1871,6 @@
"id": { "id": {
"type": "string" "type": "string"
}, },
"primary": {
"type": "boolean"
},
"type": { "type": {
"type": "string" "type": "string"
}, },
@@ -2077,9 +1882,6 @@
"repo.ItemAttachmentUpdate": { "repo.ItemAttachmentUpdate": {
"type": "object", "type": "object",
"properties": { "properties": {
"primary": {
"type": "boolean"
},
"title": { "title": {
"type": "string" "type": "string"
}, },
@@ -2158,6 +1960,12 @@
"$ref": "#/definitions/repo.ItemAttachment" "$ref": "#/definitions/repo.ItemAttachment"
} }
}, },
"children": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"createdAt": { "createdAt": {
"type": "string" "type": "string"
}, },
@@ -2173,9 +1981,6 @@
"id": { "id": {
"type": "string" "type": "string"
}, },
"imageId": {
"type": "string"
},
"insured": { "insured": {
"type": "boolean" "type": "boolean"
}, },
@@ -2191,13 +1996,9 @@
}, },
"location": { "location": {
"description": "Edges", "description": "Edges",
"allOf": [
{
"$ref": "#/definitions/repo.LocationSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.LocationSummary"
}, },
"manufacturer": { "manufacturer": {
"type": "string" "type": "string"
@@ -2213,19 +2014,16 @@
"type": "string" "type": "string"
}, },
"parent": { "parent": {
"allOf": [
{
"$ref": "#/definitions/repo.ItemSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.ItemSummary"
}, },
"purchaseFrom": { "purchaseFrom": {
"type": "string" "type": "string"
}, },
"purchasePrice": { "purchasePrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"purchaseTime": { "purchaseTime": {
"description": "Purchase", "description": "Purchase",
@@ -2241,7 +2039,8 @@
"type": "string" "type": "string"
}, },
"soldPrice": { "soldPrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"soldTime": { "soldTime": {
"description": "Sold", "description": "Sold",
@@ -2250,9 +2049,6 @@
"soldTo": { "soldTo": {
"type": "string" "type": "string"
}, },
"syncChildItemsLocations": {
"type": "boolean"
},
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
}, },
@@ -2277,30 +2073,12 @@
} }
} }
}, },
"repo.ItemPath": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"type": {
"$ref": "#/definitions/repo.ItemType"
}
}
},
"repo.ItemSummary": { "repo.ItemSummary": {
"type": "object", "type": "object",
"properties": { "properties": {
"archived": { "archived": {
"type": "boolean" "type": "boolean"
}, },
"assetId": {
"type": "string",
"example": "0"
},
"createdAt": { "createdAt": {
"type": "string" "type": "string"
}, },
@@ -2310,9 +2088,6 @@
"id": { "id": {
"type": "string" "type": "string"
}, },
"imageId": {
"type": "string"
},
"insured": { "insured": {
"type": "boolean" "type": "boolean"
}, },
@@ -2324,19 +2099,16 @@
}, },
"location": { "location": {
"description": "Edges", "description": "Edges",
"allOf": [
{
"$ref": "#/definitions/repo.LocationSummary"
}
],
"x-nullable": true, "x-nullable": true,
"x-omitempty": true "x-omitempty": true,
"$ref": "#/definitions/repo.LocationSummary"
}, },
"name": { "name": {
"type": "string" "type": "string"
}, },
"purchasePrice": { "purchasePrice": {
"type": "number" "type": "string",
"example": "0"
}, },
"quantity": { "quantity": {
"type": "integer" "type": "integer"
@@ -2346,22 +2118,8 @@
} }
} }
}, },
"repo.ItemType": {
"type": "string",
"enum": [
"location",
"item"
],
"x-enum-varnames": [
"ItemTypeLocation",
"ItemTypeItem"
]
},
"repo.ItemUpdate": { "repo.ItemUpdate": {
"type": "object", "type": "object",
"required": [
"name"
],
"properties": { "properties": {
"archived": { "archived": {
"type": "boolean" "type": "boolean"
@@ -2370,8 +2128,7 @@
"type": "string" "type": "string"
}, },
"description": { "description": {
"type": "string", "type": "string"
"maxLength": 1000
}, },
"fields": { "fields": {
"type": "array", "type": "array",
@@ -2406,9 +2163,7 @@
"type": "string" "type": "string"
}, },
"name": { "name": {
"type": "string", "type": "string"
"maxLength": 255,
"minLength": 1
}, },
"notes": { "notes": {
"description": "Extras", "description": "Extras",
@@ -2420,13 +2175,11 @@
"x-omitempty": true "x-omitempty": true
}, },
"purchaseFrom": { "purchaseFrom": {
"type": "string", "type": "string"
"maxLength": 255
}, },
"purchasePrice": { "purchasePrice": {
"type": "number", "type": "string",
"x-nullable": true, "example": "0"
"x-omitempty": true
}, },
"purchaseTime": { "purchaseTime": {
"description": "Purchase", "description": "Purchase",
@@ -2443,25 +2196,21 @@
"type": "string" "type": "string"
}, },
"soldPrice": { "soldPrice": {
"type": "number", "type": "string",
"x-nullable": true, "example": "0"
"x-omitempty": true
}, },
"soldTime": { "soldTime": {
"description": "Sold", "description": "Sold",
"type": "string" "type": "string"
}, },
"soldTo": { "soldTo": {
"type": "string", "type": "string"
"maxLength": 255
},
"syncChildItemsLocations": {
"type": "boolean"
}, },
"warrantyDetails": { "warrantyDetails": {
"type": "string" "type": "string"
}, },
"warrantyExpires": { "warrantyExpires": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2498,6 +2247,12 @@
"id": { "id": {
"type": "string" "type": "string"
}, },
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@@ -2559,15 +2314,18 @@
"id": { "id": {
"type": "string" "type": "string"
}, },
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/repo.ItemSummary"
}
},
"name": { "name": {
"type": "string" "type": "string"
}, },
"parent": { "parent": {
"$ref": "#/definitions/repo.LocationSummary" "$ref": "#/definitions/repo.LocationSummary"
}, },
"totalPrice": {
"type": "number"
},
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
} }
@@ -2638,6 +2396,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2654,6 +2413,7 @@
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2665,6 +2425,7 @@
], ],
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2678,6 +2439,7 @@
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
@@ -2686,6 +2448,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "completedDate": {
"description": "Sold",
"type": "string" "type": "string"
}, },
"cost": { "cost": {
@@ -2699,53 +2462,31 @@
"type": "string" "type": "string"
}, },
"scheduledDate": { "scheduledDate": {
"description": "Sold",
"type": "string" "type": "string"
} }
} }
}, },
"repo.MaintenanceEntryWithDetails": { "repo.MaintenanceLog": {
"type": "object", "type": "object",
"properties": { "properties": {
"completedDate": { "costAverage": {
"type": "string" "type": "number"
}, },
"cost": { "costTotal": {
"type": "string", "type": "number"
"example": "0"
}, },
"description": { "entries": {
"type": "string" "type": "array",
"items": {
"$ref": "#/definitions/repo.MaintenanceEntry"
}
}, },
"id": { "itemId": {
"type": "string"
},
"itemID": {
"type": "string"
},
"itemName": {
"type": "string"
},
"name": {
"type": "string"
},
"scheduledDate": {
"type": "string" "type": "string"
} }
} }
}, },
"repo.MaintenanceFilterStatus": {
"type": "string",
"enum": [
"scheduled",
"completed",
"both"
],
"x-enum-varnames": [
"MaintenanceFilterStatusScheduled",
"MaintenanceFilterStatusCompleted",
"MaintenanceFilterStatusBoth"
]
},
"repo.NotifierCreate": { "repo.NotifierCreate": {
"type": "object", "type": "object",
"required": [ "required": [
@@ -2787,9 +2528,6 @@
"updatedAt": { "updatedAt": {
"type": "string" "type": "string"
}, },
"url": {
"type": "string"
},
"userId": { "userId": {
"type": "string" "type": "string"
} }
@@ -2943,17 +2681,6 @@
} }
} }
}, },
"services.Latest": {
"type": "object",
"properties": {
"date": {
"type": "string"
},
"version": {
"type": "string"
}
}
},
"services.UserRegistration": { "services.UserRegistration": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -2971,7 +2698,15 @@
} }
} }
}, },
"v1.APISummary": { "v1.ActionAmountResult": {
"type": "object",
"properties": {
"completed": {
"type": "integer"
}
}
},
"v1.ApiSummary": {
"type": "object", "type": "object",
"properties": { "properties": {
"allowRegistration": { "allowRegistration": {
@@ -2986,9 +2721,6 @@
"health": { "health": {
"type": "boolean" "type": "boolean"
}, },
"latest": {
"$ref": "#/definitions/services.Latest"
},
"message": { "message": {
"type": "string" "type": "string"
}, },
@@ -3003,14 +2735,6 @@
} }
} }
}, },
"v1.ActionAmountResult": {
"type": "object",
"properties": {
"completed": {
"type": "integer"
}
}
},
"v1.Build": { "v1.Build": {
"type": "object", "type": "object",
"properties": { "properties": {
@@ -3078,15 +2802,13 @@
"type": "object", "type": "object",
"properties": { "properties": {
"password": { "password": {
"type": "string", "type": "string"
"example": "admin"
}, },
"stayLoggedIn": { "stayLoggedIn": {
"type": "boolean" "type": "boolean"
}, },
"username": { "username": {
"type": "string", "type": "string"
"example": "admin@admin.com"
} }
} }
}, },

View File

@@ -1,16 +1,5 @@
basePath: /api basePath: /api
definitions: definitions:
currencies.Currency:
properties:
code:
type: string
local:
type: string
name:
type: string
symbol:
type: string
type: object
repo.DocumentOut: repo.DocumentOut:
properties: properties:
id: id:
@@ -63,8 +52,6 @@ definitions:
$ref: '#/definitions/repo.DocumentOut' $ref: '#/definitions/repo.DocumentOut'
id: id:
type: string type: string
primary:
type: boolean
type: type:
type: string type: string
updatedAt: updatedAt:
@@ -72,8 +59,6 @@ definitions:
type: object type: object
repo.ItemAttachmentUpdate: repo.ItemAttachmentUpdate:
properties: properties:
primary:
type: boolean
title: title:
type: string type: string
type: type:
@@ -127,6 +112,10 @@ definitions:
items: items:
$ref: '#/definitions/repo.ItemAttachment' $ref: '#/definitions/repo.ItemAttachment'
type: array type: array
children:
items:
$ref: '#/definitions/repo.ItemSummary'
type: array
createdAt: createdAt:
type: string type: string
description: description:
@@ -137,8 +126,6 @@ definitions:
type: array type: array
id: id:
type: string type: string
imageId:
type: string
insured: insured:
type: boolean type: boolean
labels: labels:
@@ -149,8 +136,7 @@ definitions:
description: Warranty description: Warranty
type: boolean type: boolean
location: location:
allOf: $ref: '#/definitions/repo.LocationSummary'
- $ref: '#/definitions/repo.LocationSummary'
description: Edges description: Edges
x-nullable: true x-nullable: true
x-omitempty: true x-omitempty: true
@@ -164,14 +150,14 @@ definitions:
description: Extras description: Extras
type: string type: string
parent: parent:
allOf: $ref: '#/definitions/repo.ItemSummary'
- $ref: '#/definitions/repo.ItemSummary'
x-nullable: true x-nullable: true
x-omitempty: true x-omitempty: true
purchaseFrom: purchaseFrom:
type: string type: string
purchasePrice: purchasePrice:
type: number example: "0"
type: string
purchaseTime: purchaseTime:
description: Purchase description: Purchase
type: string type: string
@@ -182,14 +168,13 @@ definitions:
soldNotes: soldNotes:
type: string type: string
soldPrice: soldPrice:
type: number example: "0"
type: string
soldTime: soldTime:
description: Sold description: Sold
type: string type: string
soldTo: soldTo:
type: string type: string
syncChildItemsLocations:
type: boolean
updatedAt: updatedAt:
type: string type: string
warrantyDetails: warrantyDetails:
@@ -206,30 +191,16 @@ definitions:
x-nullable: true x-nullable: true
x-omitempty: true x-omitempty: true
type: object type: object
repo.ItemPath:
properties:
id:
type: string
name:
type: string
type:
$ref: '#/definitions/repo.ItemType'
type: object
repo.ItemSummary: repo.ItemSummary:
properties: properties:
archived: archived:
type: boolean type: boolean
assetId:
example: "0"
type: string
createdAt: createdAt:
type: string type: string
description: description:
type: string type: string
id: id:
type: string type: string
imageId:
type: string
insured: insured:
type: boolean type: boolean
labels: labels:
@@ -237,28 +208,20 @@ definitions:
$ref: '#/definitions/repo.LabelSummary' $ref: '#/definitions/repo.LabelSummary'
type: array type: array
location: location:
allOf: $ref: '#/definitions/repo.LocationSummary'
- $ref: '#/definitions/repo.LocationSummary'
description: Edges description: Edges
x-nullable: true x-nullable: true
x-omitempty: true x-omitempty: true
name: name:
type: string type: string
purchasePrice: purchasePrice:
type: number example: "0"
type: string
quantity: quantity:
type: integer type: integer
updatedAt: updatedAt:
type: string type: string
type: object type: object
repo.ItemType:
enum:
- location
- item
type: string
x-enum-varnames:
- ItemTypeLocation
- ItemTypeItem
repo.ItemUpdate: repo.ItemUpdate:
properties: properties:
archived: archived:
@@ -266,7 +229,6 @@ definitions:
assetId: assetId:
type: string type: string
description: description:
maxLength: 1000
type: string type: string
fields: fields:
items: items:
@@ -291,8 +253,6 @@ definitions:
modelNumber: modelNumber:
type: string type: string
name: name:
maxLength: 255
minLength: 1
type: string type: string
notes: notes:
description: Extras description: Extras
@@ -302,12 +262,10 @@ definitions:
x-nullable: true x-nullable: true
x-omitempty: true x-omitempty: true
purchaseFrom: purchaseFrom:
maxLength: 255
type: string type: string
purchasePrice: purchasePrice:
type: number example: "0"
x-nullable: true type: string
x-omitempty: true
purchaseTime: purchaseTime:
description: Purchase description: Purchase
type: string type: string
@@ -319,23 +277,18 @@ definitions:
soldNotes: soldNotes:
type: string type: string
soldPrice: soldPrice:
type: number example: "0"
x-nullable: true type: string
x-omitempty: true
soldTime: soldTime:
description: Sold description: Sold
type: string type: string
soldTo: soldTo:
maxLength: 255
type: string type: string
syncChildItemsLocations:
type: boolean
warrantyDetails: warrantyDetails:
type: string type: string
warrantyExpires: warrantyExpires:
description: Sold
type: string type: string
required:
- name
type: object type: object
repo.LabelCreate: repo.LabelCreate:
properties: properties:
@@ -359,6 +312,10 @@ definitions:
type: string type: string
id: id:
type: string type: string
items:
items:
$ref: '#/definitions/repo.ItemSummary'
type: array
name: name:
type: string type: string
updatedAt: updatedAt:
@@ -399,12 +356,14 @@ definitions:
type: string type: string
id: id:
type: string type: string
items:
items:
$ref: '#/definitions/repo.ItemSummary'
type: array
name: name:
type: string type: string
parent: parent:
$ref: '#/definitions/repo.LocationSummary' $ref: '#/definitions/repo.LocationSummary'
totalPrice:
type: number
updatedAt: updatedAt:
type: string type: string
type: object type: object
@@ -451,6 +410,7 @@ definitions:
repo.MaintenanceEntry: repo.MaintenanceEntry:
properties: properties:
completedDate: completedDate:
description: Sold
type: string type: string
cost: cost:
example: "0" example: "0"
@@ -462,11 +422,13 @@ definitions:
name: name:
type: string type: string
scheduledDate: scheduledDate:
description: Sold
type: string type: string
type: object type: object
repo.MaintenanceEntryCreate: repo.MaintenanceEntryCreate:
properties: properties:
completedDate: completedDate:
description: Sold
type: string type: string
cost: cost:
example: "0" example: "0"
@@ -476,6 +438,7 @@ definitions:
name: name:
type: string type: string
scheduledDate: scheduledDate:
description: Sold
type: string type: string
required: required:
- name - name
@@ -483,6 +446,7 @@ definitions:
repo.MaintenanceEntryUpdate: repo.MaintenanceEntryUpdate:
properties: properties:
completedDate: completedDate:
description: Sold
type: string type: string
cost: cost:
example: "0" example: "0"
@@ -492,38 +456,22 @@ definitions:
name: name:
type: string type: string
scheduledDate: scheduledDate:
description: Sold
type: string type: string
type: object type: object
repo.MaintenanceEntryWithDetails: repo.MaintenanceLog:
properties: properties:
completedDate: costAverage:
type: string type: number
cost: costTotal:
example: "0" type: number
type: string entries:
description: items:
type: string $ref: '#/definitions/repo.MaintenanceEntry'
id: type: array
type: string itemId:
itemID:
type: string
itemName:
type: string
name:
type: string
scheduledDate:
type: string type: string
type: object type: object
repo.MaintenanceFilterStatus:
enum:
- scheduled
- completed
- both
type: string
x-enum-varnames:
- MaintenanceFilterStatusScheduled
- MaintenanceFilterStatusCompleted
- MaintenanceFilterStatusBoth
repo.NotifierCreate: repo.NotifierCreate:
properties: properties:
isActive: isActive:
@@ -552,8 +500,6 @@ definitions:
type: string type: string
updatedAt: updatedAt:
type: string type: string
url:
type: string
userId: userId:
type: string type: string
type: object type: object
@@ -654,13 +600,6 @@ definitions:
value: value:
type: number type: number
type: object type: object
services.Latest:
properties:
date:
type: string
version:
type: string
type: object
services.UserRegistration: services.UserRegistration:
properties: properties:
email: email:
@@ -672,7 +611,12 @@ definitions:
token: token:
type: string type: string
type: object type: object
v1.APISummary: v1.ActionAmountResult:
properties:
completed:
type: integer
type: object
v1.ApiSummary:
properties: properties:
allowRegistration: allowRegistration:
type: boolean type: boolean
@@ -682,8 +626,6 @@ definitions:
type: boolean type: boolean
health: health:
type: boolean type: boolean
latest:
$ref: '#/definitions/services.Latest'
message: message:
type: string type: string
title: title:
@@ -693,11 +635,6 @@ definitions:
type: string type: string
type: array type: array
type: object type: object
v1.ActionAmountResult:
properties:
completed:
type: integer
type: object
v1.Build: v1.Build:
properties: properties:
buildTime: buildTime:
@@ -742,12 +679,10 @@ definitions:
v1.LoginForm: v1.LoginForm:
properties: properties:
password: password:
example: admin
type: string type: string
stayLoggedIn: stayLoggedIn:
type: boolean type: boolean
username: username:
example: admin@admin.com
type: string type: string
type: object type: object
v1.TokenResponse: v1.TokenResponse:
@@ -807,21 +742,6 @@ paths:
summary: Ensures Import Refs summary: Ensures Import Refs
tags: tags:
- Actions - Actions
/v1/actions/set-primary-photos:
post:
description: Sets the first photo of each item as the primary photo
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/v1.ActionAmountResult'
security:
- Bearer: []
summary: Set Primary Photos
tags:
- Actions
/v1/actions/zero-item-time-fields: /v1/actions/zero-item-time-fields:
post: post:
description: Resets all item date fields to the beginning of the day description: Resets all item date fields to the beginning of the day
@@ -857,18 +777,6 @@ paths:
summary: Get Item by Asset ID summary: Get Item by Asset ID
tags: tags:
- Items - Items
/v1/currency:
get:
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/currencies.Currency'
summary: Currency
tags:
- Base
/v1/groups: /v1/groups:
get: get:
produces: produces:
@@ -1022,13 +930,6 @@ paths:
type: string type: string
name: locations name: locations
type: array type: array
- collectionFormat: multi
description: parent Ids
in: query
items:
type: string
name: parentIds
type: array
produces: produces:
- application/json - application/json
responses: responses:
@@ -1150,8 +1051,6 @@ paths:
- Items - Items
/v1/items/{id}/attachments: /v1/items/{id}/attachments:
post: post:
consumes:
- multipart/form-data
parameters: parameters:
- description: Item ID - description: Item ID
in: path in: path
@@ -1264,44 +1163,20 @@ paths:
- Items Attachments - Items Attachments
/v1/items/{id}/maintenance: /v1/items/{id}/maintenance:
get: get:
parameters:
- description: Item ID
in: path
name: id
required: true
type: string
- enum:
- scheduled
- completed
- both
in: query
name: status
type: string
x-enum-varnames:
- MaintenanceFilterStatusScheduled
- MaintenanceFilterStatusCompleted
- MaintenanceFilterStatusBoth
produces: produces:
- application/json - application/json
responses: responses:
"200": "200":
description: OK description: OK
schema: schema:
items: $ref: '#/definitions/repo.MaintenanceLog'
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
type: array
security: security:
- Bearer: [] - Bearer: []
summary: Get Maintenance Log summary: Get Maintenance Log
tags: tags:
- Item Maintenance - Maintenance
post: post:
parameters: parameters:
- description: Item ID
in: path
name: id
required: true
type: string
- description: Entry Data - description: Entry Data
in: body in: body
name: payload name: payload
@@ -1319,29 +1194,39 @@ paths:
- Bearer: [] - Bearer: []
summary: Create Maintenance Entry summary: Create Maintenance Entry
tags: tags:
- Item Maintenance - Maintenance
/v1/items/{id}/path: /v1/items/{id}/maintenance/{entry_id}:
get: delete:
produces:
- application/json
responses:
"204":
description: No Content
security:
- Bearer: []
summary: Delete Maintenance Entry
tags:
- Maintenance
put:
parameters: parameters:
- description: Item ID - description: Entry Data
in: path in: body
name: id name: payload
required: true required: true
type: string schema:
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
produces: produces:
- application/json - application/json
responses: responses:
"200": "200":
description: OK description: OK
schema: schema:
items: $ref: '#/definitions/repo.MaintenanceEntry'
$ref: '#/definitions/repo.ItemPath'
type: array
security: security:
- Bearer: [] - Bearer: []
summary: Get the full path of an item summary: Update Maintenance Entry
tags: tags:
- Items - Maintenance
/v1/items/export: /v1/items/export:
get: get:
responses: responses:
@@ -1388,8 +1273,6 @@ paths:
- Items - Items
/v1/items/import: /v1/items/import:
post: post:
consumes:
- multipart/form-data
parameters: parameters:
- description: Image to upload - description: Image to upload
in: formData in: formData
@@ -1622,77 +1505,6 @@ paths:
summary: Get Locations Tree summary: Get Locations Tree
tags: tags:
- Locations - Locations
/v1/maintenance:
get:
parameters:
- enum:
- scheduled
- completed
- both
in: query
name: status
type: string
x-enum-varnames:
- MaintenanceFilterStatusScheduled
- MaintenanceFilterStatusCompleted
- MaintenanceFilterStatusBoth
produces:
- application/json
responses:
"200":
description: OK
schema:
items:
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
type: array
security:
- Bearer: []
summary: Query All Maintenance
tags:
- Maintenance
/v1/maintenance/{id}:
delete:
parameters:
- description: Maintenance ID
in: path
name: id
required: true
type: string
produces:
- application/json
responses:
"204":
description: No Content
security:
- Bearer: []
summary: Delete Maintenance Entry
tags:
- Maintenance
put:
parameters:
- description: Maintenance ID
in: path
name: id
required: true
type: string
- description: Entry Data
in: body
name: payload
required: true
schema:
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
produces:
- application/json
responses:
"200":
description: OK
schema:
$ref: '#/definitions/repo.MaintenanceEntry'
security:
- Bearer: []
summary: Update Maintenance Entry
tags:
- Maintenance
/v1/notifiers: /v1/notifiers:
get: get:
produces: produces:
@@ -1771,6 +1583,11 @@ paths:
/v1/notifiers/test: /v1/notifiers/test:
post: post:
parameters: parameters:
- description: Notifier ID
in: path
name: id
required: true
type: string
- description: URL - description: URL
in: query in: query
name: url name: url
@@ -1827,7 +1644,7 @@ paths:
"200": "200":
description: OK description: OK
schema: schema:
$ref: '#/definitions/v1.APISummary' $ref: '#/definitions/v1.ApiSummary'
summary: Application Info summary: Application Info
tags: tags:
- Base - Base
@@ -1854,16 +1671,22 @@ paths:
- application/x-www-form-urlencoded - application/x-www-form-urlencoded
- application/json - application/json
parameters: parameters:
- description: string
example: admin@admin.com
in: formData
name: username
type: string
- description: string
example: admin
in: formData
name: password
type: string
- description: Login Data - description: Login Data
in: body in: body
name: payload name: payload
required: true required: true
schema: schema:
$ref: '#/definitions/v1.LoginForm' $ref: '#/definitions/v1.LoginForm'
- description: auth provider
in: query
name: provider
type: string
produces: produces:
- application/json - application/json
responses: responses:

View File

@@ -6,7 +6,7 @@ import (
"log" "log"
"os" "os"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate" "github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
atlas "ariga.io/atlas/sql/migrate" atlas "ariga.io/atlas/sql/migrate"
_ "ariga.io/atlas/sql/sqlite" _ "ariga.io/atlas/sql/sqlite"
@@ -36,7 +36,7 @@ func main() {
} }
// Generate migrations using Atlas support for MySQL (note the Ent dialect option passed above). // Generate migrations using Atlas support for MySQL (note the Ent dialect option passed above).
err = migrate.NamedDiff(ctx, "sqlite://.data/homebox.migration.db?_fk=1&_time_format=sqlite", os.Args[1], opts...) err = migrate.NamedDiff(ctx, "sqlite://.data/homebox.migration.db?_fk=1", os.Args[1], opts...)
if err != nil { if err != nil {
log.Fatalf("failed generating migration file: %v", err) log.Fatalf("failed generating migration file: %v", err)
} }

View File

@@ -1,79 +1,77 @@
module github.com/sysadminsmedia/homebox/backend module github.com/hay-kot/homebox/backend
go 1.23.0 go 1.20
require ( require (
ariga.io/atlas v0.29.1 ariga.io/atlas v0.12.0
entgo.io/ent v0.14.1 entgo.io/ent v0.12.3
github.com/ardanlabs/conf/v3 v3.2.0 github.com/ardanlabs/conf/v3 v3.1.6
github.com/containrrr/shoutrrr v0.8.0 github.com/containrrr/shoutrrr v0.7.1
github.com/go-chi/chi/v5 v5.2.0 github.com/go-chi/chi/v5 v5.0.10
github.com/go-playground/validator/v10 v10.23.0 github.com/go-playground/validator/v10 v10.14.1
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 github.com/gocarina/gocsv v0.0.0-20230616125104-99d496ca653d
github.com/google/uuid v1.6.0 github.com/google/uuid v1.3.0
github.com/gorilla/schema v1.4.1 github.com/gorilla/schema v1.2.0
github.com/hay-kot/httpkit v0.0.11 github.com/hay-kot/httpkit v0.0.3
github.com/mattn/go-sqlite3 v1.14.24 github.com/mattn/go-sqlite3 v1.14.17
github.com/olahol/melody v1.2.1 github.com/olahol/melody v1.1.4
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/rs/zerolog v1.33.0 github.com/rs/zerolog v1.29.1
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.8.4
github.com/swaggo/http-swagger/v2 v2.0.2 github.com/swaggo/http-swagger v1.3.4
github.com/swaggo/swag v1.16.4 github.com/swaggo/swag v1.16.1
github.com/yeqown/go-qrcode/v2 v2.2.4 github.com/yeqown/go-qrcode/v2 v2.2.2
github.com/yeqown/go-qrcode/writer/standard v1.2.4 github.com/yeqown/go-qrcode/writer/standard v1.2.1
golang.org/x/crypto v0.31.0 golang.org/x/crypto v0.11.0
modernc.org/sqlite v1.34.4 modernc.org/sqlite v1.24.0
) )
require ( require (
github.com/KyleBanks/depth v1.2.1 // indirect github.com/KyleBanks/depth v1.2.1 // indirect
github.com/agext/levenshtein v1.2.3 // indirect github.com/agext/levenshtein v1.2.3 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
github.com/bmatcuk/doublestar v1.3.4 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fatih/color v1.18.0 // indirect github.com/fatih/color v1.15.0 // indirect
github.com/fogleman/gg v1.3.0 // indirect github.com/fogleman/gg v1.3.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/go-openapi/inflect v0.21.0 // indirect github.com/go-openapi/inflect v0.19.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/jsonpointer v0.20.0 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/jsonreference v0.20.2 // indirect
github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/spec v0.20.9 // indirect
github.com/go-openapi/swag v0.23.0 // indirect github.com/go-openapi/swag v0.22.4 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-cmp v0.5.9 // indirect
github.com/gorilla/websocket v1.5.3 // indirect github.com/gorilla/websocket v1.5.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl/v2 v2.17.0 // indirect
github.com/hashicorp/hcl/v2 v2.23.0 // indirect
github.com/josharian/intern v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
github.com/mailru/easyjson v0.9.0 // indirect github.com/leodido/go-urn v1.2.4 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.19 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/swaggo/files v1.0.1 // indirect
github.com/swaggo/files/v2 v2.0.2 // indirect
github.com/yeqown/reedsolomon v1.0.0 // indirect github.com/yeqown/reedsolomon v1.0.0 // indirect
github.com/zclconf/go-cty v1.16.0 // indirect github.com/zclconf/go-cty v1.13.2 // indirect
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 // indirect golang.org/x/image v0.9.0 // indirect
golang.org/x/image v0.23.0 // indirect golang.org/x/mod v0.12.0 // indirect
golang.org/x/mod v0.22.0 // indirect golang.org/x/net v0.12.0 // indirect
golang.org/x/net v0.33.0 // indirect golang.org/x/sys v0.10.0 // indirect
golang.org/x/sync v0.10.0 // indirect golang.org/x/text v0.11.0 // indirect
golang.org/x/sys v0.29.0 // indirect golang.org/x/tools v0.11.0 // indirect
golang.org/x/text v0.21.0 // indirect
golang.org/x/tools v0.28.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/gc/v3 v3.0.0-20241223112719-96e2e1e4408d // indirect lukechampine.com/uint128 v1.3.0 // indirect
modernc.org/libc v1.61.6 // indirect modernc.org/cc/v3 v3.41.0 // indirect
modernc.org/mathutil v1.7.1 // indirect modernc.org/ccgo/v3 v3.16.14 // indirect
modernc.org/memory v1.8.1 // indirect modernc.org/libc v1.24.1 // indirect
modernc.org/strutil v1.2.1 // indirect modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.6.0 // indirect
modernc.org/opt v0.1.3 // indirect
modernc.org/strutil v1.1.3 // indirect
modernc.org/token v1.1.0 // indirect modernc.org/token v1.1.0 // indirect
) )

File diff suppressed because it is too large Load Diff

View File

@@ -1,104 +0,0 @@
// Package currencies provides a shared definition of currencies. This uses a global
// variable to hold the currencies.
package currencies
import (
"bytes"
_ "embed"
"encoding/json"
"io"
"slices"
"strings"
"sync"
)
//go:embed currencies.json
var defaults []byte
type CollectorFunc func() ([]Currency, error)
func CollectJSON(reader io.Reader) CollectorFunc {
return func() ([]Currency, error) {
var currencies []Currency
err := json.NewDecoder(reader).Decode(&currencies)
if err != nil {
return nil, err
}
return currencies, nil
}
}
func CollectDefaults() CollectorFunc {
return CollectJSON(bytes.NewReader(defaults))
}
func CollectionCurrencies(collectors ...CollectorFunc) ([]Currency, error) {
out := make([]Currency, 0, len(collectors))
for i := range collectors {
c, err := collectors[i]()
if err != nil {
return nil, err
}
out = append(out, c...)
}
return out, nil
}
type Currency struct {
Name string `json:"name"`
Code string `json:"code"`
Local string `json:"local"`
Symbol string `json:"symbol"`
}
type CurrencyRegistry struct {
mu sync.RWMutex
registry map[string]Currency
}
func NewCurrencyService(currencies []Currency) *CurrencyRegistry {
registry := make(map[string]Currency, len(currencies))
for i := range currencies {
registry[currencies[i].Code] = currencies[i]
}
return &CurrencyRegistry{
registry: registry,
}
}
func (cs *CurrencyRegistry) Slice() []Currency {
cs.mu.RLock()
defer cs.mu.RUnlock()
out := make([]Currency, 0, len(cs.registry))
for key := range cs.registry {
out = append(out, cs.registry[key])
}
slices.SortFunc(out, func(a, b Currency) int {
if a.Name < b.Name {
return -1
}
if a.Name > b.Name {
return 1
}
return 0
})
return out
}
func (cs *CurrencyRegistry) IsSupported(code string) bool {
upper := strings.ToUpper(code)
cs.mu.RLock()
defer cs.mu.RUnlock()
_, ok := cs.registry[upper]
return ok
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,7 @@
// Package services provides the core business logic for the application.
package services package services
import ( import (
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies" "github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
type AllServices struct { type AllServices struct {
@@ -11,14 +9,12 @@ type AllServices struct {
Group *GroupService Group *GroupService
Items *ItemService Items *ItemService
BackgroundService *BackgroundService BackgroundService *BackgroundService
Currencies *currencies.CurrencyRegistry
} }
type OptionsFunc func(*options) type OptionsFunc func(*options)
type options struct { type options struct {
autoIncrementAssetID bool autoIncrementAssetID bool
currencies []currencies.Currency
} }
func WithAutoIncrementAssetID(v bool) func(*options) { func WithAutoIncrementAssetID(v bool) func(*options) {
@@ -27,27 +23,13 @@ func WithAutoIncrementAssetID(v bool) func(*options) {
} }
} }
func WithCurrencies(v []currencies.Currency) func(*options) {
return func(o *options) {
o.currencies = v
}
}
func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices { func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
if repos == nil { if repos == nil {
panic("repos cannot be nil") panic("repos cannot be nil")
} }
defaultCurrencies, err := currencies.CollectionCurrencies(
currencies.CollectDefaults(),
)
if err != nil {
panic("failed to collect default currencies")
}
options := &options{ options := &options{
autoIncrementAssetID: true, autoIncrementAssetID: true,
currencies: defaultCurrencies,
} }
for _, opt := range opts { for _, opt := range opts {
@@ -61,7 +43,6 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
repo: repos, repo: repos,
autoIncrementAssetID: options.autoIncrementAssetID, autoIncrementAssetID: options.autoIncrementAssetID,
}, },
BackgroundService: &BackgroundService{repos, Latest{}}, BackgroundService: &BackgroundService{repos},
Currencies: currencies.NewCurrencyService(options.currencies),
} }
} }

View File

@@ -4,7 +4,7 @@ import (
"context" "context"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
) )
type contextKeys struct { type contextKeys struct {

View File

@@ -5,8 +5,8 @@ import (
"testing" "testing"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
func Test_SetAuthContext(t *testing.T) { func Test_SetAuthContext(t *testing.T) {

View File

@@ -6,12 +6,11 @@ import (
"os" "os"
"testing" "testing"
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/faker"
_ "github.com/mattn/go-sqlite3" _ "github.com/mattn/go-sqlite3"
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/pkgs/faker"
) )
var ( var (
@@ -49,8 +48,8 @@ func bootstrap() {
} }
} }
func MainNoExit(m *testing.M) int { func TestMain(m *testing.M) {
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1&_time_format=sqlite") client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
if err != nil { if err != nil {
log.Fatalf("failed opening connection to sqlite: %v", err) log.Fatalf("failed opening connection to sqlite: %v", err)
} }
@@ -62,13 +61,8 @@ func MainNoExit(m *testing.M) int {
tClient = client tClient = client
tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox") tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox")
tSvc = New(tRepos)
defaults, _ := currencies.CollectionCurrencies( defer client.Close()
currencies.CollectDefaults(),
)
tSvc = New(tRepos, WithCurrencies(defaults))
defer func() { _ = client.Close() }()
bootstrap() bootstrap()
tCtx = Context{ tCtx = Context{
@@ -77,9 +71,5 @@ func MainNoExit(m *testing.M) int {
UID: tUser.ID, UID: tUser.ID,
} }
return m.Run() os.Exit(m.Run())
}
func TestMain(m *testing.M) {
os.Exit(MainNoExit(m))
} }

View File

@@ -2,8 +2,8 @@ package reporting
import ( import (
"github.com/gocarina/gocsv" "github.com/gocarina/gocsv"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/data/types" "github.com/hay-kot/homebox/backend/internal/data/types"
) )
// ================================================================================================= // =================================================================================================
@@ -20,9 +20,9 @@ type BillOfMaterialsEntry struct {
TotalPrice float64 `csv:"Total Price"` TotalPrice float64 `csv:"Total Price"`
} }
// BillOfMaterialsCSV returns a byte slice of the Bill of Materials for a given GID in CSV format // BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
// See BillOfMaterialsEntry for the format of the output // See BillOfMaterialsEntry for the format of the output
func BillOfMaterialsCSV(entities []repo.ItemOut) ([]byte, error) { func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
bomEntries := make([]BillOfMaterialsEntry, len(entities)) bomEntries := make([]BillOfMaterialsEntry, len(entities))
for i, entity := range entities { for i, entity := range entities {
bomEntries[i] = BillOfMaterialsEntry{ bomEntries[i] = BillOfMaterialsEntry{

View File

@@ -1,8 +1,7 @@
// Package eventbus provides an interface for event bus. // / Package eventbus provides an interface for event bus.
package eventbus package eventbus
import ( import (
"context"
"sync" "sync"
"github.com/google/uuid" "github.com/google/uuid"
@@ -35,38 +34,33 @@ type EventBus struct {
func New() *EventBus { func New() *EventBus {
return &EventBus{ return &EventBus{
ch: make(chan eventData, 100), ch: make(chan eventData, 10),
subscribers: map[Event][]func(any){ subscribers: map[Event][]func(any){
EventLabelMutation: {}, EventLabelMutation: {},
EventLocationMutation: {}, EventLocationMutation: {},
EventItemMutation: {}, EventItemMutation: {},
}, },
} }
} }
func (e *EventBus) Run(ctx context.Context) error { func (e *EventBus) Run() {
if e.started { if e.started {
panic("event bus already started") panic("event bus already started")
} }
e.started = true e.started = true
for { for event := range e.ch {
select { e.mu.RLock()
case <-ctx.Done(): arr, ok := e.subscribers[event.event]
return nil e.mu.RUnlock()
case event := <-e.ch:
e.mu.RLock()
arr, ok := e.subscribers[event.event]
e.mu.RUnlock()
if !ok { if !ok {
continue continue
} }
for _, fn := range arr { for _, fn := range arr {
fn(event.data) fn(event.data)
}
} }
} }
} }

View File

@@ -1,4 +1,3 @@
// Package reporting provides a way to import CSV files into the database.
package reporting package reporting
import ( import (

View File

@@ -3,8 +3,8 @@ package reporting
import ( import (
"strings" "strings"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/data/types" "github.com/hay-kot/homebox/backend/internal/data/types"
) )
type ExportItemFields struct { type ExportItemFields struct {
@@ -12,13 +12,12 @@ type ExportItemFields struct {
Value string Value string
} }
type ExportCSVRow struct { type ExportTSVRow struct {
ImportRef string `csv:"HB.import_ref"` ImportRef string `csv:"HB.import_ref"`
Location LocationString `csv:"HB.location"` Location LocationString `csv:"HB.location"`
LabelStr LabelString `csv:"HB.labels"` LabelStr LabelString `csv:"HB.labels"`
AssetID repo.AssetID `csv:"HB.asset_id"` AssetID repo.AssetID `csv:"HB.asset_id"`
Archived bool `csv:"HB.archived"` Archived bool `csv:"HB.archived"`
URL string `csv:"HB.url"`
Name string `csv:"HB.name"` Name string `csv:"HB.name"`
Quantity int `csv:"HB.quantity"` Quantity int `csv:"HB.quantity"`
@@ -85,7 +84,7 @@ func (csf LocationString) String() string {
return strings.Join(csf, " / ") return strings.Join(csf, " / ")
} }
func fromPathSlice(s []repo.ItemPath) LocationString { func fromPathSlice(s []repo.LocationPath) LocationString {
v := make(LocationString, len(s)) v := make(LocationString, len(s))
for i := range s { for i := range s {

View File

@@ -10,21 +10,21 @@ import (
"strings" "strings"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
) )
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting // IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given // items from homebox. It is used to read/write the data from/to a CSV/TSV file given
// the standard format of the file. // the standard format of the file.
// //
// See ExportCSVRow for the format of the data in the sheet. // See ExportTSVRow for the format of the data in the sheet.
type IOSheet struct { type IOSheet struct {
headers []string headers []string
custom []int custom []int
index map[string]int index map[string]int
Rows []ExportCSVRow Rows []ExportTSVRow
} }
func (s *IOSheet) indexHeaders() { func (s *IOSheet) indexHeaders() {
@@ -70,16 +70,16 @@ func (s *IOSheet) Read(data io.Reader) error {
} }
s.headers = sheet[0] s.headers = sheet[0]
s.Rows = make([]ExportCSVRow, len(sheet)-1) s.Rows = make([]ExportTSVRow, len(sheet)-1)
for i, row := range sheet[1:] { for i, row := range sheet[1:] {
if len(row) != len(s.headers) { if len(row) != len(s.headers) {
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers)) return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
} }
rowData := ExportCSVRow{} rowData := ExportTSVRow{}
st := reflect.TypeOf(ExportCSVRow{}) st := reflect.TypeOf(ExportTSVRow{})
for i := 0; i < st.NumField(); i++ { for i := 0; i < st.NumField(); i++ {
field := st.Field(i) field := st.Field(i)
@@ -152,9 +152,9 @@ func (s *IOSheet) Read(data io.Reader) error {
return nil return nil
} }
// ReadItems writes the sheet to a writer. // Write writes the sheet to a writer.
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.UUID, repos *repo.AllRepos, hbURL string) error { func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
s.Rows = make([]ExportCSVRow, len(items)) s.Rows = make([]ExportTSVRow, len(items))
extraHeaders := map[string]struct{}{} extraHeaders := map[string]struct{}{}
@@ -162,9 +162,9 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
item := items[i] item := items[i]
// TODO: Support fetching nested locations // TODO: Support fetching nested locations
locID := item.Location.ID locId := item.Location.ID
locPaths, err := repos.Locations.PathForLoc(context.Background(), gid, locID) locPaths, err := repos.Locations.PathForLoc(context.Background(), GID, locId)
if err != nil { if err != nil {
log.Error().Err(err).Msg("could not get location path") log.Error().Err(err).Msg("could not get location path")
return err return err
@@ -178,8 +178,6 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
labelString[i] = l.Name labelString[i] = l.Name
} }
url := generateItemURL(item, hbURL)
customFields := make([]ExportItemFields, len(item.Fields)) customFields := make([]ExportItemFields, len(item.Fields))
for i, f := range item.Fields { for i, f := range item.Fields {
@@ -191,7 +189,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
} }
} }
s.Rows[i] = ExportCSVRow{ s.Rows[i] = ExportTSVRow{
// fill struct // fill struct
Location: locString, Location: locString,
LabelStr: labelString, LabelStr: labelString,
@@ -203,7 +201,6 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
Description: item.Description, Description: item.Description,
Insured: item.Insured, Insured: item.Insured,
Archived: item.Archived, Archived: item.Archived,
URL: url,
PurchasePrice: item.PurchasePrice, PurchasePrice: item.PurchasePrice,
PurchaseFrom: item.PurchaseFrom, PurchaseFrom: item.PurchaseFrom,
@@ -222,7 +219,6 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
SoldPrice: item.SoldPrice, SoldPrice: item.SoldPrice,
SoldNotes: item.SoldNotes, SoldNotes: item.SoldNotes,
Notes: item.Notes,
Fields: customFields, Fields: customFields,
} }
} }
@@ -236,7 +232,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
sort.Strings(customHeaders) sort.Strings(customHeaders)
st := reflect.TypeOf(ExportCSVRow{}) st := reflect.TypeOf(ExportTSVRow{})
// Write headers // Write headers
for i := 0; i < st.NumField(); i++ { for i := 0; i < st.NumField(); i++ {
@@ -256,16 +252,8 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.
return nil return nil
} }
func generateItemURL(item repo.ItemOut, d string) string { // Writes the current sheet to a writer in TSV format.
url := "" func (s *IOSheet) TSV() ([][]string, error) {
if item.ID != uuid.Nil {
url = fmt.Sprintf("%s/item/%s", d, item.ID.String())
}
return url
}
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
func (s *IOSheet) CSV() ([][]string, error) {
memcsv := make([][]string, len(s.Rows)+1) memcsv := make([][]string, len(s.Rows)+1)
memcsv[0] = s.headers memcsv[0] = s.headers

View File

@@ -7,9 +7,8 @@ import (
_ "embed" _ "embed"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
var ( var (
@@ -27,13 +26,13 @@ func TestSheet_Read(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
data []byte data []byte
want []ExportCSVRow want []ExportTSVRow
wantErr bool wantErr bool
}{ }{
{ {
name: "minimal import", name: "minimal import",
data: minimalImportCSV, data: minimalImportCSV,
want: []ExportCSVRow{ want: []ExportTSVRow{
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"}, {Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"}, {Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"}, {Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
@@ -42,7 +41,7 @@ func TestSheet_Read(t *testing.T) {
{ {
name: "custom field import", name: "custom field import",
data: customFieldImportCSV, data: customFieldImportCSV,
want: []ExportCSVRow{ want: []ExportTSVRow{
{ {
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1", Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
Fields: []ExportItemFields{ Fields: []ExportItemFields{
@@ -72,7 +71,7 @@ func TestSheet_Read(t *testing.T) {
{ {
name: "custom types import", name: "custom types import",
data: customTypesImportCSV, data: customTypesImportCSV,
want: []ExportCSVRow{ want: []ExportTSVRow{
{ {
Name: "Item 1", Name: "Item 1",
AssetID: repo.AssetID(1), AssetID: repo.AssetID(1),
@@ -104,9 +103,9 @@ func TestSheet_Read(t *testing.T) {
switch { switch {
case tt.wantErr: case tt.wantErr:
require.Error(t, err) assert.Error(t, err)
default: default:
require.NoError(t, err) assert.NoError(t, err)
assert.ElementsMatch(t, tt.want, sheet.Rows) assert.ElementsMatch(t, tt.want, sheet.Rows)
} }
}) })

View File

@@ -2,25 +2,17 @@ package services
import ( import (
"context" "context"
"encoding/json"
"fmt"
"net/http"
"strings" "strings"
"time" "time"
"github.com/containrrr/shoutrrr" "github.com/containrrr/shoutrrr"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/internal/data/types"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
) )
type Latest struct {
Version string `json:"version"`
Date string `json:"date"`
}
type BackgroundService struct { type BackgroundService struct {
repos *repo.AllRepos repos *repo.AllRepos
latest Latest
} }
func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error { func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
@@ -87,52 +79,3 @@ func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
return nil return nil
} }
func (svc *BackgroundService) GetLatestGithubRelease(ctx context.Context) error {
url := "https://api.github.com/repos/sysadminsmedia/homebox/releases/latest"
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return fmt.Errorf("failed to create latest version request: %w", err)
}
req.Header.Set("User-Agent", "Homebox-Version-Checker")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return fmt.Errorf("failed to make latest version request: %w", err)
}
defer func() {
err := resp.Body.Close()
if err != nil {
log.Printf("error closing latest version response body: %v", err)
}
}()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("latest version unexpected status code: %d", resp.StatusCode)
}
// ignoring fields that are not relevant
type Release struct {
ReleaseVersion string `json:"tag_name"`
PublishedAt time.Time `json:"published_at"`
}
var release Release
if err := json.NewDecoder(resp.Body).Decode(&release); err != nil {
return fmt.Errorf("failed to decode latest version response: %w", err)
}
svc.latest = Latest{
Version: release.ReleaseVersion,
Date: release.PublishedAt.String(),
}
return nil
}
func (svc *BackgroundService) GetLatestVersion() (Latest) {
return svc.latest
}

View File

@@ -4,8 +4,8 @@ import (
"errors" "errors"
"time" "time"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher" "github.com/hay-kot/homebox/backend/pkgs/hasher"
) )
type GroupService struct { type GroupService struct {

View File

@@ -8,8 +8,8 @@ import (
"strings" "strings"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting" "github.com/hay-kot/homebox/backend/internal/core/services/reporting"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
) )
var ( var (
@@ -32,19 +32,19 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
item.AssetID = highest + 1 item.AssetID = repo.AssetID(highest + 1)
} }
return svc.repo.Items.Create(ctx, ctx.GID, item) return svc.repo.Items.Create(ctx, ctx.GID, item)
} }
func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int, error) { func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, gid) items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
if err != nil { if err != nil {
return 0, err return 0, err
} }
highest, err := svc.repo.Items.GetHighestAssetID(ctx, gid) highest, err := svc.repo.Items.GetHighestAssetID(ctx, GID)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -53,7 +53,7 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int,
for _, item := range items { for _, item := range items {
highest++ highest++
err = svc.repo.Items.SetAssetID(ctx, gid, item.ID, highest) err = svc.repo.Items.SetAssetID(ctx, GID, item.ID, repo.AssetID(highest))
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -64,8 +64,8 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int,
return finished, nil return finished, nil
} }
func (svc *ItemService) EnsureImportRef(ctx context.Context, gid uuid.UUID) (int, error) { func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, gid) ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -74,7 +74,7 @@ func (svc *ItemService) EnsureImportRef(ctx context.Context, gid uuid.UUID) (int
for _, itemID := range ids { for _, itemID := range ids {
ref := uuid.New().String()[0:8] ref := uuid.New().String()[0:8]
err = svc.repo.Items.Patch(ctx, gid, itemID, repo.ItemPatch{ImportRef: &ref}) err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -96,7 +96,7 @@ func serializeLocation[T ~[]string](location T) string {
// 1. If the item does not exist, it is created. // 1. If the item does not exist, it is created.
// 2. If the item has a ImportRef and it exists it is skipped // 2. If the item has a ImportRef and it exists it is skipped
// 3. Locations and Labels are created if they do not exist. // 3. Locations and Labels are created if they do not exist.
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Reader) (int, error) { func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
sheet := reporting.IOSheet{} sheet := reporting.IOSheet{}
err := sheet.Read(data) err := sheet.Read(data)
@@ -109,7 +109,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
labelMap := make(map[string]uuid.UUID) labelMap := make(map[string]uuid.UUID)
{ {
labels, err := svc.repo.Labels.GetAll(ctx, gid) labels, err := svc.repo.Labels.GetAll(ctx, GID)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -124,7 +124,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
locationMap := make(map[string]uuid.UUID) locationMap := make(map[string]uuid.UUID)
{ {
locations, err := svc.repo.Locations.Tree(ctx, gid, repo.TreeQuery{WithItems: false}) locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false})
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -153,7 +153,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
// Asset ID Pre-Check // Asset ID Pre-Check
highestAID := repo.AssetID(-1) highestAID := repo.AssetID(-1)
if svc.autoIncrementAssetID { if svc.autoIncrementAssetID {
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, gid) highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -169,7 +169,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
// ======================================== // ========================================
// Preflight check for existing item // Preflight check for existing item
if row.ImportRef != "" { if row.ImportRef != "" {
exists, err := svc.repo.Items.CheckRef(ctx, gid, row.ImportRef) exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
if err != nil { if err != nil {
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err) return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
} }
@@ -188,7 +188,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
id, ok := labelMap[label] id, ok := labelMap[label]
if !ok { if !ok {
newLabel, err := svc.repo.Labels.Create(ctx, gid, repo.LabelCreate{Name: label}) newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label})
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -220,7 +220,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
parentID = locationMap[parentPath] parentID = locationMap[parentPath]
} }
newLocation, err := svc.repo.Locations.Create(ctx, gid, repo.LocationCreate{ newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
ParentID: parentID, ParentID: parentID,
Name: pathElement, Name: pathElement,
}) })
@@ -261,12 +261,12 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
LabelIDs: labelIds, LabelIDs: labelIds,
} }
item, err = svc.repo.Items.Create(ctx, gid, newItem) item, err = svc.repo.Items.Create(ctx, GID, newItem)
if err != nil { if err != nil {
return 0, err return 0, err
} }
default: default:
item, err = svc.repo.Items.GetByRef(ctx, gid, row.ImportRef) item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -318,7 +318,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
Fields: fields, Fields: fields,
} }
item, err = svc.repo.Items.UpdateByGroup(ctx, gid, updateItem) item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem)
if err != nil { if err != nil {
return 0, err return 0, err
} }
@@ -329,27 +329,27 @@ func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Re
return finished, nil return finished, nil
} }
func (svc *ItemService) ExportCSV(ctx context.Context, gid uuid.UUID, hbURL string) ([][]string, error) { func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
items, err := svc.repo.Items.GetAll(ctx, gid) items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
sheet := reporting.IOSheet{} sheet := reporting.IOSheet{}
err = sheet.ReadItems(ctx, items, gid, svc.repo, hbURL) err = sheet.ReadItems(ctx, items, GID, svc.repo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return sheet.CSV() return sheet.TSV()
} }
func (svc *ItemService) ExportBillOfMaterialsCSV(ctx context.Context, gid uuid.UUID) ([]byte, error) { func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
items, err := svc.repo.Items.GetAll(ctx, gid) items, err := svc.repo.Items.GetAll(ctx, GID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return reporting.BillOfMaterialsCSV(items) return reporting.BillOfMaterialsTSV(items)
} }

View File

@@ -6,14 +6,14 @@ import (
"os" "os"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent"
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Document, error) { func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentId uuid.UUID) (*ent.Document, error) {
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID) attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -21,9 +21,9 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UU
return attachment.Edges.Document, nil return attachment.Edges.Document, nil
} }
func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) { func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
// Update Attachment // Update Attachment
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data) attachment, err := svc.repo.Attachments.Update(ctx, data.ID, attachment.Type(data.Type))
if err != nil { if err != nil {
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
@@ -35,15 +35,15 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *re
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID) return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
} }
// AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment // AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
// Table and Items table. The file provided via the reader is stored on the file system based on the provided // Table and Items table. The file provided via the reader is stored on the file system based on the provided
// relative path during construction of the service. // relative path during construction of the service.
func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) { func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
// Get the Item // Get the Item
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID) _, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
if err != nil { if err != nil {
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
@@ -56,29 +56,29 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename st
} }
// Create the attachment // Create the attachment
_, err = svc.repo.Attachments.Create(ctx, itemID, doc.ID, attachmentType) _, err = svc.repo.Attachments.Create(ctx, itemId, doc.ID, attachmentType)
if err != nil { if err != nil {
log.Err(err).Msg("failed to create attachment") log.Err(err).Msg("failed to create attachment")
return repo.ItemOut{}, err return repo.ItemOut{}, err
} }
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID) return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
} }
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error { func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemId, attachmentId uuid.UUID) error {
// Get the Item // Get the Item
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemID) _, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemId)
if err != nil { if err != nil {
return err return err
} }
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID) attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
if err != nil { if err != nil {
return err return err
} }
// Delete the attachment // Delete the attachment
err = svc.repo.Attachments.Delete(ctx, attachmentID) err = svc.repo.Attachments.Delete(ctx, attachmentId)
if err != nil { if err != nil {
return err return err
} }

View File

@@ -7,9 +7,8 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
) )
func TestItemService_AddAttachment(t *testing.T) { func TestItemService_AddAttachment(t *testing.T) {
@@ -24,7 +23,7 @@ func TestItemService_AddAttachment(t *testing.T) {
Description: "test", Description: "test",
Name: "test", Name: "test",
}) })
require.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, loc) assert.NotNil(t, loc)
itmC := repo.ItemCreate{ itmC := repo.ItemCreate{
@@ -34,11 +33,11 @@ func TestItemService_AddAttachment(t *testing.T) {
} }
itm, err := svc.repo.Items.Create(context.Background(), tGroup.ID, itmC) itm, err := svc.repo.Items.Create(context.Background(), tGroup.ID, itmC)
require.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, itm) assert.NotNil(t, itm)
t.Cleanup(func() { t.Cleanup(func() {
err := svc.repo.Items.Delete(context.Background(), itm.ID) err := svc.repo.Items.Delete(context.Background(), itm.ID)
require.NoError(t, err) assert.NoError(t, err)
}) })
contents := fk.Str(1000) contents := fk.Str(1000)
@@ -46,7 +45,7 @@ func TestItemService_AddAttachment(t *testing.T) {
// Setup // Setup
afterAttachment, err := svc.AttachmentAdd(tCtx, itm.ID, "testfile.txt", "attachment", reader) afterAttachment, err := svc.AttachmentAdd(tCtx, itm.ID, "testfile.txt", "attachment", reader)
require.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, afterAttachment) assert.NotNil(t, afterAttachment)
// Check that the file exists // Check that the file exists
@@ -57,6 +56,6 @@ func TestItemService_AddAttachment(t *testing.T) {
// Check that the file contents are correct // Check that the file contents are correct
bts, err := os.ReadFile(storedPath) bts, err := os.ReadFile(storedPath)
require.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, contents, string(bts)) assert.Equal(t, contents, string(bts))
} }

View File

@@ -6,17 +6,17 @@ import (
"time" "time"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/hay-kot/homebox/backend/internal/data/repo"
"github.com/hay-kot/homebox/backend/pkgs/hasher"
"github.com/rs/zerolog/log" "github.com/rs/zerolog/log"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
) )
var ( var (
oneWeek = time.Hour * 24 * 7 oneWeek = time.Hour * 24 * 7
ErrorInvalidLogin = errors.New("invalid username or password") ErrorInvalidLogin = errors.New("invalid username or password")
ErrorInvalidToken = errors.New("invalid token") ErrorInvalidToken = errors.New("invalid token")
ErrorTokenIDMismatch = errors.New("token id mismatch") ErrorTokenIdMismatch = errors.New("token id mismatch")
) )
type UserService struct { type UserService struct {
@@ -92,11 +92,9 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
if err != nil { if err != nil {
return repo.UserOut{}, err return repo.UserOut{}, err
} }
log.Debug().Msg("user created")
// Create the default labels and locations for the group. // Create the default labels and locations for the group.
if creatingGroup { if creatingGroup {
log.Debug().Msg("creating default labels")
for _, label := range defaultLabels() { for _, label := range defaultLabels() {
_, err := svc.repos.Labels.Create(ctx, usr.GroupID, label) _, err := svc.repos.Labels.Create(ctx, usr.GroupID, label)
if err != nil { if err != nil {
@@ -104,7 +102,6 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
} }
} }
log.Debug().Msg("creating default locations")
for _, location := range defaultLocations() { for _, location := range defaultLocations() {
_, err := svc.repos.Locations.Create(ctx, usr.GroupID, location) _, err := svc.repos.Locations.Create(ctx, usr.GroupID, location)
if err != nil { if err != nil {
@@ -115,7 +112,6 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
// Decrement the invitation token if it was used. // Decrement the invitation token if it was used.
if token.ID != uuid.Nil { if token.ID != uuid.Nil {
log.Debug().Msg("decrementing invitation token")
err = svc.repos.Groups.InvitationUpdate(ctx, token.ID, token.Uses-1) err = svc.repos.Groups.InvitationUpdate(ctx, token.ID, token.Uses-1)
if err != nil { if err != nil {
log.Err(err).Msg("Failed to update invitation token") log.Err(err).Msg("Failed to update invitation token")
@@ -132,19 +128,19 @@ func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (repo.
return svc.repos.AuthTokens.GetUserFromToken(ctx, hash) return svc.repos.AuthTokens.GetUserFromToken(ctx, hash)
} }
func (svc *UserService) UpdateSelf(ctx context.Context, id uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) { func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) {
err := svc.repos.Users.Update(ctx, id, data) err := svc.repos.Users.Update(ctx, ID, data)
if err != nil { if err != nil {
return repo.UserOut{}, err return repo.UserOut{}, err
} }
return svc.repos.Users.GetOneID(ctx, id) return svc.repos.Users.GetOneId(ctx, ID)
} }
// ============================================================================ // ============================================================================
// User Authentication // User Authentication
func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) { func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
attachmentToken := hasher.GenerateToken() attachmentToken := hasher.GenerateToken()
expiresAt := time.Now().Add(oneWeek) expiresAt := time.Now().Add(oneWeek)
@@ -153,7 +149,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID
} }
attachmentData := repo.UserAuthTokenCreate{ attachmentData := repo.UserAuthTokenCreate{
UserID: userID, UserID: userId,
TokenHash: attachmentToken.Hash, TokenHash: attachmentToken.Hash,
ExpiresAt: expiresAt, ExpiresAt: expiresAt,
} }
@@ -165,7 +161,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID
userToken := hasher.GenerateToken() userToken := hasher.GenerateToken()
data := repo.UserAuthTokenCreate{ data := repo.UserAuthTokenCreate{
UserID: userID, UserID: userId,
TokenHash: userToken.Hash, TokenHash: userToken.Hash,
ExpiresAt: expiresAt, ExpiresAt: expiresAt,
} }
@@ -217,12 +213,12 @@ func (svc *UserService) RenewToken(ctx context.Context, token string) (UserAuthT
// DeleteSelf deletes the user that is currently logged based of the provided UUID // DeleteSelf deletes the user that is currently logged based of the provided UUID
// There is _NO_ protection against deleting the wrong user, as such this should only // There is _NO_ protection against deleting the wrong user, as such this should only
// be used when the identify of the user has been confirmed. // be used when the identify of the user has been confirmed.
func (svc *UserService) DeleteSelf(ctx context.Context, id uuid.UUID) error { func (svc *UserService) DeleteSelf(ctx context.Context, ID uuid.UUID) error {
return svc.repos.Users.Delete(ctx, id) return svc.repos.Users.Delete(ctx, ID)
} }
func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) { func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) {
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UID) usr, err := svc.repos.Users.GetOneId(ctx, ctx.UID)
if err != nil { if err != nil {
return false return false
} }

View File

@@ -1,7 +1,7 @@
package services package services
import ( import (
"github.com/sysadminsmedia/homebox/backend/internal/data/repo" "github.com/hay-kot/homebox/backend/internal/data/repo"
) )
func defaultLocations() []repo.LocationCreate { func defaultLocations() []repo.LocationCreate {

View File

@@ -10,9 +10,9 @@ import (
"entgo.io/ent" "entgo.io/ent"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document" "github.com/hay-kot/homebox/backend/internal/data/ent/document"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" "github.com/hay-kot/homebox/backend/internal/data/ent/item"
) )
// Attachment is the model entity for the Attachment schema. // Attachment is the model entity for the Attachment schema.
@@ -26,8 +26,6 @@ type Attachment struct {
UpdatedAt time.Time `json:"updated_at,omitempty"` UpdatedAt time.Time `json:"updated_at,omitempty"`
// Type holds the value of the "type" field. // Type holds the value of the "type" field.
Type attachment.Type `json:"type,omitempty"` Type attachment.Type `json:"type,omitempty"`
// Primary holds the value of the "primary" field.
Primary bool `json:"primary,omitempty"`
// Edges holds the relations/edges for other nodes in the graph. // Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the AttachmentQuery when eager-loading is set. // The values are being populated by the AttachmentQuery when eager-loading is set.
Edges AttachmentEdges `json:"edges"` Edges AttachmentEdges `json:"edges"`
@@ -50,10 +48,12 @@ type AttachmentEdges struct {
// ItemOrErr returns the Item value or an error if the edge // ItemOrErr returns the Item value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found. // was not loaded in eager-loading, or loaded but was not found.
func (e AttachmentEdges) ItemOrErr() (*Item, error) { func (e AttachmentEdges) ItemOrErr() (*Item, error) {
if e.Item != nil { if e.loadedTypes[0] {
if e.Item == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: item.Label}
}
return e.Item, nil return e.Item, nil
} else if e.loadedTypes[0] {
return nil, &NotFoundError{label: item.Label}
} }
return nil, &NotLoadedError{edge: "item"} return nil, &NotLoadedError{edge: "item"}
} }
@@ -61,10 +61,12 @@ func (e AttachmentEdges) ItemOrErr() (*Item, error) {
// DocumentOrErr returns the Document value or an error if the edge // DocumentOrErr returns the Document value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found. // was not loaded in eager-loading, or loaded but was not found.
func (e AttachmentEdges) DocumentOrErr() (*Document, error) { func (e AttachmentEdges) DocumentOrErr() (*Document, error) {
if e.Document != nil { if e.loadedTypes[1] {
if e.Document == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: document.Label}
}
return e.Document, nil return e.Document, nil
} else if e.loadedTypes[1] {
return nil, &NotFoundError{label: document.Label}
} }
return nil, &NotLoadedError{edge: "document"} return nil, &NotLoadedError{edge: "document"}
} }
@@ -74,8 +76,6 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
values := make([]any, len(columns)) values := make([]any, len(columns))
for i := range columns { for i := range columns {
switch columns[i] { switch columns[i] {
case attachment.FieldPrimary:
values[i] = new(sql.NullBool)
case attachment.FieldType: case attachment.FieldType:
values[i] = new(sql.NullString) values[i] = new(sql.NullString)
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt: case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
@@ -125,12 +125,6 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
} else if value.Valid { } else if value.Valid {
a.Type = attachment.Type(value.String) a.Type = attachment.Type(value.String)
} }
case attachment.FieldPrimary:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field primary", values[i])
} else if value.Valid {
a.Primary = value.Bool
}
case attachment.ForeignKeys[0]: case attachment.ForeignKeys[0]:
if value, ok := values[i].(*sql.NullScanner); !ok { if value, ok := values[i].(*sql.NullScanner); !ok {
return fmt.Errorf("unexpected type %T for field document_attachments", values[i]) return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
@@ -199,9 +193,6 @@ func (a *Attachment) String() string {
builder.WriteString(", ") builder.WriteString(", ")
builder.WriteString("type=") builder.WriteString("type=")
builder.WriteString(fmt.Sprintf("%v", a.Type)) builder.WriteString(fmt.Sprintf("%v", a.Type))
builder.WriteString(", ")
builder.WriteString("primary=")
builder.WriteString(fmt.Sprintf("%v", a.Primary))
builder.WriteByte(')') builder.WriteByte(')')
return builder.String() return builder.String()
} }

View File

@@ -22,8 +22,6 @@ const (
FieldUpdatedAt = "updated_at" FieldUpdatedAt = "updated_at"
// FieldType holds the string denoting the type field in the database. // FieldType holds the string denoting the type field in the database.
FieldType = "type" FieldType = "type"
// FieldPrimary holds the string denoting the primary field in the database.
FieldPrimary = "primary"
// EdgeItem holds the string denoting the item edge name in mutations. // EdgeItem holds the string denoting the item edge name in mutations.
EdgeItem = "item" EdgeItem = "item"
// EdgeDocument holds the string denoting the document edge name in mutations. // EdgeDocument holds the string denoting the document edge name in mutations.
@@ -52,7 +50,6 @@ var Columns = []string{
FieldCreatedAt, FieldCreatedAt,
FieldUpdatedAt, FieldUpdatedAt,
FieldType, FieldType,
FieldPrimary,
} }
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments" // ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
@@ -84,8 +81,6 @@ var (
DefaultUpdatedAt func() time.Time DefaultUpdatedAt func() time.Time
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field. // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
UpdateDefaultUpdatedAt func() time.Time UpdateDefaultUpdatedAt func() time.Time
// DefaultPrimary holds the default value on creation for the "primary" field.
DefaultPrimary bool
// DefaultID holds the default value on creation for the "id" field. // DefaultID holds the default value on creation for the "id" field.
DefaultID func() uuid.UUID DefaultID func() uuid.UUID
) )
@@ -142,11 +137,6 @@ func ByType(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldType, opts...).ToFunc() return sql.OrderByField(FieldType, opts...).ToFunc()
} }
// ByPrimary orders the results by the primary field.
func ByPrimary(opts ...sql.OrderTermOption) OrderOption {
return sql.OrderByField(FieldPrimary, opts...).ToFunc()
}
// ByItemField orders the results by item field. // ByItemField orders the results by item field.
func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption { func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) { return func(s *sql.Selector) {

View File

@@ -8,7 +8,7 @@ import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// ID filters vertices based on their ID field. // ID filters vertices based on their ID field.
@@ -66,11 +66,6 @@ func UpdatedAt(v time.Time) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v)) return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
} }
// Primary applies equality check predicate on the "primary" field. It's identical to PrimaryEQ.
func Primary(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
}
// CreatedAtEQ applies the EQ predicate on the "created_at" field. // CreatedAtEQ applies the EQ predicate on the "created_at" field.
func CreatedAtEQ(v time.Time) predicate.Attachment { func CreatedAtEQ(v time.Time) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v)) return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
@@ -171,16 +166,6 @@ func TypeNotIn(vs ...Type) predicate.Attachment {
return predicate.Attachment(sql.FieldNotIn(FieldType, vs...)) return predicate.Attachment(sql.FieldNotIn(FieldType, vs...))
} }
// PrimaryEQ applies the EQ predicate on the "primary" field.
func PrimaryEQ(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
}
// PrimaryNEQ applies the NEQ predicate on the "primary" field.
func PrimaryNEQ(v bool) predicate.Attachment {
return predicate.Attachment(sql.FieldNEQ(FieldPrimary, v))
}
// HasItem applies the HasEdge predicate on the "item" edge. // HasItem applies the HasEdge predicate on the "item" edge.
func HasItem() predicate.Attachment { func HasItem() predicate.Attachment {
return predicate.Attachment(func(s *sql.Selector) { return predicate.Attachment(func(s *sql.Selector) {
@@ -229,15 +214,32 @@ func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
// And groups predicates with the AND operator between them. // And groups predicates with the AND operator between them.
func And(predicates ...predicate.Attachment) predicate.Attachment { func And(predicates ...predicate.Attachment) predicate.Attachment {
return predicate.Attachment(sql.AndPredicates(predicates...)) return predicate.Attachment(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
} }
// Or groups predicates with the OR operator between them. // Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.Attachment) predicate.Attachment { func Or(predicates ...predicate.Attachment) predicate.Attachment {
return predicate.Attachment(sql.OrPredicates(predicates...)) return predicate.Attachment(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
} }
// Not applies the not operator on the given predicate. // Not applies the not operator on the given predicate.
func Not(p predicate.Attachment) predicate.Attachment { func Not(p predicate.Attachment) predicate.Attachment {
return predicate.Attachment(sql.NotPredicates(p)) return predicate.Attachment(func(s *sql.Selector) {
p(s.Not())
})
} }

View File

@@ -11,9 +11,9 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document" "github.com/hay-kot/homebox/backend/internal/data/ent/document"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" "github.com/hay-kot/homebox/backend/internal/data/ent/item"
) )
// AttachmentCreate is the builder for creating a Attachment entity. // AttachmentCreate is the builder for creating a Attachment entity.
@@ -65,20 +65,6 @@ func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreat
return ac return ac
} }
// SetPrimary sets the "primary" field.
func (ac *AttachmentCreate) SetPrimary(b bool) *AttachmentCreate {
ac.mutation.SetPrimary(b)
return ac
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (ac *AttachmentCreate) SetNillablePrimary(b *bool) *AttachmentCreate {
if b != nil {
ac.SetPrimary(*b)
}
return ac
}
// SetID sets the "id" field. // SetID sets the "id" field.
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate { func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
ac.mutation.SetID(u) ac.mutation.SetID(u)
@@ -162,10 +148,6 @@ func (ac *AttachmentCreate) defaults() {
v := attachment.DefaultType v := attachment.DefaultType
ac.mutation.SetType(v) ac.mutation.SetType(v)
} }
if _, ok := ac.mutation.Primary(); !ok {
v := attachment.DefaultPrimary
ac.mutation.SetPrimary(v)
}
if _, ok := ac.mutation.ID(); !ok { if _, ok := ac.mutation.ID(); !ok {
v := attachment.DefaultID() v := attachment.DefaultID()
ac.mutation.SetID(v) ac.mutation.SetID(v)
@@ -188,13 +170,10 @@ func (ac *AttachmentCreate) check() error {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)} return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
} }
} }
if _, ok := ac.mutation.Primary(); !ok { if _, ok := ac.mutation.ItemID(); !ok {
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
}
if len(ac.mutation.ItemIDs()) == 0 {
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)} return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
} }
if len(ac.mutation.DocumentIDs()) == 0 { if _, ok := ac.mutation.DocumentID(); !ok {
return &ValidationError{Name: "document", err: errors.New(`ent: missing required edge "Attachment.document"`)} return &ValidationError{Name: "document", err: errors.New(`ent: missing required edge "Attachment.document"`)}
} }
return nil return nil
@@ -244,10 +223,6 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
_spec.SetField(attachment.FieldType, field.TypeEnum, value) _spec.SetField(attachment.FieldType, field.TypeEnum, value)
_node.Type = value _node.Type = value
} }
if value, ok := ac.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
_node.Primary = value
}
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 { if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O, Rel: sqlgraph.M2O,
@@ -288,15 +263,11 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk. // AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
type AttachmentCreateBulk struct { type AttachmentCreateBulk struct {
config config
err error
builders []*AttachmentCreate builders []*AttachmentCreate
} }
// Save creates the Attachment entities in the database. // Save creates the Attachment entities in the database.
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) { func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
if acb.err != nil {
return nil, acb.err
}
specs := make([]*sqlgraph.CreateSpec, len(acb.builders)) specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
nodes := make([]*Attachment, len(acb.builders)) nodes := make([]*Attachment, len(acb.builders))
mutators := make([]Mutator, len(acb.builders)) mutators := make([]Mutator, len(acb.builders))

View File

@@ -8,8 +8,8 @@ import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AttachmentDelete is the builder for deleting a Attachment entity. // AttachmentDelete is the builder for deleting a Attachment entity.

View File

@@ -7,15 +7,14 @@ import (
"fmt" "fmt"
"math" "math"
"entgo.io/ent"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document" "github.com/hay-kot/homebox/backend/internal/data/ent/document"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" "github.com/hay-kot/homebox/backend/internal/data/ent/item"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AttachmentQuery is the builder for querying Attachment entities. // AttachmentQuery is the builder for querying Attachment entities.
@@ -111,7 +110,7 @@ func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
// First returns the first Attachment entity from the query. // First returns the first Attachment entity from the query.
// Returns a *NotFoundError when no Attachment was found. // Returns a *NotFoundError when no Attachment was found.
func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) { func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) {
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, ent.OpQueryFirst)) nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, "First"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -134,7 +133,7 @@ func (aq *AttachmentQuery) FirstX(ctx context.Context) *Attachment {
// Returns a *NotFoundError when no Attachment ID was found. // Returns a *NotFoundError when no Attachment ID was found.
func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) { func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
var ids []uuid.UUID var ids []uuid.UUID
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryFirstID)); err != nil { if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, "FirstID")); err != nil {
return return
} }
if len(ids) == 0 { if len(ids) == 0 {
@@ -157,7 +156,7 @@ func (aq *AttachmentQuery) FirstIDX(ctx context.Context) uuid.UUID {
// Returns a *NotSingularError when more than one Attachment entity is found. // Returns a *NotSingularError when more than one Attachment entity is found.
// Returns a *NotFoundError when no Attachment entities are found. // Returns a *NotFoundError when no Attachment entities are found.
func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) { func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) {
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, ent.OpQueryOnly)) nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, "Only"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -185,7 +184,7 @@ func (aq *AttachmentQuery) OnlyX(ctx context.Context) *Attachment {
// Returns a *NotFoundError when no entities are found. // Returns a *NotFoundError when no entities are found.
func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) { func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
var ids []uuid.UUID var ids []uuid.UUID
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryOnlyID)); err != nil { if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, "OnlyID")); err != nil {
return return
} }
switch len(ids) { switch len(ids) {
@@ -210,7 +209,7 @@ func (aq *AttachmentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
// All executes the query and returns a list of Attachments. // All executes the query and returns a list of Attachments.
func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) { func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) {
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryAll) ctx = setContextOp(ctx, aq.ctx, "All")
if err := aq.prepareQuery(ctx); err != nil { if err := aq.prepareQuery(ctx); err != nil {
return nil, err return nil, err
} }
@@ -232,7 +231,7 @@ func (aq *AttachmentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error)
if aq.ctx.Unique == nil && aq.path != nil { if aq.ctx.Unique == nil && aq.path != nil {
aq.Unique(true) aq.Unique(true)
} }
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryIDs) ctx = setContextOp(ctx, aq.ctx, "IDs")
if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil { if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
return nil, err return nil, err
} }
@@ -250,7 +249,7 @@ func (aq *AttachmentQuery) IDsX(ctx context.Context) []uuid.UUID {
// Count returns the count of the given query. // Count returns the count of the given query.
func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) { func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryCount) ctx = setContextOp(ctx, aq.ctx, "Count")
if err := aq.prepareQuery(ctx); err != nil { if err := aq.prepareQuery(ctx); err != nil {
return 0, err return 0, err
} }
@@ -268,7 +267,7 @@ func (aq *AttachmentQuery) CountX(ctx context.Context) int {
// Exist returns true if the query has elements in the graph. // Exist returns true if the query has elements in the graph.
func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) { func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryExist) ctx = setContextOp(ctx, aq.ctx, "Exist")
switch _, err := aq.FirstID(ctx); { switch _, err := aq.FirstID(ctx); {
case IsNotFound(err): case IsNotFound(err):
return false, nil return false, nil
@@ -613,7 +612,7 @@ func (agb *AttachmentGroupBy) Aggregate(fns ...AggregateFunc) *AttachmentGroupBy
// Scan applies the selector query and scans the result into the given value. // Scan applies the selector query and scans the result into the given value.
func (agb *AttachmentGroupBy) Scan(ctx context.Context, v any) error { func (agb *AttachmentGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, agb.build.ctx, ent.OpQueryGroupBy) ctx = setContextOp(ctx, agb.build.ctx, "GroupBy")
if err := agb.build.prepareQuery(ctx); err != nil { if err := agb.build.prepareQuery(ctx); err != nil {
return err return err
} }
@@ -661,7 +660,7 @@ func (as *AttachmentSelect) Aggregate(fns ...AggregateFunc) *AttachmentSelect {
// Scan applies the selector query and scans the result into the given value. // Scan applies the selector query and scans the result into the given value.
func (as *AttachmentSelect) Scan(ctx context.Context, v any) error { func (as *AttachmentSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, as.ctx, ent.OpQuerySelect) ctx = setContextOp(ctx, as.ctx, "Select")
if err := as.prepareQuery(ctx); err != nil { if err := as.prepareQuery(ctx); err != nil {
return err return err
} }

View File

@@ -12,10 +12,10 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment" "github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document" "github.com/hay-kot/homebox/backend/internal/data/ent/document"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item" "github.com/hay-kot/homebox/backend/internal/data/ent/item"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AttachmentUpdate is the builder for updating Attachment entities. // AttachmentUpdate is the builder for updating Attachment entities.
@@ -51,20 +51,6 @@ func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdat
return au return au
} }
// SetPrimary sets the "primary" field.
func (au *AttachmentUpdate) SetPrimary(b bool) *AttachmentUpdate {
au.mutation.SetPrimary(b)
return au
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (au *AttachmentUpdate) SetNillablePrimary(b *bool) *AttachmentUpdate {
if b != nil {
au.SetPrimary(*b)
}
return au
}
// SetItemID sets the "item" edge to the Item entity by ID. // SetItemID sets the "item" edge to the Item entity by ID.
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate { func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
au.mutation.SetItemID(id) au.mutation.SetItemID(id)
@@ -147,10 +133,10 @@ func (au *AttachmentUpdate) check() error {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)} return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
} }
} }
if au.mutation.ItemCleared() && len(au.mutation.ItemIDs()) > 0 { if _, ok := au.mutation.ItemID(); au.mutation.ItemCleared() && !ok {
return errors.New(`ent: clearing a required unique edge "Attachment.item"`) return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
} }
if au.mutation.DocumentCleared() && len(au.mutation.DocumentIDs()) > 0 { if _, ok := au.mutation.DocumentID(); au.mutation.DocumentCleared() && !ok {
return errors.New(`ent: clearing a required unique edge "Attachment.document"`) return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
} }
return nil return nil
@@ -174,9 +160,6 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
if value, ok := au.mutation.GetType(); ok { if value, ok := au.mutation.GetType(); ok {
_spec.SetField(attachment.FieldType, field.TypeEnum, value) _spec.SetField(attachment.FieldType, field.TypeEnum, value)
} }
if value, ok := au.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
}
if au.mutation.ItemCleared() { if au.mutation.ItemCleared() {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O, Rel: sqlgraph.M2O,
@@ -275,20 +258,6 @@ func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentU
return auo return auo
} }
// SetPrimary sets the "primary" field.
func (auo *AttachmentUpdateOne) SetPrimary(b bool) *AttachmentUpdateOne {
auo.mutation.SetPrimary(b)
return auo
}
// SetNillablePrimary sets the "primary" field if the given value is not nil.
func (auo *AttachmentUpdateOne) SetNillablePrimary(b *bool) *AttachmentUpdateOne {
if b != nil {
auo.SetPrimary(*b)
}
return auo
}
// SetItemID sets the "item" edge to the Item entity by ID. // SetItemID sets the "item" edge to the Item entity by ID.
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne { func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
auo.mutation.SetItemID(id) auo.mutation.SetItemID(id)
@@ -384,10 +353,10 @@ func (auo *AttachmentUpdateOne) check() error {
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)} return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
} }
} }
if auo.mutation.ItemCleared() && len(auo.mutation.ItemIDs()) > 0 { if _, ok := auo.mutation.ItemID(); auo.mutation.ItemCleared() && !ok {
return errors.New(`ent: clearing a required unique edge "Attachment.item"`) return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
} }
if auo.mutation.DocumentCleared() && len(auo.mutation.DocumentIDs()) > 0 { if _, ok := auo.mutation.DocumentID(); auo.mutation.DocumentCleared() && !ok {
return errors.New(`ent: clearing a required unique edge "Attachment.document"`) return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
} }
return nil return nil
@@ -428,9 +397,6 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
if value, ok := auo.mutation.GetType(); ok { if value, ok := auo.mutation.GetType(); ok {
_spec.SetField(attachment.FieldType, field.TypeEnum, value) _spec.SetField(attachment.FieldType, field.TypeEnum, value)
} }
if value, ok := auo.mutation.Primary(); ok {
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
}
if auo.mutation.ItemCleared() { if auo.mutation.ItemCleared() {
edge := &sqlgraph.EdgeSpec{ edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O, Rel: sqlgraph.M2O,

View File

@@ -9,8 +9,8 @@ import (
"entgo.io/ent" "entgo.io/ent"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
) )
// AuthRoles is the model entity for the AuthRoles schema. // AuthRoles is the model entity for the AuthRoles schema.
@@ -39,10 +39,12 @@ type AuthRolesEdges struct {
// TokenOrErr returns the Token value or an error if the edge // TokenOrErr returns the Token value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found. // was not loaded in eager-loading, or loaded but was not found.
func (e AuthRolesEdges) TokenOrErr() (*AuthTokens, error) { func (e AuthRolesEdges) TokenOrErr() (*AuthTokens, error) {
if e.Token != nil { if e.loadedTypes[0] {
if e.Token == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: authtokens.Label}
}
return e.Token, nil return e.Token, nil
} else if e.loadedTypes[0] {
return nil, &NotFoundError{label: authtokens.Label}
} }
return nil, &NotLoadedError{edge: "token"} return nil, &NotLoadedError{edge: "token"}
} }

View File

@@ -5,7 +5,7 @@ package authroles
import ( import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// ID filters vertices based on their ID field. // ID filters vertices based on their ID field.
@@ -98,15 +98,32 @@ func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
// And groups predicates with the AND operator between them. // And groups predicates with the AND operator between them.
func And(predicates ...predicate.AuthRoles) predicate.AuthRoles { func And(predicates ...predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.AndPredicates(predicates...)) return predicate.AuthRoles(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
} }
// Or groups predicates with the OR operator between them. // Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles { func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.OrPredicates(predicates...)) return predicate.AuthRoles(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
} }
// Not applies the not operator on the given predicate. // Not applies the not operator on the given predicate.
func Not(p predicate.AuthRoles) predicate.AuthRoles { func Not(p predicate.AuthRoles) predicate.AuthRoles {
return predicate.AuthRoles(sql.NotPredicates(p)) return predicate.AuthRoles(func(s *sql.Selector) {
p(s.Not())
})
} }

View File

@@ -10,8 +10,8 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
) )
// AuthRolesCreate is the builder for creating a AuthRoles entity. // AuthRolesCreate is the builder for creating a AuthRoles entity.
@@ -158,15 +158,11 @@ func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
// AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk. // AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk.
type AuthRolesCreateBulk struct { type AuthRolesCreateBulk struct {
config config
err error
builders []*AuthRolesCreate builders []*AuthRolesCreate
} }
// Save creates the AuthRoles entities in the database. // Save creates the AuthRoles entities in the database.
func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) { func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) {
if arcb.err != nil {
return nil, arcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(arcb.builders)) specs := make([]*sqlgraph.CreateSpec, len(arcb.builders))
nodes := make([]*AuthRoles, len(arcb.builders)) nodes := make([]*AuthRoles, len(arcb.builders))
mutators := make([]Mutator, len(arcb.builders)) mutators := make([]Mutator, len(arcb.builders))

View File

@@ -8,8 +8,8 @@ import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AuthRolesDelete is the builder for deleting a AuthRoles entity. // AuthRolesDelete is the builder for deleting a AuthRoles entity.

View File

@@ -7,14 +7,13 @@ import (
"fmt" "fmt"
"math" "math"
"entgo.io/ent"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AuthRolesQuery is the builder for querying AuthRoles entities. // AuthRolesQuery is the builder for querying AuthRoles entities.
@@ -87,7 +86,7 @@ func (arq *AuthRolesQuery) QueryToken() *AuthTokensQuery {
// First returns the first AuthRoles entity from the query. // First returns the first AuthRoles entity from the query.
// Returns a *NotFoundError when no AuthRoles was found. // Returns a *NotFoundError when no AuthRoles was found.
func (arq *AuthRolesQuery) First(ctx context.Context) (*AuthRoles, error) { func (arq *AuthRolesQuery) First(ctx context.Context) (*AuthRoles, error) {
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, ent.OpQueryFirst)) nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, "First"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -110,7 +109,7 @@ func (arq *AuthRolesQuery) FirstX(ctx context.Context) *AuthRoles {
// Returns a *NotFoundError when no AuthRoles ID was found. // Returns a *NotFoundError when no AuthRoles ID was found.
func (arq *AuthRolesQuery) FirstID(ctx context.Context) (id int, err error) { func (arq *AuthRolesQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int var ids []int
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryFirstID)); err != nil { if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, "FirstID")); err != nil {
return return
} }
if len(ids) == 0 { if len(ids) == 0 {
@@ -133,7 +132,7 @@ func (arq *AuthRolesQuery) FirstIDX(ctx context.Context) int {
// Returns a *NotSingularError when more than one AuthRoles entity is found. // Returns a *NotSingularError when more than one AuthRoles entity is found.
// Returns a *NotFoundError when no AuthRoles entities are found. // Returns a *NotFoundError when no AuthRoles entities are found.
func (arq *AuthRolesQuery) Only(ctx context.Context) (*AuthRoles, error) { func (arq *AuthRolesQuery) Only(ctx context.Context) (*AuthRoles, error) {
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, ent.OpQueryOnly)) nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, "Only"))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -161,7 +160,7 @@ func (arq *AuthRolesQuery) OnlyX(ctx context.Context) *AuthRoles {
// Returns a *NotFoundError when no entities are found. // Returns a *NotFoundError when no entities are found.
func (arq *AuthRolesQuery) OnlyID(ctx context.Context) (id int, err error) { func (arq *AuthRolesQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int var ids []int
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryOnlyID)); err != nil { if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, "OnlyID")); err != nil {
return return
} }
switch len(ids) { switch len(ids) {
@@ -186,7 +185,7 @@ func (arq *AuthRolesQuery) OnlyIDX(ctx context.Context) int {
// All executes the query and returns a list of AuthRolesSlice. // All executes the query and returns a list of AuthRolesSlice.
func (arq *AuthRolesQuery) All(ctx context.Context) ([]*AuthRoles, error) { func (arq *AuthRolesQuery) All(ctx context.Context) ([]*AuthRoles, error) {
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryAll) ctx = setContextOp(ctx, arq.ctx, "All")
if err := arq.prepareQuery(ctx); err != nil { if err := arq.prepareQuery(ctx); err != nil {
return nil, err return nil, err
} }
@@ -208,7 +207,7 @@ func (arq *AuthRolesQuery) IDs(ctx context.Context) (ids []int, err error) {
if arq.ctx.Unique == nil && arq.path != nil { if arq.ctx.Unique == nil && arq.path != nil {
arq.Unique(true) arq.Unique(true)
} }
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryIDs) ctx = setContextOp(ctx, arq.ctx, "IDs")
if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil { if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
return nil, err return nil, err
} }
@@ -226,7 +225,7 @@ func (arq *AuthRolesQuery) IDsX(ctx context.Context) []int {
// Count returns the count of the given query. // Count returns the count of the given query.
func (arq *AuthRolesQuery) Count(ctx context.Context) (int, error) { func (arq *AuthRolesQuery) Count(ctx context.Context) (int, error) {
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryCount) ctx = setContextOp(ctx, arq.ctx, "Count")
if err := arq.prepareQuery(ctx); err != nil { if err := arq.prepareQuery(ctx); err != nil {
return 0, err return 0, err
} }
@@ -244,7 +243,7 @@ func (arq *AuthRolesQuery) CountX(ctx context.Context) int {
// Exist returns true if the query has elements in the graph. // Exist returns true if the query has elements in the graph.
func (arq *AuthRolesQuery) Exist(ctx context.Context) (bool, error) { func (arq *AuthRolesQuery) Exist(ctx context.Context) (bool, error) {
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryExist) ctx = setContextOp(ctx, arq.ctx, "Exist")
switch _, err := arq.FirstID(ctx); { switch _, err := arq.FirstID(ctx); {
case IsNotFound(err): case IsNotFound(err):
return false, nil return false, nil
@@ -538,7 +537,7 @@ func (argb *AuthRolesGroupBy) Aggregate(fns ...AggregateFunc) *AuthRolesGroupBy
// Scan applies the selector query and scans the result into the given value. // Scan applies the selector query and scans the result into the given value.
func (argb *AuthRolesGroupBy) Scan(ctx context.Context, v any) error { func (argb *AuthRolesGroupBy) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, argb.build.ctx, ent.OpQueryGroupBy) ctx = setContextOp(ctx, argb.build.ctx, "GroupBy")
if err := argb.build.prepareQuery(ctx); err != nil { if err := argb.build.prepareQuery(ctx); err != nil {
return err return err
} }
@@ -586,7 +585,7 @@ func (ars *AuthRolesSelect) Aggregate(fns ...AggregateFunc) *AuthRolesSelect {
// Scan applies the selector query and scans the result into the given value. // Scan applies the selector query and scans the result into the given value.
func (ars *AuthRolesSelect) Scan(ctx context.Context, v any) error { func (ars *AuthRolesSelect) Scan(ctx context.Context, v any) error {
ctx = setContextOp(ctx, ars.ctx, ent.OpQuerySelect) ctx = setContextOp(ctx, ars.ctx, "Select")
if err := ars.prepareQuery(ctx); err != nil { if err := ars.prepareQuery(ctx); err != nil {
return err return err
} }

View File

@@ -11,9 +11,9 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AuthRolesUpdate is the builder for updating AuthRoles entities. // AuthRolesUpdate is the builder for updating AuthRoles entities.

View File

@@ -10,9 +10,9 @@ import (
"entgo.io/ent" "entgo.io/ent"
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user" "github.com/hay-kot/homebox/backend/internal/data/ent/user"
) )
// AuthTokens is the model entity for the AuthTokens schema. // AuthTokens is the model entity for the AuthTokens schema.
@@ -49,10 +49,12 @@ type AuthTokensEdges struct {
// UserOrErr returns the User value or an error if the edge // UserOrErr returns the User value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found. // was not loaded in eager-loading, or loaded but was not found.
func (e AuthTokensEdges) UserOrErr() (*User, error) { func (e AuthTokensEdges) UserOrErr() (*User, error) {
if e.User != nil { if e.loadedTypes[0] {
if e.User == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: user.Label}
}
return e.User, nil return e.User, nil
} else if e.loadedTypes[0] {
return nil, &NotFoundError{label: user.Label}
} }
return nil, &NotLoadedError{edge: "user"} return nil, &NotLoadedError{edge: "user"}
} }
@@ -60,10 +62,12 @@ func (e AuthTokensEdges) UserOrErr() (*User, error) {
// RolesOrErr returns the Roles value or an error if the edge // RolesOrErr returns the Roles value or an error if the edge
// was not loaded in eager-loading, or loaded but was not found. // was not loaded in eager-loading, or loaded but was not found.
func (e AuthTokensEdges) RolesOrErr() (*AuthRoles, error) { func (e AuthTokensEdges) RolesOrErr() (*AuthRoles, error) {
if e.Roles != nil { if e.loadedTypes[1] {
if e.Roles == nil {
// Edge was loaded but was not found.
return nil, &NotFoundError{label: authroles.Label}
}
return e.Roles, nil return e.Roles, nil
} else if e.loadedTypes[1] {
return nil, &NotFoundError{label: authroles.Label}
} }
return nil, &NotLoadedError{edge: "roles"} return nil, &NotLoadedError{edge: "roles"}
} }

View File

@@ -8,7 +8,7 @@ import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// ID filters vertices based on their ID field. // ID filters vertices based on their ID field.
@@ -284,15 +284,32 @@ func HasRolesWith(preds ...predicate.AuthRoles) predicate.AuthTokens {
// And groups predicates with the AND operator between them. // And groups predicates with the AND operator between them.
func And(predicates ...predicate.AuthTokens) predicate.AuthTokens { func And(predicates ...predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(sql.AndPredicates(predicates...)) return predicate.AuthTokens(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
} }
// Or groups predicates with the OR operator between them. // Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens { func Or(predicates ...predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(sql.OrPredicates(predicates...)) return predicate.AuthTokens(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
} }
// Not applies the not operator on the given predicate. // Not applies the not operator on the given predicate.
func Not(p predicate.AuthTokens) predicate.AuthTokens { func Not(p predicate.AuthTokens) predicate.AuthTokens {
return predicate.AuthTokens(sql.NotPredicates(p)) return predicate.AuthTokens(func(s *sql.Selector) {
p(s.Not())
})
} }

View File

@@ -11,9 +11,9 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles" "github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user" "github.com/hay-kot/homebox/backend/internal/data/ent/user"
) )
// AuthTokensCreate is the builder for creating a AuthTokens entity. // AuthTokensCreate is the builder for creating a AuthTokens entity.
@@ -280,15 +280,11 @@ func (atc *AuthTokensCreate) createSpec() (*AuthTokens, *sqlgraph.CreateSpec) {
// AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk. // AuthTokensCreateBulk is the builder for creating many AuthTokens entities in bulk.
type AuthTokensCreateBulk struct { type AuthTokensCreateBulk struct {
config config
err error
builders []*AuthTokensCreate builders []*AuthTokensCreate
} }
// Save creates the AuthTokens entities in the database. // Save creates the AuthTokens entities in the database.
func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) { func (atcb *AuthTokensCreateBulk) Save(ctx context.Context) ([]*AuthTokens, error) {
if atcb.err != nil {
return nil, atcb.err
}
specs := make([]*sqlgraph.CreateSpec, len(atcb.builders)) specs := make([]*sqlgraph.CreateSpec, len(atcb.builders))
nodes := make([]*AuthTokens, len(atcb.builders)) nodes := make([]*AuthTokens, len(atcb.builders))
mutators := make([]Mutator, len(atcb.builders)) mutators := make([]Mutator, len(atcb.builders))

View File

@@ -8,8 +8,8 @@ import (
"entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field" "entgo.io/ent/schema/field"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens" "github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate" "github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
) )
// AuthTokensDelete is the builder for deleting a AuthTokens entity. // AuthTokensDelete is the builder for deleting a AuthTokens entity.

Some files were not shown because too many files have changed in this diff Show More