mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-24 06:28:34 +01:00
Compare commits
4 Commits
v0.13.0
...
katos/purc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
809b0db5e5 | ||
|
|
ae8f568bfa | ||
|
|
87725348be | ||
|
|
da78f13513 |
65
.github/scripts/update_currencies.py
vendored
65
.github/scripts/update_currencies.py
vendored
@@ -1,65 +0,0 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
def fetch_currencies():
|
||||
try:
|
||||
response = requests.get('https://restcountries.com/v3.1/all')
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.Timeout:
|
||||
print("Request to the API timed out.")
|
||||
return []
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"An error occurred while making the request: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
countries = response.json()
|
||||
except json.JSONDecodeError:
|
||||
print("Failed to decode JSON from the response.")
|
||||
return []
|
||||
|
||||
currencies_list = []
|
||||
for country in countries:
|
||||
country_name = country.get('name', {}).get('common')
|
||||
country_currencies = country.get('currencies', {})
|
||||
for currency_code, currency_info in country_currencies.items():
|
||||
symbol = currency_info.get('symbol', '')
|
||||
currencies_list.append({
|
||||
'code': currency_code,
|
||||
'local': country_name,
|
||||
'symbol': symbol,
|
||||
'name': currency_info.get('name')
|
||||
})
|
||||
|
||||
return currencies_list
|
||||
|
||||
def save_currencies(currencies, file_path):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(currencies, f, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
print(f"An error occurred while writing to the file: {e}")
|
||||
|
||||
def load_existing_currencies(file_path):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return [] # Return an empty list if file doesn't exist or is invalid
|
||||
|
||||
def main():
|
||||
save_path = 'backend/internal/core/currencies/currencies.json'
|
||||
|
||||
existing_currencies = load_existing_currencies(save_path)
|
||||
new_currencies = fetch_currencies()
|
||||
|
||||
if new_currencies == existing_currencies:
|
||||
print("Currencies up-to-date with API, skipping commit.")
|
||||
else:
|
||||
save_currencies(new_currencies, save_path)
|
||||
print("Currencies updated and saved.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -92,8 +92,8 @@ jobs:
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
4
.github/workflows/docker-publish.yaml
vendored
4
.github/workflows/docker-publish.yaml
vendored
@@ -89,8 +89,8 @@ jobs:
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
100
.github/workflows/update-currencies.yml
vendored
100
.github/workflows/update-currencies.yml
vendored
@@ -1,100 +0,0 @@
|
||||
name: Update Currencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-currencies:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests
|
||||
|
||||
- name: Run currency fetch script
|
||||
run: python .github/scripts/update_currencies.py
|
||||
|
||||
- name: Check for changes
|
||||
id: check_changes
|
||||
run: |
|
||||
if [[ $(git status --porcelain) ]]; then
|
||||
echo "Changes detected."
|
||||
echo "changes=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected."
|
||||
echo "changes=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Delete existing update-currencies branch
|
||||
run: |
|
||||
if git show-ref --verify --quiet refs/heads/update-currencies; then
|
||||
git branch -D update-currencies
|
||||
echo "Deleted existing update-currencies branch."
|
||||
else
|
||||
echo "No existing update-currencies branch to delete."
|
||||
fi
|
||||
|
||||
- name: Create new update-currencies branch
|
||||
if: env.changes == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create a new branch
|
||||
git checkout -b update-currencies
|
||||
git add backend/internal/core/currencies/currencies.json
|
||||
git commit -m "Update currencies.json"
|
||||
|
||||
# Fetch the latest changes from the remote
|
||||
git fetch origin
|
||||
|
||||
# Attempt to rebase with the latest changes
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Rebase conflicts occurred. Please resolve them manually."
|
||||
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
|
||||
# Push the new branch to the remote
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Push failed, trying to fetch and rebase again."
|
||||
git fetch origin
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Second rebase failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Second push failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a pull request
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-X POST \
|
||||
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
|
||||
https://api.github.com/repos/${{ github.repository }}/pulls
|
||||
|
||||
- name: Notify no changes
|
||||
if: env.changes == 'false'
|
||||
run: echo "Currencies up-to-date with API, skipping commit."
|
||||
19
Dockerfile
19
Dockerfile
@@ -1,23 +1,13 @@
|
||||
# Node dependencies
|
||||
FROM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
FROM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go mod download
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
@@ -29,11 +19,10 @@ RUN apk update && \
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
|
||||
@@ -1,42 +1,35 @@
|
||||
# Node dependencies
|
||||
FROM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
FROM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go mod download
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
ARG BUSYBOX_VERSION=1.36.1-r31
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
-v ./app/api/*.go && \
|
||||
chmod +x /go/bin/api && \
|
||||
# create a directory so that we can copy it in the next stage
|
||||
mkdir /data
|
||||
|
||||
FROM gcr.io/distroless/java:latest
|
||||
|
||||
|
||||
@@ -57,12 +57,6 @@ func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithURL(url string) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.url = url
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
@@ -71,7 +65,6 @@ type V1Controller struct {
|
||||
isDemo bool
|
||||
allowRegistration bool
|
||||
bus *eventbus.EventBus
|
||||
url string
|
||||
}
|
||||
|
||||
type (
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"errors"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -334,7 +333,7 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url))
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to export items")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
@@ -348,26 +347,3 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return writer.WriteAll(csvData)
|
||||
}
|
||||
}
|
||||
|
||||
func getHBURL(refererHeader, fallback string) (hbURL string) {
|
||||
hbURL = refererHeader
|
||||
if hbURL == "" {
|
||||
hbURL = fallback
|
||||
}
|
||||
|
||||
return stripPathFromURL(hbURL)
|
||||
}
|
||||
|
||||
// stripPathFromURL removes the path from a URL.
|
||||
// ex. https://example.com/tools -> https://example.com
|
||||
func stripPathFromURL(rawURL string) string {
|
||||
parsedURL, err := url.Parse(rawURL)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse URL")
|
||||
return ""
|
||||
}
|
||||
|
||||
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
|
||||
|
||||
return strippedURL.String()
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package main
|
||||
import (
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
@@ -55,7 +54,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
|
||||
v1.WithRegistration(a.conf.Options.AllowRegistration),
|
||||
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
|
||||
v1.WithURL(fmt.Sprintf("%s:%s", a.conf.Web.Host, a.conf.Web.Port)),
|
||||
)
|
||||
|
||||
r.Route(prefix+"/v1", func(r chi.Router) {
|
||||
|
||||
@@ -2145,6 +2145,9 @@
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
|
||||
@@ -170,6 +170,8 @@ definitions:
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
type: string
|
||||
purchaseMethod:
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,6 @@ type ExportCSVRow struct {
|
||||
LabelStr LabelString `csv:"HB.labels"`
|
||||
AssetID repo.AssetID `csv:"HB.asset_id"`
|
||||
Archived bool `csv:"HB.archived"`
|
||||
URL string `csv:"HB.url"`
|
||||
|
||||
Name string `csv:"HB.name"`
|
||||
Quantity int `csv:"HB.quantity"`
|
||||
|
||||
@@ -153,7 +153,7 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
// ReadItems writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos, hbURL string) error {
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
|
||||
s.Rows = make([]ExportCSVRow, len(items))
|
||||
|
||||
extraHeaders := map[string]struct{}{}
|
||||
@@ -178,8 +178,6 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
labelString[i] = l.Name
|
||||
}
|
||||
|
||||
url := generateItemURL(item, hbURL)
|
||||
|
||||
customFields := make([]ExportItemFields, len(item.Fields))
|
||||
|
||||
for i, f := range item.Fields {
|
||||
@@ -203,11 +201,11 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
Description: item.Description,
|
||||
Insured: item.Insured,
|
||||
Archived: item.Archived,
|
||||
URL: url,
|
||||
|
||||
PurchasePrice: item.PurchasePrice,
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
PurchaseTime: item.PurchaseTime,
|
||||
PurchasePrice: item.PurchasePrice,
|
||||
PurchaseMethod: item.PurchaseMethod,
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
PurchaseTime: item.PurchaseTime,
|
||||
|
||||
Manufacturer: item.Manufacturer,
|
||||
ModelNumber: item.ModelNumber,
|
||||
@@ -255,14 +253,6 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
return nil
|
||||
}
|
||||
|
||||
func generateItemURL(item repo.ItemOut, d string) string {
|
||||
url := ""
|
||||
if item.ID != uuid.Nil {
|
||||
url = fmt.Sprintf("%s/item/%s", d, item.ID.String())
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
|
||||
func (s *IOSheet) CSV() ([][]string, error) {
|
||||
memcsv := make([][]string, len(s.Rows)+1)
|
||||
|
||||
@@ -298,6 +298,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
Archived: row.Archived,
|
||||
|
||||
PurchasePrice: row.PurchasePrice,
|
||||
PurchaseFrom: row.PurchaseMethod,
|
||||
PurchaseFrom: row.PurchaseFrom,
|
||||
PurchaseTime: row.PurchaseTime,
|
||||
|
||||
@@ -329,7 +330,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID, hbURL string) ([][]string, error) {
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -337,7 +338,7 @@ func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID, hbURL stri
|
||||
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err = sheet.ReadItems(ctx, items, GID, svc.repo, hbURL)
|
||||
err = sheet.ReadItems(ctx, items, GID, svc.repo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -76,6 +76,8 @@ func (Item) Fields() []ent.Field {
|
||||
|
||||
// ------------------------------------
|
||||
// item purchase
|
||||
field.String("purchase_method").
|
||||
Optional(),
|
||||
field.Time("purchase_time").
|
||||
Optional(),
|
||||
field.String("purchase_from").
|
||||
|
||||
@@ -17,7 +17,7 @@ CREATE INDEX `documenttoken_token` ON `document_tokens` (`token`);
|
||||
-- create "groups" table
|
||||
CREATE TABLE `groups` (`id` uuid NOT NULL, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `name` text NOT NULL, `currency` text NOT NULL DEFAULT 'usd', PRIMARY KEY (`id`));
|
||||
-- create "items" table
|
||||
CREATE TABLE `items` (`id` uuid NOT NULL, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `name` text NOT NULL, `description` text NULL, `import_ref` text NULL, `notes` text NULL, `quantity` integer NOT NULL DEFAULT 1, `insured` bool NOT NULL DEFAULT false, `serial_number` text NULL, `model_number` text NULL, `manufacturer` text NULL, `lifetime_warranty` bool NOT NULL DEFAULT false, `warranty_expires` datetime NULL, `warranty_details` text NULL, `purchase_time` datetime NULL, `purchase_from` text NULL, `purchase_price` real NOT NULL DEFAULT 0, `sold_time` datetime NULL, `sold_to` text NULL, `sold_price` real NOT NULL DEFAULT 0, `sold_notes` text NULL, `group_items` uuid NOT NULL, `location_items` uuid NULL, PRIMARY KEY (`id`), CONSTRAINT `items_groups_items` FOREIGN KEY (`group_items`) REFERENCES `groups` (`id`) ON DELETE CASCADE, CONSTRAINT `items_locations_items` FOREIGN KEY (`location_items`) REFERENCES `locations` (`id`) ON DELETE CASCADE);
|
||||
CREATE TABLE `items` (`id` uuid NOT NULL, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `name` text NOT NULL, `description` text NULL, `import_ref` text NULL, `notes` text NULL, `quantity` integer NOT NULL DEFAULT 1, `insured` bool NOT NULL DEFAULT false, `serial_number` text NULL, `model_number` text NULL, `manufacturer` text NULL, `lifetime_warranty` bool NOT NULL DEFAULT false, `warranty_expires` datetime NULL, `warranty_details` text NULL, `purchase_method` text NULL, `purchase_time` datetime NULL, `purchase_from` text NULL, `purchase_price` real NOT NULL DEFAULT 0, `sold_time` datetime NULL, `sold_to` text NULL, `sold_price` real NOT NULL DEFAULT 0, `sold_notes` text NULL, `group_items` uuid NOT NULL, `location_items` uuid NULL, PRIMARY KEY (`id`), CONSTRAINT `items_groups_items` FOREIGN KEY (`group_items`) REFERENCES `groups` (`id`) ON DELETE CASCADE, CONSTRAINT `items_locations_items` FOREIGN KEY (`location_items`) REFERENCES `locations` (`id`) ON DELETE CASCADE);
|
||||
-- create index "item_name" to table: "items"
|
||||
CREATE INDEX `item_name` ON `items` (`name`);
|
||||
-- create index "item_manufacturer" to table: "items"
|
||||
|
||||
@@ -91,9 +91,10 @@ type (
|
||||
WarrantyDetails string `json:"warrantyDetails"`
|
||||
|
||||
// Purchase
|
||||
PurchaseTime types.Date `json:"purchaseTime"`
|
||||
PurchaseFrom string `json:"purchaseFrom"`
|
||||
PurchasePrice float64 `json:"purchasePrice,string"`
|
||||
PurchaseMethod string `json:"purchaseMethod"`
|
||||
PurchaseTime types.Date `json:"purchaseTime"`
|
||||
PurchaseFrom string `json:"purchaseFrom"`
|
||||
PurchasePrice float64 `json:"purchasePrice,string"`
|
||||
|
||||
// Sold
|
||||
SoldTime types.Date `json:"soldTime"`
|
||||
@@ -147,8 +148,9 @@ type (
|
||||
WarrantyDetails string `json:"warrantyDetails"`
|
||||
|
||||
// Purchase
|
||||
PurchaseTime types.Date `json:"purchaseTime"`
|
||||
PurchaseFrom string `json:"purchaseFrom"`
|
||||
PurchaseMethod string `json:"purchaseMethod"`
|
||||
PurchaseTime types.Date `json:"purchaseTime"`
|
||||
PurchaseFrom string `json:"purchaseFrom"`
|
||||
|
||||
// Sold
|
||||
SoldTime types.Date `json:"soldTime"`
|
||||
@@ -261,8 +263,8 @@ func mapItemOut(item *ent.Item) ItemOut {
|
||||
Manufacturer: item.Manufacturer,
|
||||
|
||||
// Purchase
|
||||
PurchaseTime: types.DateFromTime(item.PurchaseTime),
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
PurchaseTime: types.DateFromTime(item.PurchaseTime),
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
|
||||
// Sold
|
||||
SoldTime: types.DateFromTime(item.SoldTime),
|
||||
|
||||
@@ -236,6 +236,7 @@ func TestItemsRepository_Update(t *testing.T) {
|
||||
ModelNumber: fk.Str(10),
|
||||
Manufacturer: fk.Str(10),
|
||||
PurchaseTime: types.DateFromTime(time.Now()),
|
||||
PurchaseMethod: fk.Str(10),
|
||||
PurchaseFrom: fk.Str(10),
|
||||
PurchasePrice: 300.99,
|
||||
SoldTime: types.DateFromTime(time.Now()),
|
||||
@@ -262,6 +263,7 @@ func TestItemsRepository_Update(t *testing.T) {
|
||||
assert.Equal(t, updateData.Manufacturer, got.Manufacturer)
|
||||
// assert.Equal(t, updateData.PurchaseTime, got.PurchaseTime)
|
||||
assert.Equal(t, updateData.PurchaseFrom, got.PurchaseFrom)
|
||||
assert.Equal(t, updateData.PurchaseMethod, got.PurchaseMethod)
|
||||
assert.InDelta(t, updateData.PurchasePrice, got.PurchasePrice, 0.01)
|
||||
// assert.Equal(t, updateData.SoldTime, got.SoldTime)
|
||||
assert.Equal(t, updateData.SoldTo, got.SoldTo)
|
||||
|
||||
@@ -1683,14 +1683,12 @@
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin@admin.com",
|
||||
"description": "string",
|
||||
"name": "username",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin",
|
||||
"description": "string",
|
||||
"name": "password",
|
||||
"in": "formData"
|
||||
@@ -2142,6 +2140,9 @@
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseMethod": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -63,6 +63,7 @@ Specifying import refs also allows you to update existing items via the CSV impo
|
||||
| HB.model_number | String | Model of the item |
|
||||
| HB.manufacturer | String | Manufacturer of the item |
|
||||
| HB.notes | String (1000) | General notes about the product |
|
||||
| HB.purchase_method | String | Method of how the item was purchased |
|
||||
| HB.purchase_from | String | Name of the place the item was purchased from |
|
||||
| HB.purchase_price | Float64 | |
|
||||
| HB.purchase_time | Date | Date the item was purchased |
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
shamefully-hoist=true
|
||||
@@ -32,7 +32,7 @@ export function useRouteQuery(q: string, def: any): WritableComputedRef<any> {
|
||||
case "string":
|
||||
return computed({
|
||||
get: () => {
|
||||
const qv = first.value;
|
||||
const qv = route.query[q];
|
||||
if (Array.isArray(qv)) {
|
||||
return qv[0];
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ type ImportObj = {
|
||||
[`HB.manufacturer`]: string;
|
||||
[`HB.notes`]: string;
|
||||
[`HB.purchase_price`]: number;
|
||||
[`HB.purchase_method`]: string;
|
||||
[`HB.purchase_from`]: string;
|
||||
[`HB.purchase_time`]: string;
|
||||
[`HB.lifetime_warranty`]: boolean;
|
||||
@@ -62,6 +63,7 @@ function importFileGenerator(entries: number): ImportObj[] {
|
||||
[`HB.manufacturer`]: faker.string.alphanumeric(5),
|
||||
[`HB.notes`]: "",
|
||||
[`HB.purchase_from`]: faker.person.fullName(),
|
||||
[`HB.purchase_method`]: faker.string.alphanumeric(5),
|
||||
[`HB.purchase_price`]: faker.number.int(100),
|
||||
[`HB.purchase_time`]: faker.date.past().toDateString(),
|
||||
[`HB.lifetime_warranty`]: half > i,
|
||||
|
||||
@@ -40,7 +40,6 @@
|
||||
const includeArchived = useRouteQuery("archived", false);
|
||||
const fieldSelector = useRouteQuery("fieldSelector", false);
|
||||
const negateLabels = useRouteQuery("negateLabels", false);
|
||||
const orderBy = useRouteQuery("orderBy", "name");
|
||||
|
||||
const totalPages = computed(() => Math.ceil(total.value / pageSize.value));
|
||||
const hasNext = computed(() => page.value * pageSize.value < total.value);
|
||||
@@ -170,12 +169,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
watch(orderBy, (newV, oldV) => {
|
||||
if (newV !== oldV) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
|
||||
async function fetchValues(field: string): Promise<string[]> {
|
||||
if (fieldValuesCache.value[field]) {
|
||||
return fieldValuesCache.value[field];
|
||||
@@ -208,7 +201,6 @@
|
||||
pageSize: pageSize.value,
|
||||
includeArchived: includeArchived.value ? "true" : "false",
|
||||
negateLabels: negateLabels.value ? "true" : "false",
|
||||
orderBy: orderBy.value,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -239,7 +231,6 @@
|
||||
includeArchived: includeArchived.value,
|
||||
page: page.value,
|
||||
pageSize: pageSize.value,
|
||||
orderBy: orderBy.value,
|
||||
fields,
|
||||
});
|
||||
|
||||
@@ -287,7 +278,6 @@
|
||||
archived: includeArchived.value ? "true" : "false",
|
||||
fieldSelector: fieldSelector.value ? "true" : "false",
|
||||
negateLabels: negateLabels.value ? "true" : "false",
|
||||
orderBy: orderBy.value,
|
||||
pageSize: pageSize.value,
|
||||
page: page.value,
|
||||
q: query.value,
|
||||
@@ -321,7 +311,6 @@
|
||||
fieldSelector: "false",
|
||||
pageSize: 10,
|
||||
page: 1,
|
||||
orderBy: "name",
|
||||
q: "",
|
||||
loc: [],
|
||||
lab: [],
|
||||
@@ -384,14 +373,6 @@
|
||||
<input v-model="negateLabels" type="checkbox" class="toggle toggle-sm toggle-primary" />
|
||||
<span class="label-text ml-4"> Negate selected labels </span>
|
||||
</label>
|
||||
<label class="label cursor-pointer mr-auto">
|
||||
<select v-model="orderBy" class="select select-bordered select-sm">
|
||||
<option value="name" selected>Name</option>
|
||||
<option value="createdAt">Created At</option>
|
||||
<option value="updatedAt">Updated At</option>
|
||||
</select>
|
||||
<span class="label-text ml-4"> Order By </span>
|
||||
</label>
|
||||
<hr class="my-2" />
|
||||
<BaseButton class="btn-block btn-sm" @click="reset"> Reset Search</BaseButton>
|
||||
</div>
|
||||
|
||||
12218
frontend/pnpm-lock.yaml
generated
12218
frontend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user