mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-24 06:28:34 +01:00
Compare commits
448 Commits
feat/passw
...
v0.16.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e81e60106 | ||
|
|
19ccfee083 | ||
|
|
363c9dac6e | ||
|
|
9f3d50b9f7 | ||
|
|
2f8a7e2a90 | ||
|
|
5d51c74af2 | ||
|
|
121d577e45 | ||
|
|
64237a2722 | ||
|
|
3d972dcac3 | ||
|
|
6662bbd5b9 | ||
|
|
05fbb207d1 | ||
|
|
b5c7566d37 | ||
|
|
7228c64b26 | ||
|
|
744d8d6733 | ||
|
|
d5e66f29b0 | ||
|
|
030323224d | ||
|
|
76b6c34533 | ||
|
|
8d65b70922 | ||
|
|
40b1793cce | ||
|
|
e2740a9b79 | ||
|
|
4510712359 | ||
|
|
1268fd90ba | ||
|
|
ec5b6bb8ff | ||
|
|
c0860fc9ca | ||
|
|
8d93ab3d7f | ||
|
|
d39d109031 | ||
|
|
1d5b62fdf3 | ||
|
|
a9616911f5 | ||
|
|
3eb5ece34d | ||
|
|
77246ca57b | ||
|
|
087a328e83 | ||
|
|
8a6df8a69b | ||
|
|
c2546f06d4 | ||
|
|
3a4c78ed86 | ||
|
|
ed7670ac67 | ||
|
|
2cd50435b5 | ||
|
|
9b72419e6b | ||
|
|
a1e66854cd | ||
|
|
ab756aaa56 | ||
|
|
d45c8b2b2d | ||
|
|
f26b5e1190 | ||
|
|
8bfa930cf0 | ||
|
|
52f9306e98 | ||
|
|
483934bd5e | ||
|
|
de7ef70d40 | ||
|
|
1e020f7fae | ||
|
|
0d6ec9c427 | ||
|
|
e32683fb28 | ||
|
|
67c77a7d91 | ||
|
|
55acfa54f5 | ||
|
|
7f742738fa | ||
|
|
888973e2cb | ||
|
|
65b247573e | ||
|
|
8f255ccfd4 | ||
|
|
c9ed50afad | ||
|
|
3fd14aac47 | ||
|
|
ed780e292b | ||
|
|
c6158e7c9e | ||
|
|
5d3698d0d8 | ||
|
|
a70bb227a9 | ||
|
|
908bfb530e | ||
|
|
738fe6db03 | ||
|
|
733ccb51c3 | ||
|
|
ed3d106289 | ||
|
|
333bca85f8 | ||
|
|
1ebc3d9c27 | ||
|
|
c298b651b6 | ||
|
|
d8260b9988 | ||
|
|
d0a69c8446 | ||
|
|
f3388b8449 | ||
|
|
c94eb4e183 | ||
|
|
f386d1213f | ||
|
|
9b7b00e8f2 | ||
|
|
055f0219a8 | ||
|
|
9d3f3cf1da | ||
|
|
da8cc19838 | ||
|
|
a3d5485c1d | ||
|
|
865661097c | ||
|
|
ab48f55335 | ||
|
|
49f52cada4 | ||
|
|
059bc5f16c | ||
|
|
a8eab724f9 | ||
|
|
eed967e9b4 | ||
|
|
2eb421455b | ||
|
|
e0d86ce745 | ||
|
|
ba8f32cec8 | ||
|
|
4af6bf210e | ||
|
|
1de3ecda19 | ||
|
|
4a0039c838 | ||
|
|
02a34ed1a0 | ||
|
|
dc23a1ae23 | ||
|
|
4fea927880 | ||
|
|
ff874ac472 | ||
|
|
ba48a615cd | ||
|
|
5aa389b13c | ||
|
|
b500f6b51f | ||
|
|
2ea9ed0476 | ||
|
|
68ee701e46 | ||
|
|
dc88406bcc | ||
|
|
ca85d4b483 | ||
|
|
17ecfa2c66 | ||
|
|
cabaa07384 | ||
|
|
d9b05e872c | ||
|
|
bd60c36240 | ||
|
|
1fc9843450 | ||
|
|
4e260c5a8b | ||
|
|
55a9355046 | ||
|
|
5bee3dd429 | ||
|
|
a85c42b539 | ||
|
|
897f3842e0 | ||
|
|
791d390187 | ||
|
|
e85b1a44b1 | ||
|
|
ffd59d535c | ||
|
|
e32a1041b1 | ||
|
|
c16269bcb4 | ||
|
|
81d57d4c47 | ||
|
|
bdb3b5dc24 | ||
|
|
666903e663 | ||
|
|
24deb462a8 | ||
|
|
85b54c4ccf | ||
|
|
ca33d499ab | ||
|
|
52ebff7ca4 | ||
|
|
4cf0b0a9df | ||
|
|
0d92d2718d | ||
|
|
22937dd314 | ||
|
|
26a99b6bad | ||
|
|
4bf7f3e00c | ||
|
|
269c5ea7e3 | ||
|
|
bac6212188 | ||
|
|
7a7d00220a | ||
|
|
a20f94b68a | ||
|
|
7c4c373851 | ||
|
|
8b0684a0ca | ||
|
|
c903a9df13 | ||
|
|
a5d4e4f491 | ||
|
|
f3116e4729 | ||
|
|
c0b88142d8 | ||
|
|
19adc6a441 | ||
|
|
d9fa7a846c | ||
|
|
aa30fba7af | ||
|
|
edbb4b9fa0 | ||
|
|
7d8534ffc1 | ||
|
|
28b79beb76 | ||
|
|
8a7af9a98d | ||
|
|
47600ade0e | ||
|
|
2af0cfeb49 | ||
|
|
f6690dc80f | ||
|
|
9de5cf8c58 | ||
|
|
9e66bf0bc1 | ||
|
|
5f27f5d9cc | ||
|
|
0a1667ed24 | ||
|
|
6348aeac6e | ||
|
|
7e9bd7f44b | ||
|
|
cf780393c2 | ||
|
|
e94f436ba6 | ||
|
|
0c6be05b05 | ||
|
|
9297ac59ae | ||
|
|
983dedfdad | ||
|
|
d159908b91 | ||
|
|
1b53a5235c | ||
|
|
f3f709748e | ||
|
|
64ceffefe9 | ||
|
|
4b24653b86 | ||
|
|
6a0ebb76ea | ||
|
|
8e273730be | ||
|
|
5c09953e4c | ||
|
|
6866dc76c0 | ||
|
|
de16f09108 | ||
|
|
3f3ca345fd | ||
|
|
9275d2db9c | ||
|
|
d88b04b66f | ||
|
|
62e6b08baf | ||
|
|
6fa37cb474 | ||
|
|
d63d6e94dd | ||
|
|
0c8ce366eb | ||
|
|
3da3025935 | ||
|
|
d1a57e3ec5 | ||
|
|
969ef1941b | ||
|
|
214b16a26e | ||
|
|
405d0c7487 | ||
|
|
6800c2112e | ||
|
|
5fc7b3e25b | ||
|
|
88dc943b6b | ||
|
|
f8482b1c64 | ||
|
|
a6e49295e0 | ||
|
|
8ef1b8b6ce | ||
|
|
404791a344 | ||
|
|
073aade67f | ||
|
|
784cc409d4 | ||
|
|
9e3f82fbac | ||
|
|
19c6d4dec5 | ||
|
|
b18f0c790b | ||
|
|
fb62f51958 | ||
|
|
dafc6aa13f | ||
|
|
93f13b1e80 | ||
|
|
5de649d85f | ||
|
|
b37cf24f09 | ||
|
|
cf2edc8d34 | ||
|
|
f113de180b | ||
|
|
adb4b52752 | ||
|
|
baf8912dda | ||
|
|
489deda6a8 | ||
|
|
2fee607327 | ||
|
|
c428a22b5b | ||
|
|
42c01adb98 | ||
|
|
209bb2932c | ||
|
|
ec9cdb391a | ||
|
|
a6aafeb374 | ||
|
|
ffb538ef21 | ||
|
|
15925de2f0 | ||
|
|
25d72044e9 | ||
|
|
6b598383d3 | ||
|
|
25c76522d6 | ||
|
|
c0e2aa5c62 | ||
|
|
d0b9f742ae | ||
|
|
80d56829c5 | ||
|
|
0946310f60 | ||
|
|
7c855cf55d | ||
|
|
0ab95fb670 | ||
|
|
1e81b4bab4 | ||
|
|
67c50068d9 | ||
|
|
c3628e36f7 | ||
|
|
526799c6da | ||
|
|
4ef7529533 | ||
|
|
b06d670dff | ||
|
|
02c0453ff3 | ||
|
|
09358aa5b2 | ||
|
|
229d4db996 | ||
|
|
184be32f3a | ||
|
|
dbe77ea19d | ||
|
|
85e5c7e8e7 | ||
|
|
3c273b370d | ||
|
|
343e56b440 | ||
|
|
3a949aee5a | ||
|
|
1601e52c9c | ||
|
|
760cc8e35c | ||
|
|
6051e1fb8b | ||
|
|
7b146947df | ||
|
|
5497a10f9f | ||
|
|
3e6f4b3657 | ||
|
|
7baf58ad61 | ||
|
|
d72437d18c | ||
|
|
ea57981953 | ||
|
|
c2d0cce02d | ||
|
|
9f7a119e95 | ||
|
|
0dacc97e99 | ||
|
|
52a44da56b | ||
|
|
7114f262c2 | ||
|
|
7647ea96d1 | ||
|
|
593da25cdb | ||
|
|
f22bce7ccb | ||
|
|
1688773bba | ||
|
|
b56b5d2400 | ||
|
|
33ee208071 | ||
|
|
fe880cc2c7 | ||
|
|
cffe57b74e | ||
|
|
66882d6fd9 | ||
|
|
050f22f051 | ||
|
|
7891af3a9a | ||
|
|
40cbccf50a | ||
|
|
0348da362c | ||
|
|
f0a3780f3a | ||
|
|
8058743c2f | ||
|
|
39163f3cfc | ||
|
|
43676ab407 | ||
|
|
a6bdadedb1 | ||
|
|
639f795b9a | ||
|
|
fc95d2cab8 | ||
|
|
695b6d68e6 | ||
|
|
b6c265098d | ||
|
|
2a80d348bd | ||
|
|
d08dafd965 | ||
|
|
c6542de93d | ||
|
|
5928678564 | ||
|
|
fac52ca122 | ||
|
|
e9d270269f | ||
|
|
0a4c5fbb28 | ||
|
|
2bfb0283d9 | ||
|
|
94e8aee36f | ||
|
|
8051956a2e | ||
|
|
c7020503be | ||
|
|
28edce96d9 | ||
|
|
b4481fcc84 | ||
|
|
2be2bebb4e | ||
|
|
d4bb8def62 | ||
|
|
7442cb01b7 | ||
|
|
95ba8275e8 | ||
|
|
2f4a0dd212 | ||
|
|
52a621e9ba | ||
|
|
1f77fad829 | ||
|
|
8d93a3f56e | ||
|
|
9c572e7ab2 | ||
|
|
1f15e74730 | ||
|
|
7570a04c02 | ||
|
|
fc2e89c448 | ||
|
|
be216ff7fe | ||
|
|
388208571b | ||
|
|
1891903007 | ||
|
|
41a7e73ff4 | ||
|
|
81d9fb0700 | ||
|
|
76312d6eb6 | ||
|
|
f31528c841 | ||
|
|
6d869fdece | ||
|
|
d0784a7773 | ||
|
|
791f843bc8 | ||
|
|
a0cdb231fd | ||
|
|
e0004842e6 | ||
|
|
fdbfa0e76f | ||
|
|
005516013f | ||
|
|
9ec3dd4b16 | ||
|
|
4dacf981a9 | ||
|
|
9f7b76b37d | ||
|
|
0d51558e74 | ||
|
|
236c257892 | ||
|
|
3540ce4297 | ||
|
|
fef026ed47 | ||
|
|
0bcb155756 | ||
|
|
12219522ab | ||
|
|
6001bf90c5 | ||
|
|
13864997ab | ||
|
|
2ab2766534 | ||
|
|
e051352070 | ||
|
|
3bf1e50620 | ||
|
|
42f3c88396 | ||
|
|
c9f31ef934 | ||
|
|
01f54aeb52 | ||
|
|
f51de34355 | ||
|
|
4e9647651c | ||
|
|
2d1016d362 | ||
|
|
b48c961ac1 | ||
|
|
4d47567995 | ||
|
|
6b2e3accf7 | ||
|
|
c1f8520c4f | ||
|
|
41eb99ec40 | ||
|
|
97a74127fb | ||
|
|
a9396167bf | ||
|
|
3385e5684e | ||
|
|
bb9672214c | ||
|
|
1b93672417 | ||
|
|
8b1cedd4a8 | ||
|
|
2c34047b6d | ||
|
|
f0942f0714 | ||
|
|
967e574ea8 | ||
|
|
625730f37c | ||
|
|
0a72fa95b3 | ||
|
|
c39a65ec21 | ||
|
|
9403fb27e0 | ||
|
|
ab34791737 | ||
|
|
d03e60d580 | ||
|
|
6fcf9965bb | ||
|
|
d53dcd37e6 | ||
|
|
f3531cacb3 | ||
|
|
10cdca94dc | ||
|
|
0b2b7bc4fd | ||
|
|
105f63487b | ||
|
|
f3c745e42e | ||
|
|
f3e7d7a19b | ||
|
|
af1ab9d1af | ||
|
|
69e5a877c0 | ||
|
|
5c0d161eb4 | ||
|
|
91cd0d1bca | ||
|
|
b12011f4a6 | ||
|
|
0223fbbb3f | ||
|
|
a709d38946 | ||
|
|
bd2eb8b5ac | ||
|
|
5a015b9581 | ||
|
|
552cb0bf53 | ||
|
|
a93f4ff1ad | ||
|
|
3d283c2d81 | ||
|
|
ea1d92207a | ||
|
|
bcd826ed4f | ||
|
|
499bb90b09 | ||
|
|
4f00822849 | ||
| ff28175838 | |||
|
|
0f482aebad | ||
|
|
76fe0d3522 | ||
|
|
4995e04cf0 | ||
|
|
76123e00d6 | ||
|
|
0b7de9557d | ||
|
|
0f25983278 | ||
|
|
40a093656b | ||
|
|
8b8a96f93b | ||
|
|
7b9c3d52cd | ||
|
|
a0198fb66f | ||
|
|
04eb136ab0 | ||
|
|
16f3fb19e8 | ||
|
|
6a1ffd7700 | ||
|
|
fff0f4344c | ||
|
|
24dc182c0e | ||
|
|
b4be462ce5 | ||
| 9ed618d45e | |||
| e79905b608 | |||
| e929c38e37 | |||
| e406bb2d04 | |||
|
|
a3f4c97049 | ||
|
|
51c21edb67 | ||
|
|
892635b5e8 | ||
|
|
c9e603c1c2 | ||
|
|
02bc58ed20 | ||
|
|
7931003cb7 | ||
|
|
79c811f65e | ||
|
|
07c0193e55 | ||
|
|
87e2464599 | ||
|
|
30abdd4d36 | ||
|
|
8a57ca41bf | ||
|
|
b57efa02ff | ||
|
|
d7bf64742e | ||
|
|
7e150aa8d7 | ||
|
|
4d916a69af | ||
|
|
7096616414 | ||
|
|
2292d72802 | ||
|
|
a8d21f0465 | ||
|
|
53d542413b | ||
|
|
aaeac8ca9d | ||
|
|
a780c6fac4 | ||
|
|
2a54933cef | ||
|
|
fa67e7d09f | ||
|
|
add57a8962 | ||
|
|
22c76c52a2 | ||
|
|
dd1c09fe0c | ||
|
|
b4f7d2152a | ||
|
|
25e1c41335 | ||
|
|
bde7ccbb2c | ||
|
|
c9841513cd | ||
|
|
eeedb94716 | ||
|
|
bf6151ed61 | ||
|
|
101e6b0802 | ||
|
|
c844fcd185 | ||
|
|
2111ee182d | ||
|
|
0980ee41cf | ||
|
|
0c2181fce9 | ||
|
|
7d4e5d17fc | ||
|
|
ea79d2d279 | ||
|
|
36d64737e0 | ||
|
|
0acb9316f9 | ||
|
|
1233606d19 | ||
|
|
8d98780a7c | ||
|
|
b81bac0bf7 | ||
|
|
ada49f6957 | ||
|
|
919671d9c9 | ||
|
|
dd986e4481 | ||
|
|
829458562f | ||
|
|
e652507714 | ||
|
|
b6c4d8c966 | ||
|
|
96b0c8606c | ||
|
|
69a25e3aba | ||
|
|
320ca2d48a | ||
|
|
421c57267b |
@@ -35,6 +35,6 @@
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"golang": "1.21"
|
||||
"ghcr.io/devcontainers/features/go:1": "1.21"
|
||||
}
|
||||
}
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
github: [hay-kot]
|
||||
github: [tankerkiller125,katosdev,tonyaellie]
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
18
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,7 +1,9 @@
|
||||
---
|
||||
name: "Bug Report"
|
||||
description: "Submit a bug report for the current release"
|
||||
labels: ["bug"]
|
||||
labels: ["🕷️ bug"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
@@ -19,6 +21,8 @@ body:
|
||||
required: true
|
||||
- label: I already read the docs and didn't find an answer.
|
||||
required: true
|
||||
- label: I can replicate the issue inside the Demo install.
|
||||
required: true
|
||||
- type: input
|
||||
id: homebox-version
|
||||
attributes:
|
||||
@@ -54,6 +58,18 @@ body:
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: arch
|
||||
attributes:
|
||||
label: OS Architechture
|
||||
description: What type of processor are you running on.
|
||||
multiple: true
|
||||
options:
|
||||
- x86_64 (AMD, Intel)
|
||||
- ARM64
|
||||
- ARM/v7
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os-details
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,7 +1,9 @@
|
||||
---
|
||||
name: "Feature Request"
|
||||
description: "Submit a feature request for the current release"
|
||||
labels: ["feature-request"]
|
||||
labels: ["⬆️ enhancement"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
type: "Enhancement"
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-statement
|
||||
|
||||
16
.github/pull_request_template.md
vendored
16
.github/pull_request_template.md
vendored
@@ -55,18 +55,4 @@ _(fill-in or delete this section)_
|
||||
|
||||
<!--
|
||||
Describe how you tested this change.
|
||||
-->
|
||||
|
||||
## Release Notes
|
||||
|
||||
_(REQUIRED)_
|
||||
<!--
|
||||
If this PR makes user facing changes, please describe them here. This
|
||||
description will be copied into the release notes/changelog, whenever the
|
||||
next version is released. Keep this section short, and focus on high level
|
||||
changes.
|
||||
Put your text between the block. To omit notes, use NONE within the block.
|
||||
-->
|
||||
|
||||
```release-note
|
||||
```
|
||||
-->
|
||||
65
.github/scripts/update_currencies.py
vendored
Normal file
65
.github/scripts/update_currencies.py
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
def fetch_currencies():
|
||||
try:
|
||||
response = requests.get('https://restcountries.com/v3.1/all')
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.Timeout:
|
||||
print("Request to the API timed out.")
|
||||
return []
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"An error occurred while making the request: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
countries = response.json()
|
||||
except json.JSONDecodeError:
|
||||
print("Failed to decode JSON from the response.")
|
||||
return []
|
||||
|
||||
currencies_list = []
|
||||
for country in countries:
|
||||
country_name = country.get('name', {}).get('common')
|
||||
country_currencies = country.get('currencies', {})
|
||||
for currency_code, currency_info in country_currencies.items():
|
||||
symbol = currency_info.get('symbol', '')
|
||||
currencies_list.append({
|
||||
'code': currency_code,
|
||||
'local': country_name,
|
||||
'symbol': symbol,
|
||||
'name': currency_info.get('name')
|
||||
})
|
||||
|
||||
return currencies_list
|
||||
|
||||
def save_currencies(currencies, file_path):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(currencies, f, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
print(f"An error occurred while writing to the file: {e}")
|
||||
|
||||
def load_existing_currencies(file_path):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return [] # Return an empty list if file doesn't exist or is invalid
|
||||
|
||||
def main():
|
||||
save_path = 'backend/internal/core/currencies/currencies.json'
|
||||
|
||||
existing_currencies = load_existing_currencies(save_path)
|
||||
new_currencies = fetch_currencies()
|
||||
|
||||
if new_currencies == existing_currencies:
|
||||
print("Currencies up-to-date with API, skipping commit.")
|
||||
else:
|
||||
save_currencies(new_currencies, save_path)
|
||||
print("Currencies updated and saved.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
47
.github/workflows/binaries-publish.yaml
vendored
Normal file
47
.github/workflows/binaries-publish.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Publish Release Binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ 'v*.*.*' ]
|
||||
|
||||
jobs:
|
||||
# backend-tests:
|
||||
# name: "Backend Server Tests"
|
||||
# uses: sysadminsmedia/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
# frontend-tests:
|
||||
# name: "Frontend and End-to-End Tests"
|
||||
# uses: sysadminsmedia/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
105
.github/workflows/docker-publish-arm.yaml
vendored
Normal file
105
.github/workflows/docker-publish-arm.yaml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: Docker publish ARM
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
# Step 1: Checkout repository
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Step 2: Set up Buildx without specifying driver
|
||||
# Let it use default settings to avoid the 'no remote endpoint' issue
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
with:
|
||||
install: true # Ensure Buildx is installed and set up properly
|
||||
use: true # Use Buildx instance directly for this job
|
||||
|
||||
# Step 3: Login against Docker registry except on PR
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Step 4: Extract metadata for Docker images
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
flavor: |
|
||||
suffix=-arm,onlatest=true
|
||||
|
||||
# Step 5: Build and push the Docker image
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
# Step 6: Attest built image to prove build provenance
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
104
.github/workflows/docker-publish-rootless-arm.yaml
vendored
Normal file
104
.github/workflows/docker-publish-rootless-arm.yaml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Docker publish rootless ARM
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-rootless:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
# Step 1: Checkout repository
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Step 2: Set up Buildx without specifying driver
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
with:
|
||||
install: true # Ensure Buildx is installed and set up properly
|
||||
use: true # Use Buildx instance directly for this job
|
||||
|
||||
# Step 3: Login to Docker registry except on PR
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Step 4: Extract metadata for Docker images
|
||||
- name: Extract Docker metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
flavor: |
|
||||
suffix=-rootless-arm,onlatest=true
|
||||
|
||||
# Step 5: Build and push the Docker image
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
platforms: linux/arm64,linux/arm/v7
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
# Step 6: Attest built image to prove build provenance
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
108
.github/workflows/docker-publish-rootless.yaml
vendored
Normal file
108
.github/workflows/docker-publish-rootless.yaml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
name: Docker publish rootless
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
|
||||
jobs:
|
||||
build-rootless:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up BuildKit Docker container builder to be able to build
|
||||
# multi-platform images and export cache
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0 # v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
flavor: |
|
||||
suffix=-rootless,onlatest=true
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
platforms: linux/amd64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
105
.github/workflows/docker-publish.yaml
vendored
Normal file
105
.github/workflows/docker-publish.yaml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: Docker publish
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
# Publish semver tags as releases.
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows'
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
attestations: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up BuildKit Docker container builder to be able to build
|
||||
# multi-platform images and export cache
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0 # v3.0.0
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3.0.0 # v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v5.0.0 # v5.0.0
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@v1
|
||||
id: attest
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
14
.github/workflows/partial-frontend.yaml
vendored
14
.github/workflows/partial-frontend.yaml
vendored
@@ -13,13 +13,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
with:
|
||||
version: 9
|
||||
version: 9.12.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --shamefully-hoist
|
||||
@@ -50,18 +46,18 @@ jobs:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.21"
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 18
|
||||
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
with:
|
||||
version: 9
|
||||
version: 9.12.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --shamefully-hoist
|
||||
run: pnpm install
|
||||
working-directory: frontend
|
||||
|
||||
- name: Run Integration Tests
|
||||
|
||||
89
.github/workflows/partial-publish.yaml
vendored
89
.github/workflows/partial-publish.yaml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Frontend / E2E
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
required: true
|
||||
type: string
|
||||
release:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
secrets:
|
||||
GH_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish Homebox"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: install buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: login to container registry
|
||||
run: docker login ghcr.io --username hay-kot --password $CR_PAT
|
||||
env:
|
||||
CR_PAT: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
- name: build nightly image
|
||||
if: ${{ inputs.release == false }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg=COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build nightly-rootless image
|
||||
if: ${{ inputs.release == false }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
|
||||
--build-arg=COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--file Dockerfile.rootless \
|
||||
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build release tagged the image
|
||||
if: ${{ inputs.release == true }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag ghcr.io/hay-kot/homebox:nightly \
|
||||
--tag ghcr.io/hay-kot/homebox:latest \
|
||||
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg VERSION=${{ inputs.tag }} \
|
||||
--build-arg COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build release tagged the rootless image
|
||||
if: ${{ inputs.release == true }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag ghcr.io/hay-kot/homebox:nightly-rootless \
|
||||
--tag ghcr.io/hay-kot/homebox:latest-rootless \
|
||||
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }}-rootless \
|
||||
--build-arg VERSION=${{ inputs.tag }} \
|
||||
--build-arg COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 \
|
||||
--file Dockerfile.rootless .
|
||||
29
.github/workflows/publish.yaml
vendored
29
.github/workflows/publish.yaml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Publish Dockers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: "Deploy Nightly to Fly.io"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
- run: flyctl deploy --remote-only
|
||||
|
||||
publish-nightly:
|
||||
name: "Publish Nightly"
|
||||
if: github.event_name != 'release'
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
tag: nightly
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
|
||||
4
.github/workflows/pull-requests.yaml
vendored
4
.github/workflows/pull-requests.yaml
vendored
@@ -5,6 +5,10 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
|
||||
77
.github/workflows/tag.yaml
vendored
77
.github/workflows/tag.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
frontend-tests:
|
||||
name: "Frontend and End-to-End Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-tag:
|
||||
name: "Publish Tag"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
release: true
|
||||
tag: ${{ github.ref_name }}
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
deploy-docs:
|
||||
name: Deploy docs
|
||||
needs:
|
||||
- publish-tag
|
||||
- goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Deploy docs
|
||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CONFIG_FILE: docs/mkdocs.yml
|
||||
EXTRA_PACKAGES: build-base
|
||||
100
.github/workflows/update-currencies.yml
vendored
Normal file
100
.github/workflows/update-currencies.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Update Currencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-currencies:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests
|
||||
|
||||
- name: Run currency fetch script
|
||||
run: python .github/scripts/update_currencies.py
|
||||
|
||||
- name: Check for changes
|
||||
id: check_changes
|
||||
run: |
|
||||
if [[ $(git status --porcelain) ]]; then
|
||||
echo "Changes detected."
|
||||
echo "changes=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected."
|
||||
echo "changes=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Delete existing update-currencies branch
|
||||
run: |
|
||||
if git show-ref --verify --quiet refs/heads/update-currencies; then
|
||||
git branch -D update-currencies
|
||||
echo "Deleted existing update-currencies branch."
|
||||
else
|
||||
echo "No existing update-currencies branch to delete."
|
||||
fi
|
||||
|
||||
- name: Create new update-currencies branch
|
||||
if: env.changes == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create a new branch
|
||||
git checkout -b update-currencies
|
||||
git add backend/internal/core/currencies/currencies.json
|
||||
git commit -m "Update currencies.json"
|
||||
|
||||
# Fetch the latest changes from the remote
|
||||
git fetch origin
|
||||
|
||||
# Attempt to rebase with the latest changes
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Rebase conflicts occurred. Please resolve them manually."
|
||||
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
|
||||
# Push the new branch to the remote
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Push failed, trying to fetch and rebase again."
|
||||
git fetch origin
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Second rebase failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Second push failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a pull request
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-X POST \
|
||||
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
|
||||
https://api.github.com/repos/${{ github.repository }}/pulls
|
||||
|
||||
- name: Notify no changes
|
||||
if: env.changes == 'false'
|
||||
run: echo "Currencies up-to-date with API, skipping commit."
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -48,10 +48,13 @@ dist
|
||||
|
||||
.pnpm-store
|
||||
backend/app/api/app
|
||||
backend/app/api/__debug_bin
|
||||
backend/app/api/__debug_bin*
|
||||
dist/
|
||||
|
||||
# Nuxt Publish Dir
|
||||
backend/app/api/static/public/*
|
||||
!backend/app/api/static/public/.gitkeep
|
||||
backend/api
|
||||
backend/api
|
||||
|
||||
docs/.vitepress/cache/
|
||||
/.data/
|
||||
|
||||
@@ -3,7 +3,7 @@ package schema
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/schema/mixins"
|
||||
)
|
||||
|
||||
type {{ .Scaffold.model }} struct {
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -25,6 +25,7 @@
|
||||
"HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data",
|
||||
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1"
|
||||
},
|
||||
"console": "integratedTerminal",
|
||||
},
|
||||
{
|
||||
"name": "Launch Frontend",
|
||||
@@ -38,10 +39,11 @@
|
||||
"cwd": "${workspaceFolder}/frontend",
|
||||
"serverReadyAction": {
|
||||
"action": "debugWithChrome",
|
||||
"pattern": "Local: http://localhost:([0-9]+)",
|
||||
"pattern": "Local: +http://localhost:([0-9]+)",
|
||||
"uriFormat": "http://localhost:%s",
|
||||
"webRoot": "${workspaceFolder}/frontend"
|
||||
}
|
||||
},
|
||||
"console": "integratedTerminal",
|
||||
}
|
||||
]
|
||||
}
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
[support@sysadminemedia.com](mailto:support@sysadminemedia.com).
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
[Contributor Covenant Code of Conduct](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available at
|
||||
[Translations](https://www.contributor-covenant.org/translations).
|
||||
@@ -44,7 +44,7 @@ start command `task go:run`
|
||||
|
||||
### Frontend Development Notes
|
||||
|
||||
start command `task: ui:dev`
|
||||
start command `task ui:dev`
|
||||
|
||||
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
|
||||
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
|
||||
@@ -54,4 +54,4 @@ start command `task: ui:dev`
|
||||
|
||||
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
|
||||
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
||||
|
||||
73
Dockerfile
73
Dockerfile
@@ -1,50 +1,91 @@
|
||||
# Node dependencies stage
|
||||
FROM --platform=$TARGETPLATFORM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine as frontend-builder
|
||||
WORKDIR /app
|
||||
# Install pnpm globally (caching layer)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy package.json and lockfile to leverage caching
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
|
||||
# Build Nuxt (frontend) stage
|
||||
FROM --platform=$TARGETPLATFORM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Install pnpm globally again (it can reuse the cache if not changed)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy over source files and node_modules from dependencies stage
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
# Go dependencies stage
|
||||
FROM --platform=$TARGETPLATFORM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy go.mod and go.sum for better caching
|
||||
COPY ./backend/go.mod ./backend/go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
# Build API stage
|
||||
FROM --platform=$TARGETPLATFORM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
|
||||
# Install necessary build tools
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update git build-base gcc g++
|
||||
apk add --no-cache git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy Go modules (from dependencies stage) and source code
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
|
||||
# Clear old public files and copy new ones from frontend build
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
|
||||
# Use cache for Go build artifacts
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
|
||||
# Production Stage
|
||||
FROM alpine:latest
|
||||
|
||||
# Production stage
|
||||
FROM --platform=$TARGETPLATFORM alpine:latest
|
||||
ENV HBOX_MODE=production
|
||||
ENV HBOX_STORAGE_DATA=/data/
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
# Install necessary runtime dependencies
|
||||
RUN apk --no-cache add ca-certificates wget
|
||||
|
||||
# Create application directory and copy over built Go binary
|
||||
RUN mkdir /app
|
||||
COPY --from=builder /go/bin/api /app
|
||||
|
||||
RUN chmod +x /app/api
|
||||
|
||||
# Labels and configuration for the final image
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
|
||||
# Expose necessary ports
|
||||
EXPOSE 7745
|
||||
WORKDIR /app
|
||||
|
||||
# Healthcheck configuration
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
|
||||
|
||||
# Persist volume
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
# Entrypoint and CMD
|
||||
ENTRYPOINT [ "/app/api" ]
|
||||
CMD [ "/data/config.yml" ]
|
||||
|
||||
@@ -1,53 +1,73 @@
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:17-alpine as frontend-builder
|
||||
WORKDIR /app
|
||||
# Node dependencies
|
||||
FROM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
COPY frontend ./
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
# Build Go dependencies
|
||||
FROM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend/go.mod ./backend/go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update git build-base gcc g++
|
||||
|
||||
RUN apk update && apk upgrade && apk add --no-cache git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go && \
|
||||
chmod +x /go/bin/api && \
|
||||
# create a directory so that we can copy it in the next stage
|
||||
mkdir /data
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
|
||||
# Production Stage
|
||||
FROM gcr.io/distroless/static
|
||||
# Use cache for Go build
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api ./app/api/*.go
|
||||
|
||||
# Production stage with distroless
|
||||
FROM gcr.io/distroless/static:latest
|
||||
|
||||
ENV HBOX_MODE=production
|
||||
ENV HBOX_STORAGE_DATA=/data/
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
|
||||
|
||||
# Copy the binary and the (empty) /data dir and
|
||||
# change the ownership to the low-privileged user
|
||||
# Copy the binary and data directory, change ownership
|
||||
COPY --from=builder --chown=nonroot /go/bin/api /app
|
||||
COPY --from=builder --chown=nonroot /data /data
|
||||
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
|
||||
EXPOSE 7745
|
||||
VOLUME [ "/data" ]
|
||||
# Add wget to the image
|
||||
# Note: If using distroless, this may not be applicable
|
||||
# as distroless images do not include package managers.
|
||||
# This line may be omitted if you're relying on another way to handle healthchecks.
|
||||
COPY --from=alpine:latest /bin/wget /usr/bin/wget
|
||||
|
||||
# Drop root and run as low-privileged user
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
EXPOSE 7745
|
||||
|
||||
HEALTHCHECK --interval=30s \
|
||||
--timeout=5s \
|
||||
--start-period=5s \
|
||||
--retries=3 \
|
||||
CMD ["/usr/bin/wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status"]
|
||||
|
||||
VOLUME ["/data"]
|
||||
|
||||
# Drop root and run as a low-privileged user
|
||||
USER nonroot
|
||||
ENTRYPOINT [ "/app" ]
|
||||
CMD [ "/data/config.yml" ]
|
||||
ENTRYPOINT ["/app"]
|
||||
CMD ["/data/config.yml"]
|
||||
|
||||
33
README.md
33
README.md
@@ -1,19 +1,30 @@
|
||||
<div align="center">
|
||||
<img src="/docs/docs/assets/img/lilbox.svg" height="200"/>
|
||||
<img src="/docs/public/lilbox.svg" height="200"/>
|
||||
</div>
|
||||
|
||||
<h1 align="center" style="margin-top: -10px"> HomeBox </h1>
|
||||
<p align="center" style="width: 100;">
|
||||
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
||||
<a href="https://homebox.software/en/">Docs</a>
|
||||
|
|
||||
<a href="https://homebox.fly.dev">Demo</a>
|
||||
<a href="https://demo.homebox.software">Demo</a>
|
||||
|
|
||||
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
||||
<a href="https://discord.gg/aY4DCkpNA9">Discord</a>
|
||||
</p>
|
||||
|
||||
## What is HomeBox
|
||||
|
||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
|
||||
|
||||
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
|
||||
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general, idle memory usage is less than 50MB for the whole container.
|
||||
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
|
||||
|
||||
# Screenshots
|
||||
Check out screenshots of the project [here](https://imgur.com/a/5gLWt2j).
|
||||
|
||||
## Quick Start
|
||||
|
||||
[Configuration & Docker Compose](https://hay-kot.github.io/homebox/quick-start)
|
||||
[Configuration & Docker Compose](https://homebox.software/en/quick-start.html)
|
||||
|
||||
```bash
|
||||
# If using the rootless image, ensure data
|
||||
@@ -26,18 +37,24 @@ docker run -d \
|
||||
--publish 3100:7745 \
|
||||
--env TZ=Europe/Bucharest \
|
||||
--volume /path/to/data/folder/:/data \
|
||||
ghcr.io/hay-kot/homebox:latest
|
||||
# ghcr.io/hay-kot/homebox:latest-rootless
|
||||
ghcr.io/sysadminsmedia/homebox:latest
|
||||
# ghcr.io/sysadminsmedia/homebox:latest-rootless
|
||||
```
|
||||
|
||||
<!-- CONTRIBUTING -->
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
|
||||
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
|
||||
|
||||
<a href="https://www.buymeacoffee.com/haykot" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/v2/default-green.png" alt="Buy Me A Coffee" style="height: 30px !important;width: 107px !important;" ></a>
|
||||
## Help us Translate
|
||||
We want to make sure that Homebox is available in as many languages as possible. If you are interested in helping us translate Homebox, please help us via our [Weblate instance](https://translate.sysadminsmedia.com/projects/homebox/).
|
||||
|
||||
[](http://translate.sysadminsmedia.com/engage/homebox/)
|
||||
|
||||
## Credits
|
||||
|
||||
- Original project by [@hay-kot](https://github.com/hay-kot)
|
||||
- Logo by [@lakotelman](https://github.com/lakotelman)
|
||||
|
||||
@@ -6,4 +6,6 @@ Since this software is still considered beta/WIP support is always only given fo
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please open a normal public issue if you have any security related concerns.
|
||||
Please open a normal public issue for minor security issues or general security inquires.
|
||||
|
||||
For major or critical security issues, please open a private github security issue.
|
||||
13
Taskfile.yml
13
Taskfile.yml
@@ -5,11 +5,6 @@ env:
|
||||
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
|
||||
HBOX_OPTIONS_ALLOW_REGISTRATION: true
|
||||
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
|
||||
HBOX_MAILER_HOST: 127.0.0.1
|
||||
HBOX_MAILER_PORT: 1025
|
||||
HBOX_MAILER_USERNAME: c836555d57d205
|
||||
HBOX_MAILER_PASSWORD: 3ff2f9986f3cff
|
||||
HBOX_MAILER_FROM: info@example.com
|
||||
tasks:
|
||||
setup:
|
||||
desc: Install development dependencies
|
||||
@@ -57,6 +52,8 @@ tasks:
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
|
||||
go:run:
|
||||
env:
|
||||
HBOX_DEMO: true
|
||||
desc: Starts the backend api server (depends on generate task)
|
||||
dir: backend
|
||||
deps:
|
||||
@@ -84,12 +81,6 @@ tasks:
|
||||
cmds:
|
||||
- go mod tidy
|
||||
|
||||
go:fmt:
|
||||
desc: Runs go fmt on the backend
|
||||
dir: backend
|
||||
cmds:
|
||||
- gofumpt -w .
|
||||
|
||||
go:lint:
|
||||
desc: Runs golangci-lint
|
||||
dir: backend
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
run:
|
||||
timeout: 10m
|
||||
linters-settings:
|
||||
errcheck:
|
||||
exclude-functions:
|
||||
- (net/http.ResponseWriter).Write
|
||||
goconst:
|
||||
min-len: 5
|
||||
min-occurrences: 5
|
||||
@@ -46,7 +43,7 @@ linters:
|
||||
- errcheck
|
||||
- errorlint
|
||||
- exhaustive
|
||||
- exportloopref
|
||||
- copyloopvar
|
||||
- gochecknoinits
|
||||
- goconst
|
||||
- gocritic
|
||||
@@ -72,6 +69,6 @@ linters:
|
||||
- sqlclosecheck
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
fix: false
|
||||
exclude-dirs:
|
||||
- internal/data/ent.*
|
||||
fix: true
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/mailer"
|
||||
)
|
||||
|
||||
type app struct {
|
||||
conf *config.Config
|
||||
mailer *mailer.Mailer
|
||||
mailer mailer.Mailer
|
||||
db *ent.Client
|
||||
repos *repo.AllRepos
|
||||
services *services.AllServices
|
||||
@@ -23,7 +23,7 @@ func new(conf *config.Config) *app {
|
||||
conf: conf,
|
||||
}
|
||||
|
||||
s.mailer = &mailer.Mailer{
|
||||
s.mailer = mailer.Mailer{
|
||||
Host: s.conf.Mailer.Host,
|
||||
Port: s.conf.Mailer.Port,
|
||||
Username: s.conf.Mailer.Username,
|
||||
|
||||
@@ -2,14 +2,15 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
func (a *app) SetupDemo() {
|
||||
func (a *app) SetupDemo() error {
|
||||
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
|
||||
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
|
||||
@@ -33,32 +34,34 @@ func (a *app) SetupDemo() {
|
||||
_, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
if err == nil {
|
||||
log.Info().Msg("Demo user already exists, skipping setup")
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Debug().Msg("Demo user does not exist, setting up demo")
|
||||
_, err = a.services.User.RegisterUser(ctx, registration)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to register demo user")
|
||||
log.Fatal().Msg("Failed to setup demo") // nolint
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
token, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to login demo user")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
self, err := a.services.User.GetSelf(ctx, token.Raw)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to get self")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
_, err = a.services.Items.CsvImport(ctx, self.GroupID, strings.NewReader(csvText))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to import CSV")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
log.Info().Msg("Demo setup complete")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -7,12 +7,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
|
||||
"github.com/olahol/melody"
|
||||
)
|
||||
@@ -57,6 +57,12 @@ func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithURL(url string) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.url = url
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
@@ -65,6 +71,7 @@ type V1Controller struct {
|
||||
isDemo bool
|
||||
allowRegistration bool
|
||||
bus *eventbus.EventBus
|
||||
url string
|
||||
}
|
||||
|
||||
type (
|
||||
@@ -87,12 +94,6 @@ type (
|
||||
}
|
||||
)
|
||||
|
||||
func BaseURLFunc(prefix string) func(s string) string {
|
||||
return func(s string) string {
|
||||
return prefix + "/v1" + s
|
||||
}
|
||||
}
|
||||
|
||||
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller {
|
||||
ctrl := &V1Controller{
|
||||
repo: repos,
|
||||
@@ -153,7 +154,7 @@ func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
|
||||
|
||||
m.HandleConnect(func(s *melody.Session) {
|
||||
auth := services.NewContext(s.Request.Context())
|
||||
s.Set("gid", auth.GroupID)
|
||||
s.Set("gid", auth.GID)
|
||||
})
|
||||
|
||||
factory := func(e string) func(data any) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// routeID extracts the ID from the request URL. If the ID is not in a valid
|
||||
|
||||
@@ -5,11 +5,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type ActionAmountResult struct {
|
||||
@@ -20,7 +20,7 @@ func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int,
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
totalCompleted, err := fn(ctx, ctx.GroupID)
|
||||
totalCompleted, err := fn(ctx, ctx.GID)
|
||||
if err != nil {
|
||||
log.Err(err).Str("action_ref", ref).Msg("failed to run action")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
|
||||
@@ -6,11 +6,11 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
@@ -37,7 +37,7 @@ func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
pageParam := r.URL.Query().Get("page")
|
||||
var page int64 = -1
|
||||
if pageParam != "" {
|
||||
page, err = strconv.ParseInt(pageParam, 10, 64)
|
||||
page, err = strconv.ParseInt(pageParam, 10, 32)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page number")
|
||||
}
|
||||
@@ -46,13 +46,13 @@ func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
pageSizeParam := r.URL.Query().Get("pageSize")
|
||||
var pageSize int64 = -1
|
||||
if pageSizeParam != "" {
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 64)
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 32)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page size")
|
||||
}
|
||||
}
|
||||
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GroupID, repo.AssetID(assetID), int(page), int(pageSize))
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetID), int(page), int(pageSize))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to get item")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
|
||||
@@ -7,11 +7,11 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@@ -4,11 +4,11 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -35,7 +35,7 @@ type (
|
||||
func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) (repo.Group, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Groups.GroupByID(auth, auth.GroupID)
|
||||
return ctrl.repo.Groups.GroupByID(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
|
||||
@@ -4,17 +4,19 @@ import (
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleItemsGetAll godoc
|
||||
@@ -57,6 +59,7 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
Search: params.Get("q"),
|
||||
LocationIDs: queryUUIDList(params, "locations"),
|
||||
LabelIDs: queryUUIDList(params, "labels"),
|
||||
NegateLabels: queryBool(params.Get("negateLabels")),
|
||||
ParentItemIDs: queryUUIDList(params, "parentIds"),
|
||||
IncludeArchived: queryBool(params.Get("includeArchived")),
|
||||
Fields: filterFieldItems(params["fields"]),
|
||||
@@ -79,7 +82,15 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GroupID, extractQuery(r))
|
||||
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r))
|
||||
totalPrice := new(big.Int)
|
||||
for _, item := range items.Items {
|
||||
totalPrice.Add(totalPrice, big.NewInt(int64(item.PurchasePrice*100)))
|
||||
}
|
||||
|
||||
totalPriceFloat := new(big.Float).SetInt(totalPrice)
|
||||
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return server.JSON(w, http.StatusOK, repo.PaginationResult[repo.ItemSummary]{
|
||||
@@ -105,12 +116,12 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GroupID, ID)
|
||||
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GroupID, item.Location.ID)
|
||||
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GID, item.Location.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -165,7 +176,7 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GroupID, ID)
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
@@ -183,7 +194,7 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.Items.DeleteByGroup(auth, auth.GroupID, ID)
|
||||
err := ctrl.repo.Items.DeleteByGroup(auth, auth.GID, ID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -205,7 +216,7 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
body.ID = ID
|
||||
return ctrl.repo.Items.UpdateByGroup(auth, auth.GroupID, body)
|
||||
return ctrl.repo.Items.UpdateByGroup(auth, auth.GID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
@@ -226,12 +237,12 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
body.ID = ID
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GroupID, ID, body)
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GroupID, ID)
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
@@ -249,7 +260,7 @@ func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]string, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Items.GetAllCustomFieldNames(auth, auth.GroupID)
|
||||
return ctrl.repo.Items.GetAllCustomFieldNames(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
@@ -271,7 +282,7 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
|
||||
|
||||
fn := func(r *http.Request, q query) ([]string, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GroupID, q.Field)
|
||||
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
@@ -323,17 +334,40 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GroupID)
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to export items")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.csv")
|
||||
|
||||
writer := csv.NewWriter(w)
|
||||
writer.Comma = '\t'
|
||||
writer.Comma = ','
|
||||
return writer.WriteAll(csvData)
|
||||
}
|
||||
}
|
||||
|
||||
func getHBURL(refererHeader, fallback string) (hbURL string) {
|
||||
hbURL = refererHeader
|
||||
if hbURL == "" {
|
||||
hbURL = fallback
|
||||
}
|
||||
|
||||
return stripPathFromURL(hbURL)
|
||||
}
|
||||
|
||||
// stripPathFromURL removes the path from a URL.
|
||||
// ex. https://example.com/tools -> https://example.com
|
||||
func stripPathFromURL(rawURL string) string {
|
||||
parsedURL, err := url.Parse(rawURL)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse URL")
|
||||
return ""
|
||||
}
|
||||
|
||||
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
|
||||
|
||||
return strippedURL.String()
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -168,7 +168,7 @@ func (ctrl *V1Controller) handleItemAttachmentsHandler(w http.ResponseWriter, r
|
||||
|
||||
// Delete Attachment Handler
|
||||
case http.MethodDelete:
|
||||
err = ctrl.svc.Items.AttachmentDelete(r.Context(), ctx.GroupID, ID, attachmentID)
|
||||
err = ctrl.svc.Items.AttachmentDelete(r.Context(), ctx.GID, ID, attachmentID)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to delete attachment")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
|
||||
@@ -4,10 +4,10 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLabelsGetAll godoc
|
||||
@@ -21,7 +21,7 @@ import (
|
||||
func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.LabelSummary, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Labels.GetAll(auth, auth.GroupID)
|
||||
return ctrl.repo.Labels.GetAll(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
@@ -39,7 +39,7 @@ func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Labels.Create(auth, auth.GroupID, data)
|
||||
return ctrl.repo.Labels.Create(auth, auth.GID, data)
|
||||
}
|
||||
|
||||
return adapters.Action(fn, http.StatusCreated)
|
||||
@@ -57,7 +57,7 @@ func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.Labels.DeleteByGroup(auth, auth.GroupID, ID)
|
||||
err := ctrl.repo.Labels.DeleteByGroup(auth, auth.GID, ID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@ func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Labels.GetOneByGroup(auth, auth.GroupID, ID)
|
||||
return ctrl.repo.Labels.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
@@ -95,7 +95,7 @@ func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
data.ID = ID
|
||||
return ctrl.repo.Labels.UpdateByGroup(auth, auth.GroupID, data)
|
||||
return ctrl.repo.Labels.UpdateByGroup(auth, auth.GID, data)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math/big"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLocationTreeQuery godoc
|
||||
@@ -22,7 +24,7 @@ import (
|
||||
func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Locations.Tree(auth, auth.GroupID, query)
|
||||
return ctrl.repo.Locations.Tree(auth, auth.GID, query)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
@@ -40,7 +42,7 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Locations.GetAll(auth, auth.GroupID, q)
|
||||
return ctrl.repo.Locations.GetAll(auth, auth.GID, q)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
@@ -58,7 +60,7 @@ func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Locations.Create(auth, auth.GroupID, createData)
|
||||
return ctrl.repo.Locations.Create(auth, auth.GID, createData)
|
||||
}
|
||||
|
||||
return adapters.Action(fn, http.StatusCreated)
|
||||
@@ -76,13 +78,50 @@ func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.Locations.DeleteByGroup(auth, auth.GroupID, ID)
|
||||
err := ctrl.repo.Locations.DeleteByGroup(auth, auth.GID, ID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) GetLocationWithPrice(auth context.Context, gid uuid.UUID, id uuid.UUID) (repo.LocationOut, error) {
|
||||
var location, err = ctrl.repo.Locations.GetOneByGroup(auth, gid, id)
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
|
||||
// Add direct child items price
|
||||
totalPrice := new(big.Int)
|
||||
items, err := ctrl.repo.Items.QueryByGroup(auth, gid, repo.ItemQuery{LocationIDs: []uuid.UUID{id}})
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
|
||||
for _, item := range items.Items {
|
||||
// Convert item.Quantity to float64 for multiplication
|
||||
quantity := float64(item.Quantity)
|
||||
itemTotal := big.NewInt(int64(item.PurchasePrice * quantity * 100))
|
||||
totalPrice.Add(totalPrice, itemTotal)
|
||||
}
|
||||
|
||||
totalPriceFloat := new(big.Float).SetInt(totalPrice)
|
||||
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
|
||||
location.TotalPrice, _ = totalPriceFloat.Float64()
|
||||
|
||||
// Add price from child locations
|
||||
for _, childLocation := range location.Children {
|
||||
var childLocationWithPrice repo.LocationOut
|
||||
childLocationWithPrice, err = ctrl.GetLocationWithPrice(auth, gid, childLocation.ID)
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
location.TotalPrice += childLocationWithPrice.TotalPrice
|
||||
}
|
||||
|
||||
return location, nil
|
||||
}
|
||||
|
||||
// HandleLocationGet godoc
|
||||
//
|
||||
// @Summary Get Location
|
||||
@@ -95,7 +134,9 @@ func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Locations.GetOneByGroup(auth, auth.GroupID, ID)
|
||||
var location, err = ctrl.GetLocationWithPrice(auth, auth.GID, ID)
|
||||
|
||||
return location, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
@@ -115,7 +156,7 @@ func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
body.ID = ID
|
||||
return ctrl.repo.Locations.UpdateByGroup(auth, auth.GroupID, ID, body)
|
||||
return ctrl.repo.Locations.UpdateByGroup(auth, auth.GID, ID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
|
||||
@@ -4,24 +4,25 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleMaintenanceLogGet godoc
|
||||
//
|
||||
// @Summary Get Maintenance Log
|
||||
// @Tags Maintenance
|
||||
// @Tags Item Maintenance
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.MaintenanceLog
|
||||
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
|
||||
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
|
||||
// @Router /v1/items/{id}/maintenance [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, q repo.MaintenanceLogQuery) (repo.MaintenanceLog, error) {
|
||||
fn := func(r *http.Request, ID uuid.UUID, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.GetLog(auth, auth.GroupID, ID, q)
|
||||
return ctrl.repo.MaintEntry.GetMaintenanceByItemID(auth, auth.GID, ID, filters)
|
||||
}
|
||||
|
||||
return adapters.QueryID("id", fn, http.StatusOK)
|
||||
@@ -30,7 +31,7 @@ func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
|
||||
// HandleMaintenanceEntryCreate godoc
|
||||
//
|
||||
// @Summary Create Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Tags Item Maintenance
|
||||
// @Produce json
|
||||
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
|
||||
// @Success 201 {object} repo.MaintenanceEntry
|
||||
@@ -44,39 +45,3 @@ func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc {
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusCreated)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryDelete godoc
|
||||
//
|
||||
// @Summary Delete Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("entry_id", fn, http.StatusNoContent)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryUpdate godoc
|
||||
//
|
||||
// @Summary Update Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
|
||||
// @Success 200 {object} repo.MaintenanceEntry
|
||||
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("entry_id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
65
backend/app/api/handlers/v1/v1_ctrl_maintenance.go
Normal file
65
backend/app/api/handlers/v1/v1_ctrl_maintenance.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleMaintenanceGetAll godoc
|
||||
//
|
||||
// @Summary Query All Maintenance
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
|
||||
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
|
||||
// @Router /v1/maintenance [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.GetAllMaintenance(auth, auth.GID, filters)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryUpdate godoc
|
||||
//
|
||||
// @Summary Update Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
|
||||
// @Success 200 {object} repo.MaintenanceEntry
|
||||
// @Router /v1/maintenance/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryDelete godoc
|
||||
//
|
||||
// @Summary Delete Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Router /v1/maintenance/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
}
|
||||
@@ -5,10 +5,10 @@ import (
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGetUserNotifiers godoc
|
||||
@@ -40,7 +40,7 @@ func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Notifiers.Create(auth, auth.GroupID, auth.UserID, in)
|
||||
return ctrl.repo.Notifiers.Create(auth, auth.GID, auth.UID, in)
|
||||
}
|
||||
|
||||
return adapters.Action(fn, http.StatusCreated)
|
||||
@@ -57,7 +57,7 @@ func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return nil, ctrl.repo.Notifiers.Delete(auth, auth.UserID, ID)
|
||||
return nil, ctrl.repo.Notifiers.Delete(auth, auth.UID, ID)
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
@@ -75,7 +75,7 @@ func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Notifiers.Update(auth, auth.UserID, ID, in)
|
||||
return ctrl.repo.Notifiers.Update(auth, auth.UID, ID, in)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
|
||||
@@ -7,8 +7,8 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
"github.com/yeqown/go-qrcode/v2"
|
||||
"github.com/yeqown/go-qrcode/writer/standard"
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// HandleBillOfMaterialsExport godoc
|
||||
@@ -19,13 +18,13 @@ func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
actor := services.UseUserCtx(r.Context())
|
||||
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsCSV(r.Context(), actor.GroupID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.csv")
|
||||
_, err = w.Write(csv)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGroupStatisticsLocations godoc
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Groups.StatsLocationsByPurchasePrice(auth, auth.GroupID)
|
||||
return ctrl.repo.Groups.StatsLocationsByPurchasePrice(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
@@ -40,7 +40,7 @@ func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc
|
||||
func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Groups.StatsLabelsByPurchasePrice(auth, auth.GroupID)
|
||||
return ctrl.repo.Groups.StatsLabelsByPurchasePrice(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
@@ -57,7 +57,7 @@ func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) (repo.GroupStatistics, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Groups.StatsGroup(auth, auth.GroupID)
|
||||
return ctrl.repo.Groups.StatsGroup(auth, auth.GID)
|
||||
}
|
||||
|
||||
return adapters.Command(fn, http.StatusOK)
|
||||
@@ -94,7 +94,7 @@ func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerF
|
||||
return validate.NewRequestError(err, http.StatusBadRequest)
|
||||
}
|
||||
|
||||
stats, err := ctrl.repo.Groups.StatsPurchasePrice(ctx, ctx.GroupID, startDate, endDate)
|
||||
stats, err := ctrl.repo.Groups.StatsPurchasePrice(ctx, ctx.GID, startDate, endDate)
|
||||
if err != nil {
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// HandleUserRegistration godoc
|
||||
@@ -117,10 +115,12 @@ func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
type ChangePassword struct {
|
||||
Current string `json:"current,omitempty"`
|
||||
New string `json:"new,omitempty"`
|
||||
}
|
||||
type (
|
||||
ChangePassword struct {
|
||||
Current string `json:"current,omitempty"`
|
||||
New string `json:"new,omitempty"`
|
||||
}
|
||||
)
|
||||
|
||||
// HandleUserSelfChangePassword godoc
|
||||
//
|
||||
@@ -144,7 +144,7 @@ func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
|
||||
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
ok := ctrl.svc.User.PasswordChange(ctx, cp.Current, cp.New)
|
||||
ok := ctrl.svc.User.ChangePassword(ctx, cp.Current, cp.New)
|
||||
if !ok {
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
@@ -152,73 +152,3 @@ func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleUserSelfChangePasswordWithToken godoc
|
||||
//
|
||||
// @Summary Change Password
|
||||
// @Tags User
|
||||
// @Success 204
|
||||
// @Param payload body ChangePassword true "Password Payload"
|
||||
// @Router /v1/users/change-password-token [PUT]
|
||||
func (ctrl *V1Controller) HandleUserSelfChangePasswordWithToken() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
tokenQueryParam := r.URL.Query().Get("token")
|
||||
if tokenQueryParam == "" {
|
||||
return validate.NewRequestError(fmt.Errorf("missing token query param"), http.StatusBadRequest)
|
||||
}
|
||||
|
||||
if ctrl.isDemo {
|
||||
return validate.NewRequestError(nil, http.StatusForbidden)
|
||||
}
|
||||
|
||||
var cp ChangePassword
|
||||
err := server.Decode(r, &cp)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("user failed to change password")
|
||||
}
|
||||
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
ok := ctrl.svc.User.PasswordChange(ctx, cp.Current, cp.New)
|
||||
if !ok {
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleUserRequestPasswordReset godoc
|
||||
//
|
||||
// @Summary Request Password Reset
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Param payload body services.PasswordResetRequest true "User Data"
|
||||
// @Success 204
|
||||
// @Router /v1/users/request-password-reset [Post]
|
||||
func (ctrl *V1Controller) HandleUserRequestPasswordReset() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
if ctrl.isDemo {
|
||||
return validate.NewRequestError(nil, http.StatusForbidden)
|
||||
}
|
||||
|
||||
v, err := adapters.DecodeBody[services.PasswordResetRequest](r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
go func() {
|
||||
ctx := context.Background()
|
||||
|
||||
err = ctrl.svc.User.PasswordResetRequest(ctx, v)
|
||||
if err != nil {
|
||||
log.Warn().
|
||||
Err(err).
|
||||
Str("email", v.Email).
|
||||
Msg("failed to request password reset")
|
||||
}
|
||||
}()
|
||||
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,9 @@ package main
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
)
|
||||
|
||||
// setupLogger initializes the zerolog config
|
||||
|
||||
@@ -14,21 +14,21 @@ import (
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/currencies"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/migrations"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/mid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/graceful"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/rs/zerolog/pkgerrors"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/migrations"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/mid"
|
||||
|
||||
_ "github.com/hay-kot/homebox/backend/pkgs/cgofreesqlite"
|
||||
_ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -115,16 +115,17 @@ func run(cfg *config.Config) error {
|
||||
|
||||
err = c.Schema.Create(context.Background(), options...)
|
||||
if err != nil {
|
||||
log.Fatal(). // nolint
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Msg("failed creating schema resources")
|
||||
log.Error().
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Msg("failed creating schema resources")
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.RemoveAll(temp)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("failed to remove temporary directory for database migrations")
|
||||
log.Error().Err(err).Msg("failed to remove temporary directory for database migrations")
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -139,10 +140,11 @@ func run(cfg *config.Config) error {
|
||||
|
||||
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
log.Error().
|
||||
Err(err).
|
||||
Str("path", cfg.Options.CurrencyConfig).
|
||||
Msg("failed to read currency config file")
|
||||
return err
|
||||
}
|
||||
|
||||
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
|
||||
@@ -150,9 +152,10 @@ func run(cfg *config.Config) error {
|
||||
|
||||
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to collect currencies")
|
||||
return err
|
||||
}
|
||||
|
||||
app.bus = eventbus.New()
|
||||
@@ -160,8 +163,6 @@ func run(cfg *config.Config) error {
|
||||
app.repos = repo.New(c, app.bus, cfg.Storage.Data)
|
||||
app.services = services.New(
|
||||
app.repos,
|
||||
app.conf.BaseURL,
|
||||
app.mailer,
|
||||
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
|
||||
services.WithCurrencies(currencies),
|
||||
)
|
||||
@@ -182,7 +183,7 @@ func run(cfg *config.Config) error {
|
||||
|
||||
chain := errchain.New(mid.Errors(logger))
|
||||
|
||||
app.mountRoutes(router, chain)
|
||||
app.mountRoutes(router, chain, app.repos)
|
||||
|
||||
runner := graceful.NewRunner()
|
||||
|
||||
@@ -213,7 +214,10 @@ func run(cfg *config.Config) error {
|
||||
// TODO: Remove through external API that does setup
|
||||
if cfg.Demo {
|
||||
log.Info().Msg("Running in demo mode, creating demo data")
|
||||
app.SetupDemo()
|
||||
err := app.SetupDemo()
|
||||
if err != nil {
|
||||
log.Fatal().Msg(err.Error())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -7,11 +7,11 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type tokenHasKey struct {
|
||||
|
||||
@@ -4,9 +4,9 @@ import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type LoginForm struct {
|
||||
|
||||
@@ -3,7 +3,7 @@ package providers
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
type LocalProvider struct {
|
||||
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
@@ -10,13 +11,14 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/hay-kot/homebox/backend/app/api/providers"
|
||||
_ "github.com/hay-kot/homebox/backend/app/api/static/docs"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
httpSwagger "github.com/swaggo/http-swagger/v2" // http-swagger middleware
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/providers"
|
||||
_ "github.com/sysadminsmedia/homebox/backend/app/api/static/docs"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
const prefix = "/api"
|
||||
@@ -36,7 +38,7 @@ func (a *app) debugRouter() *http.ServeMux {
|
||||
}
|
||||
|
||||
// registerRoutes registers all the routes for the API
|
||||
func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain) {
|
||||
func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllRepos) {
|
||||
registerMimes()
|
||||
|
||||
r.Get("/swagger/*", httpSwagger.Handler(
|
||||
@@ -46,8 +48,6 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain) {
|
||||
// =========================================================================
|
||||
// API Version 1
|
||||
|
||||
v1Base := v1.BaseURLFunc(prefix)
|
||||
|
||||
v1Ctrl := v1.NewControllerV1(
|
||||
a.services,
|
||||
a.repos,
|
||||
@@ -55,114 +55,117 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain) {
|
||||
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
|
||||
v1.WithRegistration(a.conf.Options.AllowRegistration),
|
||||
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
|
||||
v1.WithURL(fmt.Sprintf("%s:%s", a.conf.Web.Host, a.conf.Web.Port)),
|
||||
)
|
||||
|
||||
r.Get(v1Base("/status"), chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
|
||||
Version: version,
|
||||
Commit: commit,
|
||||
BuildTime: buildTime,
|
||||
})))
|
||||
r.Route(prefix+"/v1", func(r chi.Router) {
|
||||
r.Get("/status", chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
|
||||
Version: version,
|
||||
Commit: commit,
|
||||
BuildTime: buildTime,
|
||||
})))
|
||||
|
||||
r.Get(v1Base("/currencies"), chain.ToHandlerFunc(v1Ctrl.HandleCurrency()))
|
||||
r.Get("/currencies", chain.ToHandlerFunc(v1Ctrl.HandleCurrency()))
|
||||
|
||||
providers := []v1.AuthProvider{
|
||||
providers.NewLocalProvider(a.services.User),
|
||||
}
|
||||
providers := []v1.AuthProvider{
|
||||
providers.NewLocalProvider(a.services.User),
|
||||
}
|
||||
|
||||
r.Post(v1Base("/users/register"), chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
|
||||
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
|
||||
r.Post(v1Base("/users/request-password-reset"), chain.ToHandlerFunc(v1Ctrl.HandleUserRequestPasswordReset()))
|
||||
r.Post("/users/register", chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
|
||||
r.Post("/users/login", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
|
||||
|
||||
userMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
|
||||
}
|
||||
userMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
|
||||
}
|
||||
|
||||
r.Get(v1Base("/ws/events"), chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...))
|
||||
r.Get(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
|
||||
r.Put(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
|
||||
r.Delete(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
|
||||
r.Post(v1Base("/users/logout"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
|
||||
r.Get(v1Base("/users/refresh"), chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
|
||||
r.Put(v1Base("/users/self/change-password"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
|
||||
r.Put(v1Base("/users/self/change-password-token"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePasswordWithToken()))
|
||||
r.Get("/ws/events", chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...))
|
||||
r.Get("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
|
||||
r.Put("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
|
||||
r.Delete("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
|
||||
r.Post("/users/logout", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
|
||||
r.Get("/users/refresh", chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
|
||||
r.Put("/users/self/change-password", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
|
||||
|
||||
r.Post(v1Base("/groups/invitations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/purchase-price"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/locations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/labels"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
|
||||
r.Post("/groups/invitations", chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
|
||||
r.Get("/groups/statistics", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
|
||||
r.Get("/groups/statistics/purchase-price", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
|
||||
r.Get("/groups/statistics/locations", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
|
||||
r.Get("/groups/statistics/labels", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
|
||||
|
||||
// TODO: I don't like /groups being the URL for users
|
||||
r.Get(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
|
||||
r.Put(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
|
||||
// TODO: I don't like /groups being the URL for users
|
||||
r.Get("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
|
||||
r.Put("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
|
||||
|
||||
r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post(v1Base("/actions/set-primary-photos"), chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||
r.Post("/actions/ensure-asset-ids", chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post("/actions/zero-item-time-fields", chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||
|
||||
r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
r.Get(v1Base("/locations/tree"), chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
|
||||
r.Get(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
|
||||
r.Put(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
|
||||
r.Delete(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
|
||||
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
r.Get("/locations/tree", chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
|
||||
r.Get("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
|
||||
r.Put("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
|
||||
r.Delete("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
|
||||
|
||||
r.Get(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
|
||||
r.Post(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
|
||||
r.Get(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
|
||||
r.Put(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
|
||||
r.Delete(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
|
||||
r.Get("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
|
||||
r.Post("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
|
||||
r.Get("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
|
||||
r.Put("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
|
||||
r.Delete("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
|
||||
r.Post(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
|
||||
r.Post(v1Base("/items/import"), chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
|
||||
r.Get(v1Base("/items/export"), chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
|
||||
r.Get(v1Base("/items/fields"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
|
||||
r.Get(v1Base("/items/fields/values"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
|
||||
r.Get("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
|
||||
r.Post("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
|
||||
r.Post("/items/import", chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
|
||||
r.Get("/items/export", chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
|
||||
r.Get("/items/fields", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
|
||||
r.Get("/items/fields/values", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
|
||||
r.Get(v1Base("/items/{id}/path"), chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...))
|
||||
r.Put(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
|
||||
r.Patch(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
|
||||
r.Get("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
|
||||
r.Get("/items/{id}/path", chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...))
|
||||
r.Put("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
|
||||
r.Patch("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
|
||||
r.Delete("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
|
||||
|
||||
r.Post(v1Base("/items/{id}/attachments"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
|
||||
r.Put(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
|
||||
r.Post("/items/{id}/attachments", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
|
||||
r.Put("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
|
||||
r.Delete("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
|
||||
r.Post(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
|
||||
r.Put(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
|
||||
r.Get("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
|
||||
r.Post("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
|
||||
|
||||
r.Get(v1Base("/assets/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
|
||||
r.Get("/assets/{id}", chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
|
||||
|
||||
// Notifiers
|
||||
r.Get(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
|
||||
r.Post(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
|
||||
r.Put(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
|
||||
r.Delete(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
|
||||
r.Post(v1Base("/notifiers/test"), chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
|
||||
// Maintenance
|
||||
r.Get("/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceGetAll(), userMW...))
|
||||
r.Put("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
|
||||
r.Delete("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
|
||||
|
||||
// Asset-Like endpoints
|
||||
assetMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
|
||||
}
|
||||
// Notifiers
|
||||
r.Get("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
|
||||
r.Post("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
|
||||
r.Put("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
|
||||
r.Delete("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
|
||||
r.Post("/notifiers/test", chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
|
||||
|
||||
r.Get(
|
||||
v1Base("/qrcode"),
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...),
|
||||
)
|
||||
r.Get(
|
||||
v1Base("/items/{id}/attachments/{attachment_id}"),
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
|
||||
)
|
||||
// Asset-Like endpoints
|
||||
assetMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
|
||||
}
|
||||
|
||||
// Reporting Services
|
||||
r.Get(v1Base("/reporting/bill-of-materials"), chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
|
||||
r.Get("/qrcode", chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...))
|
||||
r.Get(
|
||||
"/items/{id}/attachments/{attachment_id}",
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
|
||||
)
|
||||
|
||||
// Reporting Services
|
||||
r.Get("/reporting/bill-of-materials", chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
|
||||
|
||||
r.NotFound(http.NotFound)
|
||||
})
|
||||
|
||||
r.NotFound(chain.ToHandlerFunc(notFoundHandler()))
|
||||
}
|
||||
|
||||
@@ -917,14 +917,34 @@ const docTemplate = `{
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Get Maintenance Log",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceLog"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -939,7 +959,7 @@ const docTemplate = `{
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Create Maintenance Entry",
|
||||
"parameters": [
|
||||
@@ -963,60 +983,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/maintenance/{entry_id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1409,6 +1375,104 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Query All Maintenance",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance/{id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/notifiers": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1792,33 +1856,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/users/request-password-reset": {
|
||||
"post": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"User"
|
||||
],
|
||||
"summary": "Request Password Reset",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "User Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/services.PasswordResetRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/users/self": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -2180,8 +2217,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2197,8 +2233,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
@@ -2251,6 +2286,10 @@ const docTemplate = `{
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2286,8 +2325,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer"
|
||||
@@ -2310,6 +2348,9 @@ const docTemplate = `{
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
@@ -2318,7 +2359,8 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 1000
|
||||
},
|
||||
"fields": {
|
||||
"type": "array",
|
||||
@@ -2353,7 +2395,9 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1
|
||||
},
|
||||
"notes": {
|
||||
"description": "Extras",
|
||||
@@ -2365,11 +2409,13 @@ const docTemplate = `{
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2386,15 +2432,17 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"warrantyDetails": {
|
||||
"type": "string"
|
||||
@@ -2503,6 +2551,9 @@ const docTemplate = `{
|
||||
"parent": {
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"totalPrice": {
|
||||
"type": "number"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2638,26 +2689,49 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceLog": {
|
||||
"repo.MaintenanceEntryWithDetails": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"costAverage": {
|
||||
"type": "number"
|
||||
"completedDate": {
|
||||
"type": "string"
|
||||
},
|
||||
"costTotal": {
|
||||
"type": "number"
|
||||
"cost": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemId": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemID": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemName": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceFilterStatus": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
]
|
||||
},
|
||||
"repo.NotifierCreate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -2699,6 +2773,9 @@ const docTemplate = `{
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2852,14 +2929,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.PasswordResetRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"email": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.UserRegistration": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -910,14 +910,34 @@
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Get Maintenance Log",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceLog"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -932,7 +952,7 @@
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Create Maintenance Entry",
|
||||
"parameters": [
|
||||
@@ -956,60 +976,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/maintenance/{entry_id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1402,6 +1368,104 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Query All Maintenance",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance/{id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/notifiers": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1785,33 +1849,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/users/request-password-reset": {
|
||||
"post": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"User"
|
||||
],
|
||||
"summary": "Request Password Reset",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "User Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/services.PasswordResetRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/users/self": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -2173,8 +2210,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2190,8 +2226,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
@@ -2244,6 +2279,10 @@
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2279,8 +2318,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer"
|
||||
@@ -2303,6 +2341,9 @@
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
@@ -2311,7 +2352,8 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 1000
|
||||
},
|
||||
"fields": {
|
||||
"type": "array",
|
||||
@@ -2346,7 +2388,9 @@
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1
|
||||
},
|
||||
"notes": {
|
||||
"description": "Extras",
|
||||
@@ -2358,11 +2402,13 @@
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2379,15 +2425,17 @@
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"warrantyDetails": {
|
||||
"type": "string"
|
||||
@@ -2496,6 +2544,9 @@
|
||||
"parent": {
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"totalPrice": {
|
||||
"type": "number"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2631,26 +2682,49 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceLog": {
|
||||
"repo.MaintenanceEntryWithDetails": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"costAverage": {
|
||||
"type": "number"
|
||||
"completedDate": {
|
||||
"type": "string"
|
||||
},
|
||||
"costTotal": {
|
||||
"type": "number"
|
||||
"cost": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemId": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemID": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemName": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceFilterStatus": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
]
|
||||
},
|
||||
"repo.NotifierCreate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -2692,6 +2766,9 @@
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2845,14 +2922,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.PasswordResetRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"email": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.UserRegistration": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -171,8 +171,7 @@ definitions:
|
||||
purchaseFrom:
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
purchaseTime:
|
||||
description: Purchase
|
||||
type: string
|
||||
@@ -183,8 +182,7 @@ definitions:
|
||||
soldNotes:
|
||||
type: string
|
||||
soldPrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
soldTime:
|
||||
description: Sold
|
||||
type: string
|
||||
@@ -219,6 +217,9 @@ definitions:
|
||||
properties:
|
||||
archived:
|
||||
type: boolean
|
||||
assetId:
|
||||
example: "0"
|
||||
type: string
|
||||
createdAt:
|
||||
type: string
|
||||
description:
|
||||
@@ -242,8 +243,7 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
quantity:
|
||||
type: integer
|
||||
updatedAt:
|
||||
@@ -264,6 +264,7 @@ definitions:
|
||||
assetId:
|
||||
type: string
|
||||
description:
|
||||
maxLength: 1000
|
||||
type: string
|
||||
fields:
|
||||
items:
|
||||
@@ -288,6 +289,8 @@ definitions:
|
||||
modelNumber:
|
||||
type: string
|
||||
name:
|
||||
maxLength: 255
|
||||
minLength: 1
|
||||
type: string
|
||||
notes:
|
||||
description: Extras
|
||||
@@ -297,10 +300,12 @@ definitions:
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
maxLength: 255
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseTime:
|
||||
description: Purchase
|
||||
type: string
|
||||
@@ -312,17 +317,21 @@ definitions:
|
||||
soldNotes:
|
||||
type: string
|
||||
soldPrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
soldTime:
|
||||
description: Sold
|
||||
type: string
|
||||
soldTo:
|
||||
maxLength: 255
|
||||
type: string
|
||||
warrantyDetails:
|
||||
type: string
|
||||
warrantyExpires:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
repo.LabelCreate:
|
||||
properties:
|
||||
@@ -390,6 +399,8 @@ definitions:
|
||||
type: string
|
||||
parent:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
totalPrice:
|
||||
type: number
|
||||
updatedAt:
|
||||
type: string
|
||||
type: object
|
||||
@@ -479,19 +490,36 @@ definitions:
|
||||
scheduledDate:
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceLog:
|
||||
repo.MaintenanceEntryWithDetails:
|
||||
properties:
|
||||
costAverage:
|
||||
type: number
|
||||
costTotal:
|
||||
type: number
|
||||
entries:
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
type: array
|
||||
itemId:
|
||||
completedDate:
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
itemID:
|
||||
type: string
|
||||
itemName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceFilterStatus:
|
||||
enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
repo.NotifierCreate:
|
||||
properties:
|
||||
isActive:
|
||||
@@ -520,6 +548,8 @@ definitions:
|
||||
type: string
|
||||
updatedAt:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
userId:
|
||||
type: string
|
||||
type: object
|
||||
@@ -620,11 +650,6 @@ definitions:
|
||||
value:
|
||||
type: number
|
||||
type: object
|
||||
services.PasswordResetRequest:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
type: object
|
||||
services.UserRegistration:
|
||||
properties:
|
||||
email:
|
||||
@@ -1222,18 +1247,32 @@ paths:
|
||||
- Items Attachments
|
||||
/v1/items/{id}/maintenance:
|
||||
get:
|
||||
parameters:
|
||||
- enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
in: query
|
||||
name: status
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceLog'
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Get Maintenance Log
|
||||
tags:
|
||||
- Maintenance
|
||||
- Item Maintenance
|
||||
post:
|
||||
parameters:
|
||||
- description: Entry Data
|
||||
@@ -1253,39 +1292,7 @@ paths:
|
||||
- Bearer: []
|
||||
summary: Create Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/items/{id}/maintenance/{entry_id}:
|
||||
delete:
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Delete Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
put:
|
||||
parameters:
|
||||
- description: Entry Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Update Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
- Item Maintenance
|
||||
/v1/items/{id}/path:
|
||||
get:
|
||||
parameters:
|
||||
@@ -1586,6 +1593,66 @@ paths:
|
||||
summary: Get Locations Tree
|
||||
tags:
|
||||
- Locations
|
||||
/v1/maintenance:
|
||||
get:
|
||||
parameters:
|
||||
- enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
in: query
|
||||
name: status
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Query All Maintenance
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/maintenance/{id}:
|
||||
delete:
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Delete Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
put:
|
||||
parameters:
|
||||
- description: Entry Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Update Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/notifiers:
|
||||
get:
|
||||
produces:
|
||||
@@ -1822,23 +1889,6 @@ paths:
|
||||
summary: Register New User
|
||||
tags:
|
||||
- User
|
||||
/v1/users/request-password-reset:
|
||||
post:
|
||||
parameters:
|
||||
- description: User Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/services.PasswordResetRequest'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
summary: Request Password Reset
|
||||
tags:
|
||||
- User
|
||||
/v1/users/self:
|
||||
delete:
|
||||
produces:
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate"
|
||||
|
||||
atlas "ariga.io/atlas/sql/migrate"
|
||||
_ "ariga.io/atlas/sql/sqlite"
|
||||
|
||||
@@ -71,7 +71,7 @@ func main() {
|
||||
text = replace.Regex.ReplaceAllString(text, replace.Text)
|
||||
}
|
||||
|
||||
err = os.WriteFile(path, []byte(text), 0o644)
|
||||
err = os.WriteFile(path, []byte(text), 0644)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
|
||||
@@ -1,32 +1,29 @@
|
||||
module github.com/hay-kot/homebox/backend
|
||||
module github.com/sysadminsmedia/homebox/backend
|
||||
|
||||
go 1.22
|
||||
|
||||
toolchain go1.22.0
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
ariga.io/atlas v0.19.1
|
||||
entgo.io/ent v0.12.5
|
||||
github.com/ardanlabs/conf/v3 v3.1.7
|
||||
entgo.io/ent v0.14.1
|
||||
github.com/ardanlabs/conf/v3 v3.1.8
|
||||
github.com/containrrr/shoutrrr v0.8.0
|
||||
github.com/go-chi/chi/v5 v5.0.12
|
||||
github.com/go-playground/validator/v10 v10.18.0
|
||||
github.com/gocarina/gocsv v0.0.0-20231116093920-b87c2d0e983a
|
||||
github.com/go-chi/chi/v5 v5.1.0
|
||||
github.com/go-playground/validator/v10 v10.22.1
|
||||
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/schema v1.2.1
|
||||
github.com/hay-kot/easyemails v0.0.0-20240206011027-25232fb79aa3
|
||||
github.com/hay-kot/httpkit v0.0.9
|
||||
github.com/mattn/go-sqlite3 v1.14.22
|
||||
github.com/olahol/melody v1.1.4
|
||||
github.com/gorilla/schema v1.4.1
|
||||
github.com/hay-kot/httpkit v0.0.11
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
github.com/olahol/melody v1.2.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.32.0
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/rs/zerolog v1.33.0
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/swaggo/http-swagger/v2 v2.0.2
|
||||
github.com/swaggo/swag v1.16.3
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.2
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.2
|
||||
golang.org/x/crypto v0.19.0
|
||||
modernc.org/sqlite v1.29.2
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.4
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.4
|
||||
golang.org/x/crypto v0.28.0
|
||||
modernc.org/sqlite v1.33.1
|
||||
)
|
||||
|
||||
require (
|
||||
@@ -63,17 +60,17 @@ require (
|
||||
github.com/swaggo/files/v2 v2.0.0 // indirect
|
||||
github.com/yeqown/reedsolomon v1.0.0 // indirect
|
||||
github.com/zclconf/go-cty v1.14.1 // indirect
|
||||
golang.org/x/image v0.14.0 // indirect
|
||||
golang.org/x/mod v0.15.0 // indirect
|
||||
golang.org/x/net v0.21.0 // indirect
|
||||
golang.org/x/sys v0.17.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
golang.org/x/tools v0.17.0 // indirect
|
||||
golang.org/x/image v0.18.0 // indirect
|
||||
golang.org/x/mod v0.20.0 // indirect
|
||||
golang.org/x/net v0.28.0 // indirect
|
||||
golang.org/x/sys v0.26.0 // indirect
|
||||
golang.org/x/text v0.19.0 // indirect
|
||||
golang.org/x/tools v0.24.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
|
||||
modernc.org/libc v1.41.0 // indirect
|
||||
modernc.org/libc v1.55.3 // indirect
|
||||
modernc.org/mathutil v1.6.0 // indirect
|
||||
modernc.org/memory v1.7.2 // indirect
|
||||
modernc.org/memory v1.8.0 // indirect
|
||||
modernc.org/strutil v1.2.0 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
||||
|
||||
133
backend/go.sum
133
backend/go.sum
@@ -1,7 +1,11 @@
|
||||
ariga.io/atlas v0.19.1 h1:QzBHkakwzEhmPWOzNhw8Yr/Bbicj6Iq5hwEoNI/Jr9A=
|
||||
ariga.io/atlas v0.19.1/go.mod h1:VPlcXdd4w2KqKnH54yEZcry79UAhpaWaxEsmn5JRNoE=
|
||||
ariga.io/atlas v0.28.0 h1:qmn9tUyJypJkIw+X3ECUwDtkMTiFupgstHbjRN4xGH0=
|
||||
ariga.io/atlas v0.28.0/go.mod h1:LOOp18LCL9r+VifvVlJqgYJwYl271rrXD9/wIyzJ8sw=
|
||||
entgo.io/ent v0.12.5 h1:KREM5E4CSoej4zeGa88Ou/gfturAnpUv0mzAjch1sj4=
|
||||
entgo.io/ent v0.12.5/go.mod h1:Y3JVAjtlIk8xVZYSn3t3mf8xlZIn5SAOXZQxD6kKI+Q=
|
||||
entgo.io/ent v0.14.1 h1:fUERL506Pqr92EPHJqr8EYxbPioflJo6PudkrEA8a/s=
|
||||
entgo.io/ent v0.14.1/go.mod h1:MH6XLG0KXpkcDQhKiHfANZSzR55TJyPL5IGNpI8wpco=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||
@@ -10,10 +14,8 @@ github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7l
|
||||
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||
github.com/ardanlabs/conf/v3 v3.1.7 h1:p232cF68TafoA5U9ZlbxUIhGJtGNdKHBXF80Fdqb5t0=
|
||||
github.com/ardanlabs/conf/v3 v3.1.7/go.mod h1:zclexWKe0NVj6LHQ8NgDDZ7bQ1spE0KeKPFficdtAjU=
|
||||
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnLTKNzo+bdmT/oH34c2Y=
|
||||
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk=
|
||||
github.com/ardanlabs/conf/v3 v3.1.8 h1:r0KUV9/Hni5XdeWR2+A1BiedIDnry5CjezoqgJ0rnFQ=
|
||||
github.com/ardanlabs/conf/v3 v3.1.8/go.mod h1:OIi6NK95fj8jKFPdZ/UmcPlY37JBg99hdP9o5XmNK9c=
|
||||
github.com/containrrr/shoutrrr v0.8.0 h1:mfG2ATzIS7NR2Ec6XL+xyoHzN97H8WPjir8aYzJUSec=
|
||||
github.com/containrrr/shoutrrr v0.8.0/go.mod h1:ioyQAyu1LJY6sILuNyKaQaw+9Ttik5QePU8atnAdO2o=
|
||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
@@ -29,8 +31,8 @@ github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
|
||||
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s=
|
||||
github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw=
|
||||
github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
|
||||
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
|
||||
@@ -56,38 +58,37 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.18.0 h1:BvolUXjp4zuvkZ5YN5t7ebzbhlUtPsPm2S9NAZ5nl9U=
|
||||
github.com/go-playground/validator/v10 v10.18.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao=
|
||||
github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
|
||||
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
|
||||
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/gocarina/gocsv v0.0.0-20231116093920-b87c2d0e983a h1:RYfmiM0zluBJOiPDJseKLEN4BapJ42uSi9SZBQ2YyiA=
|
||||
github.com/gocarina/gocsv v0.0.0-20231116093920-b87c2d0e983a/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI=
|
||||
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 h1:FWNFq4fM1wPfcK40yHE5UO3RUdSNPaBC+j3PokzA6OQ=
|
||||
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1/go.mod h1:5YoVOkjYAQumqlV356Hj3xeYh4BdZuLE0/nRkf2NKkI=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
|
||||
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
|
||||
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/schema v1.2.1 h1:tjDxcmdb+siIqkTNoV+qRH2mjYdr2hHe5MKXbp61ziM=
|
||||
github.com/gorilla/schema v1.2.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM=
|
||||
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
|
||||
github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM=
|
||||
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
|
||||
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
|
||||
github.com/hay-kot/easyemails v0.0.0-20240206011027-25232fb79aa3 h1:EljujAOvukSS+sh8e883j4vhEiYG4EvJFAhl+XvUYe8=
|
||||
github.com/hay-kot/easyemails v0.0.0-20240206011027-25232fb79aa3/go.mod h1:SZdhYMO2ZmEuTTDE7pHn0WanXhwteniOCYsQ3B5IT/o=
|
||||
github.com/hay-kot/httpkit v0.0.9 h1:hu2TPY9awmIYWXxWGubaXl2U61pPvaVsm9YwboBRGu0=
|
||||
github.com/hay-kot/httpkit v0.0.9/go.mod h1:AD22YluZrvBDxmtB3Pw2SOyp3A2PZqcmBZa0+COrhoU=
|
||||
github.com/hay-kot/httpkit v0.0.11 h1:ZdB2uqsFBSDpfUoClGK5c5orjBjQkEVSXh7fZX5FKEk=
|
||||
github.com/hay-kot/httpkit v0.0.11/go.mod h1:0kZdk5/swzdfqfg2c6pBWimcgeJ9PTyO97EbHnYl2Sw=
|
||||
github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc=
|
||||
github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
@@ -115,15 +116,21 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0=
|
||||
github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/olahol/melody v1.1.4 h1:RQHfKZkQmDxI0+SLZRNBCn4LiXdqxLKRGSkT8Dyoe/E=
|
||||
github.com/olahol/melody v1.1.4/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4=
|
||||
github.com/olahol/melody v1.2.1 h1:xdwRkzHxf+B0w4TKbGpUSSkV516ZucQZJIWLztOWICQ=
|
||||
github.com/olahol/melody v1.2.1/go.mod h1:GgkTl6Y7yWj/HtfD48Q5vLKPVoZOH+Qqgfa7CvJgJM4=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU=
|
||||
github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts=
|
||||
github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE=
|
||||
@@ -137,10 +144,14 @@ github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qq
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0=
|
||||
github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
|
||||
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
@@ -149,39 +160,55 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw=
|
||||
github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM=
|
||||
github.com/swaggo/http-swagger/v2 v2.0.2 h1:FKCdLsl+sFCx60KFsyM0rDarwiUSZ8DqbfSyIKC9OBg=
|
||||
github.com/swaggo/http-swagger/v2 v2.0.2/go.mod h1:r7/GBkAWIfK6E/OLnE8fXnviHiDeAHmgIyooa4xm3AQ=
|
||||
github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg=
|
||||
github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk=
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.2 h1:0comk6jEwi0oWNhKEmzx4JI+Q7XIneAApmFSMKWmSVc=
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.2/go.mod h1:2Qsk2APUCPne0TsRo40DIkI5MYnbzYKCnKGEFWrxd24=
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.2 h1:gyzunKXgC0ZUpKqQFUImbAEwewAiwNCkxFEKZV80Kt4=
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.2/go.mod h1:bbVRiBJSRPj4UBZP/biLG7JSd9kHqXjErk1eakAMnRA=
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.4 h1:cXdYlrhzHzVAnJHiwr/T6lAUmS9MtEStjEZBjArrvnc=
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.4/go.mod h1:uHpt9CM0V1HeXLz+Wg5MN50/sI/fQhfkZlOM+cOTHxw=
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.4 h1:41e/aLr1AMVWlug6oUMkDg2r0+dv5ofB7UaTkekKZBc=
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.4/go.mod h1:H8nLSGYUWBpNyBPjDcJzAanMzYBBYMFtrU2lwoSRn+k=
|
||||
github.com/yeqown/reedsolomon v1.0.0 h1:x1h/Ej/uJnNu8jaX7GLHBWmZKCAWjEJTetkqaabr4B0=
|
||||
github.com/yeqown/reedsolomon v1.0.0/go.mod h1:P76zpcn2TCuL0ul1Fso373qHRc69LKwAw/Iy6g1WiiM=
|
||||
github.com/zclconf/go-cty v1.14.1 h1:t9fyA35fwjjUMcmL5hLER+e/rEPqrbCK1/OSE4SI9KA=
|
||||
github.com/zclconf/go-cty v1.14.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
|
||||
golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4=
|
||||
golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
|
||||
golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8=
|
||||
github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
|
||||
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
|
||||
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
|
||||
golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw=
|
||||
golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U=
|
||||
golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ=
|
||||
golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
|
||||
golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
|
||||
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
||||
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
|
||||
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
|
||||
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
|
||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
|
||||
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
|
||||
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
|
||||
golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
|
||||
golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
|
||||
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
|
||||
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
@@ -196,16 +223,20 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
|
||||
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 h1:5D53IMaUuA5InSeMu9eJtlQXS2NxAhyWQvkKEgXZhHI=
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4=
|
||||
modernc.org/libc v1.41.0 h1:g9YAc6BkKlgORsUWj+JwqoB1wU3o4DE3bM3yvA3k+Gk=
|
||||
modernc.org/libc v1.41.0/go.mod h1:w0eszPsiXoOnoMJgrXjglgLuDy/bt5RR4y3QzUUeodY=
|
||||
modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
|
||||
modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
|
||||
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
|
||||
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
|
||||
modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E=
|
||||
modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
|
||||
modernc.org/sqlite v1.29.2 h1:xgBSyA3gemwgP31PWFfFjtBorQNYpeypGdoSDjXhrgI=
|
||||
modernc.org/sqlite v1.29.2/go.mod h1:hG41jCYxOAOoO6BRK66AdRlmOcDzXf7qnwlwjUIOqa0=
|
||||
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
|
||||
modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
|
||||
modernc.org/sqlite v1.33.0 h1:WWkA/T2G17okiLGgKAj4/RMIvgyMT19yQ038160IeYk=
|
||||
modernc.org/sqlite v1.33.0/go.mod h1:9uQ9hF/pCZoYZK73D/ud5Z7cIRIILSZI8NdIemVMTX8=
|
||||
modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM=
|
||||
modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k=
|
||||
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
|
||||
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,8 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/homebox/backend/internal/core/currencies"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type AllServices struct {
|
||||
@@ -34,7 +33,7 @@ func WithCurrencies(v []currencies.Currency) func(*options) {
|
||||
}
|
||||
}
|
||||
|
||||
func New(repos *repo.AllRepos, baseurl string, sender *mailer.Mailer, opts ...OptionsFunc) *AllServices {
|
||||
func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||
if repos == nil {
|
||||
panic("repos cannot be nil")
|
||||
}
|
||||
@@ -56,11 +55,7 @@ func New(repos *repo.AllRepos, baseurl string, sender *mailer.Mailer, opts ...Op
|
||||
}
|
||||
|
||||
return &AllServices{
|
||||
User: &UserService{
|
||||
repos: repos,
|
||||
mailer: sender,
|
||||
baseurl: baseurl,
|
||||
},
|
||||
User: &UserService{repos},
|
||||
Group: &GroupService{repos},
|
||||
Items: &ItemService{
|
||||
repo: repos,
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type contextKeys struct {
|
||||
@@ -19,11 +19,11 @@ var (
|
||||
type Context struct {
|
||||
context.Context
|
||||
|
||||
// UserID is a unique identifier for the acting user.
|
||||
UserID uuid.UUID
|
||||
// UID is a unique identifier for the acting user.
|
||||
UID uuid.UUID
|
||||
|
||||
// GroupID is a unique identifier for the acting users group.
|
||||
GroupID uuid.UUID
|
||||
// GID is a unique identifier for the acting users group.
|
||||
GID uuid.UUID
|
||||
|
||||
// User is the acting user.
|
||||
User *repo.UserOut
|
||||
@@ -35,8 +35,8 @@ func NewContext(ctx context.Context) Context {
|
||||
user := UseUserCtx(ctx)
|
||||
return Context{
|
||||
Context: ctx,
|
||||
UserID: user.ID,
|
||||
GroupID: user.GroupID,
|
||||
UID: user.ID,
|
||||
GID: user.GroupID,
|
||||
User: user,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func Test_SetAuthContext(t *testing.T) {
|
||||
|
||||
@@ -6,13 +6,12 @@ import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/currencies"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/faker"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/faker"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -50,7 +49,7 @@ func bootstrap() {
|
||||
}
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
func MainNoExit(m *testing.M) int {
|
||||
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
|
||||
if err != nil {
|
||||
log.Fatalf("failed opening connection to sqlite: %v", err)
|
||||
@@ -68,16 +67,19 @@ func TestMain(m *testing.M) {
|
||||
currencies.CollectDefaults(),
|
||||
)
|
||||
|
||||
tSvc = New(tRepos, "", &mailer.Mailer{}, WithCurrencies(defaults))
|
||||
tSvc = New(tRepos, WithCurrencies(defaults))
|
||||
defer func() { _ = client.Close() }()
|
||||
|
||||
bootstrap()
|
||||
tCtx = Context{
|
||||
Context: context.Background(),
|
||||
GroupID: tGroup.ID,
|
||||
UserID: tUser.ID,
|
||||
GID: tGroup.ID,
|
||||
UID: tUser.ID,
|
||||
}
|
||||
|
||||
exit := m.Run()
|
||||
_ = client.Close()
|
||||
os.Exit(exit)
|
||||
return m.Run()
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
os.Exit(MainNoExit(m))
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ package reporting
|
||||
|
||||
import (
|
||||
"github.com/gocarina/gocsv"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// =================================================================================================
|
||||
@@ -20,9 +20,9 @@ type BillOfMaterialsEntry struct {
|
||||
TotalPrice float64 `csv:"Total Price"`
|
||||
}
|
||||
|
||||
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
|
||||
// BillOfMaterialsCSV returns a byte slice of the Bill of Materials for a given GID in CSV format
|
||||
// See BillOfMaterialsEntry for the format of the output
|
||||
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
func BillOfMaterialsCSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
bomEntries := make([]BillOfMaterialsEntry, len(entities))
|
||||
for i, entity := range entities {
|
||||
bomEntries[i] = BillOfMaterialsEntry{
|
||||
|
||||
@@ -3,8 +3,8 @@ package reporting
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type ExportItemFields struct {
|
||||
@@ -12,12 +12,13 @@ type ExportItemFields struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
type ExportTSVRow struct {
|
||||
type ExportCSVRow struct {
|
||||
ImportRef string `csv:"HB.import_ref"`
|
||||
Location LocationString `csv:"HB.location"`
|
||||
LabelStr LabelString `csv:"HB.labels"`
|
||||
AssetID repo.AssetID `csv:"HB.asset_id"`
|
||||
Archived bool `csv:"HB.archived"`
|
||||
URL string `csv:"HB.url"`
|
||||
|
||||
Name string `csv:"HB.name"`
|
||||
Quantity int `csv:"HB.quantity"`
|
||||
|
||||
@@ -10,21 +10,21 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
|
||||
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
|
||||
// the standard format of the file.
|
||||
//
|
||||
// See ExportTSVRow for the format of the data in the sheet.
|
||||
// See ExportCSVRow for the format of the data in the sheet.
|
||||
type IOSheet struct {
|
||||
headers []string
|
||||
custom []int
|
||||
index map[string]int
|
||||
Rows []ExportTSVRow
|
||||
Rows []ExportCSVRow
|
||||
}
|
||||
|
||||
func (s *IOSheet) indexHeaders() {
|
||||
@@ -70,16 +70,16 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
s.headers = sheet[0]
|
||||
s.Rows = make([]ExportTSVRow, len(sheet)-1)
|
||||
s.Rows = make([]ExportCSVRow, len(sheet)-1)
|
||||
|
||||
for i, row := range sheet[1:] {
|
||||
if len(row) != len(s.headers) {
|
||||
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
|
||||
}
|
||||
|
||||
rowData := ExportTSVRow{}
|
||||
rowData := ExportCSVRow{}
|
||||
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
field := st.Field(i)
|
||||
@@ -153,8 +153,8 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
// ReadItems writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID uuid.UUID, repos *repo.AllRepos) error {
|
||||
s.Rows = make([]ExportTSVRow, len(items))
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.UUID, repos *repo.AllRepos, hbURL string) error {
|
||||
s.Rows = make([]ExportCSVRow, len(items))
|
||||
|
||||
extraHeaders := map[string]struct{}{}
|
||||
|
||||
@@ -164,7 +164,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
// TODO: Support fetching nested locations
|
||||
locID := item.Location.ID
|
||||
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), groupID, locID)
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), gid, locID)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not get location path")
|
||||
return err
|
||||
@@ -178,6 +178,8 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
labelString[i] = l.Name
|
||||
}
|
||||
|
||||
url := generateItemURL(item, hbURL)
|
||||
|
||||
customFields := make([]ExportItemFields, len(item.Fields))
|
||||
|
||||
for i, f := range item.Fields {
|
||||
@@ -189,7 +191,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
}
|
||||
}
|
||||
|
||||
s.Rows[i] = ExportTSVRow{
|
||||
s.Rows[i] = ExportCSVRow{
|
||||
// fill struct
|
||||
Location: locString,
|
||||
LabelStr: labelString,
|
||||
@@ -201,6 +203,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
Description: item.Description,
|
||||
Insured: item.Insured,
|
||||
Archived: item.Archived,
|
||||
URL: url,
|
||||
|
||||
PurchasePrice: item.PurchasePrice,
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
@@ -219,6 +222,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
SoldPrice: item.SoldPrice,
|
||||
SoldNotes: item.SoldNotes,
|
||||
|
||||
Notes: item.Notes,
|
||||
Fields: customFields,
|
||||
}
|
||||
}
|
||||
@@ -232,7 +236,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
|
||||
sort.Strings(customHeaders)
|
||||
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
|
||||
// Write headers
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
@@ -252,8 +256,16 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, groupID u
|
||||
return nil
|
||||
}
|
||||
|
||||
// TSV writes the current sheet to a writer in TSV format.
|
||||
func (s *IOSheet) TSV() ([][]string, error) {
|
||||
func generateItemURL(item repo.ItemOut, d string) string {
|
||||
url := ""
|
||||
if item.ID != uuid.Nil {
|
||||
url = fmt.Sprintf("%s/item/%s", d, item.ID.String())
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
|
||||
func (s *IOSheet) CSV() ([][]string, error) {
|
||||
memcsv := make([][]string, len(s.Rows)+1)
|
||||
|
||||
memcsv[0] = s.headers
|
||||
|
||||
@@ -7,9 +7,9 @@ import (
|
||||
|
||||
_ "embed"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -27,13 +27,13 @@ func TestSheet_Read(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data []byte
|
||||
want []ExportTSVRow
|
||||
want []ExportCSVRow
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "minimal import",
|
||||
data: minimalImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
|
||||
@@ -42,7 +42,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom field import",
|
||||
data: customFieldImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{
|
||||
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
|
||||
Fields: []ExportItemFields{
|
||||
@@ -72,7 +72,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom types import",
|
||||
data: customTypesImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{
|
||||
Name: "Item 1",
|
||||
AssetID: repo.AssetID(1),
|
||||
|
||||
@@ -6,9 +6,9 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type BackgroundService struct {
|
||||
@@ -66,6 +66,7 @@ func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
|
||||
var sendErrs []error
|
||||
for i := range urls {
|
||||
err := shoutrrr.Send(urls[i], bldr.String())
|
||||
|
||||
if err != nil {
|
||||
sendErrs = append(sendErrs, err)
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
type GroupService struct {
|
||||
@@ -21,13 +21,13 @@ func (svc *GroupService) UpdateGroup(ctx Context, data repo.GroupUpdate) (repo.G
|
||||
return repo.Group{}, errors.New("currency cannot be empty")
|
||||
}
|
||||
|
||||
return svc.repos.Groups.GroupUpdate(ctx.Context, ctx.GroupID, data)
|
||||
return svc.repos.Groups.GroupUpdate(ctx.Context, ctx.GID, data)
|
||||
}
|
||||
|
||||
func (svc *GroupService) NewInvitation(ctx Context, uses int, expiresAt time.Time) (string, error) {
|
||||
token := hasher.GenerateToken()
|
||||
|
||||
_, err := svc.repos.Groups.InvitationCreate(ctx, ctx.GroupID, repo.GroupInvitationCreate{
|
||||
_, err := svc.repos.Groups.InvitationCreate(ctx, ctx.GID, repo.GroupInvitationCreate{
|
||||
Token: token.Hash,
|
||||
Uses: uses,
|
||||
ExpiresAt: expiresAt,
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -27,7 +27,7 @@ type ItemService struct {
|
||||
|
||||
func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut, error) {
|
||||
if svc.autoIncrementAssetID {
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, ctx.GroupID)
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, ctx.GID)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
@@ -35,16 +35,16 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
|
||||
item.AssetID = highest + 1
|
||||
}
|
||||
|
||||
return svc.repo.Items.Create(ctx, ctx.GroupID, item)
|
||||
return svc.repo.Items.Create(ctx, ctx.GID, item)
|
||||
}
|
||||
|
||||
func (svc *ItemService) EnsureAssetID(ctx context.Context, groupID uuid.UUID) (int, error) {
|
||||
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, groupID)
|
||||
func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int, error) {
|
||||
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, groupID)
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -53,7 +53,7 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, groupID uuid.UUID) (i
|
||||
for _, item := range items {
|
||||
highest++
|
||||
|
||||
err = svc.repo.Items.SetAssetID(ctx, groupID, item.ID, highest)
|
||||
err = svc.repo.Items.SetAssetID(ctx, gid, item.ID, highest)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -64,8 +64,8 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, groupID uuid.UUID) (i
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) EnsureImportRef(ctx context.Context, groupID uuid.UUID) (int, error) {
|
||||
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, groupID)
|
||||
func (svc *ItemService) EnsureImportRef(ctx context.Context, gid uuid.UUID) (int, error) {
|
||||
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -74,7 +74,7 @@ func (svc *ItemService) EnsureImportRef(ctx context.Context, groupID uuid.UUID)
|
||||
for _, itemID := range ids {
|
||||
ref := uuid.New().String()[0:8]
|
||||
|
||||
err = svc.repo.Items.Patch(ctx, groupID, itemID, repo.ItemPatch{ImportRef: &ref})
|
||||
err = svc.repo.Items.Patch(ctx, gid, itemID, repo.ItemPatch{ImportRef: &ref})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -96,7 +96,7 @@ func serializeLocation[T ~[]string](location T) string {
|
||||
// 1. If the item does not exist, it is created.
|
||||
// 2. If the item has a ImportRef and it exists it is skipped
|
||||
// 3. Locations and Labels are created if they do not exist.
|
||||
func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data io.Reader) (int, error) {
|
||||
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Reader) (int, error) {
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err := sheet.Read(data)
|
||||
@@ -109,7 +109,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
|
||||
labelMap := make(map[string]uuid.UUID)
|
||||
{
|
||||
labels, err := svc.repo.Labels.GetAll(ctx, groupID)
|
||||
labels, err := svc.repo.Labels.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -124,7 +124,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
|
||||
locationMap := make(map[string]uuid.UUID)
|
||||
{
|
||||
locations, err := svc.repo.Locations.Tree(ctx, groupID, repo.TreeQuery{WithItems: false})
|
||||
locations, err := svc.repo.Locations.Tree(ctx, gid, repo.TreeQuery{WithItems: false})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -153,7 +153,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
// Asset ID Pre-Check
|
||||
highestAID := repo.AssetID(-1)
|
||||
if svc.autoIncrementAssetID {
|
||||
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, groupID)
|
||||
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -169,7 +169,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
// ========================================
|
||||
// Preflight check for existing item
|
||||
if row.ImportRef != "" {
|
||||
exists, err := svc.repo.Items.CheckRef(ctx, groupID, row.ImportRef)
|
||||
exists, err := svc.repo.Items.CheckRef(ctx, gid, row.ImportRef)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
|
||||
}
|
||||
@@ -188,7 +188,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
|
||||
id, ok := labelMap[label]
|
||||
if !ok {
|
||||
newLabel, err := svc.repo.Labels.Create(ctx, groupID, repo.LabelCreate{Name: label})
|
||||
newLabel, err := svc.repo.Labels.Create(ctx, gid, repo.LabelCreate{Name: label})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -220,7 +220,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
parentID = locationMap[parentPath]
|
||||
}
|
||||
|
||||
newLocation, err := svc.repo.Locations.Create(ctx, groupID, repo.LocationCreate{
|
||||
newLocation, err := svc.repo.Locations.Create(ctx, gid, repo.LocationCreate{
|
||||
ParentID: parentID,
|
||||
Name: pathElement,
|
||||
})
|
||||
@@ -261,12 +261,12 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
LabelIDs: labelIds,
|
||||
}
|
||||
|
||||
item, err = svc.repo.Items.Create(ctx, groupID, newItem)
|
||||
item, err = svc.repo.Items.Create(ctx, gid, newItem)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
default:
|
||||
item, err = svc.repo.Items.GetByRef(ctx, groupID, row.ImportRef)
|
||||
item, err = svc.repo.Items.GetByRef(ctx, gid, row.ImportRef)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -318,7 +318,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
Fields: fields,
|
||||
}
|
||||
|
||||
item, err = svc.repo.Items.UpdateByGroup(ctx, groupID, updateItem)
|
||||
item, err = svc.repo.Items.UpdateByGroup(ctx, gid, updateItem)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -329,27 +329,27 @@ func (svc *ItemService) CsvImport(ctx context.Context, groupID uuid.UUID, data i
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportTSV(ctx context.Context, groupID uuid.UUID) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, groupID)
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, gid uuid.UUID, hbURL string) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err = sheet.ReadItems(ctx, items, groupID, svc.repo)
|
||||
err = sheet.ReadItems(ctx, items, gid, svc.repo, hbURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sheet.TSV()
|
||||
return sheet.CSV()
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, groupID uuid.UUID) ([]byte, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, groupID)
|
||||
func (svc *ItemService) ExportBillOfMaterialsCSV(ctx context.Context, gid uuid.UUID) ([]byte, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reporting.BillOfMaterialsTSV(items)
|
||||
return reporting.BillOfMaterialsCSV(items)
|
||||
}
|
||||
|
||||
@@ -6,10 +6,10 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Document, error) {
|
||||
@@ -35,7 +35,7 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *re
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GroupID, itemID)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
}
|
||||
|
||||
// AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
|
||||
@@ -43,13 +43,13 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *re
|
||||
// relative path during construction of the service.
|
||||
func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
|
||||
// Get the Item
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GroupID, itemID)
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
// Create the document
|
||||
doc, err := svc.repo.Docs.Create(ctx, ctx.GroupID, repo.DocumentCreate{Title: filename, Content: file})
|
||||
doc, err := svc.repo.Docs.Create(ctx, ctx.GID, repo.DocumentCreate{Title: filename, Content: file})
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to create document")
|
||||
return repo.ItemOut{}, err
|
||||
@@ -62,7 +62,7 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename st
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GroupID, itemID)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error {
|
||||
|
||||
@@ -7,9 +7,9 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func TestItemService_AddAttachment(t *testing.T) {
|
||||
|
||||
@@ -3,16 +3,13 @@ package services
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/easyemails"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -22,15 +19,8 @@ var (
|
||||
ErrorTokenIDMismatch = errors.New("token id mismatch")
|
||||
)
|
||||
|
||||
func init() { // nolint: gochecknoinits
|
||||
easyemails.ImageLogoHeader = "https://raw.githubusercontent.com/hay-kot/homebox/af9aa239af66df17478f5ed9283e303daf7c6775/docs/docs/assets/img/homebox-email-banner.jpg"
|
||||
easyemails.ColorPrimary = "#5D7F67"
|
||||
}
|
||||
|
||||
type UserService struct {
|
||||
repos *repo.AllRepos
|
||||
mailer *mailer.Mailer
|
||||
baseurl string
|
||||
repos *repo.AllRepos
|
||||
}
|
||||
|
||||
type (
|
||||
@@ -49,9 +39,6 @@ type (
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
}
|
||||
PasswordResetRequest struct {
|
||||
Email string `json:"email"`
|
||||
}
|
||||
)
|
||||
|
||||
// RegisterUser creates a new user and group in the data with the provided data. It also bootstraps the user's group
|
||||
@@ -145,13 +132,13 @@ func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (repo.
|
||||
return svc.repos.AuthTokens.GetUserFromToken(ctx, hash)
|
||||
}
|
||||
|
||||
func (svc *UserService) UpdateSelf(ctx context.Context, userID uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) {
|
||||
err := svc.repos.Users.Update(ctx, userID, data)
|
||||
func (svc *UserService) UpdateSelf(ctx context.Context, id uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) {
|
||||
err := svc.repos.Users.Update(ctx, id, data)
|
||||
if err != nil {
|
||||
return repo.UserOut{}, err
|
||||
}
|
||||
|
||||
return svc.repos.Users.GetOneID(ctx, userID)
|
||||
return svc.repos.Users.GetOneID(ctx, id)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
@@ -230,28 +217,28 @@ func (svc *UserService) RenewToken(ctx context.Context, token string) (UserAuthT
|
||||
// DeleteSelf deletes the user that is currently logged based of the provided UUID
|
||||
// There is _NO_ protection against deleting the wrong user, as such this should only
|
||||
// be used when the identify of the user has been confirmed.
|
||||
func (svc *UserService) DeleteSelf(ctx context.Context, userID uuid.UUID) error {
|
||||
return svc.repos.Users.Delete(ctx, userID)
|
||||
func (svc *UserService) DeleteSelf(ctx context.Context, id uuid.UUID) error {
|
||||
return svc.repos.Users.Delete(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *UserService) PasswordChange(ctx Context, currentPassword, newPassword string) (ok bool) {
|
||||
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UserID)
|
||||
func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) {
|
||||
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UID)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if !hasher.CheckPasswordHash(currentPassword, usr.PasswordHash) {
|
||||
if !hasher.CheckPasswordHash(current, usr.PasswordHash) {
|
||||
log.Err(errors.New("current password is incorrect")).Msg("Failed to change password")
|
||||
return false
|
||||
}
|
||||
|
||||
hashed, err := hasher.HashPassword(newPassword)
|
||||
hashed, err := hasher.HashPassword(new)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to hash password")
|
||||
return false
|
||||
}
|
||||
|
||||
err = svc.repos.Users.ChangePassword(ctx.Context, ctx.UserID, hashed)
|
||||
err = svc.repos.Users.ChangePassword(ctx.Context, ctx.UID, hashed)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to change password")
|
||||
return false
|
||||
@@ -259,80 +246,3 @@ func (svc *UserService) PasswordChange(ctx Context, currentPassword, newPassword
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (svc *UserService) PasswordChangeWithToken(ctx Context, token, newPassword string) error {
|
||||
hashed, err := hasher.HashPassword(newPassword)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tokenHash := hasher.HashToken(token)
|
||||
|
||||
resetToken, err := svc.repos.Users.PasswordResetGet(ctx.Context, tokenHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if resetToken.UserID != ctx.UserID {
|
||||
return ErrorTokenIDMismatch
|
||||
}
|
||||
|
||||
err = svc.repos.Users.ChangePassword(ctx.Context, ctx.UserID, hashed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = svc.repos.Users.PasswordResetDelete(ctx.Context, tokenHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *UserService) PasswordResetRequest(ctx context.Context, req PasswordResetRequest) error {
|
||||
usr, err := svc.repos.Users.GetOneEmail(ctx, req.Email)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("failed to get user for email reset")
|
||||
return err
|
||||
}
|
||||
|
||||
token := hasher.GenerateToken()
|
||||
err = svc.repos.Users.PasswordResetCreate(ctx, usr.ID, token.Hash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resetURL, err := url.JoinPath(svc.baseurl, "reset-password/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resetURL = resetURL + "?token=" + token.Raw
|
||||
|
||||
bldr := easyemails.NewBuilder().Add(
|
||||
easyemails.WithParagraph(
|
||||
easyemails.WithText("You have requested a password reset. Please click the link below to reset your password."),
|
||||
),
|
||||
easyemails.WithButton("Reset Password", resetURL),
|
||||
easyemails.WithParagraph(
|
||||
easyemails.WithText("[Github](https://github.com/hay-kot/homebox) · [Docs](https://hay-kot.github.io/homebox/)").
|
||||
Centered(),
|
||||
).
|
||||
FontSize(12),
|
||||
)
|
||||
|
||||
msg := mailer.NewMessageBuilder().
|
||||
SetBody(bldr.Render()).
|
||||
SetSubject("Password Reset").
|
||||
SetTo(usr.Name, usr.Email).
|
||||
Build()
|
||||
|
||||
err = svc.mailer.Send(msg)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to send password reset email")
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func defaultLocations() []repo.LocationCreate {
|
||||
|
||||
@@ -1,184 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/actiontoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// ActionToken is the model entity for the ActionToken schema.
|
||||
type ActionToken struct {
|
||||
config `json:"-"`
|
||||
// ID of the ent.
|
||||
ID uuid.UUID `json:"id,omitempty"`
|
||||
// UserID holds the value of the "user_id" field.
|
||||
UserID uuid.UUID `json:"user_id,omitempty"`
|
||||
// CreatedAt holds the value of the "created_at" field.
|
||||
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||
// UpdatedAt holds the value of the "updated_at" field.
|
||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||
// Action holds the value of the "action" field.
|
||||
Action actiontoken.Action `json:"action,omitempty"`
|
||||
// Token holds the value of the "token" field.
|
||||
Token []byte `json:"token,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the ActionTokenQuery when eager-loading is set.
|
||||
Edges ActionTokenEdges `json:"edges"`
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// ActionTokenEdges holds the relations/edges for other nodes in the graph.
|
||||
type ActionTokenEdges struct {
|
||||
// User holds the value of the user edge.
|
||||
User *User `json:"user,omitempty"`
|
||||
// loadedTypes holds the information for reporting if a
|
||||
// type was loaded (or requested) in eager-loading or not.
|
||||
loadedTypes [1]bool
|
||||
}
|
||||
|
||||
// UserOrErr returns the User value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e ActionTokenEdges) UserOrErr() (*User, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.User == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: user.Label}
|
||||
}
|
||||
return e.User, nil
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "user"}
|
||||
}
|
||||
|
||||
// scanValues returns the types for scanning values from sql.Rows.
|
||||
func (*ActionToken) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case actiontoken.FieldToken:
|
||||
values[i] = new([]byte)
|
||||
case actiontoken.FieldAction:
|
||||
values[i] = new(sql.NullString)
|
||||
case actiontoken.FieldCreatedAt, actiontoken.FieldUpdatedAt:
|
||||
values[i] = new(sql.NullTime)
|
||||
case actiontoken.FieldID, actiontoken.FieldUserID:
|
||||
values[i] = new(uuid.UUID)
|
||||
default:
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||
// to the ActionToken fields.
|
||||
func (at *ActionToken) assignValues(columns []string, values []any) error {
|
||||
if m, n := len(values), len(columns); m < n {
|
||||
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||
}
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case actiontoken.FieldID:
|
||||
if value, ok := values[i].(*uuid.UUID); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field id", values[i])
|
||||
} else if value != nil {
|
||||
at.ID = *value
|
||||
}
|
||||
case actiontoken.FieldUserID:
|
||||
if value, ok := values[i].(*uuid.UUID); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field user_id", values[i])
|
||||
} else if value != nil {
|
||||
at.UserID = *value
|
||||
}
|
||||
case actiontoken.FieldCreatedAt:
|
||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||
} else if value.Valid {
|
||||
at.CreatedAt = value.Time
|
||||
}
|
||||
case actiontoken.FieldUpdatedAt:
|
||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||
} else if value.Valid {
|
||||
at.UpdatedAt = value.Time
|
||||
}
|
||||
case actiontoken.FieldAction:
|
||||
if value, ok := values[i].(*sql.NullString); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field action", values[i])
|
||||
} else if value.Valid {
|
||||
at.Action = actiontoken.Action(value.String)
|
||||
}
|
||||
case actiontoken.FieldToken:
|
||||
if value, ok := values[i].(*[]byte); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field token", values[i])
|
||||
} else if value != nil {
|
||||
at.Token = *value
|
||||
}
|
||||
default:
|
||||
at.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the ActionToken.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (at *ActionToken) Value(name string) (ent.Value, error) {
|
||||
return at.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// QueryUser queries the "user" edge of the ActionToken entity.
|
||||
func (at *ActionToken) QueryUser() *UserQuery {
|
||||
return NewActionTokenClient(at.config).QueryUser(at)
|
||||
}
|
||||
|
||||
// Update returns a builder for updating this ActionToken.
|
||||
// Note that you need to call ActionToken.Unwrap() before calling this method if this ActionToken
|
||||
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||
func (at *ActionToken) Update() *ActionTokenUpdateOne {
|
||||
return NewActionTokenClient(at.config).UpdateOne(at)
|
||||
}
|
||||
|
||||
// Unwrap unwraps the ActionToken entity that was returned from a transaction after it was closed,
|
||||
// so that all future queries will be executed through the driver which created the transaction.
|
||||
func (at *ActionToken) Unwrap() *ActionToken {
|
||||
_tx, ok := at.config.driver.(*txDriver)
|
||||
if !ok {
|
||||
panic("ent: ActionToken is not a transactional entity")
|
||||
}
|
||||
at.config.driver = _tx.drv
|
||||
return at
|
||||
}
|
||||
|
||||
// String implements the fmt.Stringer.
|
||||
func (at *ActionToken) String() string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("ActionToken(")
|
||||
builder.WriteString(fmt.Sprintf("id=%v, ", at.ID))
|
||||
builder.WriteString("user_id=")
|
||||
builder.WriteString(fmt.Sprintf("%v", at.UserID))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("created_at=")
|
||||
builder.WriteString(at.CreatedAt.Format(time.ANSIC))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("updated_at=")
|
||||
builder.WriteString(at.UpdatedAt.Format(time.ANSIC))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("action=")
|
||||
builder.WriteString(fmt.Sprintf("%v", at.Action))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("token=")
|
||||
builder.WriteString(fmt.Sprintf("%v", at.Token))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
// ActionTokens is a parsable slice of ActionToken.
|
||||
type ActionTokens []*ActionToken
|
||||
@@ -1,138 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package actiontoken
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const (
|
||||
// Label holds the string label denoting the actiontoken type in the database.
|
||||
Label = "action_token"
|
||||
// FieldID holds the string denoting the id field in the database.
|
||||
FieldID = "id"
|
||||
// FieldUserID holds the string denoting the user_id field in the database.
|
||||
FieldUserID = "user_id"
|
||||
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||
FieldCreatedAt = "created_at"
|
||||
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||
FieldUpdatedAt = "updated_at"
|
||||
// FieldAction holds the string denoting the action field in the database.
|
||||
FieldAction = "action"
|
||||
// FieldToken holds the string denoting the token field in the database.
|
||||
FieldToken = "token"
|
||||
// EdgeUser holds the string denoting the user edge name in mutations.
|
||||
EdgeUser = "user"
|
||||
// Table holds the table name of the actiontoken in the database.
|
||||
Table = "action_tokens"
|
||||
// UserTable is the table that holds the user relation/edge.
|
||||
UserTable = "action_tokens"
|
||||
// UserInverseTable is the table name for the User entity.
|
||||
// It exists in this package in order to avoid circular dependency with the "user" package.
|
||||
UserInverseTable = "users"
|
||||
// UserColumn is the table column denoting the user relation/edge.
|
||||
UserColumn = "user_id"
|
||||
)
|
||||
|
||||
// Columns holds all SQL columns for actiontoken fields.
|
||||
var Columns = []string{
|
||||
FieldID,
|
||||
FieldUserID,
|
||||
FieldCreatedAt,
|
||||
FieldUpdatedAt,
|
||||
FieldAction,
|
||||
FieldToken,
|
||||
}
|
||||
|
||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||
func ValidColumn(column string) bool {
|
||||
for i := range Columns {
|
||||
if column == Columns[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||
DefaultCreatedAt func() time.Time
|
||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||
DefaultUpdatedAt func() time.Time
|
||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||
UpdateDefaultUpdatedAt func() time.Time
|
||||
// DefaultID holds the default value on creation for the "id" field.
|
||||
DefaultID func() uuid.UUID
|
||||
)
|
||||
|
||||
// Action defines the type for the "action" enum field.
|
||||
type Action string
|
||||
|
||||
// ActionResetPassword is the default value of the Action enum.
|
||||
const DefaultAction = ActionResetPassword
|
||||
|
||||
// Action values.
|
||||
const (
|
||||
ActionResetPassword Action = "reset_password"
|
||||
)
|
||||
|
||||
func (a Action) String() string {
|
||||
return string(a)
|
||||
}
|
||||
|
||||
// ActionValidator is a validator for the "action" field enum values. It is called by the builders before save.
|
||||
func ActionValidator(a Action) error {
|
||||
switch a {
|
||||
case ActionResetPassword:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("actiontoken: invalid enum value for action field: %q", a)
|
||||
}
|
||||
}
|
||||
|
||||
// OrderOption defines the ordering options for the ActionToken queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUserID orders the results by the user_id field.
|
||||
func ByUserID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldUserID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByCreatedAt orders the results by the created_at field.
|
||||
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUpdatedAt orders the results by the updated_at field.
|
||||
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByAction orders the results by the action field.
|
||||
func ByAction(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldAction, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUserField orders the results by user field.
|
||||
func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
func newUserStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(UserInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
|
||||
)
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package actiontoken
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
func ID(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDEQ applies the EQ predicate on the ID field.
|
||||
func IDEQ(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDNEQ applies the NEQ predicate on the ID field.
|
||||
func IDNEQ(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldID, id))
|
||||
}
|
||||
|
||||
// IDIn applies the In predicate on the ID field.
|
||||
func IDIn(ids ...uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDNotIn applies the NotIn predicate on the ID field.
|
||||
func IDNotIn(ids ...uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldID, ids...))
|
||||
}
|
||||
|
||||
// IDGT applies the GT predicate on the ID field.
|
||||
func IDGT(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDGTE applies the GTE predicate on the ID field.
|
||||
func IDGTE(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGTE(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLT applies the LT predicate on the ID field.
|
||||
func IDLT(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLT(FieldID, id))
|
||||
}
|
||||
|
||||
// IDLTE applies the LTE predicate on the ID field.
|
||||
func IDLTE(id uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLTE(FieldID, id))
|
||||
}
|
||||
|
||||
// UserID applies equality check predicate on the "user_id" field. It's identical to UserIDEQ.
|
||||
func UserID(v uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldUserID, v))
|
||||
}
|
||||
|
||||
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||
func CreatedAt(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
|
||||
func UpdatedAt(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// Token applies equality check predicate on the "token" field. It's identical to TokenEQ.
|
||||
func Token(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldToken, v))
|
||||
}
|
||||
|
||||
// UserIDEQ applies the EQ predicate on the "user_id" field.
|
||||
func UserIDEQ(v uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldUserID, v))
|
||||
}
|
||||
|
||||
// UserIDNEQ applies the NEQ predicate on the "user_id" field.
|
||||
func UserIDNEQ(v uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldUserID, v))
|
||||
}
|
||||
|
||||
// UserIDIn applies the In predicate on the "user_id" field.
|
||||
func UserIDIn(vs ...uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldUserID, vs...))
|
||||
}
|
||||
|
||||
// UserIDNotIn applies the NotIn predicate on the "user_id" field.
|
||||
func UserIDNotIn(vs ...uuid.UUID) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldUserID, vs...))
|
||||
}
|
||||
|
||||
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||
func CreatedAtEQ(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||
func CreatedAtNEQ(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||
func CreatedAtIn(vs ...time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldCreatedAt, vs...))
|
||||
}
|
||||
|
||||
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||
func CreatedAtNotIn(vs ...time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldCreatedAt, vs...))
|
||||
}
|
||||
|
||||
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||
func CreatedAtGT(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGT(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||
func CreatedAtGTE(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGTE(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||
func CreatedAtLT(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLT(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||
func CreatedAtLTE(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLTE(FieldCreatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||
func UpdatedAtEQ(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
|
||||
func UpdatedAtNEQ(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtIn applies the In predicate on the "updated_at" field.
|
||||
func UpdatedAtIn(vs ...time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldUpdatedAt, vs...))
|
||||
}
|
||||
|
||||
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
|
||||
func UpdatedAtNotIn(vs ...time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldUpdatedAt, vs...))
|
||||
}
|
||||
|
||||
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
|
||||
func UpdatedAtGT(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGT(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
|
||||
func UpdatedAtGTE(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGTE(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
|
||||
func UpdatedAtLT(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLT(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
|
||||
func UpdatedAtLTE(v time.Time) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLTE(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// ActionEQ applies the EQ predicate on the "action" field.
|
||||
func ActionEQ(v Action) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldAction, v))
|
||||
}
|
||||
|
||||
// ActionNEQ applies the NEQ predicate on the "action" field.
|
||||
func ActionNEQ(v Action) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldAction, v))
|
||||
}
|
||||
|
||||
// ActionIn applies the In predicate on the "action" field.
|
||||
func ActionIn(vs ...Action) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldAction, vs...))
|
||||
}
|
||||
|
||||
// ActionNotIn applies the NotIn predicate on the "action" field.
|
||||
func ActionNotIn(vs ...Action) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldAction, vs...))
|
||||
}
|
||||
|
||||
// TokenEQ applies the EQ predicate on the "token" field.
|
||||
func TokenEQ(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldEQ(FieldToken, v))
|
||||
}
|
||||
|
||||
// TokenNEQ applies the NEQ predicate on the "token" field.
|
||||
func TokenNEQ(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNEQ(FieldToken, v))
|
||||
}
|
||||
|
||||
// TokenIn applies the In predicate on the "token" field.
|
||||
func TokenIn(vs ...[]byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldIn(FieldToken, vs...))
|
||||
}
|
||||
|
||||
// TokenNotIn applies the NotIn predicate on the "token" field.
|
||||
func TokenNotIn(vs ...[]byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldNotIn(FieldToken, vs...))
|
||||
}
|
||||
|
||||
// TokenGT applies the GT predicate on the "token" field.
|
||||
func TokenGT(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGT(FieldToken, v))
|
||||
}
|
||||
|
||||
// TokenGTE applies the GTE predicate on the "token" field.
|
||||
func TokenGTE(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldGTE(FieldToken, v))
|
||||
}
|
||||
|
||||
// TokenLT applies the LT predicate on the "token" field.
|
||||
func TokenLT(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLT(FieldToken, v))
|
||||
}
|
||||
|
||||
// TokenLTE applies the LTE predicate on the "token" field.
|
||||
func TokenLTE(v []byte) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.FieldLTE(FieldToken, v))
|
||||
}
|
||||
|
||||
// HasUser applies the HasEdge predicate on the "user" edge.
|
||||
func HasUser() predicate.ActionToken {
|
||||
return predicate.ActionToken(func(s *sql.Selector) {
|
||||
step := sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
|
||||
)
|
||||
sqlgraph.HasNeighbors(s, step)
|
||||
})
|
||||
}
|
||||
|
||||
// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates).
|
||||
func HasUserWith(preds ...predicate.User) predicate.ActionToken {
|
||||
return predicate.ActionToken(func(s *sql.Selector) {
|
||||
step := newUserStep()
|
||||
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||
for _, p := range preds {
|
||||
p(s)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.ActionToken) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.ActionToken) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.ActionToken) predicate.ActionToken {
|
||||
return predicate.ActionToken(sql.NotPredicates(p))
|
||||
}
|
||||
@@ -1,329 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/actiontoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// ActionTokenCreate is the builder for creating a ActionToken entity.
|
||||
type ActionTokenCreate struct {
|
||||
config
|
||||
mutation *ActionTokenMutation
|
||||
hooks []Hook
|
||||
}
|
||||
|
||||
// SetUserID sets the "user_id" field.
|
||||
func (atc *ActionTokenCreate) SetUserID(u uuid.UUID) *ActionTokenCreate {
|
||||
atc.mutation.SetUserID(u)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetCreatedAt sets the "created_at" field.
|
||||
func (atc *ActionTokenCreate) SetCreatedAt(t time.Time) *ActionTokenCreate {
|
||||
atc.mutation.SetCreatedAt(t)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||
func (atc *ActionTokenCreate) SetNillableCreatedAt(t *time.Time) *ActionTokenCreate {
|
||||
if t != nil {
|
||||
atc.SetCreatedAt(*t)
|
||||
}
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetUpdatedAt sets the "updated_at" field.
|
||||
func (atc *ActionTokenCreate) SetUpdatedAt(t time.Time) *ActionTokenCreate {
|
||||
atc.mutation.SetUpdatedAt(t)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
|
||||
func (atc *ActionTokenCreate) SetNillableUpdatedAt(t *time.Time) *ActionTokenCreate {
|
||||
if t != nil {
|
||||
atc.SetUpdatedAt(*t)
|
||||
}
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetAction sets the "action" field.
|
||||
func (atc *ActionTokenCreate) SetAction(a actiontoken.Action) *ActionTokenCreate {
|
||||
atc.mutation.SetAction(a)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetNillableAction sets the "action" field if the given value is not nil.
|
||||
func (atc *ActionTokenCreate) SetNillableAction(a *actiontoken.Action) *ActionTokenCreate {
|
||||
if a != nil {
|
||||
atc.SetAction(*a)
|
||||
}
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetToken sets the "token" field.
|
||||
func (atc *ActionTokenCreate) SetToken(b []byte) *ActionTokenCreate {
|
||||
atc.mutation.SetToken(b)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetID sets the "id" field.
|
||||
func (atc *ActionTokenCreate) SetID(u uuid.UUID) *ActionTokenCreate {
|
||||
atc.mutation.SetID(u)
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetNillableID sets the "id" field if the given value is not nil.
|
||||
func (atc *ActionTokenCreate) SetNillableID(u *uuid.UUID) *ActionTokenCreate {
|
||||
if u != nil {
|
||||
atc.SetID(*u)
|
||||
}
|
||||
return atc
|
||||
}
|
||||
|
||||
// SetUser sets the "user" edge to the User entity.
|
||||
func (atc *ActionTokenCreate) SetUser(u *User) *ActionTokenCreate {
|
||||
return atc.SetUserID(u.ID)
|
||||
}
|
||||
|
||||
// Mutation returns the ActionTokenMutation object of the builder.
|
||||
func (atc *ActionTokenCreate) Mutation() *ActionTokenMutation {
|
||||
return atc.mutation
|
||||
}
|
||||
|
||||
// Save creates the ActionToken in the database.
|
||||
func (atc *ActionTokenCreate) Save(ctx context.Context) (*ActionToken, error) {
|
||||
atc.defaults()
|
||||
return withHooks(ctx, atc.sqlSave, atc.mutation, atc.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
func (atc *ActionTokenCreate) SaveX(ctx context.Context) *ActionToken {
|
||||
v, err := atc.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (atc *ActionTokenCreate) Exec(ctx context.Context) error {
|
||||
_, err := atc.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atc *ActionTokenCreate) ExecX(ctx context.Context) {
|
||||
if err := atc.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// defaults sets the default values of the builder before save.
|
||||
func (atc *ActionTokenCreate) defaults() {
|
||||
if _, ok := atc.mutation.CreatedAt(); !ok {
|
||||
v := actiontoken.DefaultCreatedAt()
|
||||
atc.mutation.SetCreatedAt(v)
|
||||
}
|
||||
if _, ok := atc.mutation.UpdatedAt(); !ok {
|
||||
v := actiontoken.DefaultUpdatedAt()
|
||||
atc.mutation.SetUpdatedAt(v)
|
||||
}
|
||||
if _, ok := atc.mutation.Action(); !ok {
|
||||
v := actiontoken.DefaultAction
|
||||
atc.mutation.SetAction(v)
|
||||
}
|
||||
if _, ok := atc.mutation.ID(); !ok {
|
||||
v := actiontoken.DefaultID()
|
||||
atc.mutation.SetID(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (atc *ActionTokenCreate) check() error {
|
||||
if _, ok := atc.mutation.UserID(); !ok {
|
||||
return &ValidationError{Name: "user_id", err: errors.New(`ent: missing required field "ActionToken.user_id"`)}
|
||||
}
|
||||
if _, ok := atc.mutation.CreatedAt(); !ok {
|
||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "ActionToken.created_at"`)}
|
||||
}
|
||||
if _, ok := atc.mutation.UpdatedAt(); !ok {
|
||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "ActionToken.updated_at"`)}
|
||||
}
|
||||
if _, ok := atc.mutation.Action(); !ok {
|
||||
return &ValidationError{Name: "action", err: errors.New(`ent: missing required field "ActionToken.action"`)}
|
||||
}
|
||||
if v, ok := atc.mutation.Action(); ok {
|
||||
if err := actiontoken.ActionValidator(v); err != nil {
|
||||
return &ValidationError{Name: "action", err: fmt.Errorf(`ent: validator failed for field "ActionToken.action": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := atc.mutation.Token(); !ok {
|
||||
return &ValidationError{Name: "token", err: errors.New(`ent: missing required field "ActionToken.token"`)}
|
||||
}
|
||||
if _, ok := atc.mutation.UserID(); !ok {
|
||||
return &ValidationError{Name: "user", err: errors.New(`ent: missing required edge "ActionToken.user"`)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (atc *ActionTokenCreate) sqlSave(ctx context.Context) (*ActionToken, error) {
|
||||
if err := atc.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_node, _spec := atc.createSpec()
|
||||
if err := sqlgraph.CreateNode(ctx, atc.driver, _spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
if _spec.ID.Value != nil {
|
||||
if id, ok := _spec.ID.Value.(*uuid.UUID); ok {
|
||||
_node.ID = *id
|
||||
} else if err := _node.ID.Scan(_spec.ID.Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
atc.mutation.id = &_node.ID
|
||||
atc.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
|
||||
func (atc *ActionTokenCreate) createSpec() (*ActionToken, *sqlgraph.CreateSpec) {
|
||||
var (
|
||||
_node = &ActionToken{config: atc.config}
|
||||
_spec = sqlgraph.NewCreateSpec(actiontoken.Table, sqlgraph.NewFieldSpec(actiontoken.FieldID, field.TypeUUID))
|
||||
)
|
||||
if id, ok := atc.mutation.ID(); ok {
|
||||
_node.ID = id
|
||||
_spec.ID.Value = &id
|
||||
}
|
||||
if value, ok := atc.mutation.CreatedAt(); ok {
|
||||
_spec.SetField(actiontoken.FieldCreatedAt, field.TypeTime, value)
|
||||
_node.CreatedAt = value
|
||||
}
|
||||
if value, ok := atc.mutation.UpdatedAt(); ok {
|
||||
_spec.SetField(actiontoken.FieldUpdatedAt, field.TypeTime, value)
|
||||
_node.UpdatedAt = value
|
||||
}
|
||||
if value, ok := atc.mutation.Action(); ok {
|
||||
_spec.SetField(actiontoken.FieldAction, field.TypeEnum, value)
|
||||
_node.Action = value
|
||||
}
|
||||
if value, ok := atc.mutation.Token(); ok {
|
||||
_spec.SetField(actiontoken.FieldToken, field.TypeBytes, value)
|
||||
_node.Token = value
|
||||
}
|
||||
if nodes := atc.mutation.UserIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
Inverse: true,
|
||||
Table: actiontoken.UserTable,
|
||||
Columns: []string{actiontoken.UserColumn},
|
||||
Bidi: false,
|
||||
Target: &sqlgraph.EdgeTarget{
|
||||
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
|
||||
},
|
||||
}
|
||||
for _, k := range nodes {
|
||||
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||
}
|
||||
_node.UserID = nodes[0]
|
||||
_spec.Edges = append(_spec.Edges, edge)
|
||||
}
|
||||
return _node, _spec
|
||||
}
|
||||
|
||||
// ActionTokenCreateBulk is the builder for creating many ActionToken entities in bulk.
|
||||
type ActionTokenCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*ActionTokenCreate
|
||||
}
|
||||
|
||||
// Save creates the ActionToken entities in the database.
|
||||
func (atcb *ActionTokenCreateBulk) Save(ctx context.Context) ([]*ActionToken, error) {
|
||||
if atcb.err != nil {
|
||||
return nil, atcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(atcb.builders))
|
||||
nodes := make([]*ActionToken, len(atcb.builders))
|
||||
mutators := make([]Mutator, len(atcb.builders))
|
||||
for i := range atcb.builders {
|
||||
func(i int, root context.Context) {
|
||||
builder := atcb.builders[i]
|
||||
builder.defaults()
|
||||
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
|
||||
mutation, ok := m.(*ActionTokenMutation)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected mutation type %T", m)
|
||||
}
|
||||
if err := builder.check(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, atcb.builders[i+1].mutation)
|
||||
} else {
|
||||
spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
|
||||
// Invoke the actual operation on the latest mutation in the chain.
|
||||
if err = sqlgraph.BatchCreate(ctx, atcb.driver, spec); err != nil {
|
||||
if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mutation.id = &nodes[i].ID
|
||||
mutation.done = true
|
||||
return nodes[i], nil
|
||||
})
|
||||
for i := len(builder.hooks) - 1; i >= 0; i-- {
|
||||
mut = builder.hooks[i](mut)
|
||||
}
|
||||
mutators[i] = mut
|
||||
}(i, ctx)
|
||||
}
|
||||
if len(mutators) > 0 {
|
||||
if _, err := mutators[0].Mutate(ctx, atcb.builders[0].mutation); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (atcb *ActionTokenCreateBulk) SaveX(ctx context.Context) []*ActionToken {
|
||||
v, err := atcb.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (atcb *ActionTokenCreateBulk) Exec(ctx context.Context) error {
|
||||
_, err := atcb.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atcb *ActionTokenCreateBulk) ExecX(ctx context.Context) {
|
||||
if err := atcb.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/actiontoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ActionTokenDelete is the builder for deleting a ActionToken entity.
|
||||
type ActionTokenDelete struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *ActionTokenMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ActionTokenDelete builder.
|
||||
func (atd *ActionTokenDelete) Where(ps ...predicate.ActionToken) *ActionTokenDelete {
|
||||
atd.mutation.Where(ps...)
|
||||
return atd
|
||||
}
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (atd *ActionTokenDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks(ctx, atd.sqlExec, atd.mutation, atd.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atd *ActionTokenDelete) ExecX(ctx context.Context) int {
|
||||
n, err := atd.Exec(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (atd *ActionTokenDelete) sqlExec(ctx context.Context) (int, error) {
|
||||
_spec := sqlgraph.NewDeleteSpec(actiontoken.Table, sqlgraph.NewFieldSpec(actiontoken.FieldID, field.TypeUUID))
|
||||
if ps := atd.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
affected, err := sqlgraph.DeleteNodes(ctx, atd.driver, _spec)
|
||||
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
atd.mutation.done = true
|
||||
return affected, err
|
||||
}
|
||||
|
||||
// ActionTokenDeleteOne is the builder for deleting a single ActionToken entity.
|
||||
type ActionTokenDeleteOne struct {
|
||||
atd *ActionTokenDelete
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ActionTokenDelete builder.
|
||||
func (atdo *ActionTokenDeleteOne) Where(ps ...predicate.ActionToken) *ActionTokenDeleteOne {
|
||||
atdo.atd.mutation.Where(ps...)
|
||||
return atdo
|
||||
}
|
||||
|
||||
// Exec executes the deletion query.
|
||||
func (atdo *ActionTokenDeleteOne) Exec(ctx context.Context) error {
|
||||
n, err := atdo.atd.Exec(ctx)
|
||||
switch {
|
||||
case err != nil:
|
||||
return err
|
||||
case n == 0:
|
||||
return &NotFoundError{actiontoken.Label}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atdo *ActionTokenDeleteOne) ExecX(ctx context.Context) {
|
||||
if err := atdo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
@@ -1,606 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/actiontoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// ActionTokenQuery is the builder for querying ActionToken entities.
|
||||
type ActionTokenQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []actiontoken.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.ActionToken
|
||||
withUser *UserQuery
|
||||
// intermediate query (i.e. traversal path).
|
||||
sql *sql.Selector
|
||||
path func(context.Context) (*sql.Selector, error)
|
||||
}
|
||||
|
||||
// Where adds a new predicate for the ActionTokenQuery builder.
|
||||
func (atq *ActionTokenQuery) Where(ps ...predicate.ActionToken) *ActionTokenQuery {
|
||||
atq.predicates = append(atq.predicates, ps...)
|
||||
return atq
|
||||
}
|
||||
|
||||
// Limit the number of records to be returned by this query.
|
||||
func (atq *ActionTokenQuery) Limit(limit int) *ActionTokenQuery {
|
||||
atq.ctx.Limit = &limit
|
||||
return atq
|
||||
}
|
||||
|
||||
// Offset to start from.
|
||||
func (atq *ActionTokenQuery) Offset(offset int) *ActionTokenQuery {
|
||||
atq.ctx.Offset = &offset
|
||||
return atq
|
||||
}
|
||||
|
||||
// Unique configures the query builder to filter duplicate records on query.
|
||||
// By default, unique is set to true, and can be disabled using this method.
|
||||
func (atq *ActionTokenQuery) Unique(unique bool) *ActionTokenQuery {
|
||||
atq.ctx.Unique = &unique
|
||||
return atq
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (atq *ActionTokenQuery) Order(o ...actiontoken.OrderOption) *ActionTokenQuery {
|
||||
atq.order = append(atq.order, o...)
|
||||
return atq
|
||||
}
|
||||
|
||||
// QueryUser chains the current query on the "user" edge.
|
||||
func (atq *ActionTokenQuery) QueryUser() *UserQuery {
|
||||
query := (&UserClient{config: atq.config}).Query()
|
||||
query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
|
||||
if err := atq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
selector := atq.sqlQuery(ctx)
|
||||
if err := selector.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
step := sqlgraph.NewStep(
|
||||
sqlgraph.From(actiontoken.Table, actiontoken.FieldID, selector),
|
||||
sqlgraph.To(user.Table, user.FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, actiontoken.UserTable, actiontoken.UserColumn),
|
||||
)
|
||||
fromU = sqlgraph.SetNeighbors(atq.driver.Dialect(), step)
|
||||
return fromU, nil
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
// First returns the first ActionToken entity from the query.
|
||||
// Returns a *NotFoundError when no ActionToken was found.
|
||||
func (atq *ActionTokenQuery) First(ctx context.Context) (*ActionToken, error) {
|
||||
nodes, err := atq.Limit(1).All(setContextOp(ctx, atq.ctx, "First"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nil, &NotFoundError{actiontoken.Label}
|
||||
}
|
||||
return nodes[0], nil
|
||||
}
|
||||
|
||||
// FirstX is like First, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) FirstX(ctx context.Context) *ActionToken {
|
||||
node, err := atq.First(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// FirstID returns the first ActionToken ID from the query.
|
||||
// Returns a *NotFoundError when no ActionToken ID was found.
|
||||
func (atq *ActionTokenQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = atq.Limit(1).IDs(setContextOp(ctx, atq.ctx, "FirstID")); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = &NotFoundError{actiontoken.Label}
|
||||
return
|
||||
}
|
||||
return ids[0], nil
|
||||
}
|
||||
|
||||
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||
id, err := atq.FirstID(ctx)
|
||||
if err != nil && !IsNotFound(err) {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// Only returns a single ActionToken entity found by the query, ensuring it only returns one.
|
||||
// Returns a *NotSingularError when more than one ActionToken entity is found.
|
||||
// Returns a *NotFoundError when no ActionToken entities are found.
|
||||
func (atq *ActionTokenQuery) Only(ctx context.Context) (*ActionToken, error) {
|
||||
nodes, err := atq.Limit(2).All(setContextOp(ctx, atq.ctx, "Only"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch len(nodes) {
|
||||
case 1:
|
||||
return nodes[0], nil
|
||||
case 0:
|
||||
return nil, &NotFoundError{actiontoken.Label}
|
||||
default:
|
||||
return nil, &NotSingularError{actiontoken.Label}
|
||||
}
|
||||
}
|
||||
|
||||
// OnlyX is like Only, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) OnlyX(ctx context.Context) *ActionToken {
|
||||
node, err := atq.Only(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// OnlyID is like Only, but returns the only ActionToken ID in the query.
|
||||
// Returns a *NotSingularError when more than one ActionToken ID is found.
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (atq *ActionTokenQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = atq.Limit(2).IDs(setContextOp(ctx, atq.ctx, "OnlyID")); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
case 1:
|
||||
id = ids[0]
|
||||
case 0:
|
||||
err = &NotFoundError{actiontoken.Label}
|
||||
default:
|
||||
err = &NotSingularError{actiontoken.Label}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||
id, err := atq.OnlyID(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
// All executes the query and returns a list of ActionTokens.
|
||||
func (atq *ActionTokenQuery) All(ctx context.Context) ([]*ActionToken, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "All")
|
||||
if err := atq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
qr := querierAll[[]*ActionToken, *ActionTokenQuery]()
|
||||
return withInterceptors[[]*ActionToken](ctx, atq, qr, atq.inters)
|
||||
}
|
||||
|
||||
// AllX is like All, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) AllX(ctx context.Context) []*ActionToken {
|
||||
nodes, err := atq.All(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nodes
|
||||
}
|
||||
|
||||
// IDs executes the query and returns a list of ActionToken IDs.
|
||||
func (atq *ActionTokenQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error) {
|
||||
if atq.ctx.Unique == nil && atq.path != nil {
|
||||
atq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, atq.ctx, "IDs")
|
||||
if err = atq.Select(actiontoken.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// IDsX is like IDs, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||
ids, err := atq.IDs(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (atq *ActionTokenQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "Count")
|
||||
if err := atq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return withInterceptors[int](ctx, atq, querierCount[*ActionTokenQuery](), atq.inters)
|
||||
}
|
||||
|
||||
// CountX is like Count, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) CountX(ctx context.Context) int {
|
||||
count, err := atq.Count(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (atq *ActionTokenQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "Exist")
|
||||
switch _, err := atq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
case err != nil:
|
||||
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||
default:
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ExistX is like Exist, but panics if an error occurs.
|
||||
func (atq *ActionTokenQuery) ExistX(ctx context.Context) bool {
|
||||
exist, err := atq.Exist(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return exist
|
||||
}
|
||||
|
||||
// Clone returns a duplicate of the ActionTokenQuery builder, including all associated steps. It can be
|
||||
// used to prepare common query builders and use them differently after the clone is made.
|
||||
func (atq *ActionTokenQuery) Clone() *ActionTokenQuery {
|
||||
if atq == nil {
|
||||
return nil
|
||||
}
|
||||
return &ActionTokenQuery{
|
||||
config: atq.config,
|
||||
ctx: atq.ctx.Clone(),
|
||||
order: append([]actiontoken.OrderOption{}, atq.order...),
|
||||
inters: append([]Interceptor{}, atq.inters...),
|
||||
predicates: append([]predicate.ActionToken{}, atq.predicates...),
|
||||
withUser: atq.withUser.Clone(),
|
||||
// clone intermediate query.
|
||||
sql: atq.sql.Clone(),
|
||||
path: atq.path,
|
||||
}
|
||||
}
|
||||
|
||||
// WithUser tells the query-builder to eager-load the nodes that are connected to
|
||||
// the "user" edge. The optional arguments are used to configure the query builder of the edge.
|
||||
func (atq *ActionTokenQuery) WithUser(opts ...func(*UserQuery)) *ActionTokenQuery {
|
||||
query := (&UserClient{config: atq.config}).Query()
|
||||
for _, opt := range opts {
|
||||
opt(query)
|
||||
}
|
||||
atq.withUser = query
|
||||
return atq
|
||||
}
|
||||
|
||||
// GroupBy is used to group vertices by one or more fields/columns.
|
||||
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// UserID uuid.UUID `json:"user_id,omitempty"`
|
||||
// Count int `json:"count,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.ActionToken.Query().
|
||||
// GroupBy(actiontoken.FieldUserID).
|
||||
// Aggregate(ent.Count()).
|
||||
// Scan(ctx, &v)
|
||||
func (atq *ActionTokenQuery) GroupBy(field string, fields ...string) *ActionTokenGroupBy {
|
||||
atq.ctx.Fields = append([]string{field}, fields...)
|
||||
grbuild := &ActionTokenGroupBy{build: atq}
|
||||
grbuild.flds = &atq.ctx.Fields
|
||||
grbuild.label = actiontoken.Label
|
||||
grbuild.scan = grbuild.Scan
|
||||
return grbuild
|
||||
}
|
||||
|
||||
// Select allows the selection one or more fields/columns for the given query,
|
||||
// instead of selecting all fields in the entity.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// var v []struct {
|
||||
// UserID uuid.UUID `json:"user_id,omitempty"`
|
||||
// }
|
||||
//
|
||||
// client.ActionToken.Query().
|
||||
// Select(actiontoken.FieldUserID).
|
||||
// Scan(ctx, &v)
|
||||
func (atq *ActionTokenQuery) Select(fields ...string) *ActionTokenSelect {
|
||||
atq.ctx.Fields = append(atq.ctx.Fields, fields...)
|
||||
sbuild := &ActionTokenSelect{ActionTokenQuery: atq}
|
||||
sbuild.label = actiontoken.Label
|
||||
sbuild.flds, sbuild.scan = &atq.ctx.Fields, sbuild.Scan
|
||||
return sbuild
|
||||
}
|
||||
|
||||
// Aggregate returns a ActionTokenSelect configured with the given aggregations.
|
||||
func (atq *ActionTokenQuery) Aggregate(fns ...AggregateFunc) *ActionTokenSelect {
|
||||
return atq.Select().Aggregate(fns...)
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) prepareQuery(ctx context.Context) error {
|
||||
for _, inter := range atq.inters {
|
||||
if inter == nil {
|
||||
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||
}
|
||||
if trv, ok := inter.(Traverser); ok {
|
||||
if err := trv.Traverse(ctx, atq); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, f := range atq.ctx.Fields {
|
||||
if !actiontoken.ValidColumn(f) {
|
||||
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
}
|
||||
if atq.path != nil {
|
||||
prev, err := atq.path(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
atq.sql = prev
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ActionToken, error) {
|
||||
var (
|
||||
nodes = []*ActionToken{}
|
||||
_spec = atq.querySpec()
|
||||
loadedTypes = [1]bool{
|
||||
atq.withUser != nil,
|
||||
}
|
||||
)
|
||||
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||
return (*ActionToken).scanValues(nil, columns)
|
||||
}
|
||||
_spec.Assign = func(columns []string, values []any) error {
|
||||
node := &ActionToken{config: atq.config}
|
||||
nodes = append(nodes, node)
|
||||
node.Edges.loadedTypes = loadedTypes
|
||||
return node.assignValues(columns, values)
|
||||
}
|
||||
for i := range hooks {
|
||||
hooks[i](ctx, _spec)
|
||||
}
|
||||
if err := sqlgraph.QueryNodes(ctx, atq.driver, _spec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(nodes) == 0 {
|
||||
return nodes, nil
|
||||
}
|
||||
if query := atq.withUser; query != nil {
|
||||
if err := atq.loadUser(ctx, query, nodes, nil,
|
||||
func(n *ActionToken, e *User) { n.Edges.User = e }); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) loadUser(ctx context.Context, query *UserQuery, nodes []*ActionToken, init func(*ActionToken), assign func(*ActionToken, *User)) error {
|
||||
ids := make([]uuid.UUID, 0, len(nodes))
|
||||
nodeids := make(map[uuid.UUID][]*ActionToken)
|
||||
for i := range nodes {
|
||||
fk := nodes[i].UserID
|
||||
if _, ok := nodeids[fk]; !ok {
|
||||
ids = append(ids, fk)
|
||||
}
|
||||
nodeids[fk] = append(nodeids[fk], nodes[i])
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
return nil
|
||||
}
|
||||
query.Where(user.IDIn(ids...))
|
||||
neighbors, err := query.All(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, n := range neighbors {
|
||||
nodes, ok := nodeids[n.ID]
|
||||
if !ok {
|
||||
return fmt.Errorf(`unexpected foreign-key "user_id" returned %v`, n.ID)
|
||||
}
|
||||
for i := range nodes {
|
||||
assign(nodes[i], n)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) sqlCount(ctx context.Context) (int, error) {
|
||||
_spec := atq.querySpec()
|
||||
_spec.Node.Columns = atq.ctx.Fields
|
||||
if len(atq.ctx.Fields) > 0 {
|
||||
_spec.Unique = atq.ctx.Unique != nil && *atq.ctx.Unique
|
||||
}
|
||||
return sqlgraph.CountNodes(ctx, atq.driver, _spec)
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) querySpec() *sqlgraph.QuerySpec {
|
||||
_spec := sqlgraph.NewQuerySpec(actiontoken.Table, actiontoken.Columns, sqlgraph.NewFieldSpec(actiontoken.FieldID, field.TypeUUID))
|
||||
_spec.From = atq.sql
|
||||
if unique := atq.ctx.Unique; unique != nil {
|
||||
_spec.Unique = *unique
|
||||
} else if atq.path != nil {
|
||||
_spec.Unique = true
|
||||
}
|
||||
if fields := atq.ctx.Fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, actiontoken.FieldID)
|
||||
for i := range fields {
|
||||
if fields[i] != actiontoken.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||
}
|
||||
}
|
||||
if atq.withUser != nil {
|
||||
_spec.Node.AddColumnOnce(actiontoken.FieldUserID)
|
||||
}
|
||||
}
|
||||
if ps := atq.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if limit := atq.ctx.Limit; limit != nil {
|
||||
_spec.Limit = *limit
|
||||
}
|
||||
if offset := atq.ctx.Offset; offset != nil {
|
||||
_spec.Offset = *offset
|
||||
}
|
||||
if ps := atq.order; len(ps) > 0 {
|
||||
_spec.Order = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
return _spec
|
||||
}
|
||||
|
||||
func (atq *ActionTokenQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||
builder := sql.Dialect(atq.driver.Dialect())
|
||||
t1 := builder.Table(actiontoken.Table)
|
||||
columns := atq.ctx.Fields
|
||||
if len(columns) == 0 {
|
||||
columns = actiontoken.Columns
|
||||
}
|
||||
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||
if atq.sql != nil {
|
||||
selector = atq.sql
|
||||
selector.Select(selector.Columns(columns...)...)
|
||||
}
|
||||
if atq.ctx.Unique != nil && *atq.ctx.Unique {
|
||||
selector.Distinct()
|
||||
}
|
||||
for _, p := range atq.predicates {
|
||||
p(selector)
|
||||
}
|
||||
for _, p := range atq.order {
|
||||
p(selector)
|
||||
}
|
||||
if offset := atq.ctx.Offset; offset != nil {
|
||||
// limit is mandatory for offset clause. We start
|
||||
// with default value, and override it below if needed.
|
||||
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||
}
|
||||
if limit := atq.ctx.Limit; limit != nil {
|
||||
selector.Limit(*limit)
|
||||
}
|
||||
return selector
|
||||
}
|
||||
|
||||
// ActionTokenGroupBy is the group-by builder for ActionToken entities.
|
||||
type ActionTokenGroupBy struct {
|
||||
selector
|
||||
build *ActionTokenQuery
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the group-by query.
|
||||
func (atgb *ActionTokenGroupBy) Aggregate(fns ...AggregateFunc) *ActionTokenGroupBy {
|
||||
atgb.fns = append(atgb.fns, fns...)
|
||||
return atgb
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (atgb *ActionTokenGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, atgb.build.ctx, "GroupBy")
|
||||
if err := atgb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*ActionTokenQuery, *ActionTokenGroupBy](ctx, atgb.build, atgb, atgb.build.inters, v)
|
||||
}
|
||||
|
||||
func (atgb *ActionTokenGroupBy) sqlScan(ctx context.Context, root *ActionTokenQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx).Select()
|
||||
aggregation := make([]string, 0, len(atgb.fns))
|
||||
for _, fn := range atgb.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
if len(selector.SelectedColumns()) == 0 {
|
||||
columns := make([]string, 0, len(*atgb.flds)+len(atgb.fns))
|
||||
for _, f := range *atgb.flds {
|
||||
columns = append(columns, selector.C(f))
|
||||
}
|
||||
columns = append(columns, aggregation...)
|
||||
selector.Select(columns...)
|
||||
}
|
||||
selector.GroupBy(selector.Columns(*atgb.flds...)...)
|
||||
if err := selector.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := atgb.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
|
||||
// ActionTokenSelect is the builder for selecting fields of ActionToken entities.
|
||||
type ActionTokenSelect struct {
|
||||
*ActionTokenQuery
|
||||
selector
|
||||
}
|
||||
|
||||
// Aggregate adds the given aggregation functions to the selector query.
|
||||
func (ats *ActionTokenSelect) Aggregate(fns ...AggregateFunc) *ActionTokenSelect {
|
||||
ats.fns = append(ats.fns, fns...)
|
||||
return ats
|
||||
}
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ats *ActionTokenSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ats.ctx, "Select")
|
||||
if err := ats.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return scanWithInterceptors[*ActionTokenQuery, *ActionTokenSelect](ctx, ats.ActionTokenQuery, ats, ats.inters, v)
|
||||
}
|
||||
|
||||
func (ats *ActionTokenSelect) sqlScan(ctx context.Context, root *ActionTokenQuery, v any) error {
|
||||
selector := root.sqlQuery(ctx)
|
||||
aggregation := make([]string, 0, len(ats.fns))
|
||||
for _, fn := range ats.fns {
|
||||
aggregation = append(aggregation, fn(selector))
|
||||
}
|
||||
switch n := len(*ats.selector.flds); {
|
||||
case n == 0 && len(aggregation) > 0:
|
||||
selector.Select(aggregation...)
|
||||
case n != 0 && len(aggregation) > 0:
|
||||
selector.AppendSelect(aggregation...)
|
||||
}
|
||||
rows := &sql.Rows{}
|
||||
query, args := selector.Query()
|
||||
if err := ats.driver.Query(ctx, query, args, rows); err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
return sql.ScanSlice(rows, v)
|
||||
}
|
||||
@@ -1,406 +0,0 @@
|
||||
// Code generated by ent, DO NOT EDIT.
|
||||
|
||||
package ent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/actiontoken"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// ActionTokenUpdate is the builder for updating ActionToken entities.
|
||||
type ActionTokenUpdate struct {
|
||||
config
|
||||
hooks []Hook
|
||||
mutation *ActionTokenMutation
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ActionTokenUpdate builder.
|
||||
func (atu *ActionTokenUpdate) Where(ps ...predicate.ActionToken) *ActionTokenUpdate {
|
||||
atu.mutation.Where(ps...)
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetUserID sets the "user_id" field.
|
||||
func (atu *ActionTokenUpdate) SetUserID(u uuid.UUID) *ActionTokenUpdate {
|
||||
atu.mutation.SetUserID(u)
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||
func (atu *ActionTokenUpdate) SetNillableUserID(u *uuid.UUID) *ActionTokenUpdate {
|
||||
if u != nil {
|
||||
atu.SetUserID(*u)
|
||||
}
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetUpdatedAt sets the "updated_at" field.
|
||||
func (atu *ActionTokenUpdate) SetUpdatedAt(t time.Time) *ActionTokenUpdate {
|
||||
atu.mutation.SetUpdatedAt(t)
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetAction sets the "action" field.
|
||||
func (atu *ActionTokenUpdate) SetAction(a actiontoken.Action) *ActionTokenUpdate {
|
||||
atu.mutation.SetAction(a)
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetNillableAction sets the "action" field if the given value is not nil.
|
||||
func (atu *ActionTokenUpdate) SetNillableAction(a *actiontoken.Action) *ActionTokenUpdate {
|
||||
if a != nil {
|
||||
atu.SetAction(*a)
|
||||
}
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetToken sets the "token" field.
|
||||
func (atu *ActionTokenUpdate) SetToken(b []byte) *ActionTokenUpdate {
|
||||
atu.mutation.SetToken(b)
|
||||
return atu
|
||||
}
|
||||
|
||||
// SetUser sets the "user" edge to the User entity.
|
||||
func (atu *ActionTokenUpdate) SetUser(u *User) *ActionTokenUpdate {
|
||||
return atu.SetUserID(u.ID)
|
||||
}
|
||||
|
||||
// Mutation returns the ActionTokenMutation object of the builder.
|
||||
func (atu *ActionTokenUpdate) Mutation() *ActionTokenMutation {
|
||||
return atu.mutation
|
||||
}
|
||||
|
||||
// ClearUser clears the "user" edge to the User entity.
|
||||
func (atu *ActionTokenUpdate) ClearUser() *ActionTokenUpdate {
|
||||
atu.mutation.ClearUser()
|
||||
return atu
|
||||
}
|
||||
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (atu *ActionTokenUpdate) Save(ctx context.Context) (int, error) {
|
||||
atu.defaults()
|
||||
return withHooks(ctx, atu.sqlSave, atu.mutation, atu.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (atu *ActionTokenUpdate) SaveX(ctx context.Context) int {
|
||||
affected, err := atu.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return affected
|
||||
}
|
||||
|
||||
// Exec executes the query.
|
||||
func (atu *ActionTokenUpdate) Exec(ctx context.Context) error {
|
||||
_, err := atu.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atu *ActionTokenUpdate) ExecX(ctx context.Context) {
|
||||
if err := atu.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// defaults sets the default values of the builder before save.
|
||||
func (atu *ActionTokenUpdate) defaults() {
|
||||
if _, ok := atu.mutation.UpdatedAt(); !ok {
|
||||
v := actiontoken.UpdateDefaultUpdatedAt()
|
||||
atu.mutation.SetUpdatedAt(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (atu *ActionTokenUpdate) check() error {
|
||||
if v, ok := atu.mutation.Action(); ok {
|
||||
if err := actiontoken.ActionValidator(v); err != nil {
|
||||
return &ValidationError{Name: "action", err: fmt.Errorf(`ent: validator failed for field "ActionToken.action": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := atu.mutation.UserID(); atu.mutation.UserCleared() && !ok {
|
||||
return errors.New(`ent: clearing a required unique edge "ActionToken.user"`)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (atu *ActionTokenUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if err := atu.check(); err != nil {
|
||||
return n, err
|
||||
}
|
||||
_spec := sqlgraph.NewUpdateSpec(actiontoken.Table, actiontoken.Columns, sqlgraph.NewFieldSpec(actiontoken.FieldID, field.TypeUUID))
|
||||
if ps := atu.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := atu.mutation.UpdatedAt(); ok {
|
||||
_spec.SetField(actiontoken.FieldUpdatedAt, field.TypeTime, value)
|
||||
}
|
||||
if value, ok := atu.mutation.Action(); ok {
|
||||
_spec.SetField(actiontoken.FieldAction, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := atu.mutation.Token(); ok {
|
||||
_spec.SetField(actiontoken.FieldToken, field.TypeBytes, value)
|
||||
}
|
||||
if atu.mutation.UserCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
Inverse: true,
|
||||
Table: actiontoken.UserTable,
|
||||
Columns: []string{actiontoken.UserColumn},
|
||||
Bidi: false,
|
||||
Target: &sqlgraph.EdgeTarget{
|
||||
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
|
||||
},
|
||||
}
|
||||
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||
}
|
||||
if nodes := atu.mutation.UserIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
Inverse: true,
|
||||
Table: actiontoken.UserTable,
|
||||
Columns: []string{actiontoken.UserColumn},
|
||||
Bidi: false,
|
||||
Target: &sqlgraph.EdgeTarget{
|
||||
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
|
||||
},
|
||||
}
|
||||
for _, k := range nodes {
|
||||
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||
}
|
||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||
}
|
||||
if n, err = sqlgraph.UpdateNodes(ctx, atu.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{actiontoken.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
atu.mutation.done = true
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// ActionTokenUpdateOne is the builder for updating a single ActionToken entity.
|
||||
type ActionTokenUpdateOne struct {
|
||||
config
|
||||
fields []string
|
||||
hooks []Hook
|
||||
mutation *ActionTokenMutation
|
||||
}
|
||||
|
||||
// SetUserID sets the "user_id" field.
|
||||
func (atuo *ActionTokenUpdateOne) SetUserID(u uuid.UUID) *ActionTokenUpdateOne {
|
||||
atuo.mutation.SetUserID(u)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||
func (atuo *ActionTokenUpdateOne) SetNillableUserID(u *uuid.UUID) *ActionTokenUpdateOne {
|
||||
if u != nil {
|
||||
atuo.SetUserID(*u)
|
||||
}
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetUpdatedAt sets the "updated_at" field.
|
||||
func (atuo *ActionTokenUpdateOne) SetUpdatedAt(t time.Time) *ActionTokenUpdateOne {
|
||||
atuo.mutation.SetUpdatedAt(t)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetAction sets the "action" field.
|
||||
func (atuo *ActionTokenUpdateOne) SetAction(a actiontoken.Action) *ActionTokenUpdateOne {
|
||||
atuo.mutation.SetAction(a)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetNillableAction sets the "action" field if the given value is not nil.
|
||||
func (atuo *ActionTokenUpdateOne) SetNillableAction(a *actiontoken.Action) *ActionTokenUpdateOne {
|
||||
if a != nil {
|
||||
atuo.SetAction(*a)
|
||||
}
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetToken sets the "token" field.
|
||||
func (atuo *ActionTokenUpdateOne) SetToken(b []byte) *ActionTokenUpdateOne {
|
||||
atuo.mutation.SetToken(b)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// SetUser sets the "user" edge to the User entity.
|
||||
func (atuo *ActionTokenUpdateOne) SetUser(u *User) *ActionTokenUpdateOne {
|
||||
return atuo.SetUserID(u.ID)
|
||||
}
|
||||
|
||||
// Mutation returns the ActionTokenMutation object of the builder.
|
||||
func (atuo *ActionTokenUpdateOne) Mutation() *ActionTokenMutation {
|
||||
return atuo.mutation
|
||||
}
|
||||
|
||||
// ClearUser clears the "user" edge to the User entity.
|
||||
func (atuo *ActionTokenUpdateOne) ClearUser() *ActionTokenUpdateOne {
|
||||
atuo.mutation.ClearUser()
|
||||
return atuo
|
||||
}
|
||||
|
||||
// Where appends a list predicates to the ActionTokenUpdate builder.
|
||||
func (atuo *ActionTokenUpdateOne) Where(ps ...predicate.ActionToken) *ActionTokenUpdateOne {
|
||||
atuo.mutation.Where(ps...)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||
// The default is selecting all fields defined in the entity schema.
|
||||
func (atuo *ActionTokenUpdateOne) Select(field string, fields ...string) *ActionTokenUpdateOne {
|
||||
atuo.fields = append([]string{field}, fields...)
|
||||
return atuo
|
||||
}
|
||||
|
||||
// Save executes the query and returns the updated ActionToken entity.
|
||||
func (atuo *ActionTokenUpdateOne) Save(ctx context.Context) (*ActionToken, error) {
|
||||
atuo.defaults()
|
||||
return withHooks(ctx, atuo.sqlSave, atuo.mutation, atuo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
func (atuo *ActionTokenUpdateOne) SaveX(ctx context.Context) *ActionToken {
|
||||
node, err := atuo.Save(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// Exec executes the query on the entity.
|
||||
func (atuo *ActionTokenUpdateOne) Exec(ctx context.Context) error {
|
||||
_, err := atuo.Save(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
func (atuo *ActionTokenUpdateOne) ExecX(ctx context.Context) {
|
||||
if err := atuo.Exec(ctx); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// defaults sets the default values of the builder before save.
|
||||
func (atuo *ActionTokenUpdateOne) defaults() {
|
||||
if _, ok := atuo.mutation.UpdatedAt(); !ok {
|
||||
v := actiontoken.UpdateDefaultUpdatedAt()
|
||||
atuo.mutation.SetUpdatedAt(v)
|
||||
}
|
||||
}
|
||||
|
||||
// check runs all checks and user-defined validators on the builder.
|
||||
func (atuo *ActionTokenUpdateOne) check() error {
|
||||
if v, ok := atuo.mutation.Action(); ok {
|
||||
if err := actiontoken.ActionValidator(v); err != nil {
|
||||
return &ValidationError{Name: "action", err: fmt.Errorf(`ent: validator failed for field "ActionToken.action": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := atuo.mutation.UserID(); atuo.mutation.UserCleared() && !ok {
|
||||
return errors.New(`ent: clearing a required unique edge "ActionToken.user"`)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (atuo *ActionTokenUpdateOne) sqlSave(ctx context.Context) (_node *ActionToken, err error) {
|
||||
if err := atuo.check(); err != nil {
|
||||
return _node, err
|
||||
}
|
||||
_spec := sqlgraph.NewUpdateSpec(actiontoken.Table, actiontoken.Columns, sqlgraph.NewFieldSpec(actiontoken.FieldID, field.TypeUUID))
|
||||
id, ok := atuo.mutation.ID()
|
||||
if !ok {
|
||||
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "ActionToken.id" for update`)}
|
||||
}
|
||||
_spec.Node.ID.Value = id
|
||||
if fields := atuo.fields; len(fields) > 0 {
|
||||
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, actiontoken.FieldID)
|
||||
for _, f := range fields {
|
||||
if !actiontoken.ValidColumn(f) {
|
||||
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||
}
|
||||
if f != actiontoken.FieldID {
|
||||
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ps := atuo.mutation.predicates; len(ps) > 0 {
|
||||
_spec.Predicate = func(selector *sql.Selector) {
|
||||
for i := range ps {
|
||||
ps[i](selector)
|
||||
}
|
||||
}
|
||||
}
|
||||
if value, ok := atuo.mutation.UpdatedAt(); ok {
|
||||
_spec.SetField(actiontoken.FieldUpdatedAt, field.TypeTime, value)
|
||||
}
|
||||
if value, ok := atuo.mutation.Action(); ok {
|
||||
_spec.SetField(actiontoken.FieldAction, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := atuo.mutation.Token(); ok {
|
||||
_spec.SetField(actiontoken.FieldToken, field.TypeBytes, value)
|
||||
}
|
||||
if atuo.mutation.UserCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
Inverse: true,
|
||||
Table: actiontoken.UserTable,
|
||||
Columns: []string{actiontoken.UserColumn},
|
||||
Bidi: false,
|
||||
Target: &sqlgraph.EdgeTarget{
|
||||
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
|
||||
},
|
||||
}
|
||||
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
|
||||
}
|
||||
if nodes := atuo.mutation.UserIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
Inverse: true,
|
||||
Table: actiontoken.UserTable,
|
||||
Columns: []string{actiontoken.UserColumn},
|
||||
Bidi: false,
|
||||
Target: &sqlgraph.EdgeTarget{
|
||||
IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeUUID),
|
||||
},
|
||||
}
|
||||
for _, k := range nodes {
|
||||
edge.Target.Nodes = append(edge.Target.Nodes, k)
|
||||
}
|
||||
_spec.Edges.Add = append(_spec.Edges.Add, edge)
|
||||
}
|
||||
_node = &ActionToken{config: atuo.config}
|
||||
_spec.Assign = _node.assignValues
|
||||
_spec.ScanValues = _node.scanValues
|
||||
if err = sqlgraph.UpdateNode(ctx, atuo.driver, _spec); err != nil {
|
||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||
err = &NotFoundError{actiontoken.Label}
|
||||
} else if sqlgraph.IsConstraintError(err) {
|
||||
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
atuo.mutation.done = true
|
||||
return _node, nil
|
||||
}
|
||||
@@ -10,9 +10,9 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// Attachment is the model entity for the Attachment schema.
|
||||
@@ -50,12 +50,10 @@ type AttachmentEdges struct {
|
||||
// ItemOrErr returns the Item value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AttachmentEdges) ItemOrErr() (*Item, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.Item == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: item.Label}
|
||||
}
|
||||
if e.Item != nil {
|
||||
return e.Item, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: item.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "item"}
|
||||
}
|
||||
@@ -63,12 +61,10 @@ func (e AttachmentEdges) ItemOrErr() (*Item, error) {
|
||||
// DocumentOrErr returns the Document value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AttachmentEdges) DocumentOrErr() (*Document, error) {
|
||||
if e.loadedTypes[1] {
|
||||
if e.Document == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: document.Label}
|
||||
}
|
||||
if e.Document != nil {
|
||||
return e.Document, nil
|
||||
} else if e.loadedTypes[1] {
|
||||
return nil, &NotFoundError{label: document.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "document"}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// AttachmentCreate is the builder for creating a Attachment entity.
|
||||
@@ -191,10 +191,10 @@ func (ac *AttachmentCreate) check() error {
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
|
||||
}
|
||||
if _, ok := ac.mutation.ItemID(); !ok {
|
||||
if len(ac.mutation.ItemIDs()) == 0 {
|
||||
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
|
||||
}
|
||||
if _, ok := ac.mutation.DocumentID(); !ok {
|
||||
if len(ac.mutation.DocumentIDs()) == 0 {
|
||||
return &ValidationError{Name: "document", err: errors.New(`ent: missing required edge "Attachment.document"`)}
|
||||
}
|
||||
return nil
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentDelete is the builder for deleting a Attachment entity.
|
||||
|
||||
@@ -7,14 +7,15 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentQuery is the builder for querying Attachment entities.
|
||||
@@ -110,7 +111,7 @@ func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
|
||||
// First returns the first Attachment entity from the query.
|
||||
// Returns a *NotFoundError when no Attachment was found.
|
||||
func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) {
|
||||
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, "First"))
|
||||
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, ent.OpQueryFirst))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -133,7 +134,7 @@ func (aq *AttachmentQuery) FirstX(ctx context.Context) *Attachment {
|
||||
// Returns a *NotFoundError when no Attachment ID was found.
|
||||
func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, "FirstID")); err != nil {
|
||||
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryFirstID)); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
@@ -156,7 +157,7 @@ func (aq *AttachmentQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||
// Returns a *NotSingularError when more than one Attachment entity is found.
|
||||
// Returns a *NotFoundError when no Attachment entities are found.
|
||||
func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) {
|
||||
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, "Only"))
|
||||
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, ent.OpQueryOnly))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -184,7 +185,7 @@ func (aq *AttachmentQuery) OnlyX(ctx context.Context) *Attachment {
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, "OnlyID")); err != nil {
|
||||
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
@@ -209,7 +210,7 @@ func (aq *AttachmentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||
|
||||
// All executes the query and returns a list of Attachments.
|
||||
func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "All")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryAll)
|
||||
if err := aq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -231,7 +232,7 @@ func (aq *AttachmentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error)
|
||||
if aq.ctx.Unique == nil && aq.path != nil {
|
||||
aq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, aq.ctx, "IDs")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryIDs)
|
||||
if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -249,7 +250,7 @@ func (aq *AttachmentQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "Count")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryCount)
|
||||
if err := aq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -267,7 +268,7 @@ func (aq *AttachmentQuery) CountX(ctx context.Context) int {
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "Exist")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryExist)
|
||||
switch _, err := aq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
@@ -612,7 +613,7 @@ func (agb *AttachmentGroupBy) Aggregate(fns ...AggregateFunc) *AttachmentGroupBy
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (agb *AttachmentGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, agb.build.ctx, "GroupBy")
|
||||
ctx = setContextOp(ctx, agb.build.ctx, ent.OpQueryGroupBy)
|
||||
if err := agb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -660,7 +661,7 @@ func (as *AttachmentSelect) Aggregate(fns ...AggregateFunc) *AttachmentSelect {
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (as *AttachmentSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, as.ctx, "Select")
|
||||
ctx = setContextOp(ctx, as.ctx, ent.OpQuerySelect)
|
||||
if err := as.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -12,10 +12,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentUpdate is the builder for updating Attachment entities.
|
||||
@@ -147,10 +147,10 @@ func (au *AttachmentUpdate) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := au.mutation.ItemID(); au.mutation.ItemCleared() && !ok {
|
||||
if au.mutation.ItemCleared() && len(au.mutation.ItemIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||
}
|
||||
if _, ok := au.mutation.DocumentID(); au.mutation.DocumentCleared() && !ok {
|
||||
if au.mutation.DocumentCleared() && len(au.mutation.DocumentIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||
}
|
||||
return nil
|
||||
@@ -384,10 +384,10 @@ func (auo *AttachmentUpdateOne) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := auo.mutation.ItemID(); auo.mutation.ItemCleared() && !ok {
|
||||
if auo.mutation.ItemCleared() && len(auo.mutation.ItemIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||
}
|
||||
if _, ok := auo.mutation.DocumentID(); auo.mutation.DocumentCleared() && !ok {
|
||||
if auo.mutation.DocumentCleared() && len(auo.mutation.DocumentIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||
}
|
||||
return nil
|
||||
|
||||
@@ -9,8 +9,8 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRoles is the model entity for the AuthRoles schema.
|
||||
@@ -39,12 +39,10 @@ type AuthRolesEdges struct {
|
||||
// TokenOrErr returns the Token value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthRolesEdges) TokenOrErr() (*AuthTokens, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.Token == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: authtokens.Label}
|
||||
}
|
||||
if e.Token != nil {
|
||||
return e.Token, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: authtokens.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "token"}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ package authroles
|
||||
import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRolesCreate is the builder for creating a AuthRoles entity.
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesDelete is the builder for deleting a AuthRoles entity.
|
||||
|
||||
@@ -7,13 +7,14 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesQuery is the builder for querying AuthRoles entities.
|
||||
@@ -86,7 +87,7 @@ func (arq *AuthRolesQuery) QueryToken() *AuthTokensQuery {
|
||||
// First returns the first AuthRoles entity from the query.
|
||||
// Returns a *NotFoundError when no AuthRoles was found.
|
||||
func (arq *AuthRolesQuery) First(ctx context.Context) (*AuthRoles, error) {
|
||||
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, "First"))
|
||||
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, ent.OpQueryFirst))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -109,7 +110,7 @@ func (arq *AuthRolesQuery) FirstX(ctx context.Context) *AuthRoles {
|
||||
// Returns a *NotFoundError when no AuthRoles ID was found.
|
||||
func (arq *AuthRolesQuery) FirstID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, "FirstID")); err != nil {
|
||||
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryFirstID)); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
@@ -132,7 +133,7 @@ func (arq *AuthRolesQuery) FirstIDX(ctx context.Context) int {
|
||||
// Returns a *NotSingularError when more than one AuthRoles entity is found.
|
||||
// Returns a *NotFoundError when no AuthRoles entities are found.
|
||||
func (arq *AuthRolesQuery) Only(ctx context.Context) (*AuthRoles, error) {
|
||||
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, "Only"))
|
||||
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, ent.OpQueryOnly))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -160,7 +161,7 @@ func (arq *AuthRolesQuery) OnlyX(ctx context.Context) *AuthRoles {
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (arq *AuthRolesQuery) OnlyID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, "OnlyID")); err != nil {
|
||||
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
@@ -185,7 +186,7 @@ func (arq *AuthRolesQuery) OnlyIDX(ctx context.Context) int {
|
||||
|
||||
// All executes the query and returns a list of AuthRolesSlice.
|
||||
func (arq *AuthRolesQuery) All(ctx context.Context) ([]*AuthRoles, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "All")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryAll)
|
||||
if err := arq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -207,7 +208,7 @@ func (arq *AuthRolesQuery) IDs(ctx context.Context) (ids []int, err error) {
|
||||
if arq.ctx.Unique == nil && arq.path != nil {
|
||||
arq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, arq.ctx, "IDs")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryIDs)
|
||||
if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -225,7 +226,7 @@ func (arq *AuthRolesQuery) IDsX(ctx context.Context) []int {
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (arq *AuthRolesQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "Count")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryCount)
|
||||
if err := arq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -243,7 +244,7 @@ func (arq *AuthRolesQuery) CountX(ctx context.Context) int {
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (arq *AuthRolesQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "Exist")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryExist)
|
||||
switch _, err := arq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
@@ -537,7 +538,7 @@ func (argb *AuthRolesGroupBy) Aggregate(fns ...AggregateFunc) *AuthRolesGroupBy
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (argb *AuthRolesGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, argb.build.ctx, "GroupBy")
|
||||
ctx = setContextOp(ctx, argb.build.ctx, ent.OpQueryGroupBy)
|
||||
if err := argb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -585,7 +586,7 @@ func (ars *AuthRolesSelect) Aggregate(fns ...AggregateFunc) *AuthRolesSelect {
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ars *AuthRolesSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ars.ctx, "Select")
|
||||
ctx = setContextOp(ctx, ars.ctx, ent.OpQuerySelect)
|
||||
if err := ars.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesUpdate is the builder for updating AuthRoles entities.
|
||||
|
||||
@@ -10,9 +10,9 @@ import (
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokens is the model entity for the AuthTokens schema.
|
||||
@@ -49,12 +49,10 @@ type AuthTokensEdges struct {
|
||||
// UserOrErr returns the User value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthTokensEdges) UserOrErr() (*User, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.User == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: user.Label}
|
||||
}
|
||||
if e.User != nil {
|
||||
return e.User, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: user.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "user"}
|
||||
}
|
||||
@@ -62,12 +60,10 @@ func (e AuthTokensEdges) UserOrErr() (*User, error) {
|
||||
// RolesOrErr returns the Roles value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthTokensEdges) RolesOrErr() (*AuthRoles, error) {
|
||||
if e.loadedTypes[1] {
|
||||
if e.Roles == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: authroles.Label}
|
||||
}
|
||||
if e.Roles != nil {
|
||||
return e.Roles, nil
|
||||
} else if e.loadedTypes[1] {
|
||||
return nil, &NotFoundError{label: authroles.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "roles"}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokensCreate is the builder for creating a AuthTokens entity.
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthTokensDelete is the builder for deleting a AuthTokens entity.
|
||||
|
||||
@@ -8,14 +8,15 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokensQuery is the builder for querying AuthTokens entities.
|
||||
@@ -111,7 +112,7 @@ func (atq *AuthTokensQuery) QueryRoles() *AuthRolesQuery {
|
||||
// First returns the first AuthTokens entity from the query.
|
||||
// Returns a *NotFoundError when no AuthTokens was found.
|
||||
func (atq *AuthTokensQuery) First(ctx context.Context) (*AuthTokens, error) {
|
||||
nodes, err := atq.Limit(1).All(setContextOp(ctx, atq.ctx, "First"))
|
||||
nodes, err := atq.Limit(1).All(setContextOp(ctx, atq.ctx, ent.OpQueryFirst))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -134,7 +135,7 @@ func (atq *AuthTokensQuery) FirstX(ctx context.Context) *AuthTokens {
|
||||
// Returns a *NotFoundError when no AuthTokens ID was found.
|
||||
func (atq *AuthTokensQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = atq.Limit(1).IDs(setContextOp(ctx, atq.ctx, "FirstID")); err != nil {
|
||||
if ids, err = atq.Limit(1).IDs(setContextOp(ctx, atq.ctx, ent.OpQueryFirstID)); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
@@ -157,7 +158,7 @@ func (atq *AuthTokensQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||
// Returns a *NotSingularError when more than one AuthTokens entity is found.
|
||||
// Returns a *NotFoundError when no AuthTokens entities are found.
|
||||
func (atq *AuthTokensQuery) Only(ctx context.Context) (*AuthTokens, error) {
|
||||
nodes, err := atq.Limit(2).All(setContextOp(ctx, atq.ctx, "Only"))
|
||||
nodes, err := atq.Limit(2).All(setContextOp(ctx, atq.ctx, ent.OpQueryOnly))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -185,7 +186,7 @@ func (atq *AuthTokensQuery) OnlyX(ctx context.Context) *AuthTokens {
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (atq *AuthTokensQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = atq.Limit(2).IDs(setContextOp(ctx, atq.ctx, "OnlyID")); err != nil {
|
||||
if ids, err = atq.Limit(2).IDs(setContextOp(ctx, atq.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
@@ -210,7 +211,7 @@ func (atq *AuthTokensQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||
|
||||
// All executes the query and returns a list of AuthTokensSlice.
|
||||
func (atq *AuthTokensQuery) All(ctx context.Context) ([]*AuthTokens, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "All")
|
||||
ctx = setContextOp(ctx, atq.ctx, ent.OpQueryAll)
|
||||
if err := atq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -232,7 +233,7 @@ func (atq *AuthTokensQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error
|
||||
if atq.ctx.Unique == nil && atq.path != nil {
|
||||
atq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, atq.ctx, "IDs")
|
||||
ctx = setContextOp(ctx, atq.ctx, ent.OpQueryIDs)
|
||||
if err = atq.Select(authtokens.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -250,7 +251,7 @@ func (atq *AuthTokensQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (atq *AuthTokensQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "Count")
|
||||
ctx = setContextOp(ctx, atq.ctx, ent.OpQueryCount)
|
||||
if err := atq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -268,7 +269,7 @@ func (atq *AuthTokensQuery) CountX(ctx context.Context) int {
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (atq *AuthTokensQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, atq.ctx, "Exist")
|
||||
ctx = setContextOp(ctx, atq.ctx, ent.OpQueryExist)
|
||||
switch _, err := atq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
@@ -609,7 +610,7 @@ func (atgb *AuthTokensGroupBy) Aggregate(fns ...AggregateFunc) *AuthTokensGroupB
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (atgb *AuthTokensGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, atgb.build.ctx, "GroupBy")
|
||||
ctx = setContextOp(ctx, atgb.build.ctx, ent.OpQueryGroupBy)
|
||||
if err := atgb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -657,7 +658,7 @@ func (ats *AuthTokensSelect) Aggregate(fns ...AggregateFunc) *AuthTokensSelect {
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ats *AuthTokensSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ats.ctx, "Select")
|
||||
ctx = setContextOp(ctx, ats.ctx, ent.OpQuerySelect)
|
||||
if err := ats.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user