mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-21 21:33:02 +01:00
Compare commits
748 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c839e82b93 | ||
|
|
ac47073988 | ||
|
|
6ad0c33340 | ||
|
|
14bb2de584 | ||
|
|
1d62552046 | ||
|
|
d84c45d332 | ||
|
|
1f197f748a | ||
|
|
0484bbb0c3 | ||
|
|
5009879f9f | ||
|
|
4b9bf95f20 | ||
|
|
d1dff61bef | ||
|
|
966ae9062e | ||
|
|
0e3c1db334 | ||
|
|
2e4a967559 | ||
|
|
d8c98d1bdb | ||
|
|
f56067ac5c | ||
|
|
5878870809 | ||
|
|
40ba888e05 | ||
|
|
342caf2e6b | ||
|
|
f30ccec451 | ||
|
|
8d3de1a1e5 | ||
|
|
f5e404e6cd | ||
|
|
62dc9f83c2 | ||
|
|
3922b13696 | ||
|
|
96f3543891 | ||
|
|
850e61cade | ||
|
|
d320401555 | ||
|
|
9cf244c933 | ||
|
|
a4e94ddf7a | ||
|
|
b6c4815dd1 | ||
|
|
1a4e98ad2a | ||
|
|
15e0cfb73f | ||
|
|
2810ff9172 | ||
|
|
303cfa7268 | ||
|
|
7ffda6119f | ||
|
|
a73aee5ea2 | ||
|
|
5bdaac74cd | ||
|
|
2b4ad9356d | ||
|
|
1638e35da3 | ||
|
|
9be3e4e830 | ||
|
|
14916d2cc9 | ||
|
|
e63316b5be | ||
|
|
09e2daef3e | ||
|
|
1ebdc88b0d | ||
|
|
ba98655231 | ||
|
|
7d955e4a5d | ||
|
|
2f7d7cfc00 | ||
|
|
243dd1d306 | ||
|
|
a825bab986 | ||
|
|
3242e9b24b | ||
|
|
481f2e9698 | ||
|
|
873ee90674 | ||
|
|
35dcfd2c0f | ||
|
|
97e0816654 | ||
|
|
2ae1115a3e | ||
|
|
ba3f4ac371 | ||
|
|
9a4f8e0dde | ||
|
|
26a7fcb697 | ||
|
|
62a450f376 | ||
|
|
32e0d80611 | ||
|
|
75b9c2f45b | ||
|
|
1aff45159e | ||
|
|
0f3948d435 | ||
|
|
e0de8433f5 | ||
|
|
29d82fcbfe | ||
|
|
2df7d25284 | ||
|
|
6a54b66fec | ||
|
|
91851b4333 | ||
|
|
68cad395b8 | ||
|
|
a935c7d0dd | ||
|
|
55d11f0b05 | ||
|
|
5e853513b9 | ||
|
|
6fb52bf19e | ||
|
|
441c2e7a59 | ||
|
|
a3ebeb8f6f | ||
|
|
6142c31c8c | ||
|
|
beed55f322 | ||
|
|
a7d61889ca | ||
|
|
83c57ed9e6 | ||
|
|
90adeb0063 | ||
|
|
63eb287485 | ||
|
|
5f63c5f738 | ||
|
|
2d2e3fe891 | ||
|
|
8dca14850b | ||
|
|
c9fc3ce020 | ||
|
|
bb58d7da9d | ||
|
|
8cb29c28ba | ||
|
|
9a27b4d762 | ||
|
|
bfb70a26ab | ||
|
|
83012d491d | ||
|
|
67668578e8 | ||
|
|
5e81e60106 | ||
|
|
19ccfee083 | ||
|
|
363c9dac6e | ||
|
|
9f3d50b9f7 | ||
|
|
2f8a7e2a90 | ||
|
|
5d51c74af2 | ||
|
|
121d577e45 | ||
|
|
64237a2722 | ||
|
|
3d972dcac3 | ||
|
|
6662bbd5b9 | ||
|
|
05fbb207d1 | ||
|
|
b5c7566d37 | ||
|
|
7228c64b26 | ||
|
|
daf07d4f35 | ||
|
|
744d8d6733 | ||
|
|
d5e66f29b0 | ||
|
|
030323224d | ||
|
|
76b6c34533 | ||
|
|
8d65b70922 | ||
|
|
40b1793cce | ||
|
|
ae76538178 | ||
|
|
e2740a9b79 | ||
|
|
4510712359 | ||
|
|
1268fd90ba | ||
|
|
ab80805073 | ||
|
|
ec5b6bb8ff | ||
|
|
c0860fc9ca | ||
|
|
8d93ab3d7f | ||
|
|
d39d109031 | ||
|
|
1d5b62fdf3 | ||
|
|
a9616911f5 | ||
|
|
3eb5ece34d | ||
|
|
77246ca57b | ||
|
|
087a328e83 | ||
|
|
8a6df8a69b | ||
|
|
c2546f06d4 | ||
|
|
3a4c78ed86 | ||
|
|
ed7670ac67 | ||
|
|
2cd50435b5 | ||
|
|
9b72419e6b | ||
|
|
a1e66854cd | ||
|
|
ab756aaa56 | ||
|
|
d45c8b2b2d | ||
|
|
f26b5e1190 | ||
|
|
8bfa930cf0 | ||
|
|
52f9306e98 | ||
|
|
483934bd5e | ||
|
|
de7ef70d40 | ||
|
|
1e020f7fae | ||
|
|
0d6ec9c427 | ||
|
|
e32683fb28 | ||
|
|
67c77a7d91 | ||
|
|
55acfa54f5 | ||
|
|
7f742738fa | ||
|
|
888973e2cb | ||
|
|
65b247573e | ||
|
|
8f255ccfd4 | ||
|
|
f6d1f9c90d | ||
|
|
17e7e24070 | ||
|
|
7d462a4dd3 | ||
|
|
c9ed50afad | ||
|
|
3fd14aac47 | ||
|
|
ed780e292b | ||
|
|
c6158e7c9e | ||
|
|
5d3698d0d8 | ||
|
|
a70bb227a9 | ||
|
|
908bfb530e | ||
|
|
738fe6db03 | ||
|
|
733ccb51c3 | ||
|
|
ed3d106289 | ||
|
|
333bca85f8 | ||
|
|
1ebc3d9c27 | ||
|
|
c298b651b6 | ||
|
|
d8260b9988 | ||
|
|
d0a69c8446 | ||
|
|
f3388b8449 | ||
|
|
c94eb4e183 | ||
|
|
f386d1213f | ||
|
|
9b7b00e8f2 | ||
|
|
055f0219a8 | ||
|
|
9d3f3cf1da | ||
|
|
da8cc19838 | ||
|
|
a3d5485c1d | ||
|
|
865661097c | ||
|
|
ab48f55335 | ||
|
|
49f52cada4 | ||
|
|
059bc5f16c | ||
|
|
a8eab724f9 | ||
|
|
eed967e9b4 | ||
|
|
2eb421455b | ||
|
|
e0d86ce745 | ||
|
|
ba8f32cec8 | ||
|
|
4af6bf210e | ||
|
|
1de3ecda19 | ||
|
|
4a0039c838 | ||
|
|
02a34ed1a0 | ||
|
|
dc23a1ae23 | ||
|
|
4fea927880 | ||
|
|
ff874ac472 | ||
|
|
ba48a615cd | ||
|
|
5aa389b13c | ||
|
|
b500f6b51f | ||
|
|
2ea9ed0476 | ||
|
|
68ee701e46 | ||
|
|
dc88406bcc | ||
|
|
ca85d4b483 | ||
|
|
17ecfa2c66 | ||
|
|
cabaa07384 | ||
|
|
d9b05e872c | ||
|
|
bd60c36240 | ||
|
|
1fc9843450 | ||
|
|
4e260c5a8b | ||
|
|
55a9355046 | ||
|
|
5bee3dd429 | ||
|
|
a85c42b539 | ||
|
|
897f3842e0 | ||
|
|
791d390187 | ||
|
|
e85b1a44b1 | ||
|
|
ffd59d535c | ||
|
|
e32a1041b1 | ||
|
|
c16269bcb4 | ||
|
|
81d57d4c47 | ||
|
|
bdb3b5dc24 | ||
|
|
666903e663 | ||
|
|
24deb462a8 | ||
|
|
85b54c4ccf | ||
|
|
ca33d499ab | ||
|
|
52ebff7ca4 | ||
|
|
4cf0b0a9df | ||
|
|
0d92d2718d | ||
|
|
22937dd314 | ||
|
|
26a99b6bad | ||
|
|
4bf7f3e00c | ||
|
|
269c5ea7e3 | ||
|
|
bac6212188 | ||
|
|
7a7d00220a | ||
|
|
a20f94b68a | ||
|
|
7c4c373851 | ||
|
|
8b0684a0ca | ||
|
|
c903a9df13 | ||
|
|
a5d4e4f491 | ||
|
|
f3116e4729 | ||
|
|
c0b88142d8 | ||
|
|
19adc6a441 | ||
|
|
d9fa7a846c | ||
|
|
aa30fba7af | ||
|
|
edbb4b9fa0 | ||
|
|
7d8534ffc1 | ||
|
|
28b79beb76 | ||
|
|
8a7af9a98d | ||
|
|
47600ade0e | ||
|
|
2af0cfeb49 | ||
|
|
f6690dc80f | ||
|
|
9de5cf8c58 | ||
|
|
9e66bf0bc1 | ||
|
|
5f27f5d9cc | ||
|
|
0a1667ed24 | ||
|
|
6348aeac6e | ||
|
|
7e9bd7f44b | ||
|
|
cf780393c2 | ||
|
|
e94f436ba6 | ||
|
|
0c6be05b05 | ||
|
|
9297ac59ae | ||
|
|
983dedfdad | ||
|
|
d159908b91 | ||
|
|
1b53a5235c | ||
|
|
f3f709748e | ||
|
|
64ceffefe9 | ||
|
|
4b24653b86 | ||
|
|
6a0ebb76ea | ||
|
|
8e273730be | ||
|
|
5c09953e4c | ||
|
|
6866dc76c0 | ||
|
|
de16f09108 | ||
|
|
3f3ca345fd | ||
|
|
9275d2db9c | ||
|
|
d88b04b66f | ||
|
|
62e6b08baf | ||
|
|
6fa37cb474 | ||
|
|
d63d6e94dd | ||
|
|
0c8ce366eb | ||
|
|
3da3025935 | ||
|
|
d1a57e3ec5 | ||
|
|
969ef1941b | ||
|
|
214b16a26e | ||
|
|
405d0c7487 | ||
|
|
6800c2112e | ||
|
|
5fc7b3e25b | ||
|
|
88dc943b6b | ||
|
|
f8482b1c64 | ||
|
|
a6e49295e0 | ||
|
|
8ef1b8b6ce | ||
|
|
404791a344 | ||
|
|
073aade67f | ||
|
|
784cc409d4 | ||
|
|
9e3f82fbac | ||
|
|
19c6d4dec5 | ||
|
|
b18f0c790b | ||
|
|
fb62f51958 | ||
|
|
dafc6aa13f | ||
|
|
93f13b1e80 | ||
|
|
5de649d85f | ||
|
|
b37cf24f09 | ||
|
|
cf2edc8d34 | ||
|
|
f113de180b | ||
|
|
adb4b52752 | ||
|
|
baf8912dda | ||
|
|
489deda6a8 | ||
|
|
2fee607327 | ||
|
|
c428a22b5b | ||
|
|
42c01adb98 | ||
|
|
209bb2932c | ||
|
|
ec9cdb391a | ||
|
|
a6aafeb374 | ||
|
|
ffb538ef21 | ||
|
|
15925de2f0 | ||
|
|
25d72044e9 | ||
|
|
6b598383d3 | ||
|
|
25c76522d6 | ||
|
|
c0e2aa5c62 | ||
|
|
d0b9f742ae | ||
|
|
80d56829c5 | ||
|
|
0946310f60 | ||
|
|
7c855cf55d | ||
|
|
0ab95fb670 | ||
|
|
1e81b4bab4 | ||
|
|
67c50068d9 | ||
|
|
c3628e36f7 | ||
|
|
526799c6da | ||
|
|
4ef7529533 | ||
|
|
b06d670dff | ||
|
|
02c0453ff3 | ||
|
|
09358aa5b2 | ||
|
|
229d4db996 | ||
|
|
184be32f3a | ||
|
|
dbe77ea19d | ||
|
|
85e5c7e8e7 | ||
|
|
3c273b370d | ||
|
|
343e56b440 | ||
|
|
3a949aee5a | ||
|
|
1601e52c9c | ||
|
|
760cc8e35c | ||
|
|
6051e1fb8b | ||
|
|
7b146947df | ||
|
|
5497a10f9f | ||
|
|
3e6f4b3657 | ||
|
|
7baf58ad61 | ||
|
|
d72437d18c | ||
|
|
ea57981953 | ||
|
|
c2d0cce02d | ||
|
|
9f7a119e95 | ||
|
|
0dacc97e99 | ||
|
|
52a44da56b | ||
|
|
7114f262c2 | ||
|
|
7647ea96d1 | ||
|
|
593da25cdb | ||
|
|
f22bce7ccb | ||
|
|
1688773bba | ||
|
|
b56b5d2400 | ||
|
|
33ee208071 | ||
|
|
fe880cc2c7 | ||
|
|
cffe57b74e | ||
|
|
66882d6fd9 | ||
|
|
050f22f051 | ||
|
|
7891af3a9a | ||
|
|
40cbccf50a | ||
|
|
0348da362c | ||
|
|
f0a3780f3a | ||
|
|
8058743c2f | ||
|
|
39163f3cfc | ||
|
|
43676ab407 | ||
|
|
a6bdadedb1 | ||
|
|
639f795b9a | ||
|
|
fc95d2cab8 | ||
|
|
695b6d68e6 | ||
|
|
b6c265098d | ||
|
|
2a80d348bd | ||
|
|
d08dafd965 | ||
|
|
c6542de93d | ||
|
|
5928678564 | ||
|
|
fac52ca122 | ||
|
|
e9d270269f | ||
|
|
0a4c5fbb28 | ||
|
|
2bfb0283d9 | ||
|
|
94e8aee36f | ||
|
|
8051956a2e | ||
|
|
c7020503be | ||
|
|
28edce96d9 | ||
|
|
b4481fcc84 | ||
|
|
2be2bebb4e | ||
|
|
d4bb8def62 | ||
|
|
7442cb01b7 | ||
|
|
95ba8275e8 | ||
|
|
2f4a0dd212 | ||
|
|
52a621e9ba | ||
|
|
1f77fad829 | ||
|
|
8d93a3f56e | ||
|
|
9c572e7ab2 | ||
|
|
1f15e74730 | ||
|
|
7570a04c02 | ||
|
|
fc2e89c448 | ||
|
|
be216ff7fe | ||
|
|
388208571b | ||
|
|
1891903007 | ||
|
|
41a7e73ff4 | ||
|
|
81d9fb0700 | ||
|
|
76312d6eb6 | ||
|
|
f31528c841 | ||
|
|
6d869fdece | ||
|
|
d0784a7773 | ||
|
|
791f843bc8 | ||
|
|
a0cdb231fd | ||
|
|
e0004842e6 | ||
|
|
fdbfa0e76f | ||
|
|
005516013f | ||
|
|
9ec3dd4b16 | ||
|
|
4dacf981a9 | ||
|
|
9f7b76b37d | ||
|
|
0d51558e74 | ||
|
|
236c257892 | ||
|
|
3540ce4297 | ||
|
|
fef026ed47 | ||
|
|
0bcb155756 | ||
|
|
12219522ab | ||
|
|
6001bf90c5 | ||
|
|
13864997ab | ||
|
|
2ab2766534 | ||
|
|
e051352070 | ||
|
|
3bf1e50620 | ||
|
|
42f3c88396 | ||
|
|
c9f31ef934 | ||
|
|
01f54aeb52 | ||
|
|
f51de34355 | ||
|
|
4e9647651c | ||
|
|
2d1016d362 | ||
|
|
b48c961ac1 | ||
|
|
4d47567995 | ||
|
|
6b2e3accf7 | ||
|
|
c1f8520c4f | ||
|
|
41eb99ec40 | ||
|
|
97a74127fb | ||
|
|
a9396167bf | ||
|
|
3385e5684e | ||
|
|
bb9672214c | ||
|
|
1b93672417 | ||
|
|
8b1cedd4a8 | ||
|
|
2c34047b6d | ||
|
|
f0942f0714 | ||
|
|
967e574ea8 | ||
|
|
625730f37c | ||
|
|
0a72fa95b3 | ||
|
|
c39a65ec21 | ||
|
|
9403fb27e0 | ||
|
|
ab34791737 | ||
|
|
d03e60d580 | ||
|
|
6fcf9965bb | ||
|
|
d53dcd37e6 | ||
|
|
f3531cacb3 | ||
|
|
10cdca94dc | ||
|
|
0b2b7bc4fd | ||
|
|
105f63487b | ||
|
|
f3c745e42e | ||
|
|
f3e7d7a19b | ||
|
|
af1ab9d1af | ||
|
|
69e5a877c0 | ||
|
|
5c0d161eb4 | ||
|
|
91cd0d1bca | ||
|
|
b12011f4a6 | ||
|
|
0223fbbb3f | ||
|
|
a709d38946 | ||
|
|
bd2eb8b5ac | ||
|
|
5a015b9581 | ||
|
|
552cb0bf53 | ||
|
|
a93f4ff1ad | ||
|
|
3d283c2d81 | ||
|
|
ea1d92207a | ||
|
|
bcd826ed4f | ||
|
|
499bb90b09 | ||
|
|
4f00822849 | ||
| ff28175838 | |||
|
|
0f482aebad | ||
|
|
76fe0d3522 | ||
|
|
4995e04cf0 | ||
|
|
76123e00d6 | ||
|
|
0b7de9557d | ||
|
|
0f25983278 | ||
|
|
40a093656b | ||
|
|
8b8a96f93b | ||
|
|
7b9c3d52cd | ||
|
|
a0198fb66f | ||
|
|
04eb136ab0 | ||
|
|
16f3fb19e8 | ||
|
|
6a1ffd7700 | ||
|
|
fff0f4344c | ||
|
|
24dc182c0e | ||
|
|
b4be462ce5 | ||
| 9ed618d45e | |||
| e79905b608 | |||
| e929c38e37 | |||
| e406bb2d04 | |||
|
|
a3f4c97049 | ||
|
|
51c21edb67 | ||
|
|
892635b5e8 | ||
|
|
c9e603c1c2 | ||
|
|
02bc58ed20 | ||
|
|
7931003cb7 | ||
|
|
79c811f65e | ||
|
|
07c0193e55 | ||
|
|
87e2464599 | ||
|
|
30abdd4d36 | ||
|
|
8a57ca41bf | ||
|
|
b57efa02ff | ||
|
|
d7bf64742e | ||
|
|
7e150aa8d7 | ||
|
|
4d916a69af | ||
|
|
7096616414 | ||
|
|
2292d72802 | ||
|
|
a8d21f0465 | ||
|
|
53d542413b | ||
|
|
aaeac8ca9d | ||
|
|
a780c6fac4 | ||
|
|
2a54933cef | ||
|
|
fa67e7d09f | ||
|
|
add57a8962 | ||
|
|
22c76c52a2 | ||
|
|
dd1c09fe0c | ||
|
|
b4f7d2152a | ||
|
|
25e1c41335 | ||
|
|
bde7ccbb2c | ||
|
|
c9841513cd | ||
|
|
eeedb94716 | ||
|
|
bf6151ed61 | ||
|
|
101e6b0802 | ||
|
|
c844fcd185 | ||
|
|
2111ee182d | ||
|
|
0980ee41cf | ||
|
|
0c2181fce9 | ||
|
|
7d4e5d17fc | ||
|
|
ea79d2d279 | ||
|
|
36d64737e0 | ||
|
|
0acb9316f9 | ||
|
|
1233606d19 | ||
|
|
8d98780a7c | ||
|
|
b81bac0bf7 | ||
|
|
ada49f6957 | ||
|
|
919671d9c9 | ||
|
|
dd986e4481 | ||
|
|
829458562f | ||
|
|
e652507714 | ||
|
|
b6c4d8c966 | ||
|
|
96b0c8606c | ||
|
|
69a25e3aba | ||
|
|
320ca2d48a | ||
|
|
421c57267b | ||
|
|
6fd8457e5a | ||
|
|
c2511d56d7 | ||
|
|
fbb17d66aa | ||
|
|
98429ae8c2 | ||
|
|
69adbacd25 | ||
|
|
43ff04317a | ||
|
|
361c2121d8 | ||
|
|
0041c277ad | ||
|
|
f621d3ad5d | ||
|
|
af9aa239af | ||
|
|
f8a2c70f2c | ||
|
|
89b68da86e | ||
|
|
03e9596b7a | ||
|
|
90535c8691 | ||
|
|
b7fc0e903c | ||
|
|
eb420d50b1 | ||
|
|
8f3a081670 | ||
|
|
2867a05c92 | ||
|
|
77b4d594af | ||
|
|
8f4fed1df9 | ||
|
|
b65da05d11 | ||
|
|
c2c8441625 | ||
|
|
a1eb6e314c | ||
|
|
b0c9dcdeee | ||
|
|
0a609adb0b | ||
|
|
19ee6c2687 | ||
|
|
b77c9be36f | ||
|
|
be2910e0df | ||
|
|
749ee367d4 | ||
|
|
c36ce7ac2d | ||
|
|
d0128c7bd7 | ||
|
|
f91b33db38 | ||
|
|
cf166ac641 | ||
|
|
ae86e09970 | ||
|
|
a7e0d2d127 | ||
|
|
9866fdddb8 | ||
|
|
ff9759ab20 | ||
|
|
b655cfab28 | ||
|
|
4c9ddac395 | ||
|
|
c708b1759e | ||
|
|
3ed50f5a1b | ||
|
|
e6d77b3f72 | ||
|
|
747815982e | ||
|
|
7e23a75908 | ||
|
|
4c8a97fe0d | ||
|
|
87b07ab47d | ||
|
|
cf8000d8ee | ||
|
|
31b10194f1 | ||
|
|
3d748021dc | ||
|
|
0f8e1fee35 | ||
|
|
8d18b06012 | ||
|
|
6a32762c98 | ||
|
|
2311eda44b | ||
|
|
5b52aa8abf | ||
|
|
aace77ec40 | ||
|
|
c55d421326 | ||
|
|
5ca8e0c35d | ||
|
|
aba853d598 | ||
|
|
2c8fd163ee | ||
|
|
d9497bd69e | ||
|
|
893f3f6df6 | ||
|
|
ca55e5ba94 | ||
|
|
7753213657 | ||
|
|
b3f7b59243 | ||
|
|
18a9b21a87 | ||
|
|
853b473668 | ||
|
|
40905bc100 | ||
|
|
8be9df2b8d | ||
|
|
bd9eb69313 | ||
|
|
fa31bb2448 | ||
|
|
fcd33e59d9 | ||
|
|
84aee20208 | ||
|
|
2b79788fbe | ||
|
|
ce923a5b4c | ||
|
|
e83ff89c9d | ||
|
|
c71c4f3b16 | ||
|
|
33661587fd | ||
|
|
27b72603e1 | ||
|
|
a4d91adc1e | ||
|
|
03df23d97c | ||
|
|
5e83b28ff5 | ||
|
|
d759fad40c | ||
|
|
1cc9bf459a | ||
|
|
0c535aa8d8 | ||
|
|
3eb4c21263 | ||
|
|
19c53dce2f | ||
|
|
3967bc86d0 | ||
|
|
2b6e42b11f | ||
|
|
d48de6b9f6 | ||
|
|
477a5fce97 | ||
|
|
65efe7f575 | ||
|
|
1b77b15db4 | ||
|
|
fc3bc3f010 | ||
|
|
81e76d9dd4 | ||
|
|
bd1a241be1 | ||
|
|
bc4c0c0b89 | ||
|
|
c7936fc478 | ||
|
|
09ee4fef3a | ||
|
|
d6b0062ae9 | ||
|
|
df26a3eeb7 | ||
|
|
798d873f76 | ||
|
|
a26ea3bac5 | ||
|
|
812b464bc7 | ||
|
|
e1f34c2507 | ||
|
|
79be938531 | ||
|
|
e3ddc68eb2 | ||
|
|
eb9175ab27 | ||
|
|
54b7b2c35f | ||
|
|
ab98870350 | ||
|
|
13c437c418 | ||
|
|
ba2e66a014 | ||
|
|
1adf24e109 | ||
|
|
742ece7923 | ||
|
|
f74736b369 | ||
|
|
3a3be89b90 | ||
|
|
4082092560 | ||
|
|
31839eb444 | ||
|
|
6e203e7833 | ||
|
|
0ef9d0deb8 | ||
|
|
c71f077466 | ||
|
|
36e13ab03b | ||
|
|
c0f19cdf05 | ||
|
|
b20c88e256 | ||
|
|
ce16b37b97 | ||
|
|
6ed1893bed | ||
|
|
bd9ea571aa | ||
|
|
10d9c4fbcc | ||
|
|
eff5db3664 | ||
|
|
f8a6160039 | ||
|
|
4375ff8bd6 | ||
|
|
9bee6e9863 | ||
|
|
b42d20b6a6 | ||
|
|
ba1e3a905b | ||
|
|
973d5ca97d | ||
|
|
57baf22534 | ||
|
|
c615707cbc | ||
|
|
992eea3278 | ||
|
|
03288b52ee | ||
|
|
a274bbcf64 | ||
|
|
64e9889d41 | ||
|
|
58e80ab3e0 | ||
|
|
94e81d14fa | ||
|
|
0b021e898f | ||
|
|
be8d6e8235 | ||
|
|
d3dcb599ca | ||
|
|
0cbe516ca3 | ||
|
|
26911e9530 | ||
|
|
5524dfa83d | ||
|
|
21d54eb2ac | ||
|
|
4152199f3b | ||
|
|
1d40251f29 | ||
|
|
97e137c411 | ||
|
|
0de6c2338d | ||
|
|
a2479155b2 | ||
|
|
e2dace75f4 | ||
|
|
ef9a7cd811 | ||
|
|
964270e054 | ||
|
|
6b9cdf3294 | ||
|
|
6fd228f1f4 | ||
|
|
0eb0b283b2 | ||
|
|
4b071bccda | ||
|
|
65a7947932 | ||
|
|
a3e607a887 | ||
|
|
feab9f4c46 | ||
|
|
fe5622d62a | ||
|
|
e759f2817e | ||
|
|
60cc5c2710 | ||
|
|
25ccd678c9 | ||
|
|
a77b4cbe71 | ||
|
|
aae32b0d74 | ||
|
|
a94b43a19e | ||
|
|
9a4c2df552 | ||
|
|
d5b89a755e | ||
|
|
a9acf62d93 | ||
|
|
c896e198dd | ||
|
|
c538518b4b | ||
|
|
f66d14eeea | ||
|
|
bc8feac83c | ||
|
|
40a98bcf30 | ||
|
|
045e91d9ac | ||
|
|
a80ab0f3e9 | ||
|
|
e5d209d407 | ||
|
|
ef1531e561 | ||
|
|
4dd036abb2 | ||
|
|
81e909ccfb | ||
|
|
0cb9d2a8e4 | ||
|
|
cb16c0e829 | ||
|
|
9e067ee230 | ||
|
|
8b53d40a2a | ||
|
|
4c0ad7a5d8 | ||
|
|
66e25ba068 | ||
|
|
56c98e6e3a | ||
|
|
01f305a98e | ||
|
|
e14cdaccdd | ||
|
|
4ece25b58d | ||
|
|
c1957bb927 | ||
|
|
636ca155e5 | ||
|
|
17a5b43609 | ||
|
|
b2b3ccf923 | ||
|
|
2272c7eb6b | ||
|
|
181c324dd4 | ||
|
|
89912b18d7 | ||
|
|
85f2af4bc3 |
@@ -35,6 +35,6 @@
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"golang": "1.20"
|
||||
"ghcr.io/devcontainers/features/go:1": "1.21"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,3 +22,4 @@
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
README.md
|
||||
!Dockerfile.rootless
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
github: [hay-kot]
|
||||
github: [tankerkiller125,katosdev,tonyaellie]
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
18
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,7 +1,9 @@
|
||||
---
|
||||
name: "Bug Report"
|
||||
description: "Submit a bug report for the current release"
|
||||
labels: ["bug"]
|
||||
labels: ["🕷️ bug"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
@@ -19,6 +21,8 @@ body:
|
||||
required: true
|
||||
- label: I already read the docs and didn't find an answer.
|
||||
required: true
|
||||
- label: I can replicate the issue inside the Demo install.
|
||||
required: true
|
||||
- type: input
|
||||
id: homebox-version
|
||||
attributes:
|
||||
@@ -54,6 +58,18 @@ body:
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: arch
|
||||
attributes:
|
||||
label: OS Architechture
|
||||
description: What type of processor are you running on.
|
||||
multiple: true
|
||||
options:
|
||||
- x86_64 (AMD, Intel)
|
||||
- ARM64
|
||||
- ARM/v7
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os-details
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,7 +1,9 @@
|
||||
---
|
||||
name: "Feature Request"
|
||||
description: "Submit a feature request for the current release"
|
||||
labels: ["feature-request"]
|
||||
labels: ["⬆️ enhancement"]
|
||||
projects: ["sysadminsmedia/2"]
|
||||
type: "Enhancement"
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem-statement
|
||||
|
||||
16
.github/pull_request_template.md
vendored
16
.github/pull_request_template.md
vendored
@@ -55,18 +55,4 @@ _(fill-in or delete this section)_
|
||||
|
||||
<!--
|
||||
Describe how you tested this change.
|
||||
-->
|
||||
|
||||
## Release Notes
|
||||
|
||||
_(REQUIRED)_
|
||||
<!--
|
||||
If this PR makes user facing changes, please describe them here. This
|
||||
description will be copied into the release notes/changelog, whenever the
|
||||
next version is released. Keep this section short, and focus on high level
|
||||
changes.
|
||||
Put your text between the block. To omit notes, use NONE within the block.
|
||||
-->
|
||||
|
||||
```release-note
|
||||
```
|
||||
-->
|
||||
64
.github/scripts/update_currencies.py
vendored
Normal file
64
.github/scripts/update_currencies.py
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
def fetch_currencies():
|
||||
try:
|
||||
response = requests.get('https://restcountries.com/v3.1/all?fields=name,common,currencies')
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"An error occurred while making the request: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
countries = response.json()
|
||||
except json.JSONDecodeError:
|
||||
print("Failed to decode JSON from the response.")
|
||||
return []
|
||||
|
||||
currencies_list = []
|
||||
for country in countries:
|
||||
country_name = country.get('name', {}).get('common')
|
||||
country_currencies = country.get('currencies', {})
|
||||
for currency_code, currency_info in country_currencies.items():
|
||||
symbol = currency_info.get('symbol', '')
|
||||
currencies_list.append({
|
||||
'code': currency_code,
|
||||
'local': country_name,
|
||||
'symbol': symbol,
|
||||
'name': currency_info.get('name')
|
||||
})
|
||||
|
||||
return currencies_list
|
||||
|
||||
def save_currencies(currencies, file_path):
|
||||
# Sort the list by the "local" field
|
||||
sorted_currencies = sorted(currencies, key=lambda x: x['local'].lower() if x['local'] else "")
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(sorted_currencies, f, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
print(f"An error occurred while writing to the file: {e}")
|
||||
|
||||
def load_existing_currencies(file_path):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return [] # Return an empty list if file doesn't exist or is invalid
|
||||
|
||||
def main():
|
||||
save_path = 'backend/internal/core/currencies/currencies.json'
|
||||
|
||||
existing_currencies = load_existing_currencies(save_path)
|
||||
new_currencies = fetch_currencies()
|
||||
|
||||
if new_currencies == existing_currencies:
|
||||
print("Currencies up-to-date with API, skipping commit.")
|
||||
else:
|
||||
save_currencies(new_currencies, save_path)
|
||||
print("Currencies updated and saved.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
47
.github/workflows/binaries-publish.yaml
vendored
Normal file
47
.github/workflows/binaries-publish.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Publish Release Binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ 'v*.*.*' ]
|
||||
|
||||
jobs:
|
||||
# backend-tests:
|
||||
# name: "Backend Server Tests"
|
||||
# uses: sysadminsmedia/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
# frontend-tests:
|
||||
# name: "Frontend and End-to-End Tests"
|
||||
# uses: sysadminsmedia/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v5
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
25
.github/workflows/clear-stale-docker-images.yml
vendored
Normal file
25
.github/workflows/clear-stale-docker-images.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Docker Cleanup
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
delete-untagged-images:
|
||||
name: Delete Untagged Images
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- uses: dataaxiom/ghcr-cleanup-action@v1
|
||||
with:
|
||||
dry-run: false
|
||||
package: homebox
|
||||
delete-ghost-images: true
|
||||
delete-orphaned-images: true
|
||||
delete-partial-images: true
|
||||
delete-untagged: true
|
||||
# Make sure to update this to include the latest major tags
|
||||
exclude-tags: main,vnext,latest,0.*,1.*
|
||||
older-than: 3 months
|
||||
184
.github/workflows/docker-publish-rootless.yaml
vendored
Normal file
184
.github/workflows/docker-publish-rootless.yaml
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
name: Docker publish rootless
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows/**'
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.rootless'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows/**'
|
||||
|
||||
env:
|
||||
DOCKERHUB_REPO: sysadminsmedia/homebox
|
||||
GHCR_REPO: ghcr.io/sysadminsmedia/homebox
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
- linux/arm/v7
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
branch=${{ github.event.pull_request.number || github.ref_name }}
|
||||
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_REPO }}
|
||||
${{ env.GHCR_REPO }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: ghcr.io/amitie10g/binfmt:latest
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: |
|
||||
image=ghcr.io/amitie10g/buildkit:master
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: . # Explicitly specify the build context
|
||||
file: ./Dockerfile.rootless # Explicitly specify the Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless
|
||||
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}-rootless,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
needs:
|
||||
- build
|
||||
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: |
|
||||
image=ghcr.io/amitie10g/buildkit:master
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_REPO }}
|
||||
${{ env.GHCR_REPO }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
flavor: |
|
||||
suffix=-rootless,onlatest=true
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *)
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.DOCKERHUB_REPO }}:${{ steps.meta.outputs.version }}
|
||||
docker buildx imagetools inspect ${{ env.GHCR_REPO }}:${{ steps.meta.outputs.version }}
|
||||
178
.github/workflows/docker-publish.yaml
vendored
Normal file
178
.github/workflows/docker-publish.yaml
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
name: Docker publish
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 0 * * *'
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows/**'
|
||||
tags: [ 'v*.*.*' ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
- 'Dockerfile'
|
||||
- '.dockerignore'
|
||||
- '.github/workflows/**'
|
||||
|
||||
env:
|
||||
DOCKERHUB_REPO: sysadminsmedia/homebox
|
||||
GHCR_REPO: ghcr.io/sysadminsmedia/homebox
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # Allows access to repository contents (read-only)
|
||||
packages: write # Allows pushing to GHCR
|
||||
id-token: write # Allows identity token write access for authentication
|
||||
attestations: write # Needed for signing and attestation (if required)
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
- linux/arm/v7
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
branch=${{ github.event.pull_request.number || github.ref_name }}
|
||||
echo "BRANCH=${branch//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_REPO }}
|
||||
${{ env.GHCR_REPO }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
image: ghcr.io/amitie10g/binfmt:latest
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: |
|
||||
image=ghcr.io/amitie10g/buildkit:master
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,"name=${{ env.DOCKERHUB_REPO }},${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR }}-${{ env.BRANCH }}
|
||||
cache-to: type=registry,ref=ghcr.io/sysadminsmedia/devcache:${{ env.PLATFORM_PAIR}}-${{ env.BRANCH }},mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read # Allows access to repository contents (read-only)
|
||||
packages: write # Allows pushing to GHCR
|
||||
id-token: write # Allows identity token write access for authentication
|
||||
attestations: write # Needed for signing and attestation (if required)
|
||||
needs:
|
||||
- build
|
||||
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: |
|
||||
image=ghcr.io/amitie10g/buildkit:master
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_REPO }}
|
||||
${{ env.GHCR_REPO }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=schedule,pattern=nightly
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.DOCKERHUB_REPO }}@sha256:%s ' *)
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.DOCKERHUB_REPO }}:${{ steps.meta.outputs.version }}
|
||||
docker buildx imagetools inspect ${{ env.GHCR_REPO }}:${{ steps.meta.outputs.version }}
|
||||
13
.github/workflows/partial-backend.yaml
vendored
13
.github/workflows/partial-backend.yaml
vendored
@@ -7,12 +7,12 @@ jobs:
|
||||
Go:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.20"
|
||||
go-version: "1.21"
|
||||
|
||||
- name: Install Task
|
||||
uses: arduino/setup-task@v1
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
with:
|
||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||
version: latest
|
||||
@@ -34,3 +34,8 @@ jobs:
|
||||
|
||||
- name: Test
|
||||
run: task go:coverage
|
||||
|
||||
- name: Validate OpenAPI definition
|
||||
uses: swaggerexpert/swagger-editor-validate@v1
|
||||
with:
|
||||
definition-file: backend/app/api/static/docs/swagger.json
|
||||
|
||||
18
.github/workflows/partial-frontend.yaml
vendored
18
.github/workflows/partial-frontend.yaml
vendored
@@ -9,13 +9,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
with:
|
||||
version: 6.0.2
|
||||
version: 9.12.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --shamefully-hoist
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -44,17 +44,17 @@ jobs:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.20"
|
||||
go-version: "1.21"
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- uses: pnpm/action-setup@v2.2.4
|
||||
- uses: pnpm/action-setup@v3.0.0
|
||||
with:
|
||||
version: 6.0.2
|
||||
version: 9.12.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
66
.github/workflows/partial-publish.yaml
vendored
66
.github/workflows/partial-publish.yaml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Frontend / E2E
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
required: true
|
||||
type: string
|
||||
release:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
secrets:
|
||||
GH_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish Homebox"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.20"
|
||||
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: install buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: login to container registry
|
||||
run: docker login ghcr.io --username hay-kot --password $CR_PAT
|
||||
env:
|
||||
CR_PAT: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
- name: build nightly the image
|
||||
if: ${{ inputs.release == false }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag=ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg=COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg=BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform=linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
|
||||
- name: build release tagged the image
|
||||
if: ${{ inputs.release == true }}
|
||||
run: |
|
||||
docker build --push --no-cache \
|
||||
--tag ghcr.io/hay-kot/homebox:nightly \
|
||||
--tag ghcr.io/hay-kot/homebox:latest \
|
||||
--tag ghcr.io/hay-kot/homebox:${{ inputs.tag }} \
|
||||
--build-arg VERSION=${{ inputs.tag }} \
|
||||
--build-arg COMMIT=$(git rev-parse HEAD) \
|
||||
--build-arg BUILD_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
||||
--platform linux/amd64,linux/arm64,linux/arm/v7 .
|
||||
29
.github/workflows/publish.yaml
vendored
29
.github/workflows/publish.yaml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Publish Dockers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: "Deploy Nightly to Fly.io"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
- run: flyctl deploy --remote-only
|
||||
|
||||
publish-nightly:
|
||||
name: "Publish Nightly"
|
||||
if: github.event_name != 'release'
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
tag: nightly
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
|
||||
4
.github/workflows/pull-requests.yaml
vendored
4
.github/workflows/pull-requests.yaml
vendored
@@ -5,6 +5,10 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
paths:
|
||||
- 'backend/**'
|
||||
- 'frontend/**'
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
|
||||
77
.github/workflows/tag.yaml
vendored
77
.github/workflows/tag.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
name: Publish Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
jobs:
|
||||
backend-tests:
|
||||
name: "Backend Server Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-backend.yaml@main
|
||||
|
||||
frontend-tests:
|
||||
name: "Frontend and End-to-End Tests"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-frontend.yaml@main
|
||||
|
||||
goreleaser:
|
||||
name: goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
|
||||
- uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 7.30.1
|
||||
|
||||
- name: Build Frontend and Copy to Backend
|
||||
working-directory: frontend
|
||||
run: |
|
||||
pnpm install --shamefully-hoist
|
||||
pnpm run build
|
||||
cp -r ./.output/public ../backend/app/api/static/
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
workdir: "backend"
|
||||
distribution: goreleaser
|
||||
version: latest
|
||||
args: release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-tag:
|
||||
name: "Publish Tag"
|
||||
uses: hay-kot/homebox/.github/workflows/partial-publish.yaml@main
|
||||
with:
|
||||
release: true
|
||||
tag: ${{ github.ref_name }}
|
||||
secrets:
|
||||
GH_TOKEN: ${{ secrets.CR_PAT }}
|
||||
|
||||
deploy-docs:
|
||||
name: Deploy docs
|
||||
needs:
|
||||
- publish-tag
|
||||
- goreleaser
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Deploy docs
|
||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CONFIG_FILE: docs/mkdocs.yml
|
||||
EXTRA_PACKAGES: build-base
|
||||
100
.github/workflows/update-currencies.yml
vendored
Normal file
100
.github/workflows/update-currencies.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Update Currencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-currencies:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests
|
||||
|
||||
- name: Run currency fetch script
|
||||
run: python .github/scripts/update_currencies.py
|
||||
|
||||
- name: Check for changes
|
||||
id: check_changes
|
||||
run: |
|
||||
if [[ $(git status --porcelain) ]]; then
|
||||
echo "Changes detected."
|
||||
echo "changes=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected."
|
||||
echo "changes=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Delete existing update-currencies branch
|
||||
run: |
|
||||
if git show-ref --verify --quiet refs/heads/update-currencies; then
|
||||
git branch -D update-currencies
|
||||
echo "Deleted existing update-currencies branch."
|
||||
else
|
||||
echo "No existing update-currencies branch to delete."
|
||||
fi
|
||||
|
||||
- name: Create new update-currencies branch
|
||||
if: env.changes == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create a new branch
|
||||
git checkout -b update-currencies
|
||||
git add backend/internal/core/currencies/currencies.json
|
||||
git commit -m "Update currencies.json"
|
||||
|
||||
# Fetch the latest changes from the remote
|
||||
git fetch origin
|
||||
|
||||
# Attempt to rebase with the latest changes
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Rebase conflicts occurred. Please resolve them manually."
|
||||
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
|
||||
# Push the new branch to the remote
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Push failed, trying to fetch and rebase again."
|
||||
git fetch origin
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Second rebase failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Second push failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a pull request
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-X POST \
|
||||
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
|
||||
https://api.github.com/repos/${{ github.repository }}/pulls
|
||||
|
||||
- name: Notify no changes
|
||||
if: env.changes == 'false'
|
||||
run: echo "Currencies up-to-date with API, skipping commit."
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -33,7 +33,7 @@ go.work
|
||||
backend/.env
|
||||
build/*
|
||||
|
||||
# Output Directory for Nuxt/Frontend during build step
|
||||
# Output Directory for Nuxt/Frontend during build steps
|
||||
backend/app/api/public/*
|
||||
!backend/app/api/public/.gitkeep
|
||||
|
||||
@@ -48,9 +48,13 @@ dist
|
||||
|
||||
.pnpm-store
|
||||
backend/app/api/app
|
||||
backend/app/api/__debug_bin
|
||||
backend/app/api/__debug_bin*
|
||||
dist/
|
||||
|
||||
# Nuxt Publish Dir
|
||||
backend/app/api/static/public/*
|
||||
!backend/app/api/static/public/.gitkeep
|
||||
!backend/app/api/static/public/.gitkeep
|
||||
backend/api
|
||||
|
||||
docs/.vitepress/cache/
|
||||
/.data/
|
||||
|
||||
@@ -3,7 +3,7 @@ package schema
|
||||
import (
|
||||
"entgo.io/ent"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/schema/mixins"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/schema/mixins"
|
||||
)
|
||||
|
||||
type {{ .Scaffold.model }} struct {
|
||||
|
||||
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -23,8 +23,9 @@
|
||||
"HBOX_LOG_LEVEL": "debug",
|
||||
"HBOX_DEBUG_ENABLED": "true",
|
||||
"HBOX_STORAGE_DATA": "${workspaceRoot}/backend/.data",
|
||||
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1"
|
||||
"HBOX_STORAGE_SQLITE_URL": "${workspaceRoot}/backend/.data/homebox.db?_fk=1&_time_format=sqlite"
|
||||
},
|
||||
"console": "integratedTerminal",
|
||||
},
|
||||
{
|
||||
"name": "Launch Frontend",
|
||||
@@ -38,10 +39,11 @@
|
||||
"cwd": "${workspaceFolder}/frontend",
|
||||
"serverReadyAction": {
|
||||
"action": "debugWithChrome",
|
||||
"pattern": "Local: http://localhost:([0-9]+)",
|
||||
"pattern": "Local: +http://localhost:([0-9]+)",
|
||||
"uriFormat": "http://localhost:%s",
|
||||
"webRoot": "${workspaceFolder}/frontend"
|
||||
}
|
||||
},
|
||||
"console": "integratedTerminal",
|
||||
}
|
||||
]
|
||||
}
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -16,7 +16,7 @@
|
||||
"editor.formatOnSave": false,
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": true
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
|
||||
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
[support@sysadminemedia.com](mailto:support@sysadminemedia.com).
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
[Contributor Covenant Code of Conduct](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available at
|
||||
[Translations](https://www.contributor-covenant.org/translations).
|
||||
@@ -1,16 +1,16 @@
|
||||
# Contributing
|
||||
|
||||
## We Develop with Github
|
||||
## We Develop with GitHub
|
||||
|
||||
We use github to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
We use GitHub to host code, to track issues and feature requests, as well as accept pull requests.
|
||||
|
||||
## Branch Flow
|
||||
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request you can use the following steps:
|
||||
We use the `main` branch as the development branch. All PRs should be made to the `main` branch from a feature branch. To create a pull request, you can use the following steps:
|
||||
|
||||
1. Fork the repository and create a new branch from `main`.
|
||||
2. If you've added code that should be tested, add tests.
|
||||
3. If you've changed API's, update the documentation.
|
||||
3. If you've changed APIs, update the documentation.
|
||||
4. Ensure that the test suite and linters pass
|
||||
5. Issue your pull request
|
||||
|
||||
@@ -18,7 +18,7 @@ We use the `main` branch as the development branch. All PRs should be made to th
|
||||
|
||||
### Prerequisites
|
||||
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you can need to ensure that you have the following tools installed:
|
||||
There is a devcontainer available for this project. If you are using VSCode, you can use the devcontainer to get started. If you are not using VSCode, you need to ensure that you have the following tools installed:
|
||||
|
||||
- [Go 1.19+](https://golang.org/doc/install)
|
||||
- [Swaggo](https://github.com/swaggo/swag)
|
||||
@@ -31,27 +31,27 @@ If you're using `taskfile` you can run `task --list-all` for a list of all comma
|
||||
|
||||
### Setup
|
||||
|
||||
If you're using the taskfile you can use the `task setup` command to run the required setup commands. Otherwise you can review the commands required in the `Taskfile.yml` file.
|
||||
If you're using the taskfile, you can use the `task setup` command to run the required setup commands. Otherwise, you can review the commands required in the `Taskfile.yml` file.
|
||||
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag you will get an error when running the the frontend server.
|
||||
Note that when installing dependencies with pnpm you must use the `--shamefully-hoist` flag. If you don't use this flag, you will get an error when running the frontend server.
|
||||
|
||||
### API Development Notes
|
||||
|
||||
start command `task go:run`
|
||||
|
||||
1. API Server does not auto reload. You'll need to restart the server after making changes.
|
||||
2. Unit tests should be written in Go, however end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
2. Unit tests should be written in Go, however, end-to-end or user story tests should be written in TypeScript using the client library in the frontend directory.
|
||||
|
||||
### Frontend Development Notes
|
||||
|
||||
start command `task: ui:dev`
|
||||
start command `task ui:dev`
|
||||
|
||||
1. The frontend is a Vue 3 app with Nuxt.js that uses Tailwind and DaisyUI for styling.
|
||||
2. We're using Vitest for our automated testing. you can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running and in some cases the first run will fail due to a race condition. If this happens just run the tests again and they should pass.
|
||||
2. We're using Vitest for our automated testing. You can run these with `task ui:watch`.
|
||||
3. Tests require the API server to be running, and in some cases the first run will fail due to a race condition. If this happens, just run the tests again and they should pass.
|
||||
|
||||
## Publishing Release
|
||||
|
||||
Create a new tag in github with the version number vX.X.X. This will trigger a new release to be created.
|
||||
Create a new tag in GitHub with the version number vX.X.X. This will trigger a new release to be created.
|
||||
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
||||
Test -> Goreleaser -> Publish Release -> Trigger Docker Builds -> Deploy Docs + Fly.io Demo
|
||||
|
||||
75
Dockerfile
75
Dockerfile
@@ -1,50 +1,91 @@
|
||||
# Node dependencies stage
|
||||
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:17-alpine as frontend-builder
|
||||
WORKDIR /app
|
||||
# Install pnpm globally (caching layer)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy package.json and lockfile to leverage caching
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
|
||||
# Build Nuxt (frontend) stage
|
||||
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Install pnpm globally again (it can reuse the cache if not changed)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy over source files and node_modules from dependencies stage
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
# Go dependencies stage
|
||||
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy go.mod and go.sum for better caching
|
||||
COPY ./backend/go.mod ./backend/go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
# Build API stage
|
||||
FROM public.ecr.aws/docker/library/golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
|
||||
# Install necessary build tools
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update git build-base gcc g++
|
||||
apk add --no-cache git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy Go modules (from dependencies stage) and source code
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
|
||||
# Clear old public files and copy new ones from frontend build
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
|
||||
# Use cache for Go build artifacts
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
|
||||
# Production Stage
|
||||
FROM alpine:latest
|
||||
|
||||
# Production stage
|
||||
FROM public.ecr.aws/docker/library/alpine:latest
|
||||
ENV HBOX_MODE=production
|
||||
ENV HBOX_STORAGE_DATA=/data/
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_fk=1
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
# Install necessary runtime dependencies
|
||||
RUN apk --no-cache add ca-certificates wget
|
||||
|
||||
# Create application directory and copy over built Go binary
|
||||
RUN mkdir /app
|
||||
COPY --from=builder /go/bin/api /app
|
||||
|
||||
RUN chmod +x /app/api
|
||||
|
||||
# Labels and configuration for the final image
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/hay-kot/homebox"
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
|
||||
# Expose necessary ports for Homebox
|
||||
EXPOSE 7745
|
||||
WORKDIR /app
|
||||
|
||||
# Healthcheck configuration
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
|
||||
|
||||
# Persist volume
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
# Entrypoint and CMD
|
||||
ENTRYPOINT [ "/app/api" ]
|
||||
CMD [ "/data/config.yml" ]
|
||||
|
||||
96
Dockerfile.rootless
Normal file
96
Dockerfile.rootless
Normal file
@@ -0,0 +1,96 @@
|
||||
# Node dependencies stage
|
||||
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
|
||||
# Install pnpm globally (caching layer)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy package.json and lockfile to leverage caching
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
|
||||
# Build Nuxt (frontend) stage
|
||||
FROM public.ecr.aws/docker/library/node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
|
||||
# Install pnpm globally again (it can reuse the cache if not changed)
|
||||
RUN npm install -g pnpm
|
||||
|
||||
# Copy over source files and node_modules from dependencies stage
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
# Go dependencies stage
|
||||
FROM public.ecr.aws/docker/library/golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy go.mod and go.sum for better caching
|
||||
COPY ./backend/go.mod ./backend/go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
# Build API stage
|
||||
FROM public.ecr.aws/docker/library/golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
|
||||
# Install necessary build tools
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --no-cache git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
|
||||
# Copy Go modules (from dependencies stage) and source code
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
COPY ./backend .
|
||||
|
||||
# Clear old public files and copy new ones from frontend build
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
|
||||
# Use cache for Go build artifacts
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
|
||||
RUN mkdir /data
|
||||
|
||||
# Production stage
|
||||
FROM public.ecr.aws/docker/library/alpine:latest
|
||||
ENV HBOX_MODE=production
|
||||
ENV HBOX_STORAGE_DATA=/data/
|
||||
ENV HBOX_STORAGE_SQLITE_URL=/data/homebox.db?_pragma=busy_timeout=2000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
|
||||
|
||||
# Install necessary runtime dependencies
|
||||
RUN apk --no-cache add ca-certificates wget
|
||||
RUN addgroup -S nonroot && adduser -S nonroot -G nonroot
|
||||
|
||||
# Create application directory and copy over built Go binary
|
||||
RUN mkdir /app
|
||||
COPY --from=builder --chown=nonroot /go/bin/api /app
|
||||
COPY --from=builder --chown=nonroot /data /data
|
||||
RUN chmod +x /app/api
|
||||
|
||||
# Labels and configuration for the final image
|
||||
LABEL Name=homebox Version=0.0.1
|
||||
LABEL org.opencontainers.image.source="https://github.com/sysadminsmedia/homebox"
|
||||
|
||||
# Expose necessary ports for Homebox
|
||||
EXPOSE 7745
|
||||
WORKDIR /app
|
||||
|
||||
# Healthcheck configuration
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD [ "wget", "--no-verbose", "--tries=1", "-O", "-", "http://localhost:7745/api/v1/status" ]
|
||||
|
||||
# Persist volume
|
||||
VOLUME [ "/data" ]
|
||||
|
||||
# Entrypoint and CMD
|
||||
USER nonroot
|
||||
ENTRYPOINT [ "/app/api" ]
|
||||
CMD [ "/data/config.yml" ]
|
||||
46
README.md
46
README.md
@@ -1,30 +1,64 @@
|
||||
<div align="center">
|
||||
<img src="/docs/docs/assets/img/lilbox.svg" height="200"/>
|
||||
<img src="/docs/public/lilbox.svg" height="200"/>
|
||||
</div>
|
||||
|
||||
<h1 align="center" style="margin-top: -10px"> HomeBox </h1>
|
||||
<p align="center" style="width: 100;">
|
||||
<a href="https://hay-kot.github.io/homebox/">Docs</a>
|
||||
<a href="https://homebox.software/en/">Docs</a>
|
||||
|
|
||||
<a href="https://homebox.fly.dev">Demo</a>
|
||||
<a href="https://demo.homebox.software">Demo</a>
|
||||
|
|
||||
<a href="https://discord.gg/tuncmNrE4z">Discord</a>
|
||||
<a href="https://discord.gg/aY4DCkpNA9">Discord</a>
|
||||
</p>
|
||||
|
||||
## What is HomeBox
|
||||
|
||||
Homebox is the inventory and organization system built for the Home User! With a focus on simplicity and ease of use, Homebox is the perfect solution for your home inventory, organization, and management needs. While developing this project, I've tried to keep the following principles in mind:
|
||||
|
||||
- _Simple_ - Homebox is designed to be simple and easy to use. No complicated setup or configuration required. Use either a single docker container, or deploy yourself by compiling the binary for your platform of choice.
|
||||
- _Blazingly Fast_ - Homebox is written in Go, which makes it extremely fast and requires minimal resources to deploy. In general, idle memory usage is less than 50MB for the whole container.
|
||||
- _Portable_ - Homebox is designed to be portable and run on anywhere. We use SQLite and an embedded Web UI to make it easy to deploy, use, and backup.
|
||||
|
||||
# Screenshots
|
||||
Check out screenshots of the project [here](https://imgur.com/a/5gLWt2j).
|
||||
You can also try the demo instances of Homebox:
|
||||
- [Demo](https://demo.homebox.software)
|
||||
- [Nightly](https://nightly.homebox.software)
|
||||
- [VNext](https://vnext.homebox.software/)
|
||||
|
||||
## Quick Start
|
||||
|
||||
[Configuration & Docker Compose](https://hay-kot.github.io/homebox/quick-start)
|
||||
[Configuration & Docker Compose](https://homebox.software/en/quick-start.html)
|
||||
|
||||
```bash
|
||||
# If using the rootless image, ensure data
|
||||
# folder has correct permissions
|
||||
mkdir -p /path/to/data/folder
|
||||
chown 65532:65532 -R /path/to/data/folder
|
||||
docker run -d \
|
||||
--name homebox \
|
||||
--restart unless-stopped \
|
||||
--publish 3100:7745 \
|
||||
--env TZ=Europe/Bucharest \
|
||||
--volume /path/to/data/folder/:/data \
|
||||
ghcr.io/hay-kot/homebox:latest
|
||||
ghcr.io/sysadminsmedia/homebox:latest
|
||||
# ghcr.io/sysadminsmedia/homebox:latest-rootless
|
||||
```
|
||||
|
||||
<!-- CONTRIBUTING -->
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
|
||||
If you are not a coder, you can still contribute financially. Financial contributions help me prioritize working on this project over others and helps me know that there is a real demand for project development.
|
||||
|
||||
## Help us Translate
|
||||
We want to make sure that Homebox is available in as many languages as possible. If you are interested in helping us translate Homebox, please help us via our [Weblate instance](https://translate.sysadminsmedia.com/projects/homebox/).
|
||||
|
||||
[](http://translate.sysadminsmedia.com/engage/homebox/)
|
||||
|
||||
## Credits
|
||||
|
||||
- Original project by [@hay-kot](https://github.com/hay-kot)
|
||||
- Logo by [@lakotelman](https://github.com/lakotelman)
|
||||
|
||||
@@ -6,4 +6,6 @@ Since this software is still considered beta/WIP support is always only given fo
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please open a normal public issue if you have any security related concerns.
|
||||
Please open a normal public issue for minor security issues or general security inquires.
|
||||
|
||||
For major or critical security issues, please open a private github security issue.
|
||||
50
Taskfile.yml
50
Taskfile.yml
@@ -1,7 +1,8 @@
|
||||
version: "3"
|
||||
|
||||
env:
|
||||
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1
|
||||
HBOX_LOG_LEVEL: debug
|
||||
HBOX_STORAGE_SQLITE_URL: .data/homebox.db?_pragma=busy_timeout=1000&_pragma=journal_mode=WAL&_fk=1&_time_format=sqlite
|
||||
HBOX_OPTIONS_ALLOW_REGISTRATION: true
|
||||
UNSAFE_DISABLE_PASSWORD_PROJECTION: "yes_i_am_sure"
|
||||
tasks:
|
||||
@@ -12,15 +13,24 @@ tasks:
|
||||
- cd backend && go mod tidy
|
||||
- cd frontend && pnpm install --shamefully-hoist
|
||||
|
||||
generate:
|
||||
desc: |
|
||||
Generates collateral files from the backend project
|
||||
including swagger docs and typescripts type for the frontend
|
||||
deps:
|
||||
- db:generate
|
||||
swag:
|
||||
desc: Generate swagger docs
|
||||
dir: backend/app/api/static/
|
||||
vars:
|
||||
API: "../"
|
||||
INTERNAL: "../../../internal"
|
||||
PKGS: "../../../pkgs"
|
||||
cmds:
|
||||
- swag fmt --dir={{ .API }}
|
||||
- swag init --dir={{ .API }},{{ .INTERNAL }}/core/services,{{ .INTERNAL }}/data/repo --parseDependency
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
typescript-types:
|
||||
desc: Generates typescript types from swagger definition
|
||||
cmds:
|
||||
- cd backend/app/api/static && swag fmt --dir=../
|
||||
- cd backend/app/api/static && swag init --dir=../,../../../internal,../../../pkgs
|
||||
- |
|
||||
npx swagger-typescript-api \
|
||||
--no-client \
|
||||
@@ -28,14 +38,21 @@ tasks:
|
||||
--path ./backend/app/api/static/docs/swagger.json \
|
||||
--output ./frontend/lib/api/types
|
||||
- go run ./backend/app/tools/typegen/main.go ./frontend/lib/api/types/data-contracts.ts
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
sources:
|
||||
- "./backend/app/api/**/*"
|
||||
- "./backend/internal/data/**"
|
||||
- "./backend/internal/core/services/**/*"
|
||||
- "./backend/app/tools/typegen/main.go"
|
||||
- ./backend/app/tools/typegen/main.go
|
||||
- ./backend/app/api/static/docs/swagger.json
|
||||
|
||||
generate:
|
||||
deps:
|
||||
- db:generate
|
||||
cmds:
|
||||
- task: swag
|
||||
- task: typescript-types
|
||||
- cp ./backend/app/api/static/docs/swagger.json docs/docs/api/openapi-2.0.json
|
||||
|
||||
go:run:
|
||||
env:
|
||||
HBOX_DEMO: true
|
||||
desc: Starts the backend api server (depends on generate task)
|
||||
dir: backend
|
||||
deps:
|
||||
@@ -87,8 +104,7 @@ tasks:
|
||||
dir: backend/internal/
|
||||
cmds:
|
||||
- |
|
||||
go generate ./... \
|
||||
--template=./data/ent/schema/templates/has_id.tmpl
|
||||
go generate ./...
|
||||
sources:
|
||||
- "./backend/internal/data/ent/schema/**/*"
|
||||
|
||||
@@ -139,4 +155,4 @@ tasks:
|
||||
- task: go:all
|
||||
- task: ui:check
|
||||
- task: ui:fix
|
||||
- task: test:ci
|
||||
- task: test:ci
|
||||
|
||||
74
backend/.golangci.yml
Normal file
74
backend/.golangci.yml
Normal file
@@ -0,0 +1,74 @@
|
||||
run:
|
||||
timeout: 10m
|
||||
linters-settings:
|
||||
goconst:
|
||||
min-len: 5
|
||||
min-occurrences: 5
|
||||
exhaustive:
|
||||
default-signifies-exhaustive: true
|
||||
revive:
|
||||
ignore-generated-header: false
|
||||
severity: warning
|
||||
confidence: 3
|
||||
depguard:
|
||||
rules:
|
||||
main:
|
||||
deny:
|
||||
- pkg: io/util
|
||||
desc: |
|
||||
Deprecated: As of Go 1.16, the same functionality is now provided by
|
||||
package io or package os, and those implementations should be
|
||||
preferred in new code. See the specific function documentation for
|
||||
details.
|
||||
gocritic:
|
||||
enabled-checks:
|
||||
- ruleguard
|
||||
testifylint:
|
||||
enable-all: true
|
||||
tagalign:
|
||||
order:
|
||||
- json
|
||||
- schema
|
||||
- yaml
|
||||
- yml
|
||||
- toml
|
||||
- validate
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- asciicheck
|
||||
- bodyclose
|
||||
- depguard
|
||||
- dogsled
|
||||
- errcheck
|
||||
- errorlint
|
||||
- exhaustive
|
||||
- copyloopvar
|
||||
- gochecknoinits
|
||||
- goconst
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- gofmt
|
||||
- goprintffuncname
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- revive
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- tagalign
|
||||
- testifylint
|
||||
- typecheck
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
- zerologlint
|
||||
- sqlclosecheck
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
exclude-dirs:
|
||||
- internal/data/ent.*
|
||||
fix: true
|
||||
@@ -1,23 +1,21 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/mailer"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/mailer"
|
||||
)
|
||||
|
||||
type app struct {
|
||||
conf *config.Config
|
||||
mailer mailer.Mailer
|
||||
db *ent.Client
|
||||
server *server.Server
|
||||
repos *repo.AllRepos
|
||||
services *services.AllServices
|
||||
bus *eventbus.EventBus
|
||||
}
|
||||
|
||||
func new(conf *config.Config) *app {
|
||||
@@ -35,13 +33,3 @@ func new(conf *config.Config) *app {
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (a *app) startBgTask(t time.Duration, fn func()) {
|
||||
timer := time.NewTimer(t)
|
||||
|
||||
for {
|
||||
timer.Reset(t)
|
||||
a.server.Background(fn)
|
||||
<-timer.C
|
||||
}
|
||||
}
|
||||
|
||||
38
backend/app/api/bgrunner.go
Normal file
38
backend/app/api/bgrunner.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BackgroundTask struct {
|
||||
name string
|
||||
Interval time.Duration
|
||||
Fn func(context.Context)
|
||||
}
|
||||
|
||||
func (tsk *BackgroundTask) Name() string {
|
||||
return tsk.name
|
||||
}
|
||||
|
||||
func NewTask(name string, interval time.Duration, fn func(context.Context)) *BackgroundTask {
|
||||
return &BackgroundTask{
|
||||
Interval: interval,
|
||||
Fn: fn,
|
||||
}
|
||||
}
|
||||
|
||||
func (tsk *BackgroundTask) Start(ctx context.Context) error {
|
||||
tsk.Fn(ctx)
|
||||
|
||||
timer := time.NewTimer(tsk.Interval)
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case <-timer.C:
|
||||
timer.Reset(tsk.Interval)
|
||||
tsk.Fn(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,22 +2,27 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
func (a *app) SetupDemo() {
|
||||
func (a *app) SetupDemo() error {
|
||||
csvText := `HB.import_ref,HB.location,HB.labels,HB.quantity,HB.name,HB.description,HB.insured,HB.serial_number,HB.model_number,HB.manufacturer,HB.notes,HB.purchase_from,HB.purchase_price,HB.purchase_time,HB.lifetime_warranty,HB.warranty_expires,HB.warranty_details,HB.sold_to,HB.sold_price,HB.sold_time,HB.sold_notes
|
||||
,Garage,IOT;Home Assistant; Z-Wave,1,Zooz Universal Relay ZEN17,"Zooz 700 Series Z-Wave Universal Relay ZEN17 for Awnings, Garage Doors, Sprinklers, and More | 2 NO-C-NC Relays (20A, 10A) | Signal Repeater | Hub Required (Compatible with SmartThings and Hubitat)",,,ZEN17,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||
,Living Room,IOT;Home Assistant; Z-Wave,1,Zooz Motion Sensor,"Zooz Z-Wave Plus S2 Motion Sensor ZSE18 with Magnetic Mount, Works with Vera and SmartThings",,,ZSE18,Zooz,,Amazon,29.95,10/15/2021,,,,,,,
|
||||
,Office,IOT;Home Assistant; Z-Wave,1,Zooz 110v Power Switch,"Zooz Z-Wave Plus Power Switch ZEN15 for 110V AC Units, Sump Pumps, Humidifiers, and More",,,ZEN15,Zooz,,Amazon,39.95,10/13/2021,,,,,,,
|
||||
,Downstairs,IOT;Home Assistant; Z-Wave,1,Ecolink Z-Wave PIR Motion Sensor,"Ecolink Z-Wave PIR Motion Detector Pet Immune, White (PIRZWAVE2.5-ECO)",,,PIRZWAVE2.5-ECO,Ecolink,,Amazon,35.58,10/21/2020,,,,,,,
|
||||
,Entry,IOT;Home Assistant; Z-Wave,1,Yale Security Touchscreen Deadbolt,"Yale Security YRD226-ZW2-619 YRD226ZW2619 Touchscreen Deadbolt, Satin Nickel",,,YRD226ZW2619,Yale,,Amazon,120.39,10/14/2020,,,,,,,
|
||||
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
||||
,Kitchen,IOT;Home Assistant; Z-Wave,1,Smart Rocker Light Dimmer,"UltraPro Z-Wave Smart Rocker Light Dimmer with QuickFit and SimpleWire, 3-Way Ready, Compatible with Alexa, Google Assistant, ZWave Hub Required, Repeater/Range Extender, White Paddle Only, 39351",,,39351,Honeywell,,Amazon,65.98,09/30/0202,,,,,,,
|
||||
`
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
registration := services.UserRegistration{
|
||||
Email: "demo@example.com",
|
||||
Name: "Demo",
|
||||
@@ -25,25 +30,38 @@ func (a *app) SetupDemo() {
|
||||
}
|
||||
|
||||
// First check if we've already setup a demo user and skip if so
|
||||
_, err := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
|
||||
log.Debug().Msg("Checking if demo user already exists")
|
||||
_, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
if err == nil {
|
||||
return
|
||||
log.Info().Msg("Demo user already exists, skipping setup")
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = a.services.User.RegisterUser(context.Background(), registration)
|
||||
log.Debug().Msg("Demo user does not exist, setting up demo")
|
||||
_, err = a.services.User.RegisterUser(ctx, registration)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to register demo user")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
token, _ := a.services.User.Login(context.Background(), registration.Email, registration.Password, false)
|
||||
self, _ := a.services.User.GetSelf(context.Background(), token.Raw)
|
||||
token, err := a.services.User.Login(ctx, registration.Email, registration.Password, false)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to login demo user")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
self, err := a.services.User.GetSelf(ctx, token.Raw)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to get self")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
_, err = a.services.Items.CsvImport(context.Background(), self.GroupID, strings.NewReader(csvText))
|
||||
_, err = a.services.Items.CsvImport(ctx, self.GroupID, strings.NewReader(csvText))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to import CSV")
|
||||
log.Fatal().Msg("Failed to setup demo")
|
||||
return errors.New("failed to setup demo")
|
||||
}
|
||||
|
||||
log.Info().Msg("Demo setup complete")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
// Package debughandlers provides handlers for debugging.
|
||||
package debughandlers
|
||||
|
||||
import (
|
||||
|
||||
@@ -1,12 +1,20 @@
|
||||
// Package v1 provides the API handlers for version 1 of the API.
|
||||
package v1
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
|
||||
"github.com/olahol/melody"
|
||||
)
|
||||
|
||||
type Results[T any] struct {
|
||||
@@ -43,12 +51,27 @@ func WithRegistration(allowRegistration bool) func(*V1Controller) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.cookieSecure = secure
|
||||
}
|
||||
}
|
||||
|
||||
func WithURL(url string) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.url = url
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
svc *services.AllServices
|
||||
maxUploadSize int64
|
||||
isDemo bool
|
||||
allowRegistration bool
|
||||
bus *eventbus.EventBus
|
||||
url string
|
||||
}
|
||||
|
||||
type (
|
||||
@@ -60,28 +83,24 @@ type (
|
||||
BuildTime string `json:"buildTime"`
|
||||
}
|
||||
|
||||
ApiSummary struct {
|
||||
Healthy bool `json:"health"`
|
||||
Versions []string `json:"versions"`
|
||||
Title string `json:"title"`
|
||||
Message string `json:"message"`
|
||||
Build Build `json:"build"`
|
||||
Demo bool `json:"demo"`
|
||||
AllowRegistration bool `json:"allowRegistration"`
|
||||
APISummary struct {
|
||||
Healthy bool `json:"health"`
|
||||
Versions []string `json:"versions"`
|
||||
Title string `json:"title"`
|
||||
Message string `json:"message"`
|
||||
Build Build `json:"build"`
|
||||
Latest services.Latest `json:"latest"`
|
||||
Demo bool `json:"demo"`
|
||||
AllowRegistration bool `json:"allowRegistration"`
|
||||
}
|
||||
)
|
||||
|
||||
func BaseUrlFunc(prefix string) func(s string) string {
|
||||
return func(s string) string {
|
||||
return prefix + "/v1" + s
|
||||
}
|
||||
}
|
||||
|
||||
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, options ...func(*V1Controller)) *V1Controller {
|
||||
func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, bus *eventbus.EventBus, options ...func(*V1Controller)) *V1Controller {
|
||||
ctrl := &V1Controller{
|
||||
repo: repos,
|
||||
svc: svc,
|
||||
allowRegistration: true,
|
||||
bus: bus,
|
||||
}
|
||||
|
||||
for _, opt := range options {
|
||||
@@ -93,20 +112,105 @@ func NewControllerV1(svc *services.AllServices, repos *repo.AllRepos, options ..
|
||||
|
||||
// HandleBase godoc
|
||||
//
|
||||
// @Summary Application Info
|
||||
// @Tags Base
|
||||
// @Produce json
|
||||
// @Success 200 {object} ApiSummary
|
||||
// @Router /v1/status [GET]
|
||||
// @Summary Application Info
|
||||
// @Tags Base
|
||||
// @Produce json
|
||||
// @Success 200 {object} APISummary
|
||||
// @Router /v1/status [GET]
|
||||
func (ctrl *V1Controller) HandleBase(ready ReadyFunc, build Build) errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
return server.JSON(w, http.StatusOK, ApiSummary{
|
||||
return server.JSON(w, http.StatusOK, APISummary{
|
||||
Healthy: ready(),
|
||||
Title: "Homebox",
|
||||
Message: "Track, Manage, and Organize your Things",
|
||||
Build: build,
|
||||
Latest: ctrl.svc.BackgroundService.GetLatestVersion(),
|
||||
Demo: ctrl.isDemo,
|
||||
AllowRegistration: ctrl.allowRegistration,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// HandleCurrency godoc
|
||||
//
|
||||
// @Summary Currency
|
||||
// @Tags Base
|
||||
// @Produce json
|
||||
// @Success 200 {object} currencies.Currency
|
||||
// @Router /v1/currency [GET]
|
||||
func (ctrl *V1Controller) HandleCurrency() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
// Set Cache for 10 Minutes
|
||||
w.Header().Set("Cache-Control", "max-age=600")
|
||||
|
||||
return server.JSON(w, http.StatusOK, ctrl.svc.Currencies.Slice())
|
||||
}
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) HandleCacheWS() errchain.HandlerFunc {
|
||||
type eventMsg struct {
|
||||
Event string `json:"event"`
|
||||
}
|
||||
|
||||
m := melody.New()
|
||||
|
||||
m.HandleConnect(func(s *melody.Session) {
|
||||
auth := services.NewContext(s.Request.Context())
|
||||
s.Set("gid", auth.GID)
|
||||
})
|
||||
|
||||
factory := func(e string) func(data any) {
|
||||
return func(data any) {
|
||||
eventData, ok := data.(eventbus.GroupMutationEvent)
|
||||
if !ok {
|
||||
log.Log().Msgf("invalid event data: %v", data)
|
||||
return
|
||||
}
|
||||
|
||||
msg := &eventMsg{Event: e}
|
||||
|
||||
jsonBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
log.Log().Msgf("error marshling event data %v: %v", data, err)
|
||||
return
|
||||
}
|
||||
|
||||
_ = m.BroadcastFilter(jsonBytes, func(s *melody.Session) bool {
|
||||
groupIDStr, ok := s.Get("gid")
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
GID := groupIDStr.(uuid.UUID)
|
||||
return GID == eventData.GID
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
ctrl.bus.Subscribe(eventbus.EventLabelMutation, factory("label.mutation"))
|
||||
ctrl.bus.Subscribe(eventbus.EventLocationMutation, factory("location.mutation"))
|
||||
ctrl.bus.Subscribe(eventbus.EventItemMutation, factory("item.mutation"))
|
||||
|
||||
// Persistent asynchronous ticker that keeps all websocket connections alive with periodic pings.
|
||||
go func() {
|
||||
const interval = 10 * time.Second
|
||||
|
||||
ping := time.NewTicker(interval)
|
||||
defer ping.Stop()
|
||||
|
||||
for range ping.C {
|
||||
msg := &eventMsg{Event: "ping"}
|
||||
|
||||
pingBytes, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
log.Log().Msgf("error marshaling ping: %v", err)
|
||||
} else {
|
||||
_ = m.Broadcast(pingBytes)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
return m.HandleRequest(w, r)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// routeID extracts the ID from the request URL. If the ID is not in a valid
|
||||
|
||||
@@ -5,11 +5,11 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type ActionAmountResult struct {
|
||||
@@ -32,39 +32,52 @@ func actionHandlerFactory(ref string, fn func(context.Context, uuid.UUID) (int,
|
||||
|
||||
// HandleEnsureAssetID godoc
|
||||
//
|
||||
// @Summary Ensure Asset IDs
|
||||
// @Description Ensures all items in the database have an asset ID
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/ensure-asset-ids [Post]
|
||||
// @Security Bearer
|
||||
// @Summary Ensure Asset IDs
|
||||
// @Description Ensures all items in the database have an asset ID
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/ensure-asset-ids [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleEnsureAssetID() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("ensure asset IDs", ctrl.svc.Items.EnsureAssetID)
|
||||
}
|
||||
|
||||
// HandleEnsureImportRefs godoc
|
||||
//
|
||||
// @Summary Ensures Import Refs
|
||||
// @Description Ensures all items in the database have an import ref
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/ensure-import-refs [Post]
|
||||
// @Security Bearer
|
||||
// @Summary Ensures Import Refs
|
||||
// @Description Ensures all items in the database have an import ref
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/ensure-import-refs [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleEnsureImportRefs() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("ensure import refs", ctrl.svc.Items.EnsureImportRef)
|
||||
}
|
||||
|
||||
// HandleItemDateZeroOut godoc
|
||||
//
|
||||
// @Summary Zero Out Time Fields
|
||||
// @Description Resets all item date fields to the beginning of the day
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/zero-item-time-fields [Post]
|
||||
// @Security Bearer
|
||||
// @Summary Zero Out Time Fields
|
||||
// @Description Resets all item date fields to the beginning of the day
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/zero-item-time-fields [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemDateZeroOut() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("zero out date time", ctrl.repo.Items.ZeroOutTimeFields)
|
||||
}
|
||||
|
||||
// HandleSetPrimaryPhotos godoc
|
||||
//
|
||||
// @Summary Set Primary Photos
|
||||
// @Description Sets the first photo of each item as the primary photo
|
||||
// @Tags Actions
|
||||
// @Produce json
|
||||
// @Success 200 {object} ActionAmountResult
|
||||
// @Router /v1/actions/set-primary-photos [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleSetPrimaryPhotos() errchain.HandlerFunc {
|
||||
return actionHandlerFactory("ensure asset IDs", ctrl.repo.Items.SetPrimaryPhotos)
|
||||
}
|
||||
|
||||
@@ -6,38 +6,38 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
// HandleAssetGet godocs
|
||||
//
|
||||
// @Summary Get Item by Asset ID
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Asset ID"
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/assets/{id} [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Item by Asset ID
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Asset ID"
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/assets/{id} [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
assetIdParam := chi.URLParam(r, "id")
|
||||
assetIdParam = strings.ReplaceAll(assetIdParam, "-", "") // Remove dashes
|
||||
assetIDParam := chi.URLParam(r, "id")
|
||||
assetIDParam = strings.ReplaceAll(assetIDParam, "-", "") // Remove dashes
|
||||
// Convert the asset ID to an int64
|
||||
assetId, err := strconv.ParseInt(assetIdParam, 10, 64)
|
||||
assetID, err := strconv.ParseInt(assetIDParam, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pageParam := r.URL.Query().Get("page")
|
||||
var page int64 = -1
|
||||
if pageParam != "" {
|
||||
page, err = strconv.ParseInt(pageParam, 10, 64)
|
||||
page, err = strconv.ParseInt(pageParam, 10, 32)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page number")
|
||||
}
|
||||
@@ -46,13 +46,13 @@ func (ctrl *V1Controller) HandleAssetGet() errchain.HandlerFunc {
|
||||
pageSizeParam := r.URL.Query().Get("pageSize")
|
||||
var pageSize int64 = -1
|
||||
if pageSizeParam != "" {
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 64)
|
||||
pageSize, err = strconv.ParseInt(pageSizeParam, 10, 32)
|
||||
if err != nil {
|
||||
return server.JSON(w, http.StatusBadRequest, "Invalid page size")
|
||||
}
|
||||
}
|
||||
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetId), int(page), int(pageSize))
|
||||
items, err := ctrl.repo.Items.QueryByAssetID(r.Context(), ctx.GID, repo.AssetID(assetID), int(page), int(pageSize))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to get item")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
|
||||
@@ -3,14 +3,21 @@ package v1
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
const (
|
||||
cookieNameToken = "hb.auth.token"
|
||||
cookieNameRemember = "hb.auth.remember"
|
||||
cookieNameSession = "hb.auth.session"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -21,66 +28,97 @@ type (
|
||||
}
|
||||
|
||||
LoginForm struct {
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Username string `json:"username" example:"admin@admin.com"`
|
||||
Password string `json:"password" example:"admin"`
|
||||
StayLoggedIn bool `json:"stayLoggedIn"`
|
||||
}
|
||||
)
|
||||
|
||||
type CookieContents struct {
|
||||
Token string
|
||||
ExpiresAt time.Time
|
||||
Remember bool
|
||||
}
|
||||
|
||||
func GetCookies(r *http.Request) (*CookieContents, error) {
|
||||
cookie, err := r.Cookie(cookieNameToken)
|
||||
if err != nil {
|
||||
return nil, errors.New("authorization cookie is required")
|
||||
}
|
||||
|
||||
rememberCookie, err := r.Cookie(cookieNameRemember)
|
||||
if err != nil {
|
||||
return nil, errors.New("remember cookie is required")
|
||||
}
|
||||
|
||||
return &CookieContents{
|
||||
Token: cookie.Value,
|
||||
ExpiresAt: cookie.Expires,
|
||||
Remember: rememberCookie.Value == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// AuthProvider is an interface that can be implemented by any authentication provider.
|
||||
// to extend authentication methods for the API.
|
||||
type AuthProvider interface {
|
||||
// Name returns the name of the authentication provider. This should be a unique name.
|
||||
// that is URL friendly.
|
||||
//
|
||||
// Example: "local", "ldap"
|
||||
Name() string
|
||||
// Authenticate is called when a user attempts to login to the API. The implementation
|
||||
// should return an error if the user cannot be authenticated. If an error is returned
|
||||
// the API controller will return a vague error message to the user.
|
||||
//
|
||||
// Authenticate should do the following:
|
||||
//
|
||||
// 1. Ensure that the user exists within the database (either create, or get)
|
||||
// 2. On successful authentication, they must set the user cookies.
|
||||
Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error)
|
||||
}
|
||||
|
||||
// HandleAuthLogin godoc
|
||||
//
|
||||
// @Summary User Login
|
||||
// @Tags Authentication
|
||||
// @Accept x-www-form-urlencoded
|
||||
// @Accept application/json
|
||||
// @Param username formData string false "string" example(admin@admin.com)
|
||||
// @Param password formData string false "string" example(admin)
|
||||
// @Param payload body LoginForm true "Login Data"
|
||||
// @Param payload body LoginForm true "Login Data"
|
||||
// @Param provider query string false "auth provider"
|
||||
// @Produce json
|
||||
// @Success 200 {object} TokenResponse
|
||||
// @Router /v1/users/login [POST]
|
||||
func (ctrl *V1Controller) HandleAuthLogin() errchain.HandlerFunc {
|
||||
func (ctrl *V1Controller) HandleAuthLogin(ps ...AuthProvider) errchain.HandlerFunc {
|
||||
if len(ps) == 0 {
|
||||
panic("no auth providers provided")
|
||||
}
|
||||
|
||||
providers := make(map[string]AuthProvider)
|
||||
for _, p := range ps {
|
||||
log.Info().Str("name", p.Name()).Msg("registering auth provider")
|
||||
providers[p.Name()] = p
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
loginForm := &LoginForm{}
|
||||
|
||||
switch r.Header.Get("Content-Type") {
|
||||
case "application/x-www-form-urlencoded":
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return errors.New("failed to parse form")
|
||||
}
|
||||
|
||||
loginForm.Username = r.PostFormValue("username")
|
||||
loginForm.Password = r.PostFormValue("password")
|
||||
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
|
||||
case "application/json":
|
||||
err := server.Decode(r, loginForm)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode login form")
|
||||
return errors.New("failed to decode login form")
|
||||
}
|
||||
default:
|
||||
return server.JSON(w, http.StatusBadRequest, errors.New("invalid content type"))
|
||||
// Extract provider query
|
||||
provider := r.URL.Query().Get("provider")
|
||||
if provider == "" {
|
||||
provider = "local"
|
||||
}
|
||||
|
||||
if loginForm.Username == "" || loginForm.Password == "" {
|
||||
return validate.NewFieldErrors(
|
||||
validate.FieldError{
|
||||
Field: "username",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
validate.FieldError{
|
||||
Field: "password",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
)
|
||||
// Get the provider
|
||||
p, ok := providers[provider]
|
||||
if !ok {
|
||||
return validate.NewRequestError(errors.New("invalid auth provider"), http.StatusBadRequest)
|
||||
}
|
||||
|
||||
newToken, err := ctrl.svc.User.Login(r.Context(), strings.ToLower(loginForm.Username), loginForm.Password, loginForm.StayLoggedIn)
|
||||
newToken, err := p.Authenticate(w, r)
|
||||
if err != nil {
|
||||
return validate.NewRequestError(errors.New("authentication failed"), http.StatusInternalServerError)
|
||||
log.Err(err).Msg("failed to authenticate")
|
||||
return server.JSON(w, http.StatusInternalServerError, err.Error())
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, true)
|
||||
return server.JSON(w, http.StatusOK, TokenResponse{
|
||||
Token: "Bearer " + newToken.Raw,
|
||||
ExpiresAt: newToken.ExpiresAt,
|
||||
@@ -91,11 +129,11 @@ func (ctrl *V1Controller) HandleAuthLogin() errchain.HandlerFunc {
|
||||
|
||||
// HandleAuthLogout godoc
|
||||
//
|
||||
// @Summary User Logout
|
||||
// @Tags Authentication
|
||||
// @Success 204
|
||||
// @Router /v1/users/logout [POST]
|
||||
// @Security Bearer
|
||||
// @Summary User Logout
|
||||
// @Tags Authentication
|
||||
// @Success 204
|
||||
// @Router /v1/users/logout [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
token := services.UseTokenCtx(r.Context())
|
||||
@@ -108,19 +146,20 @@ func (ctrl *V1Controller) HandleAuthLogout() errchain.HandlerFunc {
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
ctrl.unsetCookies(w, noPort(r.Host))
|
||||
return server.JSON(w, http.StatusNoContent, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// HandleAuthLogout godoc
|
||||
// HandleAuthRefresh godoc
|
||||
//
|
||||
// @Summary User Token Refresh
|
||||
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
|
||||
// @Description This does not validate that the user still exists within the database.
|
||||
// @Tags Authentication
|
||||
// @Success 200
|
||||
// @Router /v1/users/refresh [GET]
|
||||
// @Security Bearer
|
||||
// @Summary User Token Refresh
|
||||
// @Description handleAuthRefresh returns a handler that will issue a new token from an existing token.
|
||||
// @Description This does not validate that the user still exists within the database.
|
||||
// @Tags Authentication
|
||||
// @Success 200
|
||||
// @Router /v1/users/refresh [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
requestToken := services.UseTokenCtx(r.Context())
|
||||
@@ -133,6 +172,78 @@ func (ctrl *V1Controller) HandleAuthRefresh() errchain.HandlerFunc {
|
||||
return validate.NewUnauthorizedError()
|
||||
}
|
||||
|
||||
ctrl.setCookies(w, noPort(r.Host), newToken.Raw, newToken.ExpiresAt, false)
|
||||
return server.JSON(w, http.StatusOK, newToken)
|
||||
}
|
||||
}
|
||||
|
||||
func noPort(host string) string {
|
||||
return strings.Split(host, ":")[0]
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) setCookies(w http.ResponseWriter, domain, token string, expires time.Time, remember bool) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: strconv.FormatBool(remember),
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set HTTP only cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: token,
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "true",
|
||||
Expires: expires,
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
||||
func (ctrl *V1Controller) unsetCookies(w http.ResponseWriter, domain string) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameToken,
|
||||
Value: "",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameRemember,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: true,
|
||||
Path: "/",
|
||||
})
|
||||
|
||||
// Set Fake Session cookie
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: cookieNameSession,
|
||||
Value: "false",
|
||||
Expires: time.Unix(0, 0),
|
||||
Domain: domain,
|
||||
Secure: ctrl.cookieSecure,
|
||||
HttpOnly: false,
|
||||
Path: "/",
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,15 +4,16 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
type (
|
||||
GroupInvitationCreate struct {
|
||||
Uses int `json:"uses" validate:"required,min=1,max=100"`
|
||||
Uses int `json:"uses" validate:"required,min=1,max=100"`
|
||||
ExpiresAt time.Time `json:"expiresAt"`
|
||||
}
|
||||
|
||||
@@ -25,12 +26,12 @@ type (
|
||||
|
||||
// HandleGroupGet godoc
|
||||
//
|
||||
// @Summary Get Group
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.Group
|
||||
// @Router /v1/groups [Get]
|
||||
// @Security Bearer
|
||||
// @Summary Get Group
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.Group
|
||||
// @Router /v1/groups [Get]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) (repo.Group, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -42,16 +43,24 @@ func (ctrl *V1Controller) HandleGroupGet() errchain.HandlerFunc {
|
||||
|
||||
// HandleGroupUpdate godoc
|
||||
//
|
||||
// @Summary Update Group
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Param payload body repo.GroupUpdate true "User Data"
|
||||
// @Success 200 {object} repo.Group
|
||||
// @Router /v1/groups [Put]
|
||||
// @Security Bearer
|
||||
// @Summary Update Group
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Param payload body repo.GroupUpdate true "User Data"
|
||||
// @Success 200 {object} repo.Group
|
||||
// @Router /v1/groups [Put]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, body repo.GroupUpdate) (repo.Group, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
ok := ctrl.svc.Currencies.IsSupported(body.Currency)
|
||||
if !ok {
|
||||
return repo.Group{}, validate.NewFieldErrors(
|
||||
validate.NewFieldError("currency", "currency '"+body.Currency+"' is not supported"),
|
||||
)
|
||||
}
|
||||
|
||||
return ctrl.svc.Group.UpdateGroup(auth, body)
|
||||
}
|
||||
|
||||
@@ -60,13 +69,13 @@ func (ctrl *V1Controller) HandleGroupUpdate() errchain.HandlerFunc {
|
||||
|
||||
// HandleGroupInvitationsCreate godoc
|
||||
//
|
||||
// @Summary Create Group Invitation
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Param payload body GroupInvitationCreate true "User Data"
|
||||
// @Success 200 {object} GroupInvitation
|
||||
// @Router /v1/groups/invitations [Post]
|
||||
// @Security Bearer
|
||||
// @Summary Create Group Invitation
|
||||
// @Tags Group
|
||||
// @Produce json
|
||||
// @Param payload body GroupInvitationCreate true "User Data"
|
||||
// @Success 200 {object} GroupInvitation
|
||||
// @Router /v1/groups/invitations [Post]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupInvitationsCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, body GroupInvitationCreate) (GroupInvitation, error) {
|
||||
if body.ExpiresAt.IsZero() {
|
||||
|
||||
@@ -4,32 +4,37 @@ import (
|
||||
"database/sql"
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleItemsGetAll godoc
|
||||
//
|
||||
// @Summary Query All Items
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param q query string false "search string"
|
||||
// @Param page query int false "page number"
|
||||
// @Param pageSize query int false "items per page"
|
||||
// @Param labels query []string false "label Ids" collectionFormat(multi)
|
||||
// @Param locations query []string false "location Ids" collectionFormat(multi)
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/items [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Query All Items
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param q query string false "search string"
|
||||
// @Param page query int false "page number"
|
||||
// @Param pageSize query int false "items per page"
|
||||
// @Param labels query []string false "label Ids" collectionFormat(multi)
|
||||
// @Param locations query []string false "location Ids" collectionFormat(multi)
|
||||
// @Param parentIds query []string false "parent Ids" collectionFormat(multi)
|
||||
// @Success 200 {object} repo.PaginationResult[repo.ItemSummary]{}
|
||||
// @Router /v1/items [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
extractQuery := func(r *http.Request) repo.ItemQuery {
|
||||
params := r.URL.Query()
|
||||
@@ -56,6 +61,8 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
Search: params.Get("q"),
|
||||
LocationIDs: queryUUIDList(params, "locations"),
|
||||
LabelIDs: queryUUIDList(params, "labels"),
|
||||
NegateLabels: queryBool(params.Get("negateLabels")),
|
||||
ParentItemIDs: queryUUIDList(params, "parentIds"),
|
||||
IncludeArchived: queryBool(params.Get("includeArchived")),
|
||||
Fields: filterFieldItems(params["fields"]),
|
||||
OrderBy: params.Get("orderBy"),
|
||||
@@ -78,6 +85,14 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
items, err := ctrl.repo.Items.QueryByGroup(ctx, ctx.GID, extractQuery(r))
|
||||
totalPrice := new(big.Int)
|
||||
for _, item := range items.Items {
|
||||
totalPrice.Add(totalPrice, big.NewInt(int64(item.PurchasePrice*100)))
|
||||
}
|
||||
|
||||
totalPriceFloat := new(big.Float).SetInt(totalPrice)
|
||||
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return server.JSON(w, http.StatusOK, repo.PaginationResult[repo.ItemSummary]{
|
||||
@@ -91,15 +106,57 @@ func (ctrl *V1Controller) HandleItemsGetAll() errchain.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
// HandleItemFullPath godoc
|
||||
//
|
||||
// @Summary Get the full path of an item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 200 {object} []repo.ItemPath
|
||||
// @Router /v1/items/{id}/path [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemFullPath() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) ([]repo.ItemPath, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
item, err := ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
paths, err := ctrl.repo.Locations.PathForLoc(auth, auth.GID, item.Location.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if item.Parent != nil {
|
||||
paths = append(paths, repo.ItemPath{
|
||||
Type: repo.ItemTypeItem,
|
||||
ID: item.Parent.ID,
|
||||
Name: item.Parent.Name,
|
||||
})
|
||||
}
|
||||
|
||||
paths = append(paths, repo.ItemPath{
|
||||
Type: repo.ItemTypeItem,
|
||||
ID: item.ID,
|
||||
Name: item.Name,
|
||||
})
|
||||
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleItemsCreate godoc
|
||||
//
|
||||
// @Summary Create Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param payload body repo.ItemCreate true "Item Data"
|
||||
// @Success 201 {object} repo.ItemSummary
|
||||
// @Router /v1/items [POST]
|
||||
// @Security Bearer
|
||||
// @Summary Create Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param payload body repo.ItemCreate true "Item Data"
|
||||
// @Success 201 {object} repo.ItemSummary
|
||||
// @Router /v1/items [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, body repo.ItemCreate) (repo.ItemOut, error) {
|
||||
return ctrl.svc.Items.Create(services.NewContext(r.Context()), body)
|
||||
@@ -110,13 +167,13 @@ func (ctrl *V1Controller) HandleItemsCreate() errchain.HandlerFunc {
|
||||
|
||||
// HandleItemGet godocs
|
||||
//
|
||||
// @Summary Get Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id} [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id} [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.ItemOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -129,13 +186,13 @@ func (ctrl *V1Controller) HandleItemGet() errchain.HandlerFunc {
|
||||
|
||||
// HandleItemDelete godocs
|
||||
//
|
||||
// @Summary Delete Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -148,14 +205,14 @@ func (ctrl *V1Controller) HandleItemDelete() errchain.HandlerFunc {
|
||||
|
||||
// HandleItemUpdate godocs
|
||||
//
|
||||
// @Summary Update Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param payload body repo.ItemUpdate true "Item Data"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id} [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param payload body repo.ItemUpdate true "Item Data"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemUpdate) (repo.ItemOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -167,15 +224,41 @@ func (ctrl *V1Controller) HandleItemUpdate() errchain.HandlerFunc {
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleItemPatch godocs
|
||||
//
|
||||
// @Summary Update Item
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param payload body repo.ItemPatch true "Item Data"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id} [Patch]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemPatch() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, body repo.ItemPatch) (repo.ItemOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
body.ID = ID
|
||||
err := ctrl.repo.Items.Patch(auth, auth.GID, ID, body)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return ctrl.repo.Items.GetOneByGroup(auth, auth.GID, ID)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleGetAllCustomFieldNames godocs
|
||||
//
|
||||
// @Summary Get All Custom Field Names
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Success 200
|
||||
// @Router /v1/items/fields [GET]
|
||||
// @Success 200 {object} []string
|
||||
// @Security Bearer
|
||||
// @Summary Get All Custom Field Names
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Success 200
|
||||
// @Router /v1/items/fields [GET]
|
||||
// @Success 200 {object} []string
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]string, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -187,13 +270,13 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldNames() errchain.HandlerFunc {
|
||||
|
||||
// HandleGetAllCustomFieldValues godocs
|
||||
//
|
||||
// @Summary Get All Custom Field Values
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Success 200
|
||||
// @Router /v1/items/fields/values [GET]
|
||||
// @Success 200 {object} []string
|
||||
// @Security Bearer
|
||||
// @Summary Get All Custom Field Values
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Success 200
|
||||
// @Router /v1/items/fields/values [GET]
|
||||
// @Success 200 {object} []string
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
|
||||
type query struct {
|
||||
Field string `schema:"field" validate:"required"`
|
||||
@@ -204,14 +287,14 @@ func (ctrl *V1Controller) HandleGetAllCustomFieldValues() errchain.HandlerFunc {
|
||||
return ctrl.repo.Items.GetAllCustomFieldValues(auth, auth.GID, q.Field)
|
||||
}
|
||||
|
||||
return adapters.Action(fn, http.StatusOK)
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleItemsImport godocs
|
||||
//
|
||||
// @Summary Import Items
|
||||
// @Tags Items
|
||||
// @Accept multipart/form-data
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Param csv formData file true "Image to upload"
|
||||
@@ -245,26 +328,52 @@ func (ctrl *V1Controller) HandleItemsImport() errchain.HandlerFunc {
|
||||
|
||||
// HandleItemsExport godocs
|
||||
//
|
||||
// @Summary Export Items
|
||||
// @Tags Items
|
||||
// @Success 200 {string} string "text/csv"
|
||||
// @Router /v1/items/export [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Export Items
|
||||
// @Tags Items
|
||||
// @Success 200 {string} string "text/csv"
|
||||
// @Router /v1/items/export [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
csvData, err := ctrl.svc.Items.ExportTSV(r.Context(), ctx.GID)
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to export items")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=homebox-items.tsv")
|
||||
timestamp := time.Now().Format("2006-01-02_15-04-05") // YYYY-MM-DD_HH-MM-SS format
|
||||
filename := fmt.Sprintf("homebox-items_%s.csv", timestamp) // add timestamp to filename
|
||||
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment;filename=%s", filename))
|
||||
|
||||
writer := csv.NewWriter(w)
|
||||
writer.Comma = '\t'
|
||||
writer.Comma = ','
|
||||
return writer.WriteAll(csvData)
|
||||
}
|
||||
}
|
||||
|
||||
func getHBURL(refererHeader, fallback string) (hbURL string) {
|
||||
hbURL = refererHeader
|
||||
if hbURL == "" {
|
||||
hbURL = fallback
|
||||
}
|
||||
|
||||
return stripPathFromURL(hbURL)
|
||||
}
|
||||
|
||||
// stripPathFromURL removes the path from a URL.
|
||||
// ex. https://example.com/tools -> https://example.com
|
||||
func stripPathFromURL(rawURL string) string {
|
||||
parsedURL, err := url.Parse(rawURL)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse URL")
|
||||
return ""
|
||||
}
|
||||
|
||||
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
|
||||
|
||||
return strippedURL.String()
|
||||
}
|
||||
|
||||
@@ -3,14 +3,16 @@ package v1
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type (
|
||||
@@ -23,13 +25,14 @@ type (
|
||||
//
|
||||
// @Summary Create Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Accept multipart/form-data
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param file formData file true "File attachment"
|
||||
// @Param type formData string true "Type of file"
|
||||
// @Param name formData string true "name of the file including extension"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Failure 422 {object} mid.ErrorResponse
|
||||
// @Failure 422 {object} validate.ErrorResponse
|
||||
// @Router /v1/items/{id}/attachments [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
@@ -38,7 +41,6 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse multipart form")
|
||||
return validate.NewRequestError(errors.New("failed to parse multipart form"), http.StatusBadRequest)
|
||||
|
||||
}
|
||||
|
||||
errs := validate.NewFieldErrors()
|
||||
@@ -67,7 +69,15 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
|
||||
attachmentType := r.FormValue("type")
|
||||
if attachmentType == "" {
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
// Attempt to auto-detect the type of the file
|
||||
ext := filepath.Ext(attachmentName)
|
||||
|
||||
switch strings.ToLower(ext) {
|
||||
case ".jpg", ".jpeg", ".png", ".webp", ".gif", ".bmp", ".tiff":
|
||||
attachmentType = attachment.TypePhoto.String()
|
||||
default:
|
||||
attachmentType = attachment.TypeAttachment.String()
|
||||
}
|
||||
}
|
||||
|
||||
id, err := ctrl.routeID(r)
|
||||
@@ -95,41 +105,41 @@ func (ctrl *V1Controller) HandleItemAttachmentCreate() errchain.HandlerFunc {
|
||||
|
||||
// HandleItemAttachmentGet godocs
|
||||
//
|
||||
// @Summary Get Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Produce application/octet-stream
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Success 200 {object} ItemAttachmentToken
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Produce application/octet-stream
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Success 200 {object} ItemAttachmentToken
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemAttachmentGet() errchain.HandlerFunc {
|
||||
return ctrl.handleItemAttachmentsHandler
|
||||
}
|
||||
|
||||
// HandleItemAttachmentDelete godocs
|
||||
//
|
||||
// @Summary Delete Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemAttachmentDelete() errchain.HandlerFunc {
|
||||
return ctrl.handleItemAttachmentsHandler
|
||||
}
|
||||
|
||||
// HandleItemAttachmentUpdate godocs
|
||||
//
|
||||
// @Summary Update Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Item Attachment
|
||||
// @Tags Items Attachments
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param attachment_id path string true "Attachment ID"
|
||||
// @Param payload body repo.ItemAttachmentUpdate true "Attachment Update"
|
||||
// @Success 200 {object} repo.ItemOut
|
||||
// @Router /v1/items/{id}/attachments/{attachment_id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleItemAttachmentUpdate() errchain.HandlerFunc {
|
||||
return ctrl.handleItemAttachmentsHandler
|
||||
}
|
||||
|
||||
@@ -4,20 +4,20 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLabelsGetAll godoc
|
||||
//
|
||||
// @Summary Get All Labels
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.LabelOut
|
||||
// @Router /v1/labels [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get All Labels
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.LabelOut
|
||||
// @Router /v1/labels [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.LabelSummary, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -29,13 +29,13 @@ func (ctrl *V1Controller) HandleLabelsGetAll() errchain.HandlerFunc {
|
||||
|
||||
// HandleLabelsCreate godoc
|
||||
//
|
||||
// @Summary Create Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param payload body repo.LabelCreate true "Label Data"
|
||||
// @Success 200 {object} repo.LabelSummary
|
||||
// @Router /v1/labels [POST]
|
||||
// @Security Bearer
|
||||
// @Summary Create Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param payload body repo.LabelCreate true "Label Data"
|
||||
// @Success 200 {object} repo.LabelSummary
|
||||
// @Router /v1/labels [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, data repo.LabelCreate) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -47,13 +47,13 @@ func (ctrl *V1Controller) HandleLabelsCreate() errchain.HandlerFunc {
|
||||
|
||||
// HandleLabelDelete godocs
|
||||
//
|
||||
// @Summary Delete Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 204
|
||||
// @Router /v1/labels/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 204
|
||||
// @Router /v1/labels/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -66,13 +66,13 @@ func (ctrl *V1Controller) HandleLabelDelete() errchain.HandlerFunc {
|
||||
|
||||
// HandleLabelGet godocs
|
||||
//
|
||||
// @Summary Get Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 200 {object} repo.LabelOut
|
||||
// @Router /v1/labels/{id} [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 200 {object} repo.LabelOut
|
||||
// @Router /v1/labels/{id} [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -84,13 +84,13 @@ func (ctrl *V1Controller) HandleLabelGet() errchain.HandlerFunc {
|
||||
|
||||
// HandleLabelUpdate godocs
|
||||
//
|
||||
// @Summary Update Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 200 {object} repo.LabelOut
|
||||
// @Router /v1/labels/{id} [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Label
|
||||
// @Tags Labels
|
||||
// @Produce json
|
||||
// @Param id path string true "Label ID"
|
||||
// @Success 200 {object} repo.LabelOut
|
||||
// @Router /v1/labels/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLabelUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, data repo.LabelUpdate) (repo.LabelOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
@@ -1,24 +1,26 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math/big"
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleLocationTreeQuery
|
||||
// HandleLocationTreeQuery godoc
|
||||
//
|
||||
// @Summary Get Locations Tree
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param withItems query bool false "include items in response tree"
|
||||
// @Success 200 {object} []repo.TreeItem
|
||||
// @Router /v1/locations/tree [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Locations Tree
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param withItems query bool false "include items in response tree"
|
||||
// @Success 200 {object} []repo.TreeItem
|
||||
// @Router /v1/locations/tree [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, query repo.TreeQuery) ([]repo.TreeItem, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -28,15 +30,15 @@ func (ctrl *V1Controller) HandleLocationTreeQuery() errchain.HandlerFunc {
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationGetAll
|
||||
// HandleLocationGetAll godoc
|
||||
//
|
||||
// @Summary Get All Locations
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param filterChildren query bool false "Filter locations with parents"
|
||||
// @Success 200 {object} []repo.LocationOutCount
|
||||
// @Router /v1/locations [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get All Locations
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param filterChildren query bool false "Filter locations with parents"
|
||||
// @Success 200 {object} []repo.LocationOutCount
|
||||
// @Router /v1/locations [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, q repo.LocationQuery) ([]repo.LocationOutCount, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -46,15 +48,15 @@ func (ctrl *V1Controller) HandleLocationGetAll() errchain.HandlerFunc {
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationCreate
|
||||
// HandleLocationCreate godoc
|
||||
//
|
||||
// @Summary Create Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param payload body repo.LocationCreate true "Location Data"
|
||||
// @Success 200 {object} repo.LocationSummary
|
||||
// @Router /v1/locations [POST]
|
||||
// @Security Bearer
|
||||
// @Summary Create Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param payload body repo.LocationCreate true "Location Data"
|
||||
// @Success 200 {object} repo.LocationSummary
|
||||
// @Router /v1/locations [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, createData repo.LocationCreate) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -64,15 +66,15 @@ func (ctrl *V1Controller) HandleLocationCreate() errchain.HandlerFunc {
|
||||
return adapters.Action(fn, http.StatusCreated)
|
||||
}
|
||||
|
||||
// HandleLocationDelete
|
||||
// HandleLocationDelete godoc
|
||||
//
|
||||
// @Summary Delete Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Success 204
|
||||
// @Router /v1/locations/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Success 204
|
||||
// @Router /v1/locations/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -83,34 +85,73 @@ func (ctrl *V1Controller) HandleLocationDelete() errchain.HandlerFunc {
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
}
|
||||
|
||||
// HandleLocationGet
|
||||
func (ctrl *V1Controller) GetLocationWithPrice(auth context.Context, gid uuid.UUID, id uuid.UUID) (repo.LocationOut, error) {
|
||||
var location, err = ctrl.repo.Locations.GetOneByGroup(auth, gid, id)
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
|
||||
// Add direct child items price
|
||||
totalPrice := new(big.Int)
|
||||
items, err := ctrl.repo.Items.QueryByGroup(auth, gid, repo.ItemQuery{LocationIDs: []uuid.UUID{id}})
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
|
||||
for _, item := range items.Items {
|
||||
// Convert item.Quantity to float64 for multiplication
|
||||
quantity := float64(item.Quantity)
|
||||
itemTotal := big.NewInt(int64(item.PurchasePrice * quantity * 100))
|
||||
totalPrice.Add(totalPrice, itemTotal)
|
||||
}
|
||||
|
||||
totalPriceFloat := new(big.Float).SetInt(totalPrice)
|
||||
totalPriceFloat.Quo(totalPriceFloat, big.NewFloat(100))
|
||||
location.TotalPrice, _ = totalPriceFloat.Float64()
|
||||
|
||||
// Add price from child locations
|
||||
for _, childLocation := range location.Children {
|
||||
var childLocationWithPrice repo.LocationOut
|
||||
childLocationWithPrice, err = ctrl.GetLocationWithPrice(auth, gid, childLocation.ID)
|
||||
if err != nil {
|
||||
return repo.LocationOut{}, err
|
||||
}
|
||||
location.TotalPrice += childLocationWithPrice.TotalPrice
|
||||
}
|
||||
|
||||
return location, nil
|
||||
}
|
||||
|
||||
// HandleLocationGet godoc
|
||||
//
|
||||
// @Summary Get Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Success 200 {object} repo.LocationOut
|
||||
// @Router /v1/locations/{id} [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Success 200 {object} repo.LocationOut
|
||||
// @Router /v1/locations/{id} [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.Locations.GetOneByGroup(auth, auth.GID, ID)
|
||||
var location, err = ctrl.GetLocationWithPrice(auth, auth.GID, ID)
|
||||
|
||||
return location, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleLocationUpdate
|
||||
// HandleLocationUpdate godoc
|
||||
//
|
||||
// @Summary Update Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Param payload body repo.LocationUpdate true "Location Data"
|
||||
// @Success 200 {object} repo.LocationOut
|
||||
// @Router /v1/locations/{id} [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Location
|
||||
// @Tags Locations
|
||||
// @Produce json
|
||||
// @Param id path string true "Location ID"
|
||||
// @Param payload body repo.LocationUpdate true "Location Data"
|
||||
// @Success 200 {object} repo.LocationOut
|
||||
// @Router /v1/locations/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleLocationUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, body repo.LocationUpdate) (repo.LocationOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
|
||||
@@ -4,24 +4,26 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleMaintenanceGetLog godoc
|
||||
// HandleMaintenanceLogGet godoc
|
||||
//
|
||||
// @Summary Get Maintenance Log
|
||||
// @Tags Maintenance
|
||||
// @Tags Item Maintenance
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.MaintenanceLog
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
|
||||
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
|
||||
// @Router /v1/items/{id}/maintenance [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, q repo.MaintenanceLogQuery) (repo.MaintenanceLog, error) {
|
||||
fn := func(r *http.Request, ID uuid.UUID, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.GetLog(auth, auth.GID, ID, q)
|
||||
return ctrl.repo.MaintEntry.GetMaintenanceByItemID(auth, auth.GID, ID, filters)
|
||||
}
|
||||
|
||||
return adapters.QueryID("id", fn, http.StatusOK)
|
||||
@@ -30,8 +32,9 @@ func (ctrl *V1Controller) HandleMaintenanceLogGet() errchain.HandlerFunc {
|
||||
// HandleMaintenanceEntryCreate godoc
|
||||
//
|
||||
// @Summary Create Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Tags Item Maintenance
|
||||
// @Produce json
|
||||
// @Param id path string true "Item ID"
|
||||
// @Param payload body repo.MaintenanceEntryCreate true "Entry Data"
|
||||
// @Success 201 {object} repo.MaintenanceEntry
|
||||
// @Router /v1/items/{id}/maintenance [POST]
|
||||
@@ -44,39 +47,3 @@ func (ctrl *V1Controller) HandleMaintenanceEntryCreate() errchain.HandlerFunc {
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusCreated)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryDelete godoc
|
||||
//
|
||||
// @Summary Delete Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Router /v1/items/{id}/maintenance/{entry_id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("entry_id", fn, http.StatusNoContent)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryUpdate godoc
|
||||
//
|
||||
// @Summary Update Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
|
||||
// @Success 200 {object} repo.MaintenanceEntry
|
||||
// @Router /v1/items/{id}/maintenance/{entry_id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("entry_id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
67
backend/app/api/handlers/v1/v1_ctrl_maintenance.go
Normal file
67
backend/app/api/handlers/v1/v1_ctrl_maintenance.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleMaintenanceGetAll godoc
|
||||
//
|
||||
// @Summary Query All Maintenance
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param filters query repo.MaintenanceFilters false "which maintenance to retrieve"
|
||||
// @Success 200 {array} repo.MaintenanceEntryWithDetails[]
|
||||
// @Router /v1/maintenance [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceGetAll() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, filters repo.MaintenanceFilters) ([]repo.MaintenanceEntryWithDetails, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.GetAllMaintenance(auth, auth.GID, filters)
|
||||
}
|
||||
|
||||
return adapters.Query(fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryUpdate godoc
|
||||
//
|
||||
// @Summary Update Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param id path string true "Maintenance ID"
|
||||
// @Param payload body repo.MaintenanceEntryUpdate true "Entry Data"
|
||||
// @Success 200 {object} repo.MaintenanceEntry
|
||||
// @Router /v1/maintenance/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryUpdate() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID, body repo.MaintenanceEntryUpdate) (repo.MaintenanceEntry, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
return ctrl.repo.MaintEntry.Update(auth, entryID, body)
|
||||
}
|
||||
|
||||
return adapters.ActionID("id", fn, http.StatusOK)
|
||||
}
|
||||
|
||||
// HandleMaintenanceEntryDelete godoc
|
||||
//
|
||||
// @Summary Delete Maintenance Entry
|
||||
// @Tags Maintenance
|
||||
// @Produce json
|
||||
// @Param id path string true "Maintenance ID"
|
||||
// @Success 204
|
||||
// @Router /v1/maintenance/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleMaintenanceEntryDelete() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, entryID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
err := ctrl.repo.MaintEntry.Delete(auth, entryID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return adapters.CommandID("id", fn, http.StatusNoContent)
|
||||
}
|
||||
@@ -5,20 +5,20 @@ import (
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGetUserNotifiers godoc
|
||||
//
|
||||
// @Summary Get Notifiers
|
||||
// @Tags Notifiers
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.NotifierOut
|
||||
// @Router /v1/notifiers [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Notifiers
|
||||
// @Tags Notifiers
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.NotifierOut
|
||||
// @Router /v1/notifiers [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, _ struct{}) ([]repo.NotifierOut, error) {
|
||||
user := services.UseUserCtx(r.Context())
|
||||
@@ -30,13 +30,13 @@ func (ctrl *V1Controller) HandleGetUserNotifiers() errchain.HandlerFunc {
|
||||
|
||||
// HandleCreateNotifier godoc
|
||||
//
|
||||
// @Summary Create Notifier
|
||||
// @Tags Notifiers
|
||||
// @Produce json
|
||||
// @Param payload body repo.NotifierCreate true "Notifier Data"
|
||||
// @Success 200 {object} repo.NotifierOut
|
||||
// @Router /v1/notifiers [POST]
|
||||
// @Security Bearer
|
||||
// @Summary Create Notifier
|
||||
// @Tags Notifiers
|
||||
// @Produce json
|
||||
// @Param payload body repo.NotifierCreate true "Notifier Data"
|
||||
// @Success 200 {object} repo.NotifierOut
|
||||
// @Router /v1/notifiers [POST]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, in repo.NotifierCreate) (repo.NotifierOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleCreateNotifier() errchain.HandlerFunc {
|
||||
|
||||
// HandleDeleteNotifier godocs
|
||||
//
|
||||
// @Summary Delete a Notifier
|
||||
// @Tags Notifiers
|
||||
// @Param id path string true "Notifier ID"
|
||||
// @Success 204
|
||||
// @Router /v1/notifiers/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete a Notifier
|
||||
// @Tags Notifiers
|
||||
// @Param id path string true "Notifier ID"
|
||||
// @Success 204
|
||||
// @Router /v1/notifiers/{id} [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID) (any, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -65,13 +65,13 @@ func (ctrl *V1Controller) HandleDeleteNotifier() errchain.HandlerFunc {
|
||||
|
||||
// HandleUpdateNotifier godocs
|
||||
//
|
||||
// @Summary Update Notifier
|
||||
// @Tags Notifiers
|
||||
// @Param id path string true "Notifier ID"
|
||||
// @Param payload body repo.NotifierUpdate true "Notifier Data"
|
||||
// @Success 200 {object} repo.NotifierOut
|
||||
// @Router /v1/notifiers/{id} [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Notifier
|
||||
// @Tags Notifiers
|
||||
// @Param id path string true "Notifier ID"
|
||||
// @Param payload body repo.NotifierUpdate true "Notifier Data"
|
||||
// @Success 200 {object} repo.NotifierOut
|
||||
// @Router /v1/notifiers/{id} [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request, ID uuid.UUID, in repo.NotifierUpdate) (repo.NotifierOut, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -86,7 +86,6 @@ func (ctrl *V1Controller) HandleUpdateNotifier() errchain.HandlerFunc {
|
||||
// @Summary Test Notifier
|
||||
// @Tags Notifiers
|
||||
// @Produce json
|
||||
// @Param id path string true "Notifier ID"
|
||||
// @Param url query string true "URL"
|
||||
// @Success 204
|
||||
// @Router /v1/notifiers/test [POST]
|
||||
|
||||
@@ -5,9 +5,10 @@ import (
|
||||
"image/png"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
"github.com/yeqown/go-qrcode/v2"
|
||||
"github.com/yeqown/go-qrcode/writer/standard"
|
||||
|
||||
@@ -19,13 +20,13 @@ var qrcodeLogo []byte
|
||||
|
||||
// HandleGenerateQRCode godoc
|
||||
//
|
||||
// @Summary Create QR Code
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param data query string false "data to be encoded into qrcode"
|
||||
// @Success 200 {string} string "image/jpeg"
|
||||
// @Router /v1/qrcode [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Create QR Code
|
||||
// @Tags Items
|
||||
// @Produce json
|
||||
// @Param data query string false "data to be encoded into qrcode"
|
||||
// @Success 200 {string} string "image/jpeg"
|
||||
// @Router /v1/qrcode [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
|
||||
type query struct {
|
||||
// 4,296 characters is the maximum length of a QR code
|
||||
@@ -43,7 +44,12 @@ func (ctrl *V1Controller) HandleGenerateQRCode() errchain.HandlerFunc {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
qrc, err := qrcode.New(q.Data)
|
||||
decodedStr, err := url.QueryUnescape(q.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
qrc, err := qrcode.New(decodedStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1,31 +1,30 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// HandleBillOfMaterialsExport godoc
|
||||
//
|
||||
// @Summary Export Bill of Materials
|
||||
// @Tags Reporting
|
||||
// @Produce json
|
||||
// @Success 200 {string} string "text/csv"
|
||||
// @Router /v1/reporting/bill-of-materials [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Export Bill of Materials
|
||||
// @Tags Reporting
|
||||
// @Produce json
|
||||
// @Success 200 {string} string "text/csv"
|
||||
// @Router /v1/reporting/bill-of-materials [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleBillOfMaterialsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
actor := services.UseUserCtx(r.Context())
|
||||
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsTSV(r.Context(), actor.GroupID)
|
||||
csv, err := ctrl.svc.Items.ExportBillOfMaterialsCSV(r.Context(), actor.GroupID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "text/tsv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.tsv")
|
||||
w.Header().Set("Content-Type", "text/csv")
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=bill-of-materials.csv")
|
||||
_, err = w.Write(csv)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,22 +4,22 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/adapters"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/adapters"
|
||||
)
|
||||
|
||||
// HandleGroupGet godoc
|
||||
// HandleGroupStatisticsLocations godoc
|
||||
//
|
||||
// @Summary Get Location Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.TotalsByOrganizer
|
||||
// @Router /v1/groups/statistics/locations [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Location Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.TotalsByOrganizer
|
||||
// @Router /v1/groups/statistics/locations [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -31,12 +31,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLocations() errchain.HandlerFunc
|
||||
|
||||
// HandleGroupStatisticsLabels godoc
|
||||
//
|
||||
// @Summary Get Label Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.TotalsByOrganizer
|
||||
// @Router /v1/groups/statistics/labels [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Label Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} []repo.TotalsByOrganizer
|
||||
// @Router /v1/groups/statistics/labels [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) ([]repo.TotalsByOrganizer, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -48,12 +48,12 @@ func (ctrl *V1Controller) HandleGroupStatisticsLabels() errchain.HandlerFunc {
|
||||
|
||||
// HandleGroupStatistics godoc
|
||||
//
|
||||
// @Summary Get Group Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.GroupStatistics
|
||||
// @Router /v1/groups/statistics [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Group Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.GroupStatistics
|
||||
// @Router /v1/groups/statistics [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
|
||||
fn := func(r *http.Request) (repo.GroupStatistics, error) {
|
||||
auth := services.NewContext(r.Context())
|
||||
@@ -65,14 +65,14 @@ func (ctrl *V1Controller) HandleGroupStatistics() errchain.HandlerFunc {
|
||||
|
||||
// HandleGroupStatisticsPriceOverTime godoc
|
||||
//
|
||||
// @Summary Get Purchase Price Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.ValueOverTime
|
||||
// @Param start query string false "start date"
|
||||
// @Param end query string false "end date"
|
||||
// @Router /v1/groups/statistics/purchase-price [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get Purchase Price Statistics
|
||||
// @Tags Statistics
|
||||
// @Produce json
|
||||
// @Success 200 {object} repo.ValueOverTime
|
||||
// @Param start query string false "start date"
|
||||
// @Param end query string false "end date"
|
||||
// @Router /v1/groups/statistics/purchase-price [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleGroupStatisticsPriceOverTime() errchain.HandlerFunc {
|
||||
parseDate := func(datestr string, defaultDate time.Time) (time.Time, error) {
|
||||
if datestr == "" {
|
||||
|
||||
@@ -5,22 +5,22 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
// HandleUserRegistration godoc
|
||||
//
|
||||
// @Summary Register New User
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Param payload body services.UserRegistration true "User Data"
|
||||
// @Success 204
|
||||
// @Router /v1/users/register [Post]
|
||||
// @Summary Register New User
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Param payload body services.UserRegistration true "User Data"
|
||||
// @Success 204
|
||||
// @Router /v1/users/register [Post]
|
||||
func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
regData := services.UserRegistration{}
|
||||
@@ -46,12 +46,12 @@ func (ctrl *V1Controller) HandleUserRegistration() errchain.HandlerFunc {
|
||||
|
||||
// HandleUserSelf godoc
|
||||
//
|
||||
// @Summary Get User Self
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Success 200 {object} Wrapped{item=repo.UserOut}
|
||||
// @Router /v1/users/self [GET]
|
||||
// @Security Bearer
|
||||
// @Summary Get User Self
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Success 200 {object} Wrapped{item=repo.UserOut}
|
||||
// @Router /v1/users/self [GET]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
token := services.UseTokenCtx(r.Context())
|
||||
@@ -67,13 +67,13 @@ func (ctrl *V1Controller) HandleUserSelf() errchain.HandlerFunc {
|
||||
|
||||
// HandleUserSelfUpdate godoc
|
||||
//
|
||||
// @Summary Update Account
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Param payload body repo.UserUpdate true "User Data"
|
||||
// @Success 200 {object} Wrapped{item=repo.UserUpdate}
|
||||
// @Router /v1/users/self [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Update Account
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Param payload body repo.UserUpdate true "User Data"
|
||||
// @Success 200 {object} Wrapped{item=repo.UserUpdate}
|
||||
// @Router /v1/users/self [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
updateData := repo.UserUpdate{}
|
||||
@@ -94,12 +94,12 @@ func (ctrl *V1Controller) HandleUserSelfUpdate() errchain.HandlerFunc {
|
||||
|
||||
// HandleUserSelfDelete godoc
|
||||
//
|
||||
// @Summary Delete Account
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Router /v1/users/self [DELETE]
|
||||
// @Security Bearer
|
||||
// @Summary Delete Account
|
||||
// @Tags User
|
||||
// @Produce json
|
||||
// @Success 204
|
||||
// @Router /v1/users/self [DELETE]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleUserSelfDelete() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
if ctrl.isDemo {
|
||||
@@ -124,12 +124,12 @@ type (
|
||||
|
||||
// HandleUserSelfChangePassword godoc
|
||||
//
|
||||
// @Summary Change Password
|
||||
// @Tags User
|
||||
// @Success 204
|
||||
// @Param payload body ChangePassword true "Password Payload"
|
||||
// @Router /v1/users/change-password [PUT]
|
||||
// @Security Bearer
|
||||
// @Summary Change Password
|
||||
// @Tags User
|
||||
// @Success 204
|
||||
// @Param payload body ChangePassword true "Password Payload"
|
||||
// @Router /v1/users/change-password [PUT]
|
||||
// @Security Bearer
|
||||
func (ctrl *V1Controller) HandleUserSelfChangePassword() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
if ctrl.isDemo {
|
||||
|
||||
@@ -2,11 +2,10 @@ package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
)
|
||||
|
||||
// setupLogger initializes the zerolog config
|
||||
@@ -18,24 +17,8 @@ func (a *app) setupLogger() {
|
||||
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr}).With().Caller().Logger()
|
||||
}
|
||||
|
||||
log.Level(getLevel(a.conf.Log.Level))
|
||||
}
|
||||
|
||||
func getLevel(l string) zerolog.Level {
|
||||
switch strings.ToLower(l) {
|
||||
case "debug":
|
||||
return zerolog.DebugLevel
|
||||
case "info":
|
||||
return zerolog.InfoLevel
|
||||
case "warn":
|
||||
return zerolog.WarnLevel
|
||||
case "error":
|
||||
return zerolog.ErrorLevel
|
||||
case "fatal":
|
||||
return zerolog.FatalLevel
|
||||
case "panic":
|
||||
return zerolog.PanicLevel
|
||||
default:
|
||||
return zerolog.InfoLevel
|
||||
level, err := zerolog.ParseLevel(a.conf.Log.Level)
|
||||
if err == nil {
|
||||
zerolog.SetGlobalLevel(level)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@@ -13,19 +14,21 @@ import (
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/migrations"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/config"
|
||||
"github.com/hay-kot/homebox/backend/internal/web/mid"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/hay-kot/httpkit/graceful"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/rs/zerolog/pkgerrors"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/migrations"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/config"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/web/mid"
|
||||
|
||||
_ "github.com/hay-kot/homebox/backend/pkgs/cgofreesqlite"
|
||||
_ "github.com/sysadminsmedia/homebox/backend/pkgs/cgofreesqlite"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -34,19 +37,28 @@ var (
|
||||
buildTime = "now"
|
||||
)
|
||||
|
||||
// @title Homebox API
|
||||
// @version 1.0
|
||||
// @description Track, Manage, and Organize your Things.
|
||||
// @contact.name Don't
|
||||
// @BasePath /api
|
||||
// @securityDefinitions.apikey Bearer
|
||||
// @in header
|
||||
// @name Authorization
|
||||
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
|
||||
func build() string {
|
||||
short := commit
|
||||
if len(short) > 7 {
|
||||
short = short[:7]
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s, commit %s, built at %s", version, short, buildTime)
|
||||
}
|
||||
|
||||
// @title Homebox API
|
||||
// @version 1.0
|
||||
// @description Track, Manage, and Organize your Things.
|
||||
// @contact.name Don't
|
||||
// @BasePath /api
|
||||
// @securityDefinitions.apikey Bearer
|
||||
// @in header
|
||||
// @name Authorization
|
||||
// @description "Type 'Bearer TOKEN' to correctly set the API Key"
|
||||
func main() {
|
||||
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
|
||||
|
||||
cfg, err := config.New()
|
||||
cfg, err := config.New(build(), "Homebox inventory management system")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@@ -68,12 +80,12 @@ func run(cfg *config.Config) error {
|
||||
log.Fatal().Err(err).Msg("failed to create data directory")
|
||||
}
|
||||
|
||||
c, err := ent.Open("sqlite3", cfg.Storage.SqliteUrl)
|
||||
c, err := ent.Open("sqlite3", cfg.Storage.SqliteURL)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteUrl).
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Msg("failed opening connection to sqlite")
|
||||
}
|
||||
defer func(c *ent.Client) {
|
||||
@@ -103,24 +115,56 @@ func run(cfg *config.Config) error {
|
||||
|
||||
err = c.Schema.Create(context.Background(), options...)
|
||||
if err != nil {
|
||||
log.Fatal().
|
||||
log.Error().
|
||||
Err(err).
|
||||
Str("driver", "sqlite").
|
||||
Str("url", cfg.Storage.SqliteUrl).
|
||||
Str("url", cfg.Storage.SqliteURL).
|
||||
Msg("failed creating schema resources")
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.RemoveAll(temp)
|
||||
if err != nil {
|
||||
log.Fatal().Err(err).Msg("failed to remove temporary directory for database migrations")
|
||||
log.Error().Err(err).Msg("failed to remove temporary directory for database migrations")
|
||||
return err
|
||||
}
|
||||
|
||||
collectFuncs := []currencies.CollectorFunc{
|
||||
currencies.CollectDefaults(),
|
||||
}
|
||||
|
||||
if cfg.Options.CurrencyConfig != "" {
|
||||
log.Info().
|
||||
Str("path", cfg.Options.CurrencyConfig).
|
||||
Msg("loading currency config file")
|
||||
|
||||
content, err := os.ReadFile(cfg.Options.CurrencyConfig)
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Str("path", cfg.Options.CurrencyConfig).
|
||||
Msg("failed to read currency config file")
|
||||
return err
|
||||
}
|
||||
|
||||
collectFuncs = append(collectFuncs, currencies.CollectJSON(bytes.NewReader(content)))
|
||||
}
|
||||
|
||||
currencies, err := currencies.CollectionCurrencies(collectFuncs...)
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to collect currencies")
|
||||
return err
|
||||
}
|
||||
|
||||
app.bus = eventbus.New()
|
||||
app.db = c
|
||||
app.repos = repo.New(c, cfg.Storage.Data)
|
||||
app.repos = repo.New(c, app.bus, cfg.Storage.Data)
|
||||
app.services = services.New(
|
||||
app.repos,
|
||||
services.WithAutoIncrementAssetID(cfg.Options.AutoIncrementAssetID),
|
||||
services.WithCurrencies(currencies),
|
||||
)
|
||||
|
||||
// =========================================================================
|
||||
@@ -137,36 +181,66 @@ func run(cfg *config.Config) error {
|
||||
middleware.StripSlashes,
|
||||
)
|
||||
|
||||
chain := errchain.New(mid.Errors(app.server, logger))
|
||||
chain := errchain.New(mid.Errors(logger))
|
||||
|
||||
app.mountRoutes(router, chain, app.repos)
|
||||
|
||||
app.server = server.NewServer(
|
||||
server.WithHost(app.conf.Web.Host),
|
||||
server.WithPort(app.conf.Web.Port),
|
||||
)
|
||||
log.Info().Msgf("Starting HTTP Server on %s:%s", app.server.Host, app.server.Port)
|
||||
runner := graceful.NewRunner()
|
||||
|
||||
runner.AddFunc("server", func(ctx context.Context) error {
|
||||
httpserver := http.Server{
|
||||
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Web.Port),
|
||||
Handler: router,
|
||||
ReadTimeout: cfg.Web.ReadTimeout,
|
||||
WriteTimeout: cfg.Web.WriteTimeout,
|
||||
IdleTimeout: cfg.Web.IdleTimeout,
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
_ = httpserver.Shutdown(context.Background())
|
||||
}()
|
||||
|
||||
log.Info().Msgf("Server is running on %s:%s", cfg.Web.Host, cfg.Web.Port)
|
||||
return httpserver.ListenAndServe()
|
||||
})
|
||||
|
||||
// =========================================================================
|
||||
// Start Reoccurring Tasks
|
||||
|
||||
go app.startBgTask(time.Duration(24)*time.Hour, func() {
|
||||
_, err := app.repos.AuthTokens.PurgeExpiredTokens(context.Background())
|
||||
runner.AddFunc("eventbus", app.bus.Run)
|
||||
|
||||
runner.AddFunc("seed_database", func(ctx context.Context) error {
|
||||
// TODO: Remove through external API that does setup
|
||||
if cfg.Demo {
|
||||
log.Info().Msg("Running in demo mode, creating demo data")
|
||||
err := app.SetupDemo()
|
||||
if err != nil {
|
||||
log.Fatal().Msg(err.Error())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
runner.AddPlugin(NewTask("purge-tokens", time.Duration(24)*time.Hour, func(ctx context.Context) {
|
||||
_, err := app.repos.AuthTokens.PurgeExpiredTokens(ctx)
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to purge expired tokens")
|
||||
}
|
||||
})
|
||||
go app.startBgTask(time.Duration(24)*time.Hour, func() {
|
||||
_, err := app.repos.Groups.InvitationPurge(context.Background())
|
||||
}))
|
||||
|
||||
runner.AddPlugin(NewTask("purge-invitations", time.Duration(24)*time.Hour, func(ctx context.Context) {
|
||||
_, err := app.repos.Groups.InvitationPurge(ctx)
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to purge expired invitations")
|
||||
}
|
||||
})
|
||||
go app.startBgTask(time.Duration(1)*time.Hour, func() {
|
||||
}))
|
||||
|
||||
runner.AddPlugin(NewTask("send-notifications", time.Duration(1)*time.Hour, func(ctx context.Context) {
|
||||
now := time.Now()
|
||||
|
||||
if now.Hour() == 8 {
|
||||
@@ -178,22 +252,39 @@ func run(cfg *config.Config) error {
|
||||
Msg("failed to send notifiers")
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
// TODO: Remove through external API that does setup
|
||||
if cfg.Demo {
|
||||
log.Info().Msg("Running in demo mode, creating demo data")
|
||||
app.SetupDemo()
|
||||
if cfg.Options.GithubReleaseCheck {
|
||||
runner.AddPlugin(NewTask("get-latest-github-release", time.Hour, func(ctx context.Context) {
|
||||
log.Debug().Msg("running get latest github release")
|
||||
err := app.services.BackgroundService.GetLatestGithubRelease(context.Background())
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Msg("failed to get latest github release")
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
if cfg.Debug.Enabled {
|
||||
debugrouter := app.debugRouter()
|
||||
go func() {
|
||||
if err := http.ListenAndServe(":"+cfg.Debug.Port, debugrouter); err != nil {
|
||||
log.Fatal().Err(err).Msg("failed to start debug server")
|
||||
runner.AddFunc("debug", func(ctx context.Context) error {
|
||||
debugserver := http.Server{
|
||||
Addr: fmt.Sprintf("%s:%s", cfg.Web.Host, cfg.Debug.Port),
|
||||
Handler: app.debugRouter(),
|
||||
ReadTimeout: cfg.Web.ReadTimeout,
|
||||
WriteTimeout: cfg.Web.WriteTimeout,
|
||||
IdleTimeout: cfg.Web.IdleTimeout,
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
_ = debugserver.Shutdown(context.Background())
|
||||
}()
|
||||
|
||||
log.Info().Msgf("Debug server is running on %s:%s", cfg.Web.Host, cfg.Debug.Port)
|
||||
return debugserver.ListenAndServe()
|
||||
})
|
||||
}
|
||||
|
||||
return app.server.Start(router)
|
||||
return runner.Start(context.Background())
|
||||
}
|
||||
|
||||
@@ -7,9 +7,11 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services"
|
||||
"github.com/hay-kot/homebox/backend/internal/sys/validate"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type tokenHasKey struct {
|
||||
@@ -94,20 +96,6 @@ func getQuery(r *http.Request) (string, error) {
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func getCookie(r *http.Request) (string, error) {
|
||||
cookie, err := r.Cookie("hb.auth.token")
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
token, err := url.QueryUnescape(cookie.Value)
|
||||
if err != nil {
|
||||
return "", errors.New("access_token cookie is required")
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// mwAuthToken is a middleware that will check the database for a stateful token
|
||||
// and attach it's user to the request context, or return an appropriate error.
|
||||
// Authorization support is by token via Headers or Query Parameter
|
||||
@@ -115,26 +103,35 @@ func getCookie(r *http.Request) (string, error) {
|
||||
// Example:
|
||||
// - header = "Bearer 1234567890"
|
||||
// - query = "?access_token=1234567890"
|
||||
// - cookie = hb.auth.token = 1234567890
|
||||
func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
|
||||
return errchain.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getCookie,
|
||||
getQuery,
|
||||
}
|
||||
|
||||
var requestToken string
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
|
||||
// We ignore the error to allow the next strategy to be attempted
|
||||
{
|
||||
cookies, _ := v1.GetCookies(r)
|
||||
if cookies != nil {
|
||||
requestToken = cookies.Token
|
||||
}
|
||||
}
|
||||
|
||||
if requestToken == "" {
|
||||
return validate.NewRequestError(errors.New("Authorization header or query is required"), http.StatusUnauthorized)
|
||||
keyFuncs := [...]KeyFunc{
|
||||
getBearer,
|
||||
getQuery,
|
||||
}
|
||||
|
||||
for _, keyFunc := range keyFuncs {
|
||||
token, err := keyFunc(r)
|
||||
if err == nil {
|
||||
requestToken = token
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if requestToken == "" {
|
||||
return validate.NewRequestError(errors.New("authorization header or query is required"), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
requestToken = strings.TrimPrefix(requestToken, "Bearer ")
|
||||
@@ -144,7 +141,11 @@ func (a *app) mwAuthToken(next errchain.Handler) errchain.Handler {
|
||||
usr, err := a.services.User.GetSelf(r.Context(), requestToken)
|
||||
// Check the database for the token
|
||||
if err != nil {
|
||||
return validate.NewRequestError(errors.New("valid authorization header is required"), http.StatusUnauthorized)
|
||||
if ent.IsNotFound(err) {
|
||||
return validate.NewRequestError(errors.New("valid authorization token is required"), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
r = r.WithContext(services.SetUserCtx(r.Context(), &usr, requestToken))
|
||||
|
||||
2
backend/app/api/providers/doc.go
Normal file
2
backend/app/api/providers/doc.go
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package providers provides a authentication abstraction for the backend.
|
||||
package providers
|
||||
55
backend/app/api/providers/extractors.go
Normal file
55
backend/app/api/providers/extractors.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/hay-kot/httpkit/server"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/sys/validate"
|
||||
)
|
||||
|
||||
type LoginForm struct {
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
StayLoggedIn bool `json:"stayLoggedIn"`
|
||||
}
|
||||
|
||||
func getLoginForm(r *http.Request) (LoginForm, error) {
|
||||
loginForm := LoginForm{}
|
||||
|
||||
switch r.Header.Get("Content-Type") {
|
||||
case "application/x-www-form-urlencoded":
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return loginForm, errors.New("failed to parse form")
|
||||
}
|
||||
|
||||
loginForm.Username = r.PostFormValue("username")
|
||||
loginForm.Password = r.PostFormValue("password")
|
||||
loginForm.StayLoggedIn = r.PostFormValue("stayLoggedIn") == "true"
|
||||
case "application/json":
|
||||
err := server.Decode(r, &loginForm)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to decode login form")
|
||||
return loginForm, errors.New("failed to decode login form")
|
||||
}
|
||||
default:
|
||||
return loginForm, errors.New("invalid content type")
|
||||
}
|
||||
|
||||
if loginForm.Username == "" || loginForm.Password == "" {
|
||||
return loginForm, validate.NewFieldErrors(
|
||||
validate.FieldError{
|
||||
Field: "username",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
validate.FieldError{
|
||||
Field: "password",
|
||||
Error: "username or password is empty",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return loginForm, nil
|
||||
}
|
||||
30
backend/app/api/providers/local.go
Normal file
30
backend/app/api/providers/local.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package providers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services"
|
||||
)
|
||||
|
||||
type LocalProvider struct {
|
||||
service *services.UserService
|
||||
}
|
||||
|
||||
func NewLocalProvider(service *services.UserService) *LocalProvider {
|
||||
return &LocalProvider{
|
||||
service: service,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *LocalProvider) Name() string {
|
||||
return "local"
|
||||
}
|
||||
|
||||
func (p *LocalProvider) Authenticate(w http.ResponseWriter, r *http.Request) (services.UserAuthTokenDetail, error) {
|
||||
loginForm, err := getLoginForm(r)
|
||||
if err != nil {
|
||||
return services.UserAuthTokenDetail{}, err
|
||||
}
|
||||
|
||||
return p.service.Login(r.Context(), loginForm.Username, loginForm.Password, loginForm.StayLoggedIn)
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
@@ -10,13 +11,14 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/hay-kot/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/hay-kot/homebox/backend/app/api/handlers/v1"
|
||||
_ "github.com/hay-kot/homebox/backend/app/api/static/docs"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/httpkit/errchain"
|
||||
httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
|
||||
httpSwagger "github.com/swaggo/http-swagger/v2" // http-swagger middleware
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/handlers/debughandlers"
|
||||
v1 "github.com/sysadminsmedia/homebox/backend/app/api/handlers/v1"
|
||||
"github.com/sysadminsmedia/homebox/backend/app/api/providers"
|
||||
_ "github.com/sysadminsmedia/homebox/backend/app/api/static/docs"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
const prefix = "/api"
|
||||
@@ -46,113 +48,126 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
// =========================================================================
|
||||
// API Version 1
|
||||
|
||||
v1Base := v1.BaseUrlFunc(prefix)
|
||||
|
||||
v1Ctrl := v1.NewControllerV1(
|
||||
a.services,
|
||||
a.repos,
|
||||
a.bus,
|
||||
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
|
||||
v1.WithRegistration(a.conf.Options.AllowRegistration),
|
||||
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
|
||||
v1.WithURL(fmt.Sprintf("%s:%s", a.conf.Web.Host, a.conf.Web.Port)),
|
||||
)
|
||||
|
||||
r.Get(v1Base("/status"), chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
|
||||
Version: version,
|
||||
Commit: commit,
|
||||
BuildTime: buildTime,
|
||||
})))
|
||||
r.Route(prefix+"/v1", func(r chi.Router) {
|
||||
r.Get("/status", chain.ToHandlerFunc(v1Ctrl.HandleBase(func() bool { return true }, v1.Build{
|
||||
Version: version,
|
||||
Commit: commit,
|
||||
BuildTime: buildTime,
|
||||
})))
|
||||
|
||||
r.Post(v1Base("/users/register"), chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
|
||||
r.Post(v1Base("/users/login"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin()))
|
||||
r.Get("/currencies", chain.ToHandlerFunc(v1Ctrl.HandleCurrency()))
|
||||
|
||||
userMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
|
||||
}
|
||||
providers := []v1.AuthProvider{
|
||||
providers.NewLocalProvider(a.services.User),
|
||||
}
|
||||
|
||||
r.Get(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
|
||||
r.Put(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
|
||||
r.Delete(v1Base("/users/self"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
|
||||
r.Post(v1Base("/users/logout"), chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
|
||||
r.Get(v1Base("/users/refresh"), chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
|
||||
r.Put(v1Base("/users/self/change-password"), chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
|
||||
r.Post("/users/register", chain.ToHandlerFunc(v1Ctrl.HandleUserRegistration()))
|
||||
r.Post("/users/login", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogin(providers...)))
|
||||
|
||||
r.Post(v1Base("/groups/invitations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/purchase-price"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/locations"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
|
||||
r.Get(v1Base("/groups/statistics/labels"), chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
|
||||
userMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String()),
|
||||
}
|
||||
|
||||
// TODO: I don't like /groups being the URL for users
|
||||
r.Get(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
|
||||
r.Put(v1Base("/groups"), chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
|
||||
r.Get("/ws/events", chain.ToHandlerFunc(v1Ctrl.HandleCacheWS(), userMW...))
|
||||
r.Get("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelf(), userMW...))
|
||||
r.Put("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfUpdate(), userMW...))
|
||||
r.Delete("/users/self", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfDelete(), userMW...))
|
||||
r.Post("/users/logout", chain.ToHandlerFunc(v1Ctrl.HandleAuthLogout(), userMW...))
|
||||
r.Get("/users/refresh", chain.ToHandlerFunc(v1Ctrl.HandleAuthRefresh(), userMW...))
|
||||
r.Put("/users/self/change-password", chain.ToHandlerFunc(v1Ctrl.HandleUserSelfChangePassword(), userMW...))
|
||||
|
||||
r.Post(v1Base("/actions/ensure-asset-ids"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post(v1Base("/actions/zero-item-time-fields"), chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post(v1Base("/actions/ensure-import-refs"), chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post("/groups/invitations", chain.ToHandlerFunc(v1Ctrl.HandleGroupInvitationsCreate(), userMW...))
|
||||
r.Get("/groups/statistics", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatistics(), userMW...))
|
||||
r.Get("/groups/statistics/purchase-price", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsPriceOverTime(), userMW...))
|
||||
r.Get("/groups/statistics/locations", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLocations(), userMW...))
|
||||
r.Get("/groups/statistics/labels", chain.ToHandlerFunc(v1Ctrl.HandleGroupStatisticsLabels(), userMW...))
|
||||
|
||||
r.Get(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post(v1Base("/locations"), chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
r.Get(v1Base("/locations/tree"), chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
|
||||
r.Get(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
|
||||
r.Put(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
|
||||
r.Delete(v1Base("/locations/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
|
||||
// TODO: I don't like /groups being the URL for users
|
||||
r.Get("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupGet(), userMW...))
|
||||
r.Put("/groups", chain.ToHandlerFunc(v1Ctrl.HandleGroupUpdate(), userMW...))
|
||||
|
||||
r.Get(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
|
||||
r.Post(v1Base("/labels"), chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
|
||||
r.Get(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
|
||||
r.Put(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
|
||||
r.Delete(v1Base("/labels/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
|
||||
r.Post("/actions/ensure-asset-ids", chain.ToHandlerFunc(v1Ctrl.HandleEnsureAssetID(), userMW...))
|
||||
r.Post("/actions/zero-item-time-fields", chain.ToHandlerFunc(v1Ctrl.HandleItemDateZeroOut(), userMW...))
|
||||
r.Post("/actions/ensure-import-refs", chain.ToHandlerFunc(v1Ctrl.HandleEnsureImportRefs(), userMW...))
|
||||
r.Post("/actions/set-primary-photos", chain.ToHandlerFunc(v1Ctrl.HandleSetPrimaryPhotos(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
|
||||
r.Post(v1Base("/items"), chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
|
||||
r.Post(v1Base("/items/import"), chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
|
||||
r.Get(v1Base("/items/export"), chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
|
||||
r.Get(v1Base("/items/fields"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
|
||||
r.Get(v1Base("/items/fields/values"), chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
|
||||
r.Get("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationGetAll(), userMW...))
|
||||
r.Post("/locations", chain.ToHandlerFunc(v1Ctrl.HandleLocationCreate(), userMW...))
|
||||
r.Get("/locations/tree", chain.ToHandlerFunc(v1Ctrl.HandleLocationTreeQuery(), userMW...))
|
||||
r.Get("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationGet(), userMW...))
|
||||
r.Put("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationUpdate(), userMW...))
|
||||
r.Delete("/locations/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLocationDelete(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
|
||||
r.Put(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
|
||||
r.Get("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsGetAll(), userMW...))
|
||||
r.Post("/labels", chain.ToHandlerFunc(v1Ctrl.HandleLabelsCreate(), userMW...))
|
||||
r.Get("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelGet(), userMW...))
|
||||
r.Put("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelUpdate(), userMW...))
|
||||
r.Delete("/labels/{id}", chain.ToHandlerFunc(v1Ctrl.HandleLabelDelete(), userMW...))
|
||||
|
||||
r.Post(v1Base("/items/{id}/attachments"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
|
||||
r.Put(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}/attachments/{attachment_id}"), chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
|
||||
r.Get("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsGetAll(), userMW...))
|
||||
r.Post("/items", chain.ToHandlerFunc(v1Ctrl.HandleItemsCreate(), userMW...))
|
||||
r.Post("/items/import", chain.ToHandlerFunc(v1Ctrl.HandleItemsImport(), userMW...))
|
||||
r.Get("/items/export", chain.ToHandlerFunc(v1Ctrl.HandleItemsExport(), userMW...))
|
||||
r.Get("/items/fields", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldNames(), userMW...))
|
||||
r.Get("/items/fields/values", chain.ToHandlerFunc(v1Ctrl.HandleGetAllCustomFieldValues(), userMW...))
|
||||
|
||||
r.Get(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
|
||||
r.Post(v1Base("/items/{id}/maintenance"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
|
||||
r.Put(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
|
||||
r.Delete(v1Base("/items/{id}/maintenance/{entry_id}"), chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
|
||||
r.Get("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemGet(), userMW...))
|
||||
r.Get("/items/{id}/path", chain.ToHandlerFunc(v1Ctrl.HandleItemFullPath(), userMW...))
|
||||
r.Put("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemUpdate(), userMW...))
|
||||
r.Patch("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemPatch(), userMW...))
|
||||
r.Delete("/items/{id}", chain.ToHandlerFunc(v1Ctrl.HandleItemDelete(), userMW...))
|
||||
|
||||
r.Get(v1Base("/asset/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
|
||||
r.Post("/items/{id}/attachments", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentCreate(), userMW...))
|
||||
r.Put("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentUpdate(), userMW...))
|
||||
r.Delete("/items/{id}/attachments/{attachment_id}", chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentDelete(), userMW...))
|
||||
|
||||
// Notifiers
|
||||
r.Get(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
|
||||
r.Post(v1Base("/notifiers"), chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
|
||||
r.Put(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
|
||||
r.Delete(v1Base("/notifiers/{id}"), chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
|
||||
r.Post(v1Base("/notifiers/test"), chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
|
||||
r.Get("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceLogGet(), userMW...))
|
||||
r.Post("/items/{id}/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryCreate(), userMW...))
|
||||
|
||||
// Asset-Like endpoints
|
||||
assetMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
|
||||
}
|
||||
r.Get("/assets/{id}", chain.ToHandlerFunc(v1Ctrl.HandleAssetGet(), userMW...))
|
||||
|
||||
r.Get(
|
||||
v1Base("/qrcode"),
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...),
|
||||
)
|
||||
r.Get(
|
||||
v1Base("/items/{id}/attachments/{attachment_id}"),
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
|
||||
)
|
||||
// Maintenance
|
||||
r.Get("/maintenance", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceGetAll(), userMW...))
|
||||
r.Put("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryUpdate(), userMW...))
|
||||
r.Delete("/maintenance/{id}", chain.ToHandlerFunc(v1Ctrl.HandleMaintenanceEntryDelete(), userMW...))
|
||||
|
||||
// Reporting Services
|
||||
r.Get(v1Base("/reporting/bill-of-materials"), chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
|
||||
// Notifiers
|
||||
r.Get("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleGetUserNotifiers(), userMW...))
|
||||
r.Post("/notifiers", chain.ToHandlerFunc(v1Ctrl.HandleCreateNotifier(), userMW...))
|
||||
r.Put("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleUpdateNotifier(), userMW...))
|
||||
r.Delete("/notifiers/{id}", chain.ToHandlerFunc(v1Ctrl.HandleDeleteNotifier(), userMW...))
|
||||
r.Post("/notifiers/test", chain.ToHandlerFunc(v1Ctrl.HandlerNotifierTest(), userMW...))
|
||||
|
||||
// Asset-Like endpoints
|
||||
assetMW := []errchain.Middleware{
|
||||
a.mwAuthToken,
|
||||
a.mwRoles(RoleModeOr, authroles.RoleUser.String(), authroles.RoleAttachments.String()),
|
||||
}
|
||||
|
||||
r.Get("/qrcode", chain.ToHandlerFunc(v1Ctrl.HandleGenerateQRCode(), assetMW...))
|
||||
r.Get(
|
||||
"/items/{id}/attachments/{attachment_id}",
|
||||
chain.ToHandlerFunc(v1Ctrl.HandleItemAttachmentGet(), assetMW...),
|
||||
)
|
||||
|
||||
// Reporting Services
|
||||
r.Get("/reporting/bill-of-materials", chain.ToHandlerFunc(v1Ctrl.HandleBillOfMaterialsExport(), userMW...))
|
||||
|
||||
r.NotFound(http.NotFound)
|
||||
})
|
||||
|
||||
r.NotFound(chain.ToHandlerFunc(notFoundHandler()))
|
||||
|
||||
}
|
||||
|
||||
func registerMimes() {
|
||||
@@ -175,7 +190,7 @@ func notFoundHandler() errchain.HandlerFunc {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
defer func() { _ = f.Close() }()
|
||||
|
||||
stat, _ := f.Stat()
|
||||
if stat.IsDir() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
// Package docs GENERATED BY SWAG; DO NOT EDIT
|
||||
// This file was generated by swaggo/swag
|
||||
// Package docs Code generated by swaggo/swag. DO NOT EDIT
|
||||
package docs
|
||||
|
||||
import "github.com/swaggo/swag"
|
||||
@@ -68,6 +67,31 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
@@ -126,6 +150,25 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/currency": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Base"
|
||||
],
|
||||
"summary": "Currency",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/currencies.Currency"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/groups": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -386,6 +429,16 @@ const docTemplate = `{
|
||||
"description": "location Ids",
|
||||
"name": "locations",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "multi",
|
||||
"description": "parent Ids",
|
||||
"name": "parentIds",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -513,6 +566,9 @@ const docTemplate = `{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
@@ -635,6 +691,46 @@ const docTemplate = `{
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
},
|
||||
"patch": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Items"
|
||||
],
|
||||
"summary": "Update Item",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Item Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.ItemPatch"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.ItemOut"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/attachments": {
|
||||
@@ -644,6 +740,9 @@ const docTemplate = `{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
@@ -691,7 +790,7 @@ const docTemplate = `{
|
||||
"422": {
|
||||
"description": "Unprocessable Entity",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/mid.ErrorResponse"
|
||||
"$ref": "#/definitions/validate.ErrorResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -824,14 +923,41 @@ const docTemplate = `{
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Get Maintenance Log",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceLog"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -846,10 +972,17 @@ const docTemplate = `{
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Create Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
@@ -870,8 +1003,8 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/maintenance/{entry_id}": {
|
||||
"put": {
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
@@ -881,47 +1014,29 @@ const docTemplate = `{
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Items"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"summary": "Get the full path of an item",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemPath"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/labels": {
|
||||
@@ -1280,6 +1395,120 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Query All Maintenance",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance/{id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Maintenance ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Maintenance ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/notifiers": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1355,13 +1584,6 @@ const docTemplate = `{
|
||||
],
|
||||
"summary": "Test Notifier",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Notifier ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "URL",
|
||||
@@ -1510,7 +1732,7 @@ const docTemplate = `{
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ApiSummary"
|
||||
"$ref": "#/definitions/v1.APISummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1559,20 +1781,6 @@ const docTemplate = `{
|
||||
],
|
||||
"summary": "User Login",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin@admin.com",
|
||||
"description": "string",
|
||||
"name": "username",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin",
|
||||
"description": "string",
|
||||
"name": "password",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"description": "Login Data",
|
||||
"name": "payload",
|
||||
@@ -1581,6 +1789,12 @@ const docTemplate = `{
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.LoginForm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "auth provider",
|
||||
"name": "provider",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1759,17 +1973,20 @@ const docTemplate = `{
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"mid.ErrorResponse": {
|
||||
"currencies.Currency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"code": {
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
"local": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"symbol": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1853,6 +2070,9 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1864,6 +2084,9 @@ const docTemplate = `{
|
||||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1942,12 +2165,6 @@ const docTemplate = `{
|
||||
"$ref": "#/definitions/repo.ItemAttachment"
|
||||
}
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1963,6 +2180,9 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1978,9 +2198,13 @@ const docTemplate = `{
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
@@ -1996,16 +2220,19 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2021,8 +2248,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
@@ -2031,6 +2257,9 @@ const docTemplate = `{
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
},
|
||||
"syncChildItemsLocations": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2042,12 +2271,43 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPatch": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPath": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/repo.ItemType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2057,6 +2317,9 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2068,16 +2331,19 @@ const docTemplate = `{
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer"
|
||||
@@ -2087,8 +2353,22 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"location",
|
||||
"item"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ItemTypeLocation",
|
||||
"ItemTypeItem"
|
||||
]
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
@@ -2097,7 +2377,8 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 1000
|
||||
},
|
||||
"fields": {
|
||||
"type": "array",
|
||||
@@ -2132,7 +2413,9 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1
|
||||
},
|
||||
"notes": {
|
||||
"description": "Extras",
|
||||
@@ -2144,11 +2427,13 @@ const docTemplate = `{
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2165,21 +2450,25 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"syncChildItemsLocations": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"warrantyDetails": {
|
||||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2216,12 +2505,6 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2283,18 +2566,15 @@ const docTemplate = `{
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"totalPrice": {
|
||||
"type": "number"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2365,7 +2645,6 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2382,7 +2661,6 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2394,7 +2672,6 @@ const docTemplate = `{
|
||||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2408,7 +2685,6 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2417,7 +2693,6 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2431,31 +2706,53 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceLog": {
|
||||
"repo.MaintenanceEntryWithDetails": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"costAverage": {
|
||||
"type": "number"
|
||||
"completedDate": {
|
||||
"type": "string"
|
||||
},
|
||||
"costTotal": {
|
||||
"type": "number"
|
||||
"cost": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemId": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemID": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemName": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceFilterStatus": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
]
|
||||
},
|
||||
"repo.NotifierCreate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -2497,6 +2794,9 @@ const docTemplate = `{
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2650,6 +2950,17 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.Latest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.UserRegistration": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2667,15 +2978,7 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ApiSummary": {
|
||||
"v1.APISummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allowRegistration": {
|
||||
@@ -2690,6 +2993,9 @@ const docTemplate = `{
|
||||
"health": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"latest": {
|
||||
"$ref": "#/definitions/services.Latest"
|
||||
},
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2704,6 +3010,14 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.Build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2771,13 +3085,15 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"password": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "admin"
|
||||
},
|
||||
"stayLoggedIn": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "admin@admin.com"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -2800,6 +3116,17 @@ const docTemplate = `{
|
||||
"properties": {
|
||||
"item": {}
|
||||
}
|
||||
},
|
||||
"validate.ErrorResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"securityDefinitions": {
|
||||
@@ -2822,6 +3149,8 @@ var SwaggerInfo = &swag.Spec{
|
||||
Description: "Track, Manage, and Organize your Things.",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
LeftDelim: "{{",
|
||||
RightDelim: "}}",
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -60,6 +60,31 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/set-primary-photos": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"description": "Sets the first photo of each item as the primary photo",
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Actions"
|
||||
],
|
||||
"summary": "Set Primary Photos",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ActionAmountResult"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/actions/zero-item-time-fields": {
|
||||
"post": {
|
||||
"security": [
|
||||
@@ -118,6 +143,25 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/currency": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Base"
|
||||
],
|
||||
"summary": "Currency",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/currencies.Currency"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/groups": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -378,6 +422,16 @@
|
||||
"description": "location Ids",
|
||||
"name": "locations",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "multi",
|
||||
"description": "parent Ids",
|
||||
"name": "parentIds",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -505,6 +559,9 @@
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
@@ -627,6 +684,46 @@
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
},
|
||||
"patch": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Items"
|
||||
],
|
||||
"summary": "Update Item",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Item Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.ItemPatch"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.ItemOut"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/attachments": {
|
||||
@@ -636,6 +733,9 @@
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"multipart/form-data"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
@@ -683,7 +783,7 @@
|
||||
"422": {
|
||||
"description": "Unprocessable Entity",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/mid.ErrorResponse"
|
||||
"$ref": "#/definitions/validate.ErrorResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -816,14 +916,41 @@
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Get Maintenance Log",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceLog"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -838,10 +965,17 @@
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Item Maintenance"
|
||||
],
|
||||
"summary": "Create Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
@@ -862,8 +996,8 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/items/{id}/maintenance/{entry_id}": {
|
||||
"put": {
|
||||
"/v1/items/{id}/path": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
@@ -873,47 +1007,29 @@
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
"Items"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"summary": "Get the full path of an item",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
"type": "string",
|
||||
"description": "Item ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemPath"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/labels": {
|
||||
@@ -1272,6 +1388,120 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Query All Maintenance",
|
||||
"parameters": [
|
||||
{
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"type": "string",
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
],
|
||||
"name": "status",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryWithDetails"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/maintenance/{id}": {
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Update Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Maintenance ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Entry Data",
|
||||
"name": "payload",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntryUpdate"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"Bearer": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Maintenance"
|
||||
],
|
||||
"summary": "Delete Maintenance Entry",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Maintenance ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/v1/notifiers": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -1347,13 +1577,6 @@
|
||||
],
|
||||
"summary": "Test Notifier",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Notifier ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "URL",
|
||||
@@ -1502,7 +1725,7 @@
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.ApiSummary"
|
||||
"$ref": "#/definitions/v1.APISummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1551,20 +1774,6 @@
|
||||
],
|
||||
"summary": "User Login",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin@admin.com",
|
||||
"description": "string",
|
||||
"name": "username",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin",
|
||||
"description": "string",
|
||||
"name": "password",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"description": "Login Data",
|
||||
"name": "payload",
|
||||
@@ -1573,6 +1782,12 @@
|
||||
"schema": {
|
||||
"$ref": "#/definitions/v1.LoginForm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "auth provider",
|
||||
"name": "provider",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -1751,17 +1966,20 @@
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"mid.ErrorResponse": {
|
||||
"currencies.Currency": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"code": {
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
"local": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"symbol": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1845,6 +2063,9 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1856,6 +2077,9 @@
|
||||
"repo.ItemAttachmentUpdate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1934,12 +2158,6 @@
|
||||
"$ref": "#/definitions/repo.ItemAttachment"
|
||||
}
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -1955,6 +2173,9 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1970,9 +2191,13 @@
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"manufacturer": {
|
||||
"type": "string"
|
||||
@@ -1988,16 +2213,19 @@
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2013,8 +2241,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
@@ -2023,6 +2250,9 @@
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
},
|
||||
"syncChildItemsLocations": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2034,12 +2264,43 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPatch": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemPath": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/repo.ItemType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemSummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"assetId": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2049,6 +2310,9 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"imageId": {
|
||||
"type": "string"
|
||||
},
|
||||
"insured": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2060,16 +2324,19 @@
|
||||
},
|
||||
"location": {
|
||||
"description": "Edges",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
}
|
||||
],
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true,
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
"x-omitempty": true
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer"
|
||||
@@ -2079,8 +2346,22 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.ItemType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"location",
|
||||
"item"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ItemTypeLocation",
|
||||
"ItemTypeItem"
|
||||
]
|
||||
},
|
||||
"repo.ItemUpdate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name"
|
||||
],
|
||||
"properties": {
|
||||
"archived": {
|
||||
"type": "boolean"
|
||||
@@ -2089,7 +2370,8 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 1000
|
||||
},
|
||||
"fields": {
|
||||
"type": "array",
|
||||
@@ -2124,7 +2406,9 @@
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1
|
||||
},
|
||||
"notes": {
|
||||
"description": "Extras",
|
||||
@@ -2136,11 +2420,13 @@
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseFrom": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"purchasePrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"purchaseTime": {
|
||||
"description": "Purchase",
|
||||
@@ -2157,21 +2443,25 @@
|
||||
"type": "string"
|
||||
},
|
||||
"soldPrice": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
"type": "number",
|
||||
"x-nullable": true,
|
||||
"x-omitempty": true
|
||||
},
|
||||
"soldTime": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"soldTo": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
"syncChildItemsLocations": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"warrantyDetails": {
|
||||
"type": "string"
|
||||
},
|
||||
"warrantyExpires": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2208,12 +2498,6 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2275,18 +2559,15 @@
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.ItemSummary"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"parent": {
|
||||
"$ref": "#/definitions/repo.LocationSummary"
|
||||
},
|
||||
"totalPrice": {
|
||||
"type": "number"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2357,7 +2638,6 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2374,7 +2654,6 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2386,7 +2665,6 @@
|
||||
],
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2400,7 +2678,6 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@@ -2409,7 +2686,6 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completedDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
},
|
||||
"cost": {
|
||||
@@ -2423,31 +2699,53 @@
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"description": "Sold",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceLog": {
|
||||
"repo.MaintenanceEntryWithDetails": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"costAverage": {
|
||||
"type": "number"
|
||||
"completedDate": {
|
||||
"type": "string"
|
||||
},
|
||||
"costTotal": {
|
||||
"type": "number"
|
||||
"cost": {
|
||||
"type": "string",
|
||||
"example": "0"
|
||||
},
|
||||
"entries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/repo.MaintenanceEntry"
|
||||
}
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemId": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemID": {
|
||||
"type": "string"
|
||||
},
|
||||
"itemName": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"scheduledDate": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"repo.MaintenanceFilterStatus": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"scheduled",
|
||||
"completed",
|
||||
"both"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"MaintenanceFilterStatusScheduled",
|
||||
"MaintenanceFilterStatusCompleted",
|
||||
"MaintenanceFilterStatusBoth"
|
||||
]
|
||||
},
|
||||
"repo.NotifierCreate": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -2489,6 +2787,9 @@
|
||||
"updatedAt": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
}
|
||||
@@ -2642,6 +2943,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.Latest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services.UserRegistration": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2659,15 +2971,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ApiSummary": {
|
||||
"v1.APISummary": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allowRegistration": {
|
||||
@@ -2682,6 +2986,9 @@
|
||||
"health": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"latest": {
|
||||
"$ref": "#/definitions/services.Latest"
|
||||
},
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -2696,6 +3003,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.ActionAmountResult": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"v1.Build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -2763,13 +3078,15 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"password": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "admin"
|
||||
},
|
||||
"stayLoggedIn": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"example": "admin@admin.com"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -2792,6 +3109,17 @@
|
||||
"properties": {
|
||||
"item": {}
|
||||
}
|
||||
},
|
||||
"validate.ErrorResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string"
|
||||
},
|
||||
"fields": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"securityDefinitions": {
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
basePath: /api
|
||||
definitions:
|
||||
mid.ErrorResponse:
|
||||
currencies.Currency:
|
||||
properties:
|
||||
error:
|
||||
code:
|
||||
type: string
|
||||
local:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
symbol:
|
||||
type: string
|
||||
fields:
|
||||
additionalProperties:
|
||||
type: string
|
||||
type: object
|
||||
type: object
|
||||
repo.DocumentOut:
|
||||
properties:
|
||||
@@ -61,6 +63,8 @@ definitions:
|
||||
$ref: '#/definitions/repo.DocumentOut'
|
||||
id:
|
||||
type: string
|
||||
primary:
|
||||
type: boolean
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
@@ -68,6 +72,8 @@ definitions:
|
||||
type: object
|
||||
repo.ItemAttachmentUpdate:
|
||||
properties:
|
||||
primary:
|
||||
type: boolean
|
||||
title:
|
||||
type: string
|
||||
type:
|
||||
@@ -121,10 +127,6 @@ definitions:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemAttachment'
|
||||
type: array
|
||||
children:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
createdAt:
|
||||
type: string
|
||||
description:
|
||||
@@ -135,6 +137,8 @@ definitions:
|
||||
type: array
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
@@ -145,7 +149,8 @@ definitions:
|
||||
description: Warranty
|
||||
type: boolean
|
||||
location:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
@@ -159,14 +164,14 @@ definitions:
|
||||
description: Extras
|
||||
type: string
|
||||
parent:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.ItemSummary'
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
purchaseTime:
|
||||
description: Purchase
|
||||
type: string
|
||||
@@ -177,13 +182,14 @@ definitions:
|
||||
soldNotes:
|
||||
type: string
|
||||
soldPrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
soldTime:
|
||||
description: Sold
|
||||
type: string
|
||||
soldTo:
|
||||
type: string
|
||||
syncChildItemsLocations:
|
||||
type: boolean
|
||||
updatedAt:
|
||||
type: string
|
||||
warrantyDetails:
|
||||
@@ -191,16 +197,39 @@ definitions:
|
||||
warrantyExpires:
|
||||
type: string
|
||||
type: object
|
||||
repo.ItemPatch:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
quantity:
|
||||
type: integer
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
type: object
|
||||
repo.ItemPath:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/definitions/repo.ItemType'
|
||||
type: object
|
||||
repo.ItemSummary:
|
||||
properties:
|
||||
archived:
|
||||
type: boolean
|
||||
assetId:
|
||||
example: "0"
|
||||
type: string
|
||||
createdAt:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
imageId:
|
||||
type: string
|
||||
insured:
|
||||
type: boolean
|
||||
labels:
|
||||
@@ -208,20 +237,28 @@ definitions:
|
||||
$ref: '#/definitions/repo.LabelSummary'
|
||||
type: array
|
||||
location:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
allOf:
|
||||
- $ref: '#/definitions/repo.LocationSummary'
|
||||
description: Edges
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
name:
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
quantity:
|
||||
type: integer
|
||||
updatedAt:
|
||||
type: string
|
||||
type: object
|
||||
repo.ItemType:
|
||||
enum:
|
||||
- location
|
||||
- item
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- ItemTypeLocation
|
||||
- ItemTypeItem
|
||||
repo.ItemUpdate:
|
||||
properties:
|
||||
archived:
|
||||
@@ -229,6 +266,7 @@ definitions:
|
||||
assetId:
|
||||
type: string
|
||||
description:
|
||||
maxLength: 1000
|
||||
type: string
|
||||
fields:
|
||||
items:
|
||||
@@ -253,6 +291,8 @@ definitions:
|
||||
modelNumber:
|
||||
type: string
|
||||
name:
|
||||
maxLength: 255
|
||||
minLength: 1
|
||||
type: string
|
||||
notes:
|
||||
description: Extras
|
||||
@@ -262,10 +302,12 @@ definitions:
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseFrom:
|
||||
maxLength: 255
|
||||
type: string
|
||||
purchasePrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
purchaseTime:
|
||||
description: Purchase
|
||||
type: string
|
||||
@@ -277,18 +319,23 @@ definitions:
|
||||
soldNotes:
|
||||
type: string
|
||||
soldPrice:
|
||||
example: "0"
|
||||
type: string
|
||||
type: number
|
||||
x-nullable: true
|
||||
x-omitempty: true
|
||||
soldTime:
|
||||
description: Sold
|
||||
type: string
|
||||
soldTo:
|
||||
maxLength: 255
|
||||
type: string
|
||||
syncChildItemsLocations:
|
||||
type: boolean
|
||||
warrantyDetails:
|
||||
type: string
|
||||
warrantyExpires:
|
||||
description: Sold
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
repo.LabelCreate:
|
||||
properties:
|
||||
@@ -312,10 +359,6 @@ definitions:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
updatedAt:
|
||||
@@ -356,14 +399,12 @@ definitions:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
items:
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemSummary'
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
parent:
|
||||
$ref: '#/definitions/repo.LocationSummary'
|
||||
totalPrice:
|
||||
type: number
|
||||
updatedAt:
|
||||
type: string
|
||||
type: object
|
||||
@@ -410,7 +451,6 @@ definitions:
|
||||
repo.MaintenanceEntry:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -422,13 +462,11 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceEntryCreate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -438,7 +476,6 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
@@ -446,7 +483,6 @@ definitions:
|
||||
repo.MaintenanceEntryUpdate:
|
||||
properties:
|
||||
completedDate:
|
||||
description: Sold
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
@@ -456,22 +492,38 @@ definitions:
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
description: Sold
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceLog:
|
||||
repo.MaintenanceEntryWithDetails:
|
||||
properties:
|
||||
costAverage:
|
||||
type: number
|
||||
costTotal:
|
||||
type: number
|
||||
entries:
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
type: array
|
||||
itemId:
|
||||
completedDate:
|
||||
type: string
|
||||
cost:
|
||||
example: "0"
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
itemID:
|
||||
type: string
|
||||
itemName:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
scheduledDate:
|
||||
type: string
|
||||
type: object
|
||||
repo.MaintenanceFilterStatus:
|
||||
enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
repo.NotifierCreate:
|
||||
properties:
|
||||
isActive:
|
||||
@@ -500,6 +552,8 @@ definitions:
|
||||
type: string
|
||||
updatedAt:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
userId:
|
||||
type: string
|
||||
type: object
|
||||
@@ -600,6 +654,13 @@ definitions:
|
||||
value:
|
||||
type: number
|
||||
type: object
|
||||
services.Latest:
|
||||
properties:
|
||||
date:
|
||||
type: string
|
||||
version:
|
||||
type: string
|
||||
type: object
|
||||
services.UserRegistration:
|
||||
properties:
|
||||
email:
|
||||
@@ -611,12 +672,7 @@ definitions:
|
||||
token:
|
||||
type: string
|
||||
type: object
|
||||
v1.ActionAmountResult:
|
||||
properties:
|
||||
completed:
|
||||
type: integer
|
||||
type: object
|
||||
v1.ApiSummary:
|
||||
v1.APISummary:
|
||||
properties:
|
||||
allowRegistration:
|
||||
type: boolean
|
||||
@@ -626,6 +682,8 @@ definitions:
|
||||
type: boolean
|
||||
health:
|
||||
type: boolean
|
||||
latest:
|
||||
$ref: '#/definitions/services.Latest'
|
||||
message:
|
||||
type: string
|
||||
title:
|
||||
@@ -635,6 +693,11 @@ definitions:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
v1.ActionAmountResult:
|
||||
properties:
|
||||
completed:
|
||||
type: integer
|
||||
type: object
|
||||
v1.Build:
|
||||
properties:
|
||||
buildTime:
|
||||
@@ -679,10 +742,12 @@ definitions:
|
||||
v1.LoginForm:
|
||||
properties:
|
||||
password:
|
||||
example: admin
|
||||
type: string
|
||||
stayLoggedIn:
|
||||
type: boolean
|
||||
username:
|
||||
example: admin@admin.com
|
||||
type: string
|
||||
type: object
|
||||
v1.TokenResponse:
|
||||
@@ -698,6 +763,13 @@ definitions:
|
||||
properties:
|
||||
item: {}
|
||||
type: object
|
||||
validate.ErrorResponse:
|
||||
properties:
|
||||
error:
|
||||
type: string
|
||||
fields:
|
||||
type: string
|
||||
type: object
|
||||
info:
|
||||
contact:
|
||||
name: Don't
|
||||
@@ -735,6 +807,21 @@ paths:
|
||||
summary: Ensures Import Refs
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/set-primary-photos:
|
||||
post:
|
||||
description: Sets the first photo of each item as the primary photo
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/v1.ActionAmountResult'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Set Primary Photos
|
||||
tags:
|
||||
- Actions
|
||||
/v1/actions/zero-item-time-fields:
|
||||
post:
|
||||
description: Resets all item date fields to the beginning of the day
|
||||
@@ -770,6 +857,18 @@ paths:
|
||||
summary: Get Item by Asset ID
|
||||
tags:
|
||||
- Items
|
||||
/v1/currency:
|
||||
get:
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/currencies.Currency'
|
||||
summary: Currency
|
||||
tags:
|
||||
- Base
|
||||
/v1/groups:
|
||||
get:
|
||||
produces:
|
||||
@@ -923,6 +1022,13 @@ paths:
|
||||
type: string
|
||||
name: locations
|
||||
type: array
|
||||
- collectionFormat: multi
|
||||
description: parent Ids
|
||||
in: query
|
||||
items:
|
||||
type: string
|
||||
name: parentIds
|
||||
type: array
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
@@ -992,6 +1098,31 @@ paths:
|
||||
summary: Get Item
|
||||
tags:
|
||||
- Items
|
||||
patch:
|
||||
parameters:
|
||||
- description: Item ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- description: Item Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/repo.ItemPatch'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.ItemOut'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Update Item
|
||||
tags:
|
||||
- Items
|
||||
put:
|
||||
parameters:
|
||||
- description: Item ID
|
||||
@@ -1019,6 +1150,8 @@ paths:
|
||||
- Items
|
||||
/v1/items/{id}/attachments:
|
||||
post:
|
||||
consumes:
|
||||
- multipart/form-data
|
||||
parameters:
|
||||
- description: Item ID
|
||||
in: path
|
||||
@@ -1050,7 +1183,7 @@ paths:
|
||||
"422":
|
||||
description: Unprocessable Entity
|
||||
schema:
|
||||
$ref: '#/definitions/mid.ErrorResponse'
|
||||
$ref: '#/definitions/validate.ErrorResponse'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Create Item Attachment
|
||||
@@ -1131,20 +1264,44 @@ paths:
|
||||
- Items Attachments
|
||||
/v1/items/{id}/maintenance:
|
||||
get:
|
||||
parameters:
|
||||
- description: Item ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
in: query
|
||||
name: status
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceLog'
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Get Maintenance Log
|
||||
tags:
|
||||
- Maintenance
|
||||
- Item Maintenance
|
||||
post:
|
||||
parameters:
|
||||
- description: Item ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- description: Entry Data
|
||||
in: body
|
||||
name: payload
|
||||
@@ -1162,39 +1319,29 @@ paths:
|
||||
- Bearer: []
|
||||
summary: Create Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/items/{id}/maintenance/{entry_id}:
|
||||
delete:
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Delete Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
put:
|
||||
- Item Maintenance
|
||||
/v1/items/{id}/path:
|
||||
get:
|
||||
parameters:
|
||||
- description: Entry Data
|
||||
in: body
|
||||
name: payload
|
||||
- description: Item ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
items:
|
||||
$ref: '#/definitions/repo.ItemPath'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Update Maintenance Entry
|
||||
summary: Get the full path of an item
|
||||
tags:
|
||||
- Maintenance
|
||||
- Items
|
||||
/v1/items/export:
|
||||
get:
|
||||
responses:
|
||||
@@ -1241,6 +1388,8 @@ paths:
|
||||
- Items
|
||||
/v1/items/import:
|
||||
post:
|
||||
consumes:
|
||||
- multipart/form-data
|
||||
parameters:
|
||||
- description: Image to upload
|
||||
in: formData
|
||||
@@ -1473,6 +1622,77 @@ paths:
|
||||
summary: Get Locations Tree
|
||||
tags:
|
||||
- Locations
|
||||
/v1/maintenance:
|
||||
get:
|
||||
parameters:
|
||||
- enum:
|
||||
- scheduled
|
||||
- completed
|
||||
- both
|
||||
in: query
|
||||
name: status
|
||||
type: string
|
||||
x-enum-varnames:
|
||||
- MaintenanceFilterStatusScheduled
|
||||
- MaintenanceFilterStatusCompleted
|
||||
- MaintenanceFilterStatusBoth
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryWithDetails'
|
||||
type: array
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Query All Maintenance
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/maintenance/{id}:
|
||||
delete:
|
||||
parameters:
|
||||
- description: Maintenance ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Delete Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
put:
|
||||
parameters:
|
||||
- description: Maintenance ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- description: Entry Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntryUpdate'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/repo.MaintenanceEntry'
|
||||
security:
|
||||
- Bearer: []
|
||||
summary: Update Maintenance Entry
|
||||
tags:
|
||||
- Maintenance
|
||||
/v1/notifiers:
|
||||
get:
|
||||
produces:
|
||||
@@ -1551,11 +1771,6 @@ paths:
|
||||
/v1/notifiers/test:
|
||||
post:
|
||||
parameters:
|
||||
- description: Notifier ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
- description: URL
|
||||
in: query
|
||||
name: url
|
||||
@@ -1612,7 +1827,7 @@ paths:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/v1.ApiSummary'
|
||||
$ref: '#/definitions/v1.APISummary'
|
||||
summary: Application Info
|
||||
tags:
|
||||
- Base
|
||||
@@ -1639,22 +1854,16 @@ paths:
|
||||
- application/x-www-form-urlencoded
|
||||
- application/json
|
||||
parameters:
|
||||
- description: string
|
||||
example: admin@admin.com
|
||||
in: formData
|
||||
name: username
|
||||
type: string
|
||||
- description: string
|
||||
example: admin
|
||||
in: formData
|
||||
name: password
|
||||
type: string
|
||||
- description: Login Data
|
||||
in: body
|
||||
name: payload
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/v1.LoginForm'
|
||||
- description: auth provider
|
||||
in: query
|
||||
name: provider
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/migrate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/migrate"
|
||||
|
||||
atlas "ariga.io/atlas/sql/migrate"
|
||||
_ "ariga.io/atlas/sql/sqlite"
|
||||
@@ -36,7 +36,7 @@ func main() {
|
||||
}
|
||||
|
||||
// Generate migrations using Atlas support for MySQL (note the Ent dialect option passed above).
|
||||
err = migrate.NamedDiff(ctx, "sqlite://.data/homebox.migration.db?_fk=1", os.Args[1], opts...)
|
||||
err = migrate.NamedDiff(ctx, "sqlite://.data/homebox.migration.db?_fk=1&_time_format=sqlite", os.Args[1], opts...)
|
||||
if err != nil {
|
||||
log.Fatalf("failed generating migration file: %v", err)
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ func main() {
|
||||
NewReReplace(` Services`, " "),
|
||||
NewReReplace(` V1`, " "),
|
||||
NewReReplace(`\?:`, ":"),
|
||||
NewReReplace(`(\w+):\s(.*null.*)`, "$1?: $2"), // make null union types optional
|
||||
NewReDate("createdAt"),
|
||||
NewReDate("updatedAt"),
|
||||
NewReDate("soldTime"),
|
||||
|
||||
109
backend/go.mod
109
backend/go.mod
@@ -1,74 +1,79 @@
|
||||
module github.com/hay-kot/homebox/backend
|
||||
module github.com/sysadminsmedia/homebox/backend
|
||||
|
||||
go 1.20
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
ariga.io/atlas v0.10.1
|
||||
entgo.io/ent v0.11.10
|
||||
github.com/ardanlabs/conf/v3 v3.1.5
|
||||
github.com/containrrr/shoutrrr v0.7.1
|
||||
github.com/go-chi/chi/v5 v5.0.8
|
||||
github.com/go-playground/validator/v10 v10.12.0
|
||||
github.com/gocarina/gocsv v0.0.0-20230406101422-6445c2b15027
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/schema v1.2.0
|
||||
github.com/hay-kot/httpkit v0.0.3
|
||||
github.com/mattn/go-sqlite3 v1.14.16
|
||||
ariga.io/atlas v0.29.1
|
||||
entgo.io/ent v0.14.1
|
||||
github.com/ardanlabs/conf/v3 v3.2.0
|
||||
github.com/containrrr/shoutrrr v0.8.0
|
||||
github.com/go-chi/chi/v5 v5.2.0
|
||||
github.com/go-playground/validator/v10 v10.23.0
|
||||
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/schema v1.4.1
|
||||
github.com/hay-kot/httpkit v0.0.11
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
github.com/olahol/melody v1.2.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/zerolog v1.29.1
|
||||
github.com/stretchr/testify v1.8.2
|
||||
github.com/swaggo/http-swagger v1.3.4
|
||||
github.com/swaggo/swag v1.8.12
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.1
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.1
|
||||
golang.org/x/crypto v0.8.0
|
||||
modernc.org/sqlite v1.21.1
|
||||
github.com/rs/zerolog v1.33.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/swaggo/http-swagger/v2 v2.0.2
|
||||
github.com/swaggo/swag v1.16.4
|
||||
github.com/yeqown/go-qrcode/v2 v2.2.4
|
||||
github.com/yeqown/go-qrcode/writer/standard v1.2.4
|
||||
golang.org/x/crypto v0.31.0
|
||||
modernc.org/sqlite v1.34.4
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/bmatcuk/doublestar v1.3.4 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.0 // indirect
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/fogleman/gg v1.3.0 // indirect
|
||||
github.com/go-openapi/inflect v0.19.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.0 // indirect
|
||||
github.com/go-openapi/spec v0.20.7 // indirect
|
||||
github.com/go-openapi/swag v0.22.3 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-openapi/inflect v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.15.0 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.23.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||
github.com/leodido/go-urn v1.2.2 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mailru/easyjson v0.9.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.17 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/swaggo/files v1.0.0 // indirect
|
||||
github.com/rogpeppe/go-internal v1.11.0 // indirect
|
||||
github.com/swaggo/files/v2 v2.0.2 // indirect
|
||||
github.com/yeqown/reedsolomon v1.0.0 // indirect
|
||||
github.com/zclconf/go-cty v1.12.1 // indirect
|
||||
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 // indirect
|
||||
golang.org/x/mod v0.9.0 // indirect
|
||||
golang.org/x/net v0.9.0 // indirect
|
||||
golang.org/x/sys v0.7.0 // indirect
|
||||
golang.org/x/text v0.9.0 // indirect
|
||||
golang.org/x/tools v0.7.0 // indirect
|
||||
github.com/zclconf/go-cty v1.16.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 // indirect
|
||||
golang.org/x/image v0.23.0 // indirect
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/sync v0.10.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
lukechampine.com/uint128 v1.2.0 // indirect
|
||||
modernc.org/cc/v3 v3.40.0 // indirect
|
||||
modernc.org/ccgo/v3 v3.16.13 // indirect
|
||||
modernc.org/libc v1.22.3 // indirect
|
||||
modernc.org/mathutil v1.5.0 // indirect
|
||||
modernc.org/memory v1.5.0 // indirect
|
||||
modernc.org/opt v0.1.3 // indirect
|
||||
modernc.org/strutil v1.1.3 // indirect
|
||||
modernc.org/token v1.0.1 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20241223112719-96e2e1e4408d // indirect
|
||||
modernc.org/libc v1.61.6 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.8.1 // indirect
|
||||
modernc.org/strutil v1.2.1 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
||||
|
||||
1413
backend/go.sum
1413
backend/go.sum
File diff suppressed because it is too large
Load Diff
104
backend/internal/core/currencies/currencies.go
Normal file
104
backend/internal/core/currencies/currencies.go
Normal file
@@ -0,0 +1,104 @@
|
||||
// Package currencies provides a shared definition of currencies. This uses a global
|
||||
// variable to hold the currencies.
|
||||
package currencies
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//go:embed currencies.json
|
||||
var defaults []byte
|
||||
|
||||
type CollectorFunc func() ([]Currency, error)
|
||||
|
||||
func CollectJSON(reader io.Reader) CollectorFunc {
|
||||
return func() ([]Currency, error) {
|
||||
var currencies []Currency
|
||||
err := json.NewDecoder(reader).Decode(¤cies)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return currencies, nil
|
||||
}
|
||||
}
|
||||
|
||||
func CollectDefaults() CollectorFunc {
|
||||
return CollectJSON(bytes.NewReader(defaults))
|
||||
}
|
||||
|
||||
func CollectionCurrencies(collectors ...CollectorFunc) ([]Currency, error) {
|
||||
out := make([]Currency, 0, len(collectors))
|
||||
for i := range collectors {
|
||||
c, err := collectors[i]()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out = append(out, c...)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
type Currency struct {
|
||||
Name string `json:"name"`
|
||||
Code string `json:"code"`
|
||||
Local string `json:"local"`
|
||||
Symbol string `json:"symbol"`
|
||||
}
|
||||
|
||||
type CurrencyRegistry struct {
|
||||
mu sync.RWMutex
|
||||
registry map[string]Currency
|
||||
}
|
||||
|
||||
func NewCurrencyService(currencies []Currency) *CurrencyRegistry {
|
||||
registry := make(map[string]Currency, len(currencies))
|
||||
for i := range currencies {
|
||||
registry[currencies[i].Code] = currencies[i]
|
||||
}
|
||||
|
||||
return &CurrencyRegistry{
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
func (cs *CurrencyRegistry) Slice() []Currency {
|
||||
cs.mu.RLock()
|
||||
defer cs.mu.RUnlock()
|
||||
|
||||
out := make([]Currency, 0, len(cs.registry))
|
||||
for key := range cs.registry {
|
||||
out = append(out, cs.registry[key])
|
||||
}
|
||||
|
||||
slices.SortFunc(out, func(a, b Currency) int {
|
||||
if a.Name < b.Name {
|
||||
return -1
|
||||
}
|
||||
|
||||
if a.Name > b.Name {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (cs *CurrencyRegistry) IsSupported(code string) bool {
|
||||
upper := strings.ToUpper(code)
|
||||
|
||||
cs.mu.RLock()
|
||||
defer cs.mu.RUnlock()
|
||||
_, ok := cs.registry[upper]
|
||||
return ok
|
||||
}
|
||||
1610
backend/internal/core/currencies/currencies.json
Normal file
1610
backend/internal/core/currencies/currencies.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,9 @@
|
||||
// Package services provides the core business logic for the application.
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type AllServices struct {
|
||||
@@ -9,12 +11,14 @@ type AllServices struct {
|
||||
Group *GroupService
|
||||
Items *ItemService
|
||||
BackgroundService *BackgroundService
|
||||
Currencies *currencies.CurrencyRegistry
|
||||
}
|
||||
|
||||
type OptionsFunc func(*options)
|
||||
|
||||
type options struct {
|
||||
autoIncrementAssetID bool
|
||||
currencies []currencies.Currency
|
||||
}
|
||||
|
||||
func WithAutoIncrementAssetID(v bool) func(*options) {
|
||||
@@ -23,13 +27,27 @@ func WithAutoIncrementAssetID(v bool) func(*options) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithCurrencies(v []currencies.Currency) func(*options) {
|
||||
return func(o *options) {
|
||||
o.currencies = v
|
||||
}
|
||||
}
|
||||
|
||||
func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||
if repos == nil {
|
||||
panic("repos cannot be nil")
|
||||
}
|
||||
|
||||
defaultCurrencies, err := currencies.CollectionCurrencies(
|
||||
currencies.CollectDefaults(),
|
||||
)
|
||||
if err != nil {
|
||||
panic("failed to collect default currencies")
|
||||
}
|
||||
|
||||
options := &options{
|
||||
autoIncrementAssetID: true,
|
||||
currencies: defaultCurrencies,
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
@@ -43,6 +61,7 @@ func New(repos *repo.AllRepos, opts ...OptionsFunc) *AllServices {
|
||||
repo: repos,
|
||||
autoIncrementAssetID: options.autoIncrementAssetID,
|
||||
},
|
||||
BackgroundService: &BackgroundService{repos},
|
||||
BackgroundService: &BackgroundService{repos, Latest{}},
|
||||
Currencies: currencies.NewCurrencyService(options.currencies),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
type contextKeys struct {
|
||||
|
||||
@@ -5,8 +5,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func Test_SetAuthContext(t *testing.T) {
|
||||
|
||||
@@ -6,14 +6,17 @@ import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/faker"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/currencies"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting/eventbus"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/faker"
|
||||
)
|
||||
|
||||
var (
|
||||
fk = faker.NewFaker()
|
||||
fk = faker.NewFaker()
|
||||
tbus = eventbus.New()
|
||||
|
||||
tCtx = Context{}
|
||||
tClient *ent.Client
|
||||
@@ -46,8 +49,8 @@ func bootstrap() {
|
||||
}
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
|
||||
func MainNoExit(m *testing.M) int {
|
||||
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1&_time_format=sqlite")
|
||||
if err != nil {
|
||||
log.Fatalf("failed opening connection to sqlite: %v", err)
|
||||
}
|
||||
@@ -58,9 +61,14 @@ func TestMain(m *testing.M) {
|
||||
}
|
||||
|
||||
tClient = client
|
||||
tRepos = repo.New(tClient, os.TempDir()+"/homebox")
|
||||
tSvc = New(tRepos)
|
||||
defer client.Close()
|
||||
tRepos = repo.New(tClient, tbus, os.TempDir()+"/homebox")
|
||||
|
||||
defaults, _ := currencies.CollectionCurrencies(
|
||||
currencies.CollectDefaults(),
|
||||
)
|
||||
|
||||
tSvc = New(tRepos, WithCurrencies(defaults))
|
||||
defer func() { _ = client.Close() }()
|
||||
|
||||
bootstrap()
|
||||
tCtx = Context{
|
||||
@@ -69,5 +77,9 @@ func TestMain(m *testing.M) {
|
||||
UID: tUser.ID,
|
||||
}
|
||||
|
||||
os.Exit(m.Run())
|
||||
return m.Run()
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
os.Exit(MainNoExit(m))
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ package reporting
|
||||
|
||||
import (
|
||||
"github.com/gocarina/gocsv"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// =================================================================================================
|
||||
@@ -20,9 +20,9 @@ type BillOfMaterialsEntry struct {
|
||||
TotalPrice float64 `csv:"Total Price"`
|
||||
}
|
||||
|
||||
// BillOfMaterialsTSV returns a byte slice of the Bill of Materials for a given GID in TSV format
|
||||
// BillOfMaterialsCSV returns a byte slice of the Bill of Materials for a given GID in CSV format
|
||||
// See BillOfMaterialsEntry for the format of the output
|
||||
func BillOfMaterialsTSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
func BillOfMaterialsCSV(entities []repo.ItemOut) ([]byte, error) {
|
||||
bomEntries := make([]BillOfMaterialsEntry, len(entities))
|
||||
for i, entity := range entities {
|
||||
bomEntries[i] = BillOfMaterialsEntry{
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
// Package eventbus provides an interface for event bus.
|
||||
package eventbus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Event string
|
||||
|
||||
const (
|
||||
EventLabelMutation Event = "label.mutation"
|
||||
EventLocationMutation Event = "location.mutation"
|
||||
EventItemMutation Event = "item.mutation"
|
||||
)
|
||||
|
||||
type GroupMutationEvent struct {
|
||||
GID uuid.UUID
|
||||
}
|
||||
|
||||
type eventData struct {
|
||||
event Event
|
||||
data any
|
||||
}
|
||||
|
||||
type EventBus struct {
|
||||
started bool
|
||||
ch chan eventData
|
||||
|
||||
mu sync.RWMutex
|
||||
subscribers map[Event][]func(any)
|
||||
}
|
||||
|
||||
func New() *EventBus {
|
||||
return &EventBus{
|
||||
ch: make(chan eventData, 100),
|
||||
subscribers: map[Event][]func(any){
|
||||
EventLabelMutation: {},
|
||||
EventLocationMutation: {},
|
||||
EventItemMutation: {},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (e *EventBus) Run(ctx context.Context) error {
|
||||
if e.started {
|
||||
panic("event bus already started")
|
||||
}
|
||||
|
||||
e.started = true
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case event := <-e.ch:
|
||||
e.mu.RLock()
|
||||
arr, ok := e.subscribers[event.event]
|
||||
e.mu.RUnlock()
|
||||
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, fn := range arr {
|
||||
fn(event.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *EventBus) Publish(event Event, data any) {
|
||||
e.ch <- eventData{
|
||||
event: event,
|
||||
data: data,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *EventBus) Subscribe(event Event, fn func(any)) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
arr, ok := e.subscribers[event]
|
||||
if !ok {
|
||||
panic("event not found")
|
||||
}
|
||||
|
||||
e.subscribers[event] = append(arr, fn)
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
// Package reporting provides a way to import CSV files into the database.
|
||||
package reporting
|
||||
|
||||
import (
|
||||
|
||||
@@ -3,8 +3,8 @@ package reporting
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type ExportItemFields struct {
|
||||
@@ -12,12 +12,13 @@ type ExportItemFields struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
type ExportTSVRow struct {
|
||||
type ExportCSVRow struct {
|
||||
ImportRef string `csv:"HB.import_ref"`
|
||||
Location LocationString `csv:"HB.location"`
|
||||
LabelStr LabelString `csv:"HB.labels"`
|
||||
AssetID repo.AssetID `csv:"HB.asset_id"`
|
||||
Archived bool `csv:"HB.archived"`
|
||||
URL string `csv:"HB.url"`
|
||||
|
||||
Name string `csv:"HB.name"`
|
||||
Quantity int `csv:"HB.quantity"`
|
||||
@@ -84,7 +85,7 @@ func (csf LocationString) String() string {
|
||||
return strings.Join(csf, " / ")
|
||||
}
|
||||
|
||||
func fromPathSlice(s []repo.LocationPath) LocationString {
|
||||
func fromPathSlice(s []repo.ItemPath) LocationString {
|
||||
v := make(LocationString, len(s))
|
||||
|
||||
for i := range s {
|
||||
|
||||
@@ -10,21 +10,21 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
// IOSheet is the representation of a CSV/TSV sheet that is used for importing/exporting
|
||||
// items from homebox. It is used to read/write the data from/to a CSV/TSV file given
|
||||
// the standard format of the file.
|
||||
//
|
||||
// See ExportTSVRow for the format of the data in the sheet.
|
||||
// See ExportCSVRow for the format of the data in the sheet.
|
||||
type IOSheet struct {
|
||||
headers []string
|
||||
custom []int
|
||||
index map[string]int
|
||||
Rows []ExportTSVRow
|
||||
Rows []ExportCSVRow
|
||||
}
|
||||
|
||||
func (s *IOSheet) indexHeaders() {
|
||||
@@ -70,16 +70,16 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
s.headers = sheet[0]
|
||||
s.Rows = make([]ExportTSVRow, len(sheet)-1)
|
||||
s.Rows = make([]ExportCSVRow, len(sheet)-1)
|
||||
|
||||
for i, row := range sheet[1:] {
|
||||
if len(row) != len(s.headers) {
|
||||
return fmt.Errorf("row has %d columns, expected %d", len(row), len(s.headers))
|
||||
}
|
||||
|
||||
rowData := ExportTSVRow{}
|
||||
rowData := ExportCSVRow{}
|
||||
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
field := st.Field(i)
|
||||
@@ -152,9 +152,9 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Write writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
|
||||
s.Rows = make([]ExportTSVRow, len(items))
|
||||
// ReadItems writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, gid uuid.UUID, repos *repo.AllRepos, hbURL string) error {
|
||||
s.Rows = make([]ExportCSVRow, len(items))
|
||||
|
||||
extraHeaders := map[string]struct{}{}
|
||||
|
||||
@@ -162,9 +162,9 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
item := items[i]
|
||||
|
||||
// TODO: Support fetching nested locations
|
||||
locId := item.Location.ID
|
||||
locID := item.Location.ID
|
||||
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), GID, locId)
|
||||
locPaths, err := repos.Locations.PathForLoc(context.Background(), gid, locID)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not get location path")
|
||||
return err
|
||||
@@ -178,6 +178,8 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
labelString[i] = l.Name
|
||||
}
|
||||
|
||||
url := generateItemURL(item, hbURL)
|
||||
|
||||
customFields := make([]ExportItemFields, len(item.Fields))
|
||||
|
||||
for i, f := range item.Fields {
|
||||
@@ -189,7 +191,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
}
|
||||
}
|
||||
|
||||
s.Rows[i] = ExportTSVRow{
|
||||
s.Rows[i] = ExportCSVRow{
|
||||
// fill struct
|
||||
Location: locString,
|
||||
LabelStr: labelString,
|
||||
@@ -201,6 +203,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
Description: item.Description,
|
||||
Insured: item.Insured,
|
||||
Archived: item.Archived,
|
||||
URL: url,
|
||||
|
||||
PurchasePrice: item.PurchasePrice,
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
@@ -219,6 +222,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
SoldPrice: item.SoldPrice,
|
||||
SoldNotes: item.SoldNotes,
|
||||
|
||||
Notes: item.Notes,
|
||||
Fields: customFields,
|
||||
}
|
||||
}
|
||||
@@ -232,7 +236,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
|
||||
sort.Strings(customHeaders)
|
||||
|
||||
st := reflect.TypeOf(ExportTSVRow{})
|
||||
st := reflect.TypeOf(ExportCSVRow{})
|
||||
|
||||
// Write headers
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
@@ -252,8 +256,16 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
return nil
|
||||
}
|
||||
|
||||
// Writes the current sheet to a writer in TSV format.
|
||||
func (s *IOSheet) TSV() ([][]string, error) {
|
||||
func generateItemURL(item repo.ItemOut, d string) string {
|
||||
url := ""
|
||||
if item.ID != uuid.Nil {
|
||||
url = fmt.Sprintf("%s/item/%s", d, item.ID.String())
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
|
||||
func (s *IOSheet) CSV() ([][]string, error) {
|
||||
memcsv := make([][]string, len(s.Rows)+1)
|
||||
|
||||
memcsv[0] = s.headers
|
||||
|
||||
@@ -7,8 +7,9 @@ import (
|
||||
|
||||
_ "embed"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -26,13 +27,13 @@ func TestSheet_Read(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data []byte
|
||||
want []ExportTSVRow
|
||||
want []ExportCSVRow
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "minimal import",
|
||||
data: minimalImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 2", Quantity: 2, Description: "Description 2"},
|
||||
{Location: LocationString{"loc"}, Name: "Item 3", Quantity: 3, Description: "Description 3"},
|
||||
@@ -41,7 +42,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom field import",
|
||||
data: customFieldImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{
|
||||
Location: LocationString{"loc"}, Name: "Item 1", Quantity: 1, Description: "Description 1",
|
||||
Fields: []ExportItemFields{
|
||||
@@ -71,7 +72,7 @@ func TestSheet_Read(t *testing.T) {
|
||||
{
|
||||
name: "custom types import",
|
||||
data: customTypesImportCSV,
|
||||
want: []ExportTSVRow{
|
||||
want: []ExportCSVRow{
|
||||
{
|
||||
Name: "Item 1",
|
||||
AssetID: repo.AssetID(1),
|
||||
@@ -103,9 +104,9 @@ func TestSheet_Read(t *testing.T) {
|
||||
|
||||
switch {
|
||||
case tt.wantErr:
|
||||
assert.Error(t, err)
|
||||
require.Error(t, err)
|
||||
default:
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.ElementsMatch(t, tt.want, sheet.Rows)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -2,17 +2,25 @@ package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containrrr/shoutrrr"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/types"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/types"
|
||||
)
|
||||
|
||||
type Latest struct {
|
||||
Version string `json:"version"`
|
||||
Date string `json:"date"`
|
||||
}
|
||||
type BackgroundService struct {
|
||||
repos *repo.AllRepos
|
||||
latest Latest
|
||||
}
|
||||
|
||||
func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
|
||||
@@ -79,3 +87,52 @@ func (svc *BackgroundService) SendNotifiersToday(ctx context.Context) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *BackgroundService) GetLatestGithubRelease(ctx context.Context) error {
|
||||
url := "https://api.github.com/repos/sysadminsmedia/homebox/releases/latest"
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create latest version request: %w", err)
|
||||
}
|
||||
|
||||
req.Header.Set("User-Agent", "Homebox-Version-Checker")
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to make latest version request: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
err := resp.Body.Close()
|
||||
if err != nil {
|
||||
log.Printf("error closing latest version response body: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("latest version unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
// ignoring fields that are not relevant
|
||||
type Release struct {
|
||||
ReleaseVersion string `json:"tag_name"`
|
||||
PublishedAt time.Time `json:"published_at"`
|
||||
}
|
||||
|
||||
var release Release
|
||||
if err := json.NewDecoder(resp.Body).Decode(&release); err != nil {
|
||||
return fmt.Errorf("failed to decode latest version response: %w", err)
|
||||
}
|
||||
|
||||
svc.latest = Latest{
|
||||
Version: release.ReleaseVersion,
|
||||
Date: release.PublishedAt.String(),
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (svc *BackgroundService) GetLatestVersion() (Latest) {
|
||||
return svc.latest
|
||||
}
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
type GroupService struct {
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/core/services/reporting"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -32,19 +32,19 @@ func (svc *ItemService) Create(ctx Context, item repo.ItemCreate) (repo.ItemOut,
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
item.AssetID = repo.AssetID(highest + 1)
|
||||
item.AssetID = highest + 1
|
||||
}
|
||||
|
||||
return svc.repo.Items.Create(ctx, ctx.GID, item)
|
||||
}
|
||||
|
||||
func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int, error) {
|
||||
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, GID)
|
||||
func (svc *ItemService) EnsureAssetID(ctx context.Context, gid uuid.UUID) (int, error) {
|
||||
items, err := svc.repo.Items.GetAllZeroAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, GID)
|
||||
highest, err := svc.repo.Items.GetHighestAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -53,7 +53,7 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
|
||||
for _, item := range items {
|
||||
highest++
|
||||
|
||||
err = svc.repo.Items.SetAssetID(ctx, GID, item.ID, repo.AssetID(highest))
|
||||
err = svc.repo.Items.SetAssetID(ctx, gid, item.ID, highest)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -64,8 +64,8 @@ func (svc *ItemService) EnsureAssetID(ctx context.Context, GID uuid.UUID) (int,
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int, error) {
|
||||
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, GID)
|
||||
func (svc *ItemService) EnsureImportRef(ctx context.Context, gid uuid.UUID) (int, error) {
|
||||
ids, err := svc.repo.Items.GetAllZeroImportRef(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -74,7 +74,7 @@ func (svc *ItemService) EnsureImportRef(ctx context.Context, GID uuid.UUID) (int
|
||||
for _, itemID := range ids {
|
||||
ref := uuid.New().String()[0:8]
|
||||
|
||||
err = svc.repo.Items.Patch(ctx, GID, itemID, repo.ItemPatch{ImportRef: &ref})
|
||||
err = svc.repo.Items.Patch(ctx, gid, itemID, repo.ItemPatch{ImportRef: &ref})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -96,7 +96,7 @@ func serializeLocation[T ~[]string](location T) string {
|
||||
// 1. If the item does not exist, it is created.
|
||||
// 2. If the item has a ImportRef and it exists it is skipped
|
||||
// 3. Locations and Labels are created if they do not exist.
|
||||
func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Reader) (int, error) {
|
||||
func (svc *ItemService) CsvImport(ctx context.Context, gid uuid.UUID, data io.Reader) (int, error) {
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err := sheet.Read(data)
|
||||
@@ -109,7 +109,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
|
||||
labelMap := make(map[string]uuid.UUID)
|
||||
{
|
||||
labels, err := svc.repo.Labels.GetAll(ctx, GID)
|
||||
labels, err := svc.repo.Labels.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -124,7 +124,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
|
||||
locationMap := make(map[string]uuid.UUID)
|
||||
{
|
||||
locations, err := svc.repo.Locations.Tree(ctx, GID, repo.TreeQuery{WithItems: false})
|
||||
locations, err := svc.repo.Locations.Tree(ctx, gid, repo.TreeQuery{WithItems: false})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -153,7 +153,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
// Asset ID Pre-Check
|
||||
highestAID := repo.AssetID(-1)
|
||||
if svc.autoIncrementAssetID {
|
||||
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, GID)
|
||||
highestAID, err = svc.repo.Items.GetHighestAssetID(ctx, gid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -169,7 +169,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
// ========================================
|
||||
// Preflight check for existing item
|
||||
if row.ImportRef != "" {
|
||||
exists, err := svc.repo.Items.CheckRef(ctx, GID, row.ImportRef)
|
||||
exists, err := svc.repo.Items.CheckRef(ctx, gid, row.ImportRef)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error checking for existing item with ref %q: %w", row.ImportRef, err)
|
||||
}
|
||||
@@ -188,7 +188,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
|
||||
id, ok := labelMap[label]
|
||||
if !ok {
|
||||
newLabel, err := svc.repo.Labels.Create(ctx, GID, repo.LabelCreate{Name: label})
|
||||
newLabel, err := svc.repo.Labels.Create(ctx, gid, repo.LabelCreate{Name: label})
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -220,7 +220,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
parentID = locationMap[parentPath]
|
||||
}
|
||||
|
||||
newLocation, err := svc.repo.Locations.Create(ctx, GID, repo.LocationCreate{
|
||||
newLocation, err := svc.repo.Locations.Create(ctx, gid, repo.LocationCreate{
|
||||
ParentID: parentID,
|
||||
Name: pathElement,
|
||||
})
|
||||
@@ -261,12 +261,12 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
LabelIDs: labelIds,
|
||||
}
|
||||
|
||||
item, err = svc.repo.Items.Create(ctx, GID, newItem)
|
||||
item, err = svc.repo.Items.Create(ctx, gid, newItem)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
default:
|
||||
item, err = svc.repo.Items.GetByRef(ctx, GID, row.ImportRef)
|
||||
item, err = svc.repo.Items.GetByRef(ctx, gid, row.ImportRef)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -318,7 +318,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
Fields: fields,
|
||||
}
|
||||
|
||||
item, err = svc.repo.Items.UpdateByGroup(ctx, GID, updateItem)
|
||||
item, err = svc.repo.Items.UpdateByGroup(ctx, gid, updateItem)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -329,27 +329,27 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportTSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, gid uuid.UUID, hbURL string) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err = sheet.ReadItems(ctx, items, GID, svc.repo)
|
||||
err = sheet.ReadItems(ctx, items, gid, svc.repo, hbURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sheet.TSV()
|
||||
return sheet.CSV()
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportBillOfMaterialsTSV(ctx context.Context, GID uuid.UUID) ([]byte, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
func (svc *ItemService) ExportBillOfMaterialsCSV(ctx context.Context, gid uuid.UUID) ([]byte, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, gid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reporting.BillOfMaterialsTSV(items)
|
||||
return reporting.BillOfMaterialsCSV(items)
|
||||
}
|
||||
|
||||
@@ -6,14 +6,14 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentId uuid.UUID) (*ent.Document, error) {
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
|
||||
func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentID uuid.UUID) (*ent.Document, error) {
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -21,9 +21,9 @@ func (svc *ItemService) AttachmentPath(ctx context.Context, attachmentId uuid.UU
|
||||
return attachment.Edges.Document, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
func (svc *ItemService) AttachmentUpdate(ctx Context, itemID uuid.UUID, data *repo.ItemAttachmentUpdate) (repo.ItemOut, error) {
|
||||
// Update Attachment
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, attachment.Type(data.Type))
|
||||
attachment, err := svc.repo.Attachments.Update(ctx, data.ID, data)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
@@ -35,15 +35,15 @@ func (svc *ItemService) AttachmentUpdate(ctx Context, itemId uuid.UUID, data *re
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
}
|
||||
|
||||
// AttachmentAdd adds an attachment to an item by creating an entry in the Documents table and linking it to the Attachment
|
||||
// Table and Items table. The file provided via the reader is stored on the file system based on the provided
|
||||
// relative path during construction of the service.
|
||||
func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
|
||||
func (svc *ItemService) AttachmentAdd(ctx Context, itemID uuid.UUID, filename string, attachmentType attachment.Type, file io.Reader) (repo.ItemOut, error) {
|
||||
// Get the Item
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
if err != nil {
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
@@ -56,29 +56,29 @@ func (svc *ItemService) AttachmentAdd(ctx Context, itemId uuid.UUID, filename st
|
||||
}
|
||||
|
||||
// Create the attachment
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemId, doc.ID, attachmentType)
|
||||
_, err = svc.repo.Attachments.Create(ctx, itemID, doc.ID, attachmentType)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to create attachment")
|
||||
return repo.ItemOut{}, err
|
||||
}
|
||||
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemId)
|
||||
return svc.repo.Items.GetOneByGroup(ctx, ctx.GID, itemID)
|
||||
}
|
||||
|
||||
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemId, attachmentId uuid.UUID) error {
|
||||
func (svc *ItemService) AttachmentDelete(ctx context.Context, gid, itemID, attachmentID uuid.UUID) error {
|
||||
// Get the Item
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemId)
|
||||
_, err := svc.repo.Items.GetOneByGroup(ctx, gid, itemID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentId)
|
||||
attachment, err := svc.repo.Attachments.Get(ctx, attachmentID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete the attachment
|
||||
err = svc.repo.Attachments.Delete(ctx, attachmentId)
|
||||
err = svc.repo.Attachments.Delete(ctx, attachmentID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -7,8 +7,9 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func TestItemService_AddAttachment(t *testing.T) {
|
||||
@@ -23,7 +24,7 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
Description: "test",
|
||||
Name: "test",
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, loc)
|
||||
|
||||
itmC := repo.ItemCreate{
|
||||
@@ -33,11 +34,11 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
}
|
||||
|
||||
itm, err := svc.repo.Items.Create(context.Background(), tGroup.ID, itmC)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, itm)
|
||||
t.Cleanup(func() {
|
||||
err := svc.repo.Items.Delete(context.Background(), itm.ID)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
contents := fk.Str(1000)
|
||||
@@ -45,7 +46,7 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
|
||||
// Setup
|
||||
afterAttachment, err := svc.AttachmentAdd(tCtx, itm.ID, "testfile.txt", "attachment", reader)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, afterAttachment)
|
||||
|
||||
// Check that the file exists
|
||||
@@ -56,6 +57,6 @@ func TestItemService_AddAttachment(t *testing.T) {
|
||||
|
||||
// Check that the file contents are correct
|
||||
bts, err := os.ReadFile(storedPath)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, contents, string(bts))
|
||||
}
|
||||
|
||||
@@ -6,17 +6,17 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/hay-kot/homebox/backend/pkgs/hasher"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/pkgs/hasher"
|
||||
)
|
||||
|
||||
var (
|
||||
oneWeek = time.Hour * 24 * 7
|
||||
ErrorInvalidLogin = errors.New("invalid username or password")
|
||||
ErrorInvalidToken = errors.New("invalid token")
|
||||
ErrorTokenIdMismatch = errors.New("token id mismatch")
|
||||
ErrorTokenIDMismatch = errors.New("token id mismatch")
|
||||
)
|
||||
|
||||
type UserService struct {
|
||||
@@ -92,9 +92,11 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
if err != nil {
|
||||
return repo.UserOut{}, err
|
||||
}
|
||||
log.Debug().Msg("user created")
|
||||
|
||||
// Create the default labels and locations for the group.
|
||||
if creatingGroup {
|
||||
log.Debug().Msg("creating default labels")
|
||||
for _, label := range defaultLabels() {
|
||||
_, err := svc.repos.Labels.Create(ctx, usr.GroupID, label)
|
||||
if err != nil {
|
||||
@@ -102,6 +104,7 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug().Msg("creating default locations")
|
||||
for _, location := range defaultLocations() {
|
||||
_, err := svc.repos.Locations.Create(ctx, usr.GroupID, location)
|
||||
if err != nil {
|
||||
@@ -112,6 +115,7 @@ func (svc *UserService) RegisterUser(ctx context.Context, data UserRegistration)
|
||||
|
||||
// Decrement the invitation token if it was used.
|
||||
if token.ID != uuid.Nil {
|
||||
log.Debug().Msg("decrementing invitation token")
|
||||
err = svc.repos.Groups.InvitationUpdate(ctx, token.ID, token.Uses-1)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("Failed to update invitation token")
|
||||
@@ -128,19 +132,19 @@ func (svc *UserService) GetSelf(ctx context.Context, requestToken string) (repo.
|
||||
return svc.repos.AuthTokens.GetUserFromToken(ctx, hash)
|
||||
}
|
||||
|
||||
func (svc *UserService) UpdateSelf(ctx context.Context, ID uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) {
|
||||
err := svc.repos.Users.Update(ctx, ID, data)
|
||||
func (svc *UserService) UpdateSelf(ctx context.Context, id uuid.UUID, data repo.UserUpdate) (repo.UserOut, error) {
|
||||
err := svc.repos.Users.Update(ctx, id, data)
|
||||
if err != nil {
|
||||
return repo.UserOut{}, err
|
||||
}
|
||||
|
||||
return svc.repos.Users.GetOneId(ctx, ID)
|
||||
return svc.repos.Users.GetOneID(ctx, id)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// User Authentication
|
||||
|
||||
func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
|
||||
func (svc *UserService) createSessionToken(ctx context.Context, userID uuid.UUID, extendedSession bool) (UserAuthTokenDetail, error) {
|
||||
attachmentToken := hasher.GenerateToken()
|
||||
|
||||
expiresAt := time.Now().Add(oneWeek)
|
||||
@@ -149,7 +153,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID
|
||||
}
|
||||
|
||||
attachmentData := repo.UserAuthTokenCreate{
|
||||
UserID: userId,
|
||||
UserID: userID,
|
||||
TokenHash: attachmentToken.Hash,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
@@ -161,7 +165,7 @@ func (svc *UserService) createSessionToken(ctx context.Context, userId uuid.UUID
|
||||
|
||||
userToken := hasher.GenerateToken()
|
||||
data := repo.UserAuthTokenCreate{
|
||||
UserID: userId,
|
||||
UserID: userID,
|
||||
TokenHash: userToken.Hash,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
@@ -213,12 +217,12 @@ func (svc *UserService) RenewToken(ctx context.Context, token string) (UserAuthT
|
||||
// DeleteSelf deletes the user that is currently logged based of the provided UUID
|
||||
// There is _NO_ protection against deleting the wrong user, as such this should only
|
||||
// be used when the identify of the user has been confirmed.
|
||||
func (svc *UserService) DeleteSelf(ctx context.Context, ID uuid.UUID) error {
|
||||
return svc.repos.Users.Delete(ctx, ID)
|
||||
func (svc *UserService) DeleteSelf(ctx context.Context, id uuid.UUID) error {
|
||||
return svc.repos.Users.Delete(ctx, id)
|
||||
}
|
||||
|
||||
func (svc *UserService) ChangePassword(ctx Context, current string, new string) (ok bool) {
|
||||
usr, err := svc.repos.Users.GetOneId(ctx, ctx.UID)
|
||||
usr, err := svc.repos.Users.GetOneID(ctx, ctx.UID)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"github.com/hay-kot/homebox/backend/internal/data/repo"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/repo"
|
||||
)
|
||||
|
||||
func defaultLocations() []repo.LocationCreate {
|
||||
|
||||
@@ -7,11 +7,12 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// Attachment is the model entity for the Attachment schema.
|
||||
@@ -25,11 +26,14 @@ type Attachment struct {
|
||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||
// Type holds the value of the "type" field.
|
||||
Type attachment.Type `json:"type,omitempty"`
|
||||
// Primary holds the value of the "primary" field.
|
||||
Primary bool `json:"primary,omitempty"`
|
||||
// Edges holds the relations/edges for other nodes in the graph.
|
||||
// The values are being populated by the AttachmentQuery when eager-loading is set.
|
||||
Edges AttachmentEdges `json:"edges"`
|
||||
document_attachments *uuid.UUID
|
||||
item_attachments *uuid.UUID
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// AttachmentEdges holds the relations/edges for other nodes in the graph.
|
||||
@@ -46,12 +50,10 @@ type AttachmentEdges struct {
|
||||
// ItemOrErr returns the Item value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AttachmentEdges) ItemOrErr() (*Item, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.Item == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: item.Label}
|
||||
}
|
||||
if e.Item != nil {
|
||||
return e.Item, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: item.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "item"}
|
||||
}
|
||||
@@ -59,12 +61,10 @@ func (e AttachmentEdges) ItemOrErr() (*Item, error) {
|
||||
// DocumentOrErr returns the Document value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AttachmentEdges) DocumentOrErr() (*Document, error) {
|
||||
if e.loadedTypes[1] {
|
||||
if e.Document == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: document.Label}
|
||||
}
|
||||
if e.Document != nil {
|
||||
return e.Document, nil
|
||||
} else if e.loadedTypes[1] {
|
||||
return nil, &NotFoundError{label: document.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "document"}
|
||||
}
|
||||
@@ -74,6 +74,8 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
|
||||
values := make([]any, len(columns))
|
||||
for i := range columns {
|
||||
switch columns[i] {
|
||||
case attachment.FieldPrimary:
|
||||
values[i] = new(sql.NullBool)
|
||||
case attachment.FieldType:
|
||||
values[i] = new(sql.NullString)
|
||||
case attachment.FieldCreatedAt, attachment.FieldUpdatedAt:
|
||||
@@ -85,7 +87,7 @@ func (*Attachment) scanValues(columns []string) ([]any, error) {
|
||||
case attachment.ForeignKeys[1]: // item_attachments
|
||||
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected column %q for type Attachment", columns[i])
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
@@ -123,6 +125,12 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
|
||||
} else if value.Valid {
|
||||
a.Type = attachment.Type(value.String)
|
||||
}
|
||||
case attachment.FieldPrimary:
|
||||
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field primary", values[i])
|
||||
} else if value.Valid {
|
||||
a.Primary = value.Bool
|
||||
}
|
||||
case attachment.ForeignKeys[0]:
|
||||
if value, ok := values[i].(*sql.NullScanner); !ok {
|
||||
return fmt.Errorf("unexpected type %T for field document_attachments", values[i])
|
||||
@@ -137,11 +145,19 @@ func (a *Attachment) assignValues(columns []string, values []any) error {
|
||||
a.item_attachments = new(uuid.UUID)
|
||||
*a.item_attachments = *value.S.(*uuid.UUID)
|
||||
}
|
||||
default:
|
||||
a.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the Attachment.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (a *Attachment) Value(name string) (ent.Value, error) {
|
||||
return a.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// QueryItem queries the "item" edge of the Attachment entity.
|
||||
func (a *Attachment) QueryItem() *ItemQuery {
|
||||
return NewAttachmentClient(a.config).QueryItem(a)
|
||||
@@ -183,6 +199,9 @@ func (a *Attachment) String() string {
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("type=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Type))
|
||||
builder.WriteString(", ")
|
||||
builder.WriteString("primary=")
|
||||
builder.WriteString(fmt.Sprintf("%v", a.Primary))
|
||||
builder.WriteByte(')')
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
@@ -6,6 +6,8 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
@@ -20,6 +22,8 @@ const (
|
||||
FieldUpdatedAt = "updated_at"
|
||||
// FieldType holds the string denoting the type field in the database.
|
||||
FieldType = "type"
|
||||
// FieldPrimary holds the string denoting the primary field in the database.
|
||||
FieldPrimary = "primary"
|
||||
// EdgeItem holds the string denoting the item edge name in mutations.
|
||||
EdgeItem = "item"
|
||||
// EdgeDocument holds the string denoting the document edge name in mutations.
|
||||
@@ -48,6 +52,7 @@ var Columns = []string{
|
||||
FieldCreatedAt,
|
||||
FieldUpdatedAt,
|
||||
FieldType,
|
||||
FieldPrimary,
|
||||
}
|
||||
|
||||
// ForeignKeys holds the SQL foreign-keys that are owned by the "attachments"
|
||||
@@ -79,6 +84,8 @@ var (
|
||||
DefaultUpdatedAt func() time.Time
|
||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||
UpdateDefaultUpdatedAt func() time.Time
|
||||
// DefaultPrimary holds the default value on creation for the "primary" field.
|
||||
DefaultPrimary bool
|
||||
// DefaultID holds the default value on creation for the "id" field.
|
||||
DefaultID func() uuid.UUID
|
||||
)
|
||||
@@ -111,3 +118,59 @@ func TypeValidator(_type Type) error {
|
||||
return fmt.Errorf("attachment: invalid enum value for type field: %q", _type)
|
||||
}
|
||||
}
|
||||
|
||||
// OrderOption defines the ordering options for the Attachment queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByCreatedAt orders the results by the created_at field.
|
||||
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUpdatedAt orders the results by the updated_at field.
|
||||
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByType orders the results by the type field.
|
||||
func ByType(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldType, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByPrimary orders the results by the primary field.
|
||||
func ByPrimary(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldPrimary, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByItemField orders the results by item field.
|
||||
func ByItemField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newItemStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
|
||||
// ByDocumentField orders the results by document field.
|
||||
func ByDocumentField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newDocumentStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
func newItemStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(ItemInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
|
||||
)
|
||||
}
|
||||
func newDocumentStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(DocumentInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -66,6 +66,11 @@ func UpdatedAt(v time.Time) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldUpdatedAt, v))
|
||||
}
|
||||
|
||||
// Primary applies equality check predicate on the "primary" field. It's identical to PrimaryEQ.
|
||||
func Primary(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||
func CreatedAtEQ(v time.Time) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldCreatedAt, v))
|
||||
@@ -166,6 +171,16 @@ func TypeNotIn(vs ...Type) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldNotIn(FieldType, vs...))
|
||||
}
|
||||
|
||||
// PrimaryEQ applies the EQ predicate on the "primary" field.
|
||||
func PrimaryEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// PrimaryNEQ applies the NEQ predicate on the "primary" field.
|
||||
func PrimaryNEQ(v bool) predicate.Attachment {
|
||||
return predicate.Attachment(sql.FieldNEQ(FieldPrimary, v))
|
||||
}
|
||||
|
||||
// HasItem applies the HasEdge predicate on the "item" edge.
|
||||
func HasItem() predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
@@ -180,11 +195,7 @@ func HasItem() predicate.Attachment {
|
||||
// HasItemWith applies the HasEdge predicate on the "item" edge with a given conditions (other predicates).
|
||||
func HasItemWith(preds ...predicate.Item) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
step := sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(ItemInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, ItemTable, ItemColumn),
|
||||
)
|
||||
step := newItemStep()
|
||||
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||
for _, p := range preds {
|
||||
p(s)
|
||||
@@ -207,11 +218,7 @@ func HasDocument() predicate.Attachment {
|
||||
// HasDocumentWith applies the HasEdge predicate on the "document" edge with a given conditions (other predicates).
|
||||
func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
step := sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(DocumentInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, DocumentTable, DocumentColumn),
|
||||
)
|
||||
step := newDocumentStep()
|
||||
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||
for _, p := range preds {
|
||||
p(s)
|
||||
@@ -222,32 +229,15 @@ func HasDocumentWith(preds ...predicate.Document) predicate.Attachment {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Attachment(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.Attachment(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.Attachment) predicate.Attachment {
|
||||
return predicate.Attachment(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.Attachment(sql.NotPredicates(p))
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
)
|
||||
|
||||
// AttachmentCreate is the builder for creating a Attachment entity.
|
||||
@@ -65,6 +65,20 @@ func (ac *AttachmentCreate) SetNillableType(a *attachment.Type) *AttachmentCreat
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (ac *AttachmentCreate) SetPrimary(b bool) *AttachmentCreate {
|
||||
ac.mutation.SetPrimary(b)
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (ac *AttachmentCreate) SetNillablePrimary(b *bool) *AttachmentCreate {
|
||||
if b != nil {
|
||||
ac.SetPrimary(*b)
|
||||
}
|
||||
return ac
|
||||
}
|
||||
|
||||
// SetID sets the "id" field.
|
||||
func (ac *AttachmentCreate) SetID(u uuid.UUID) *AttachmentCreate {
|
||||
ac.mutation.SetID(u)
|
||||
@@ -109,7 +123,7 @@ func (ac *AttachmentCreate) Mutation() *AttachmentMutation {
|
||||
// Save creates the Attachment in the database.
|
||||
func (ac *AttachmentCreate) Save(ctx context.Context) (*Attachment, error) {
|
||||
ac.defaults()
|
||||
return withHooks[*Attachment, AttachmentMutation](ctx, ac.sqlSave, ac.mutation, ac.hooks)
|
||||
return withHooks(ctx, ac.sqlSave, ac.mutation, ac.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
@@ -148,6 +162,10 @@ func (ac *AttachmentCreate) defaults() {
|
||||
v := attachment.DefaultType
|
||||
ac.mutation.SetType(v)
|
||||
}
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
v := attachment.DefaultPrimary
|
||||
ac.mutation.SetPrimary(v)
|
||||
}
|
||||
if _, ok := ac.mutation.ID(); !ok {
|
||||
v := attachment.DefaultID()
|
||||
ac.mutation.SetID(v)
|
||||
@@ -170,10 +188,13 @@ func (ac *AttachmentCreate) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := ac.mutation.ItemID(); !ok {
|
||||
if _, ok := ac.mutation.Primary(); !ok {
|
||||
return &ValidationError{Name: "primary", err: errors.New(`ent: missing required field "Attachment.primary"`)}
|
||||
}
|
||||
if len(ac.mutation.ItemIDs()) == 0 {
|
||||
return &ValidationError{Name: "item", err: errors.New(`ent: missing required edge "Attachment.item"`)}
|
||||
}
|
||||
if _, ok := ac.mutation.DocumentID(); !ok {
|
||||
if len(ac.mutation.DocumentIDs()) == 0 {
|
||||
return &ValidationError{Name: "document", err: errors.New(`ent: missing required edge "Attachment.document"`)}
|
||||
}
|
||||
return nil
|
||||
@@ -223,6 +244,10 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
_node.Type = value
|
||||
}
|
||||
if value, ok := ac.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
_node.Primary = value
|
||||
}
|
||||
if nodes := ac.mutation.ItemIDs(); len(nodes) > 0 {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
@@ -263,11 +288,15 @@ func (ac *AttachmentCreate) createSpec() (*Attachment, *sqlgraph.CreateSpec) {
|
||||
// AttachmentCreateBulk is the builder for creating many Attachment entities in bulk.
|
||||
type AttachmentCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AttachmentCreate
|
||||
}
|
||||
|
||||
// Save creates the Attachment entities in the database.
|
||||
func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error) {
|
||||
if acb.err != nil {
|
||||
return nil, acb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(acb.builders))
|
||||
nodes := make([]*Attachment, len(acb.builders))
|
||||
mutators := make([]Mutator, len(acb.builders))
|
||||
@@ -284,8 +313,8 @@ func (acb *AttachmentCreateBulk) Save(ctx context.Context) ([]*Attachment, error
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, acb.builders[i+1].mutation)
|
||||
} else {
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentDelete is the builder for deleting a Attachment entity.
|
||||
@@ -27,7 +27,7 @@ func (ad *AttachmentDelete) Where(ps ...predicate.Attachment) *AttachmentDelete
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (ad *AttachmentDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks[int, AttachmentMutation](ctx, ad.sqlExec, ad.mutation, ad.hooks)
|
||||
return withHooks(ctx, ad.sqlExec, ad.mutation, ad.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
|
||||
@@ -7,21 +7,22 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentQuery is the builder for querying Attachment entities.
|
||||
type AttachmentQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []OrderFunc
|
||||
order []attachment.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.Attachment
|
||||
withItem *ItemQuery
|
||||
@@ -58,7 +59,7 @@ func (aq *AttachmentQuery) Unique(unique bool) *AttachmentQuery {
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (aq *AttachmentQuery) Order(o ...OrderFunc) *AttachmentQuery {
|
||||
func (aq *AttachmentQuery) Order(o ...attachment.OrderOption) *AttachmentQuery {
|
||||
aq.order = append(aq.order, o...)
|
||||
return aq
|
||||
}
|
||||
@@ -110,7 +111,7 @@ func (aq *AttachmentQuery) QueryDocument() *DocumentQuery {
|
||||
// First returns the first Attachment entity from the query.
|
||||
// Returns a *NotFoundError when no Attachment was found.
|
||||
func (aq *AttachmentQuery) First(ctx context.Context) (*Attachment, error) {
|
||||
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, "First"))
|
||||
nodes, err := aq.Limit(1).All(setContextOp(ctx, aq.ctx, ent.OpQueryFirst))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -133,7 +134,7 @@ func (aq *AttachmentQuery) FirstX(ctx context.Context) *Attachment {
|
||||
// Returns a *NotFoundError when no Attachment ID was found.
|
||||
func (aq *AttachmentQuery) FirstID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, "FirstID")); err != nil {
|
||||
if ids, err = aq.Limit(1).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryFirstID)); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
@@ -156,7 +157,7 @@ func (aq *AttachmentQuery) FirstIDX(ctx context.Context) uuid.UUID {
|
||||
// Returns a *NotSingularError when more than one Attachment entity is found.
|
||||
// Returns a *NotFoundError when no Attachment entities are found.
|
||||
func (aq *AttachmentQuery) Only(ctx context.Context) (*Attachment, error) {
|
||||
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, "Only"))
|
||||
nodes, err := aq.Limit(2).All(setContextOp(ctx, aq.ctx, ent.OpQueryOnly))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -184,7 +185,7 @@ func (aq *AttachmentQuery) OnlyX(ctx context.Context) *Attachment {
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (aq *AttachmentQuery) OnlyID(ctx context.Context) (id uuid.UUID, err error) {
|
||||
var ids []uuid.UUID
|
||||
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, "OnlyID")); err != nil {
|
||||
if ids, err = aq.Limit(2).IDs(setContextOp(ctx, aq.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
@@ -209,7 +210,7 @@ func (aq *AttachmentQuery) OnlyIDX(ctx context.Context) uuid.UUID {
|
||||
|
||||
// All executes the query and returns a list of Attachments.
|
||||
func (aq *AttachmentQuery) All(ctx context.Context) ([]*Attachment, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "All")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryAll)
|
||||
if err := aq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -231,7 +232,7 @@ func (aq *AttachmentQuery) IDs(ctx context.Context) (ids []uuid.UUID, err error)
|
||||
if aq.ctx.Unique == nil && aq.path != nil {
|
||||
aq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, aq.ctx, "IDs")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryIDs)
|
||||
if err = aq.Select(attachment.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -249,7 +250,7 @@ func (aq *AttachmentQuery) IDsX(ctx context.Context) []uuid.UUID {
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (aq *AttachmentQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "Count")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryCount)
|
||||
if err := aq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -267,7 +268,7 @@ func (aq *AttachmentQuery) CountX(ctx context.Context) int {
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (aq *AttachmentQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, aq.ctx, "Exist")
|
||||
ctx = setContextOp(ctx, aq.ctx, ent.OpQueryExist)
|
||||
switch _, err := aq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
@@ -296,7 +297,7 @@ func (aq *AttachmentQuery) Clone() *AttachmentQuery {
|
||||
return &AttachmentQuery{
|
||||
config: aq.config,
|
||||
ctx: aq.ctx.Clone(),
|
||||
order: append([]OrderFunc{}, aq.order...),
|
||||
order: append([]attachment.OrderOption{}, aq.order...),
|
||||
inters: append([]Interceptor{}, aq.inters...),
|
||||
predicates: append([]predicate.Attachment{}, aq.predicates...),
|
||||
withItem: aq.withItem.Clone(),
|
||||
@@ -612,7 +613,7 @@ func (agb *AttachmentGroupBy) Aggregate(fns ...AggregateFunc) *AttachmentGroupBy
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (agb *AttachmentGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, agb.build.ctx, "GroupBy")
|
||||
ctx = setContextOp(ctx, agb.build.ctx, ent.OpQueryGroupBy)
|
||||
if err := agb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -660,7 +661,7 @@ func (as *AttachmentSelect) Aggregate(fns ...AggregateFunc) *AttachmentSelect {
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (as *AttachmentSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, as.ctx, "Select")
|
||||
ctx = setContextOp(ctx, as.ctx, ent.OpQuerySelect)
|
||||
if err := as.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -12,10 +12,10 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/document"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/item"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/attachment"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/document"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/item"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AttachmentUpdate is the builder for updating Attachment entities.
|
||||
@@ -51,6 +51,20 @@ func (au *AttachmentUpdate) SetNillableType(a *attachment.Type) *AttachmentUpdat
|
||||
return au
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (au *AttachmentUpdate) SetPrimary(b bool) *AttachmentUpdate {
|
||||
au.mutation.SetPrimary(b)
|
||||
return au
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (au *AttachmentUpdate) SetNillablePrimary(b *bool) *AttachmentUpdate {
|
||||
if b != nil {
|
||||
au.SetPrimary(*b)
|
||||
}
|
||||
return au
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (au *AttachmentUpdate) SetItemID(id uuid.UUID) *AttachmentUpdate {
|
||||
au.mutation.SetItemID(id)
|
||||
@@ -93,7 +107,7 @@ func (au *AttachmentUpdate) ClearDocument() *AttachmentUpdate {
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (au *AttachmentUpdate) Save(ctx context.Context) (int, error) {
|
||||
au.defaults()
|
||||
return withHooks[int, AttachmentMutation](ctx, au.sqlSave, au.mutation, au.hooks)
|
||||
return withHooks(ctx, au.sqlSave, au.mutation, au.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
@@ -133,10 +147,10 @@ func (au *AttachmentUpdate) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := au.mutation.ItemID(); au.mutation.ItemCleared() && !ok {
|
||||
if au.mutation.ItemCleared() && len(au.mutation.ItemIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||
}
|
||||
if _, ok := au.mutation.DocumentID(); au.mutation.DocumentCleared() && !ok {
|
||||
if au.mutation.DocumentCleared() && len(au.mutation.DocumentIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||
}
|
||||
return nil
|
||||
@@ -160,6 +174,9 @@ func (au *AttachmentUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||
if value, ok := au.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := au.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if au.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
@@ -258,6 +275,20 @@ func (auo *AttachmentUpdateOne) SetNillableType(a *attachment.Type) *AttachmentU
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetPrimary sets the "primary" field.
|
||||
func (auo *AttachmentUpdateOne) SetPrimary(b bool) *AttachmentUpdateOne {
|
||||
auo.mutation.SetPrimary(b)
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetNillablePrimary sets the "primary" field if the given value is not nil.
|
||||
func (auo *AttachmentUpdateOne) SetNillablePrimary(b *bool) *AttachmentUpdateOne {
|
||||
if b != nil {
|
||||
auo.SetPrimary(*b)
|
||||
}
|
||||
return auo
|
||||
}
|
||||
|
||||
// SetItemID sets the "item" edge to the Item entity by ID.
|
||||
func (auo *AttachmentUpdateOne) SetItemID(id uuid.UUID) *AttachmentUpdateOne {
|
||||
auo.mutation.SetItemID(id)
|
||||
@@ -313,7 +344,7 @@ func (auo *AttachmentUpdateOne) Select(field string, fields ...string) *Attachme
|
||||
// Save executes the query and returns the updated Attachment entity.
|
||||
func (auo *AttachmentUpdateOne) Save(ctx context.Context) (*Attachment, error) {
|
||||
auo.defaults()
|
||||
return withHooks[*Attachment, AttachmentMutation](ctx, auo.sqlSave, auo.mutation, auo.hooks)
|
||||
return withHooks(ctx, auo.sqlSave, auo.mutation, auo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
@@ -353,10 +384,10 @@ func (auo *AttachmentUpdateOne) check() error {
|
||||
return &ValidationError{Name: "type", err: fmt.Errorf(`ent: validator failed for field "Attachment.type": %w`, err)}
|
||||
}
|
||||
}
|
||||
if _, ok := auo.mutation.ItemID(); auo.mutation.ItemCleared() && !ok {
|
||||
if auo.mutation.ItemCleared() && len(auo.mutation.ItemIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.item"`)
|
||||
}
|
||||
if _, ok := auo.mutation.DocumentID(); auo.mutation.DocumentCleared() && !ok {
|
||||
if auo.mutation.DocumentCleared() && len(auo.mutation.DocumentIDs()) > 0 {
|
||||
return errors.New(`ent: clearing a required unique edge "Attachment.document"`)
|
||||
}
|
||||
return nil
|
||||
@@ -397,6 +428,9 @@ func (auo *AttachmentUpdateOne) sqlSave(ctx context.Context) (_node *Attachment,
|
||||
if value, ok := auo.mutation.GetType(); ok {
|
||||
_spec.SetField(attachment.FieldType, field.TypeEnum, value)
|
||||
}
|
||||
if value, ok := auo.mutation.Primary(); ok {
|
||||
_spec.SetField(attachment.FieldPrimary, field.TypeBool, value)
|
||||
}
|
||||
if auo.mutation.ItemCleared() {
|
||||
edge := &sqlgraph.EdgeSpec{
|
||||
Rel: sqlgraph.M2O,
|
||||
|
||||
@@ -6,10 +6,11 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRoles is the model entity for the AuthRoles schema.
|
||||
@@ -23,6 +24,7 @@ type AuthRoles struct {
|
||||
// The values are being populated by the AuthRolesQuery when eager-loading is set.
|
||||
Edges AuthRolesEdges `json:"edges"`
|
||||
auth_tokens_roles *uuid.UUID
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// AuthRolesEdges holds the relations/edges for other nodes in the graph.
|
||||
@@ -37,12 +39,10 @@ type AuthRolesEdges struct {
|
||||
// TokenOrErr returns the Token value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthRolesEdges) TokenOrErr() (*AuthTokens, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.Token == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: authtokens.Label}
|
||||
}
|
||||
if e.Token != nil {
|
||||
return e.Token, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: authtokens.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "token"}
|
||||
}
|
||||
@@ -59,7 +59,7 @@ func (*AuthRoles) scanValues(columns []string) ([]any, error) {
|
||||
case authroles.ForeignKeys[0]: // auth_tokens_roles
|
||||
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected column %q for type AuthRoles", columns[i])
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
@@ -92,11 +92,19 @@ func (ar *AuthRoles) assignValues(columns []string, values []any) error {
|
||||
ar.auth_tokens_roles = new(uuid.UUID)
|
||||
*ar.auth_tokens_roles = *value.S.(*uuid.UUID)
|
||||
}
|
||||
default:
|
||||
ar.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the AuthRoles.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (ar *AuthRoles) Value(name string) (ent.Value, error) {
|
||||
return ar.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// QueryToken queries the "token" edge of the AuthRoles entity.
|
||||
func (ar *AuthRoles) QueryToken() *AuthTokensQuery {
|
||||
return NewAuthRolesClient(ar.config).QueryToken(ar)
|
||||
|
||||
@@ -4,6 +4,9 @@ package authroles
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -79,3 +82,30 @@ func RoleValidator(r Role) error {
|
||||
return fmt.Errorf("authroles: invalid enum value for role field: %q", r)
|
||||
}
|
||||
}
|
||||
|
||||
// OrderOption defines the ordering options for the AuthRoles queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByRole orders the results by the role field.
|
||||
func ByRole(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldRole, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByTokenField orders the results by token field.
|
||||
func ByTokenField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newTokenStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
func newTokenStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(TokenInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.O2O, true, TokenTable, TokenColumn),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ package authroles
|
||||
import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// ID filters vertices based on their ID field.
|
||||
@@ -87,11 +87,7 @@ func HasToken() predicate.AuthRoles {
|
||||
// HasTokenWith applies the HasEdge predicate on the "token" edge with a given conditions (other predicates).
|
||||
func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
step := sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(TokenInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.O2O, true, TokenTable, TokenColumn),
|
||||
)
|
||||
step := newTokenStep()
|
||||
sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
|
||||
for _, p := range preds {
|
||||
p(s)
|
||||
@@ -102,32 +98,15 @@ func HasTokenWith(preds ...predicate.AuthTokens) predicate.AuthRoles {
|
||||
|
||||
// And groups predicates with the AND operator between them.
|
||||
func And(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for _, p := range predicates {
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthRoles(sql.AndPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Or groups predicates with the OR operator between them.
|
||||
func Or(predicates ...predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
s1 := s.Clone().SetP(nil)
|
||||
for i, p := range predicates {
|
||||
if i > 0 {
|
||||
s1.Or()
|
||||
}
|
||||
p(s1)
|
||||
}
|
||||
s.Where(s1.P())
|
||||
})
|
||||
return predicate.AuthRoles(sql.OrPredicates(predicates...))
|
||||
}
|
||||
|
||||
// Not applies the not operator on the given predicate.
|
||||
func Not(p predicate.AuthRoles) predicate.AuthRoles {
|
||||
return predicate.AuthRoles(func(s *sql.Selector) {
|
||||
p(s.Not())
|
||||
})
|
||||
return predicate.AuthRoles(sql.NotPredicates(p))
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
)
|
||||
|
||||
// AuthRolesCreate is the builder for creating a AuthRoles entity.
|
||||
@@ -62,7 +62,7 @@ func (arc *AuthRolesCreate) Mutation() *AuthRolesMutation {
|
||||
// Save creates the AuthRoles in the database.
|
||||
func (arc *AuthRolesCreate) Save(ctx context.Context) (*AuthRoles, error) {
|
||||
arc.defaults()
|
||||
return withHooks[*AuthRoles, AuthRolesMutation](ctx, arc.sqlSave, arc.mutation, arc.hooks)
|
||||
return withHooks(ctx, arc.sqlSave, arc.mutation, arc.hooks)
|
||||
}
|
||||
|
||||
// SaveX calls Save and panics if Save returns an error.
|
||||
@@ -158,11 +158,15 @@ func (arc *AuthRolesCreate) createSpec() (*AuthRoles, *sqlgraph.CreateSpec) {
|
||||
// AuthRolesCreateBulk is the builder for creating many AuthRoles entities in bulk.
|
||||
type AuthRolesCreateBulk struct {
|
||||
config
|
||||
err error
|
||||
builders []*AuthRolesCreate
|
||||
}
|
||||
|
||||
// Save creates the AuthRoles entities in the database.
|
||||
func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error) {
|
||||
if arcb.err != nil {
|
||||
return nil, arcb.err
|
||||
}
|
||||
specs := make([]*sqlgraph.CreateSpec, len(arcb.builders))
|
||||
nodes := make([]*AuthRoles, len(arcb.builders))
|
||||
mutators := make([]Mutator, len(arcb.builders))
|
||||
@@ -179,8 +183,8 @@ func (arcb *AuthRolesCreateBulk) Save(ctx context.Context) ([]*AuthRoles, error)
|
||||
return nil, err
|
||||
}
|
||||
builder.mutation = mutation
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
var err error
|
||||
nodes[i], specs[i] = builder.createSpec()
|
||||
if i < len(mutators)-1 {
|
||||
_, err = mutators[i+1].Mutate(root, arcb.builders[i+1].mutation)
|
||||
} else {
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesDelete is the builder for deleting a AuthRoles entity.
|
||||
@@ -27,7 +27,7 @@ func (ard *AuthRolesDelete) Where(ps ...predicate.AuthRoles) *AuthRolesDelete {
|
||||
|
||||
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||
func (ard *AuthRolesDelete) Exec(ctx context.Context) (int, error) {
|
||||
return withHooks[int, AuthRolesMutation](ctx, ard.sqlExec, ard.mutation, ard.hooks)
|
||||
return withHooks(ctx, ard.sqlExec, ard.mutation, ard.hooks)
|
||||
}
|
||||
|
||||
// ExecX is like Exec, but panics if an error occurs.
|
||||
|
||||
@@ -7,20 +7,21 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesQuery is the builder for querying AuthRoles entities.
|
||||
type AuthRolesQuery struct {
|
||||
config
|
||||
ctx *QueryContext
|
||||
order []OrderFunc
|
||||
order []authroles.OrderOption
|
||||
inters []Interceptor
|
||||
predicates []predicate.AuthRoles
|
||||
withToken *AuthTokensQuery
|
||||
@@ -56,7 +57,7 @@ func (arq *AuthRolesQuery) Unique(unique bool) *AuthRolesQuery {
|
||||
}
|
||||
|
||||
// Order specifies how the records should be ordered.
|
||||
func (arq *AuthRolesQuery) Order(o ...OrderFunc) *AuthRolesQuery {
|
||||
func (arq *AuthRolesQuery) Order(o ...authroles.OrderOption) *AuthRolesQuery {
|
||||
arq.order = append(arq.order, o...)
|
||||
return arq
|
||||
}
|
||||
@@ -86,7 +87,7 @@ func (arq *AuthRolesQuery) QueryToken() *AuthTokensQuery {
|
||||
// First returns the first AuthRoles entity from the query.
|
||||
// Returns a *NotFoundError when no AuthRoles was found.
|
||||
func (arq *AuthRolesQuery) First(ctx context.Context) (*AuthRoles, error) {
|
||||
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, "First"))
|
||||
nodes, err := arq.Limit(1).All(setContextOp(ctx, arq.ctx, ent.OpQueryFirst))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -109,7 +110,7 @@ func (arq *AuthRolesQuery) FirstX(ctx context.Context) *AuthRoles {
|
||||
// Returns a *NotFoundError when no AuthRoles ID was found.
|
||||
func (arq *AuthRolesQuery) FirstID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, "FirstID")); err != nil {
|
||||
if ids, err = arq.Limit(1).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryFirstID)); err != nil {
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
@@ -132,7 +133,7 @@ func (arq *AuthRolesQuery) FirstIDX(ctx context.Context) int {
|
||||
// Returns a *NotSingularError when more than one AuthRoles entity is found.
|
||||
// Returns a *NotFoundError when no AuthRoles entities are found.
|
||||
func (arq *AuthRolesQuery) Only(ctx context.Context) (*AuthRoles, error) {
|
||||
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, "Only"))
|
||||
nodes, err := arq.Limit(2).All(setContextOp(ctx, arq.ctx, ent.OpQueryOnly))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -160,7 +161,7 @@ func (arq *AuthRolesQuery) OnlyX(ctx context.Context) *AuthRoles {
|
||||
// Returns a *NotFoundError when no entities are found.
|
||||
func (arq *AuthRolesQuery) OnlyID(ctx context.Context) (id int, err error) {
|
||||
var ids []int
|
||||
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, "OnlyID")); err != nil {
|
||||
if ids, err = arq.Limit(2).IDs(setContextOp(ctx, arq.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||
return
|
||||
}
|
||||
switch len(ids) {
|
||||
@@ -185,7 +186,7 @@ func (arq *AuthRolesQuery) OnlyIDX(ctx context.Context) int {
|
||||
|
||||
// All executes the query and returns a list of AuthRolesSlice.
|
||||
func (arq *AuthRolesQuery) All(ctx context.Context) ([]*AuthRoles, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "All")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryAll)
|
||||
if err := arq.prepareQuery(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -207,7 +208,7 @@ func (arq *AuthRolesQuery) IDs(ctx context.Context) (ids []int, err error) {
|
||||
if arq.ctx.Unique == nil && arq.path != nil {
|
||||
arq.Unique(true)
|
||||
}
|
||||
ctx = setContextOp(ctx, arq.ctx, "IDs")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryIDs)
|
||||
if err = arq.Select(authroles.FieldID).Scan(ctx, &ids); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -225,7 +226,7 @@ func (arq *AuthRolesQuery) IDsX(ctx context.Context) []int {
|
||||
|
||||
// Count returns the count of the given query.
|
||||
func (arq *AuthRolesQuery) Count(ctx context.Context) (int, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "Count")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryCount)
|
||||
if err := arq.prepareQuery(ctx); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -243,7 +244,7 @@ func (arq *AuthRolesQuery) CountX(ctx context.Context) int {
|
||||
|
||||
// Exist returns true if the query has elements in the graph.
|
||||
func (arq *AuthRolesQuery) Exist(ctx context.Context) (bool, error) {
|
||||
ctx = setContextOp(ctx, arq.ctx, "Exist")
|
||||
ctx = setContextOp(ctx, arq.ctx, ent.OpQueryExist)
|
||||
switch _, err := arq.FirstID(ctx); {
|
||||
case IsNotFound(err):
|
||||
return false, nil
|
||||
@@ -272,7 +273,7 @@ func (arq *AuthRolesQuery) Clone() *AuthRolesQuery {
|
||||
return &AuthRolesQuery{
|
||||
config: arq.config,
|
||||
ctx: arq.ctx.Clone(),
|
||||
order: append([]OrderFunc{}, arq.order...),
|
||||
order: append([]authroles.OrderOption{}, arq.order...),
|
||||
inters: append([]Interceptor{}, arq.inters...),
|
||||
predicates: append([]predicate.AuthRoles{}, arq.predicates...),
|
||||
withToken: arq.withToken.Clone(),
|
||||
@@ -537,7 +538,7 @@ func (argb *AuthRolesGroupBy) Aggregate(fns ...AggregateFunc) *AuthRolesGroupBy
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (argb *AuthRolesGroupBy) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, argb.build.ctx, "GroupBy")
|
||||
ctx = setContextOp(ctx, argb.build.ctx, ent.OpQueryGroupBy)
|
||||
if err := argb.build.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -585,7 +586,7 @@ func (ars *AuthRolesSelect) Aggregate(fns ...AggregateFunc) *AuthRolesSelect {
|
||||
|
||||
// Scan applies the selector query and scans the result into the given value.
|
||||
func (ars *AuthRolesSelect) Scan(ctx context.Context, v any) error {
|
||||
ctx = setContextOp(ctx, ars.ctx, "Select")
|
||||
ctx = setContextOp(ctx, ars.ctx, ent.OpQuerySelect)
|
||||
if err := ars.prepareQuery(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ import (
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"entgo.io/ent/schema/field"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/predicate"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/predicate"
|
||||
)
|
||||
|
||||
// AuthRolesUpdate is the builder for updating AuthRoles entities.
|
||||
@@ -75,7 +75,7 @@ func (aru *AuthRolesUpdate) ClearToken() *AuthRolesUpdate {
|
||||
|
||||
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||
func (aru *AuthRolesUpdate) Save(ctx context.Context) (int, error) {
|
||||
return withHooks[int, AuthRolesMutation](ctx, aru.sqlSave, aru.mutation, aru.hooks)
|
||||
return withHooks(ctx, aru.sqlSave, aru.mutation, aru.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
@@ -233,7 +233,7 @@ func (aruo *AuthRolesUpdateOne) Select(field string, fields ...string) *AuthRole
|
||||
|
||||
// Save executes the query and returns the updated AuthRoles entity.
|
||||
func (aruo *AuthRolesUpdateOne) Save(ctx context.Context) (*AuthRoles, error) {
|
||||
return withHooks[*AuthRoles, AuthRolesMutation](ctx, aruo.sqlSave, aruo.mutation, aruo.hooks)
|
||||
return withHooks(ctx, aruo.sqlSave, aruo.mutation, aruo.hooks)
|
||||
}
|
||||
|
||||
// SaveX is like Save, but panics if an error occurs.
|
||||
|
||||
@@ -7,11 +7,12 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"entgo.io/ent"
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"github.com/google/uuid"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/hay-kot/homebox/backend/internal/data/ent/user"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authroles"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/authtokens"
|
||||
"github.com/sysadminsmedia/homebox/backend/internal/data/ent/user"
|
||||
)
|
||||
|
||||
// AuthTokens is the model entity for the AuthTokens schema.
|
||||
@@ -31,6 +32,7 @@ type AuthTokens struct {
|
||||
// The values are being populated by the AuthTokensQuery when eager-loading is set.
|
||||
Edges AuthTokensEdges `json:"edges"`
|
||||
user_auth_tokens *uuid.UUID
|
||||
selectValues sql.SelectValues
|
||||
}
|
||||
|
||||
// AuthTokensEdges holds the relations/edges for other nodes in the graph.
|
||||
@@ -47,12 +49,10 @@ type AuthTokensEdges struct {
|
||||
// UserOrErr returns the User value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthTokensEdges) UserOrErr() (*User, error) {
|
||||
if e.loadedTypes[0] {
|
||||
if e.User == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: user.Label}
|
||||
}
|
||||
if e.User != nil {
|
||||
return e.User, nil
|
||||
} else if e.loadedTypes[0] {
|
||||
return nil, &NotFoundError{label: user.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "user"}
|
||||
}
|
||||
@@ -60,12 +60,10 @@ func (e AuthTokensEdges) UserOrErr() (*User, error) {
|
||||
// RolesOrErr returns the Roles value or an error if the edge
|
||||
// was not loaded in eager-loading, or loaded but was not found.
|
||||
func (e AuthTokensEdges) RolesOrErr() (*AuthRoles, error) {
|
||||
if e.loadedTypes[1] {
|
||||
if e.Roles == nil {
|
||||
// Edge was loaded but was not found.
|
||||
return nil, &NotFoundError{label: authroles.Label}
|
||||
}
|
||||
if e.Roles != nil {
|
||||
return e.Roles, nil
|
||||
} else if e.loadedTypes[1] {
|
||||
return nil, &NotFoundError{label: authroles.Label}
|
||||
}
|
||||
return nil, &NotLoadedError{edge: "roles"}
|
||||
}
|
||||
@@ -84,7 +82,7 @@ func (*AuthTokens) scanValues(columns []string) ([]any, error) {
|
||||
case authtokens.ForeignKeys[0]: // user_auth_tokens
|
||||
values[i] = &sql.NullScanner{S: new(uuid.UUID)}
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected column %q for type AuthTokens", columns[i])
|
||||
values[i] = new(sql.UnknownType)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
@@ -135,11 +133,19 @@ func (at *AuthTokens) assignValues(columns []string, values []any) error {
|
||||
at.user_auth_tokens = new(uuid.UUID)
|
||||
*at.user_auth_tokens = *value.S.(*uuid.UUID)
|
||||
}
|
||||
default:
|
||||
at.selectValues.Set(columns[i], values[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value returns the ent.Value that was dynamically selected and assigned to the AuthTokens.
|
||||
// This includes values selected through modifiers, order, etc.
|
||||
func (at *AuthTokens) Value(name string) (ent.Value, error) {
|
||||
return at.selectValues.Get(name)
|
||||
}
|
||||
|
||||
// QueryUser queries the "user" edge of the AuthTokens entity.
|
||||
func (at *AuthTokens) QueryUser() *UserQuery {
|
||||
return NewAuthTokensClient(at.config).QueryUser(at)
|
||||
|
||||
@@ -5,6 +5,8 @@ package authtokens
|
||||
import (
|
||||
"time"
|
||||
|
||||
"entgo.io/ent/dialect/sql"
|
||||
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
@@ -85,3 +87,54 @@ var (
|
||||
// DefaultID holds the default value on creation for the "id" field.
|
||||
DefaultID func() uuid.UUID
|
||||
)
|
||||
|
||||
// OrderOption defines the ordering options for the AuthTokens queries.
|
||||
type OrderOption func(*sql.Selector)
|
||||
|
||||
// ByID orders the results by the id field.
|
||||
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByCreatedAt orders the results by the created_at field.
|
||||
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUpdatedAt orders the results by the updated_at field.
|
||||
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByExpiresAt orders the results by the expires_at field.
|
||||
func ByExpiresAt(opts ...sql.OrderTermOption) OrderOption {
|
||||
return sql.OrderByField(FieldExpiresAt, opts...).ToFunc()
|
||||
}
|
||||
|
||||
// ByUserField orders the results by user field.
|
||||
func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
|
||||
// ByRolesField orders the results by roles field.
|
||||
func ByRolesField(field string, opts ...sql.OrderTermOption) OrderOption {
|
||||
return func(s *sql.Selector) {
|
||||
sqlgraph.OrderByNeighborTerms(s, newRolesStep(), sql.OrderByField(field, opts...))
|
||||
}
|
||||
}
|
||||
func newUserStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(UserInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
|
||||
)
|
||||
}
|
||||
func newRolesStep() *sqlgraph.Step {
|
||||
return sqlgraph.NewStep(
|
||||
sqlgraph.From(Table, FieldID),
|
||||
sqlgraph.To(RolesInverseTable, FieldID),
|
||||
sqlgraph.Edge(sqlgraph.O2O, false, RolesTable, RolesColumn),
|
||||
)
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user