mirror of
https://github.com/sysadminsmedia/homebox.git
synced 2025-12-21 13:23:14 +01:00
Compare commits
20 Commits
katos/purc
...
v0.13.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e051352070 | ||
|
|
3bf1e50620 | ||
|
|
42f3c88396 | ||
|
|
c9f31ef934 | ||
|
|
01f54aeb52 | ||
|
|
2d1016d362 | ||
|
|
b48c961ac1 | ||
|
|
4d47567995 | ||
|
|
6b2e3accf7 | ||
|
|
c1f8520c4f | ||
|
|
41eb99ec40 | ||
|
|
97a74127fb | ||
|
|
a9396167bf | ||
|
|
3385e5684e | ||
|
|
bb9672214c | ||
|
|
1b93672417 | ||
|
|
8b1cedd4a8 | ||
|
|
2c34047b6d | ||
|
|
f0942f0714 | ||
|
|
967e574ea8 |
65
.github/scripts/update_currencies.py
vendored
Normal file
65
.github/scripts/update_currencies.py
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
|
||||
def fetch_currencies():
|
||||
try:
|
||||
response = requests.get('https://restcountries.com/v3.1/all')
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.Timeout:
|
||||
print("Request to the API timed out.")
|
||||
return []
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"An error occurred while making the request: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
countries = response.json()
|
||||
except json.JSONDecodeError:
|
||||
print("Failed to decode JSON from the response.")
|
||||
return []
|
||||
|
||||
currencies_list = []
|
||||
for country in countries:
|
||||
country_name = country.get('name', {}).get('common')
|
||||
country_currencies = country.get('currencies', {})
|
||||
for currency_code, currency_info in country_currencies.items():
|
||||
symbol = currency_info.get('symbol', '')
|
||||
currencies_list.append({
|
||||
'code': currency_code,
|
||||
'local': country_name,
|
||||
'symbol': symbol,
|
||||
'name': currency_info.get('name')
|
||||
})
|
||||
|
||||
return currencies_list
|
||||
|
||||
def save_currencies(currencies, file_path):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(currencies, f, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
print(f"An error occurred while writing to the file: {e}")
|
||||
|
||||
def load_existing_currencies(file_path):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return [] # Return an empty list if file doesn't exist or is invalid
|
||||
|
||||
def main():
|
||||
save_path = 'backend/internal/core/currencies/currencies.json'
|
||||
|
||||
existing_currencies = load_existing_currencies(save_path)
|
||||
new_currencies = fetch_currencies()
|
||||
|
||||
if new_currencies == existing_currencies:
|
||||
print("Currencies up-to-date with API, skipping commit.")
|
||||
else:
|
||||
save_currencies(new_currencies, save_path)
|
||||
print("Currencies updated and saved.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -92,8 +92,8 @@ jobs:
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
4
.github/workflows/docker-publish.yaml
vendored
4
.github/workflows/docker-publish.yaml
vendored
@@ -89,8 +89,8 @@ jobs:
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
VERSION=${{ github.ref_name }}
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
100
.github/workflows/update-currencies.yml
vendored
Normal file
100
.github/workflows/update-currencies.yml
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
name: Update Currencies
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-currencies:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install requests
|
||||
|
||||
- name: Run currency fetch script
|
||||
run: python .github/scripts/update_currencies.py
|
||||
|
||||
- name: Check for changes
|
||||
id: check_changes
|
||||
run: |
|
||||
if [[ $(git status --porcelain) ]]; then
|
||||
echo "Changes detected."
|
||||
echo "changes=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected."
|
||||
echo "changes=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Delete existing update-currencies branch
|
||||
run: |
|
||||
if git show-ref --verify --quiet refs/heads/update-currencies; then
|
||||
git branch -D update-currencies
|
||||
echo "Deleted existing update-currencies branch."
|
||||
else
|
||||
echo "No existing update-currencies branch to delete."
|
||||
fi
|
||||
|
||||
- name: Create new update-currencies branch
|
||||
if: env.changes == 'true'
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create a new branch
|
||||
git checkout -b update-currencies
|
||||
git add backend/internal/core/currencies/currencies.json
|
||||
git commit -m "Update currencies.json"
|
||||
|
||||
# Fetch the latest changes from the remote
|
||||
git fetch origin
|
||||
|
||||
# Attempt to rebase with the latest changes
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Rebase conflicts occurred. Please resolve them manually."
|
||||
echo "To resolve conflicts, check out the 'update-currencies' branch locally."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
|
||||
# Push the new branch to the remote
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Push failed, trying to fetch and rebase again."
|
||||
git fetch origin
|
||||
if git show-ref --verify --quiet refs/remotes/origin/update-currencies; then
|
||||
if ! git rebase origin/update-currencies; then
|
||||
echo "Second rebase failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "No existing remote branch 'update-currencies'. Skipping rebase."
|
||||
fi
|
||||
if ! git push --set-upstream origin update-currencies; then
|
||||
echo "Second push failed. Please resolve manually."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a pull request
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
-X POST \
|
||||
-d '{"title": "Update currencies", "head": "update-currencies", "base": "main"}' \
|
||||
https://api.github.com/repos/${{ github.repository }}/pulls
|
||||
|
||||
- name: Notify no changes
|
||||
if: env.changes == 'false'
|
||||
run: echo "Currencies up-to-date with API, skipping commit."
|
||||
19
Dockerfile
19
Dockerfile
@@ -1,13 +1,23 @@
|
||||
# Node dependencies
|
||||
FROM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
FROM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go mod download
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
@@ -19,10 +29,11 @@ RUN apk update && \
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go
|
||||
|
||||
@@ -1,35 +1,42 @@
|
||||
# Node dependencies
|
||||
FROM node:18-alpine AS frontend-dependencies
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
|
||||
# Build Nuxt
|
||||
FROM node:18-alpine AS frontend-builder
|
||||
WORKDIR /app
|
||||
RUN npm install -g pnpm
|
||||
COPY frontend/package.json frontend/pnpm-lock.yaml ./
|
||||
RUN pnpm install --frozen-lockfile --shamefully-hoist
|
||||
COPY frontend .
|
||||
COPY --from=frontend-dependencies /app/node_modules ./node_modules
|
||||
RUN pnpm build
|
||||
|
||||
FROM golang:alpine AS builder-dependencies
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go mod download
|
||||
|
||||
# Build API
|
||||
FROM golang:alpine AS builder
|
||||
ARG BUILD_TIME
|
||||
ARG COMMIT
|
||||
ARG VERSION
|
||||
ARG BUSYBOX_VERSION=1.36.1-r31
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
apk add --update git build-base gcc g++
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY ./backend .
|
||||
RUN go get -d -v ./...
|
||||
RUN rm -rf ./app/api/public
|
||||
COPY --from=frontend-builder /app/.output/public ./app/api/static/public
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build \
|
||||
COPY --from=builder-dependencies /go/pkg/mod /go/pkg/mod
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags "-s -w -X main.commit=$COMMIT -X main.buildTime=$BUILD_TIME -X main.version=$VERSION" \
|
||||
-o /go/bin/api \
|
||||
-v ./app/api/*.go && \
|
||||
chmod +x /go/bin/api && \
|
||||
# create a directory so that we can copy it in the next stage
|
||||
mkdir /data
|
||||
-v ./app/api/*.go
|
||||
|
||||
FROM gcr.io/distroless/java:latest
|
||||
|
||||
|
||||
@@ -57,6 +57,12 @@ func WithSecureCookies(secure bool) func(*V1Controller) {
|
||||
}
|
||||
}
|
||||
|
||||
func WithURL(url string) func(*V1Controller) {
|
||||
return func(ctrl *V1Controller) {
|
||||
ctrl.url = url
|
||||
}
|
||||
}
|
||||
|
||||
type V1Controller struct {
|
||||
cookieSecure bool
|
||||
repo *repo.AllRepos
|
||||
@@ -65,6 +71,7 @@ type V1Controller struct {
|
||||
isDemo bool
|
||||
allowRegistration bool
|
||||
bus *eventbus.EventBus
|
||||
url string
|
||||
}
|
||||
|
||||
type (
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"math/big"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -333,7 +334,7 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) error {
|
||||
ctx := services.NewContext(r.Context())
|
||||
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID)
|
||||
csvData, err := ctrl.svc.Items.ExportCSV(r.Context(), ctx.GID, getHBURL(r.Header.Get("Referer"), ctrl.url))
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to export items")
|
||||
return validate.NewRequestError(err, http.StatusInternalServerError)
|
||||
@@ -347,3 +348,26 @@ func (ctrl *V1Controller) HandleItemsExport() errchain.HandlerFunc {
|
||||
return writer.WriteAll(csvData)
|
||||
}
|
||||
}
|
||||
|
||||
func getHBURL(refererHeader, fallback string) (hbURL string) {
|
||||
hbURL = refererHeader
|
||||
if hbURL == "" {
|
||||
hbURL = fallback
|
||||
}
|
||||
|
||||
return stripPathFromURL(hbURL)
|
||||
}
|
||||
|
||||
// stripPathFromURL removes the path from a URL.
|
||||
// ex. https://example.com/tools -> https://example.com
|
||||
func stripPathFromURL(rawURL string) string {
|
||||
parsedURL, err := url.Parse(rawURL)
|
||||
if err != nil {
|
||||
log.Err(err).Msg("failed to parse URL")
|
||||
return ""
|
||||
}
|
||||
|
||||
strippedURL := url.URL{Scheme: parsedURL.Scheme, Host: parsedURL.Host}
|
||||
|
||||
return strippedURL.String()
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
@@ -54,6 +55,7 @@ func (a *app) mountRoutes(r *chi.Mux, chain *errchain.ErrChain, repos *repo.AllR
|
||||
v1.WithMaxUploadSize(a.conf.Web.MaxUploadSize),
|
||||
v1.WithRegistration(a.conf.Options.AllowRegistration),
|
||||
v1.WithDemoStatus(a.conf.Demo), // Disable Password Change in Demo Mode
|
||||
v1.WithURL(fmt.Sprintf("%s:%s", a.conf.Web.Host, a.conf.Web.Port)),
|
||||
)
|
||||
|
||||
r.Route(prefix+"/v1", func(r chi.Router) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,6 +18,7 @@ type ExportCSVRow struct {
|
||||
LabelStr LabelString `csv:"HB.labels"`
|
||||
AssetID repo.AssetID `csv:"HB.asset_id"`
|
||||
Archived bool `csv:"HB.archived"`
|
||||
URL string `csv:"HB.url"`
|
||||
|
||||
Name string `csv:"HB.name"`
|
||||
Quantity int `csv:"HB.quantity"`
|
||||
|
||||
@@ -153,7 +153,7 @@ func (s *IOSheet) Read(data io.Reader) error {
|
||||
}
|
||||
|
||||
// ReadItems writes the sheet to a writer.
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos) error {
|
||||
func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.UUID, repos *repo.AllRepos, hbURL string) error {
|
||||
s.Rows = make([]ExportCSVRow, len(items))
|
||||
|
||||
extraHeaders := map[string]struct{}{}
|
||||
@@ -178,6 +178,8 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
labelString[i] = l.Name
|
||||
}
|
||||
|
||||
url := generateItemURL(item, hbURL)
|
||||
|
||||
customFields := make([]ExportItemFields, len(item.Fields))
|
||||
|
||||
for i, f := range item.Fields {
|
||||
@@ -201,6 +203,7 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
Description: item.Description,
|
||||
Insured: item.Insured,
|
||||
Archived: item.Archived,
|
||||
URL: url,
|
||||
|
||||
PurchasePrice: item.PurchasePrice,
|
||||
PurchaseFrom: item.PurchaseFrom,
|
||||
@@ -252,6 +255,14 @@ func (s *IOSheet) ReadItems(ctx context.Context, items []repo.ItemOut, GID uuid.
|
||||
return nil
|
||||
}
|
||||
|
||||
func generateItemURL(item repo.ItemOut, d string) string {
|
||||
url := ""
|
||||
if item.ID != uuid.Nil {
|
||||
url = fmt.Sprintf("%s/item/%s", d, item.ID.String())
|
||||
}
|
||||
return url
|
||||
}
|
||||
|
||||
// CSV writes the current sheet to a 2d array, for compatibility with TSV/CSV files.
|
||||
func (s *IOSheet) CSV() ([][]string, error) {
|
||||
memcsv := make([][]string, len(s.Rows)+1)
|
||||
|
||||
@@ -329,7 +329,7 @@ func (svc *ItemService) CsvImport(ctx context.Context, GID uuid.UUID, data io.Re
|
||||
return finished, nil
|
||||
}
|
||||
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID) ([][]string, error) {
|
||||
func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID, hbURL string) ([][]string, error) {
|
||||
items, err := svc.repo.Items.GetAll(ctx, GID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -337,7 +337,7 @@ func (svc *ItemService) ExportCSV(ctx context.Context, GID uuid.UUID) ([][]strin
|
||||
|
||||
sheet := reporting.IOSheet{}
|
||||
|
||||
err = sheet.ReadItems(ctx, items, GID, svc.repo)
|
||||
err = sheet.ReadItems(ctx, items, GID, svc.repo, hbURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1683,12 +1683,14 @@
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin@admin.com",
|
||||
"description": "string",
|
||||
"name": "username",
|
||||
"in": "formData"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"example": "admin",
|
||||
"description": "string",
|
||||
"name": "password",
|
||||
"in": "formData"
|
||||
|
||||
1
frontend/.npmrc
Normal file
1
frontend/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
shamefully-hoist=true
|
||||
@@ -32,7 +32,7 @@ export function useRouteQuery(q: string, def: any): WritableComputedRef<any> {
|
||||
case "string":
|
||||
return computed({
|
||||
get: () => {
|
||||
const qv = route.query[q];
|
||||
const qv = first.value;
|
||||
if (Array.isArray(qv)) {
|
||||
return qv[0];
|
||||
}
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
const includeArchived = useRouteQuery("archived", false);
|
||||
const fieldSelector = useRouteQuery("fieldSelector", false);
|
||||
const negateLabels = useRouteQuery("negateLabels", false);
|
||||
const orderBy = useRouteQuery("orderBy", "name");
|
||||
|
||||
const totalPages = computed(() => Math.ceil(total.value / pageSize.value));
|
||||
const hasNext = computed(() => page.value * pageSize.value < total.value);
|
||||
@@ -169,6 +170,12 @@
|
||||
}
|
||||
});
|
||||
|
||||
watch(orderBy, (newV, oldV) => {
|
||||
if (newV !== oldV) {
|
||||
search();
|
||||
}
|
||||
});
|
||||
|
||||
async function fetchValues(field: string): Promise<string[]> {
|
||||
if (fieldValuesCache.value[field]) {
|
||||
return fieldValuesCache.value[field];
|
||||
@@ -201,6 +208,7 @@
|
||||
pageSize: pageSize.value,
|
||||
includeArchived: includeArchived.value ? "true" : "false",
|
||||
negateLabels: negateLabels.value ? "true" : "false",
|
||||
orderBy: orderBy.value,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -231,6 +239,7 @@
|
||||
includeArchived: includeArchived.value,
|
||||
page: page.value,
|
||||
pageSize: pageSize.value,
|
||||
orderBy: orderBy.value,
|
||||
fields,
|
||||
});
|
||||
|
||||
@@ -278,6 +287,7 @@
|
||||
archived: includeArchived.value ? "true" : "false",
|
||||
fieldSelector: fieldSelector.value ? "true" : "false",
|
||||
negateLabels: negateLabels.value ? "true" : "false",
|
||||
orderBy: orderBy.value,
|
||||
pageSize: pageSize.value,
|
||||
page: page.value,
|
||||
q: query.value,
|
||||
@@ -311,6 +321,7 @@
|
||||
fieldSelector: "false",
|
||||
pageSize: 10,
|
||||
page: 1,
|
||||
orderBy: "name",
|
||||
q: "",
|
||||
loc: [],
|
||||
lab: [],
|
||||
@@ -373,6 +384,14 @@
|
||||
<input v-model="negateLabels" type="checkbox" class="toggle toggle-sm toggle-primary" />
|
||||
<span class="label-text ml-4"> Negate selected labels </span>
|
||||
</label>
|
||||
<label class="label cursor-pointer mr-auto">
|
||||
<select v-model="orderBy" class="select select-bordered select-sm">
|
||||
<option value="name" selected>Name</option>
|
||||
<option value="createdAt">Created At</option>
|
||||
<option value="updatedAt">Updated At</option>
|
||||
</select>
|
||||
<span class="label-text ml-4"> Order By </span>
|
||||
</label>
|
||||
<hr class="my-2" />
|
||||
<BaseButton class="btn-block btn-sm" @click="reset"> Reset Search</BaseButton>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user