1
0
mirror of https://github.com/amir20/dozzle.git synced 2026-01-02 11:07:26 +01:00

feat: more improvements to analytic and gzips logs for better performance (#3814)

This commit is contained in:
Amir Raminfar
2025-04-17 14:09:06 -07:00
committed by GitHub
parent 061cd0c445
commit 1cf178e37d
16 changed files with 53 additions and 22 deletions

View File

@@ -35,7 +35,7 @@ pre_cmd = []
rerun = false
rerun_delay = 500
send_interrupt = true
stop_on_error = false
stop_on_error = true
[color]
app = ""

View File

@@ -13,11 +13,12 @@
class="textarea textarea-primary w-full font-mono text-lg"
:class="{ 'textarea-error': error }"
></textarea>
<div class="label mt-2 overflow-y-auto">
<span v-if="!isReady">{{ $t("analytics.creating_table") }}</span>
<div class="mt-2">
<span class="text-error" v-if="state === 'error'">{{ error }}</span>
<span v-else-if="state === 'initializing'">{{ $t("analytics.creating_table") }}</span>
<span v-else-if="state === 'downloading'">{{ $t("analytics.downloading") }}</span>
<span v-else-if="evaluating">{{ $t("analytics.evaluating_query") }}</span>
<span class="label-text-alt text-error" v-else-if="error">{{ error }}</span>
<span class="label-text-alt" v-else>
<span v-else>
{{ $t("analytics.total_records", { count: results.numRows.toLocaleString() }) }}
<template v-if="results.numRows > pageLimit">{{
$t("analytics.showing_first", { count: page.numRows.toLocaleString() })
@@ -26,7 +27,7 @@
</div>
</label>
</section>
<SQLTable :table="page" :loading="evaluating || !isReady" />
<SQLTable :table="page" :loading="evaluating || state !== 'ready'" />
</div>
</aside>
</template>
@@ -34,12 +35,14 @@
<script setup lang="ts">
import { Container } from "@/models/Container";
import { type Table } from "@apache-arrow/esnext-esm";
const { container } = defineProps<{ container: Container }>();
const query = ref("SELECT * FROM logs LIMIT 100");
const error = ref<string | null>(null);
const debouncedQuery = debouncedRef(query, 500);
const evaluating = ref(false);
const pageLimit = 1000;
const state = ref<"downloading" | "error" | "ready" | "initializing">("downloading");
const url = withBase(
`/api/hosts/${container.host}/containers/${container.id}/logs?stdout=1&stderr=1&everything&jsonOnly`,
@@ -55,27 +58,29 @@ if (!response.ok) {
const { db, conn } = await useDuckDB();
const empty = await conn.query<Record<string, any>>(`SELECT 1 LIMIT 0`);
const { isReady } = useAsyncState(
async () => {
onMounted(async () => {
try {
state.value = "downloading";
await db.registerFileBuffer("logs.json", new Uint8Array(await response.arrayBuffer()));
state.value = "initializing";
await conn.query(
`CREATE TABLE logs AS SELECT unnest(m) FROM read_json('logs.json', ignore_errors = true, format = 'newline_delimited')`,
);
},
undefined,
{
onError: (e) => {
console.error(e);
if (e instanceof Error) {
error.value = e.message;
}
},
},
);
state.value = "ready";
} catch (e) {
console.error(e);
state.value = "error";
if (e instanceof Error) {
error.value = e.message;
}
}
});
const results = computedAsync(
async () => {
if (isReady.value) {
if (state.value === "ready") {
return await conn.query<Record<string, any>>(debouncedQuery.value);
} else {
return empty;
@@ -85,6 +90,7 @@ const results = computedAsync(
{
onError: (e) => {
console.error(e);
state.value = "error";
if (e instanceof Error) {
error.value = e.message;
}
@@ -93,7 +99,10 @@ const results = computedAsync(
},
);
whenever(evaluating, () => (error.value = null));
whenever(evaluating, () => {
error.value = null;
state.value = "ready";
});
const page = computed(() =>
results.value.numRows > pageLimit ? results.value.slice(0, pageLimit) : results.value,
) as unknown as ComputedRef<Table<Record<string, any>>>;

View File

@@ -1,6 +1,7 @@
package web
import (
"compress/gzip"
"context"
"errors"
"regexp"
@@ -111,7 +112,15 @@ func (h *handler) fetchLogsBetweenDates(w http.ResponseWriter, r *http.Request)
lastSeenId = uint32(num)
}
encoder := json.NewEncoder(w)
var encoder *json.Encoder
if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
w.Header().Set("Content-Encoding", "gzip")
writer := gzip.NewWriter(w)
defer writer.Close()
encoder = json.NewEncoder(writer)
} else {
encoder = json.NewEncoder(w)
}
for {
if buffer.Len() > minimum {

View File

@@ -116,6 +116,7 @@ toasts:
message: Log kopieret til clipboard
analytics:
creating_table: Opretter midlertidig tabel...
downloading: Henter containerlogfiler...
evaluating_query: Evaluerer forespørgsel...
total_records: I alt {count} poster.
showing_first: Viser de første {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Log in Zwischenablage kopiert
analytics:
creating_table: Temporäre Tabelle wird erstellt...
downloading: Container-Logs werden abgerufen...
evaluating_query: Abfrage wird ausgewertet...
total_records: Insgesamt {count} Datensätze.
showing_first: Zeige die ersten {count}.

View File

@@ -121,6 +121,7 @@ toasts:
message: Log copied to clipboard
analytics:
creating_table: Creating temporary table...
downloading: Fetching containers logs...
evaluating_query: Evaluating query...
total_records: Total {count} records.
showing_first: Showing first {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Registro copiado al portapapeles
analytics:
creating_table: Creando tabla temporal...
downloading: Obteniendo registros de contenedores...
evaluating_query: Evaluando consulta...
total_records: Total {count} registros.
showing_first: Mostrando los primeros {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Journal copié dans le presse-papiers
analytics:
creating_table: Création d'une table temporaire...
downloading: Récupération des journaux des conteneurs...
evaluating_query: Évaluation de la requête...
total_records: Total de {count} enregistrements.
showing_first: Affichage des {count} premiers.

View File

@@ -116,6 +116,7 @@ toasts:
message: Log copiato nella clipboard
analytics:
creating_table: Creazione tabella temporanea...
downloading: Recupero dei log dei container...
evaluating_query: Valutazione della query...
total_records: Totale {count} record.
showing_first: Mostrati i primi {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Log skopiowany do schowka
analytics:
creating_table: Tworzenie tymczasowej tabeli...
downloading: Pobieranie logów kontenerów...
evaluating_query: Przetwarzanie zapytania...
total_records: Razem {count} rekordów.
showing_first: Pokazywanie pierwszych {count}.

View File

@@ -112,6 +112,7 @@ settings:
analytics:
creating_table: A criar tabela temporária...
downloading: A obter registos dos contentores...
evaluating_query: A avaliar consulta...
total_records: Total de {count} registos.
showing_first: A mostrar os primeiros {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Log copiado para a área de transferência
analytics:
creating_table: Criando tabela temporária...
downloading: Buscando logs dos containers...
evaluating_query: Avaliando consulta...
total_records: Total de {count} registros.
showing_first: Mostrando os primeiros {count}.

View File

@@ -116,6 +116,7 @@ toasts:
message: Log copied to clipboard
analytics:
creating_table: Создание временной таблицы...
downloading: Получение логов контейнеров...
evaluating_query: Выполнение запроса...
total_records: Всего {count} записей.
showing_first: Показаны первые {count}.

View File

@@ -109,6 +109,7 @@ toasts:
message: Günlük panoya kopyalandı
analytics:
creating_table: Geçici tablo oluşturuluyor...
downloading: Konteyner günlükleri alınıyor...
evaluating_query: Sorgu değerlendiriliyor...
total_records: Toplam {count} kayıt.
showing_first: İlk {count} gösteriliyor.

View File

@@ -119,6 +119,7 @@ toasts:
message: 日誌已複製到剪貼簿
analytics:
creating_table: 正在建立臨時資料表...
downloading: 正在取得容器日誌...
evaluating_query: 正在評估查詢...
total_records: 共計 {count} 筆記錄。
showing_first: 顯示前 {count} 筆。

View File

@@ -116,6 +116,7 @@ toasts:
message: 日志已复制到剪贴板
analytics:
creating_table: 正在创建临时表...
downloading: 正在获取容器日志...
evaluating_query: 正在评估查询...
total_records: 总共 {count} 条记录。
showing_first: 显示前 {count} 条。