⚡️ Decrease row space drastically
This commit is contained in:
parent
5b71f9b7ab
commit
e231e4d6ae
@ -5,18 +5,18 @@ meta {
|
||||
}
|
||||
|
||||
get {
|
||||
url: http://localhost:8090/api/metrics/POD-2/WORKLOAD?bucket-unit=TOTAL
|
||||
url: http://localhost:4000/api/metrics/POD-2/WORKLOAD?bucket-unit=DAILY
|
||||
body: none
|
||||
auth: basic
|
||||
}
|
||||
|
||||
params:query {
|
||||
bucket-unit: TOTAL
|
||||
bucket-unit: DAILY
|
||||
}
|
||||
|
||||
auth:basic {
|
||||
username: test
|
||||
password: test
|
||||
username: kubooboo
|
||||
password: password
|
||||
}
|
||||
|
||||
settings {
|
||||
|
||||
@ -5,7 +5,7 @@ meta {
|
||||
}
|
||||
|
||||
post {
|
||||
url: http://localhost:4000/api/metrics/POD-2/WORKLOAD
|
||||
url: http://localhost:4000/api/metrics/analytics-backend-deployment-8695d9f87f-v8n7r/WORKLOAD
|
||||
body: json
|
||||
auth: basic
|
||||
}
|
||||
@ -23,9 +23,10 @@ body:json {
|
||||
{
|
||||
"owner": "kubooboo",
|
||||
"values": {
|
||||
"x": "20",
|
||||
"y": "23",
|
||||
"z": "56"
|
||||
"RELATIVE_CPU": "20",
|
||||
"RELATIVE_MEMORY": "23",
|
||||
"RELATIVE_DISK_USAGE": "56",
|
||||
"TOTAL_DISK_SPACE": "73483209348"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,63 +1,50 @@
|
||||
package dev.dinauer.metrics.service.model;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import io.quarkus.hibernate.orm.panache.PanacheEntity;
|
||||
|
||||
@Entity
|
||||
@Table(name = "bucket")
|
||||
public class Bucket
|
||||
public class Bucket extends PanacheEntity
|
||||
{
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
@Id
|
||||
private String id;
|
||||
|
||||
@Column(nullable = false)
|
||||
private String resource;
|
||||
|
||||
@Column(name = "bucket_name", nullable = false)
|
||||
private String name;
|
||||
|
||||
@Column(nullable = false)
|
||||
@Column(name = "formatted_timestamp", nullable = false)
|
||||
private String timestamp;
|
||||
|
||||
@Column(name = "unix_timestamp", nullable = false)
|
||||
private long unixTimestamp;
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Enumerated(EnumType.ORDINAL)
|
||||
@Column(name = "bucket_unit", nullable = false)
|
||||
private BucketUnit bucketUnit;
|
||||
|
||||
@Column(columnDefinition = "text", nullable = false)
|
||||
private String metrics;
|
||||
|
||||
@Column(name = "bucket_owner")
|
||||
private String owner;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", updatable = false)
|
||||
private ZonedDateTime createdAt;
|
||||
|
||||
@UpdateTimestamp
|
||||
@Column(name = "updated_at")
|
||||
private ZonedDateTime updatedAt;
|
||||
|
||||
public Bucket()
|
||||
{
|
||||
}
|
||||
|
||||
public Bucket(String resource, String name, String timestamp, long unixTimestamp, String owner, BucketUnit bucketUnit)
|
||||
{
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.resource = resource;
|
||||
this.name = name;
|
||||
this.timestamp = timestamp;
|
||||
@ -67,11 +54,6 @@ public class Bucket
|
||||
this.metrics = "{}";
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
public void add(String key, double value)
|
||||
{
|
||||
Map<String, Metric> metrics = getMetrics();
|
||||
@ -82,13 +64,12 @@ public class Bucket
|
||||
}
|
||||
else
|
||||
{
|
||||
Metric newMetric = new Metric();
|
||||
newMetric.add(value);
|
||||
metrics.put(key, newMetric);
|
||||
metrics.put(key, Metric.init(value));
|
||||
}
|
||||
try
|
||||
{
|
||||
this.metrics = OBJECT_MAPPER.writeValueAsString(metrics);
|
||||
Map<String, String> serializedMetrics = metrics.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> Metric.serialize(entry.getValue())));
|
||||
this.metrics = OBJECT_MAPPER.writeValueAsString(serializedMetrics);
|
||||
}
|
||||
catch (JsonProcessingException e)
|
||||
{
|
||||
@ -129,9 +110,10 @@ public class Bucket
|
||||
{
|
||||
try
|
||||
{
|
||||
return OBJECT_MAPPER.readValue(metrics, new TypeReference<Map<String, Metric>>()
|
||||
Map<String, String> raw = OBJECT_MAPPER.readValue(metrics, new TypeReference<Map<String, String>>()
|
||||
{
|
||||
});
|
||||
return raw.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> Metric.deserialize(entry.getValue())));
|
||||
}
|
||||
catch (JsonProcessingException e)
|
||||
{
|
||||
@ -144,28 +126,6 @@ public class Bucket
|
||||
return unixTimestamp;
|
||||
}
|
||||
|
||||
public ZonedDateTime getCreatedAt()
|
||||
{
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public Bucket setCreatedAt(ZonedDateTime createdAt)
|
||||
{
|
||||
this.createdAt = createdAt;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ZonedDateTime getUpdatedAt()
|
||||
{
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public Bucket setUpdatedAt(ZonedDateTime updatedAt)
|
||||
{
|
||||
this.updatedAt = updatedAt;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getOwner()
|
||||
{
|
||||
return owner;
|
||||
|
||||
@ -1,37 +1,41 @@
|
||||
package dev.dinauer.metrics.service.model;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Metric
|
||||
{
|
||||
private int count;
|
||||
private double sum;
|
||||
private double average;
|
||||
private Double min;
|
||||
private Double max;
|
||||
private double min;
|
||||
private double max;
|
||||
|
||||
public Metric()
|
||||
public static Metric init(double value)
|
||||
{
|
||||
this.count = 0;
|
||||
this.sum = 0;
|
||||
this.average = 0.0F;
|
||||
this.min = null;
|
||||
this.max = null;
|
||||
return new Metric(0, 0, 0.0F, value, value).add(value);
|
||||
}
|
||||
|
||||
public void add(double value)
|
||||
public Metric(int count, double sum, double average, double min, double max)
|
||||
{
|
||||
this.count = count;
|
||||
this.sum = sum;
|
||||
this.average = average;
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
public Metric add(double value)
|
||||
{
|
||||
count = count + 1;
|
||||
sum = sum + value;
|
||||
average = calculateAverage(sum, count);
|
||||
if (Objects.isNull(min) || value < min)
|
||||
if (value < min)
|
||||
{
|
||||
min = value;
|
||||
}
|
||||
if (Objects.isNull(max) || value > max)
|
||||
if (value > max)
|
||||
{
|
||||
max = value;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private double calculateAverage(double sum, int count)
|
||||
@ -67,4 +71,27 @@ public class Metric
|
||||
{
|
||||
return max;
|
||||
}
|
||||
|
||||
public static Metric deserialize(String input)
|
||||
{
|
||||
if (input != null && !input.isBlank())
|
||||
{
|
||||
String[] sections = input.split(",");
|
||||
if (sections.length == 5)
|
||||
{
|
||||
int count = Integer.parseInt(sections[0]);
|
||||
double sum = Double.parseDouble(sections[1]);
|
||||
double average = Double.parseDouble(sections[2]);
|
||||
double min = Double.parseDouble(sections[3]);
|
||||
double max = Double.parseDouble(sections[4]);
|
||||
return new Metric(count, sum, average, min, max);
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
public static String serialize(Metric input)
|
||||
{
|
||||
return String.format("%s,%s,%s,%s,%s", input.getCount(), input.getSum(), input.getAverage(), input.getMin(), input.getMax());
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,8 +19,11 @@ quarkus.datasource.db-kind = postgresql
|
||||
%prod.quarkus.datasource.password=${DB_PASSWORD}
|
||||
%prod.quarkus.datasource.jdbc.url=jdbc:postgresql://${DB_HOST}:${DB_PORT}/${DB_DATABASE}?currentSchema=${DB_SCHEMA}
|
||||
|
||||
|
||||
|
||||
quarkus.banner.path=banner.txt
|
||||
|
||||
# Flyway
|
||||
quarkus.hibernate-orm.schema-management.strategy=none
|
||||
%test,dev.quarkus.flyway.clean-at-start=true
|
||||
quarkus.flyway.migrate-at-start=true
|
||||
@ -1,14 +1,20 @@
|
||||
CREATE SEQUENCE public.bucket_seq
|
||||
INCREMENT BY 50
|
||||
MINVALUE 1
|
||||
MAXVALUE 9223372036854775807
|
||||
START 1
|
||||
CACHE 1
|
||||
NO CYCLE;
|
||||
|
||||
CREATE TABLE public.bucket (
|
||||
created_at timestamptz(6) NULL,
|
||||
unix_timestamp int8 NOT NULL,
|
||||
updated_at timestamptz(6) NULL,
|
||||
bucket_name varchar(255) NOT NULL,
|
||||
bucket_unit varchar(255) NOT NULL,
|
||||
id varchar(255) NOT NULL,
|
||||
bucket_unit INT2 NOT NULL,
|
||||
id INT8 NOT NULL,
|
||||
unix_timestamp INT8 NOT NULL,
|
||||
bucket_name VARCHAR(255) NOT NULL,
|
||||
bucket_owner VARCHAR(255) NULL,
|
||||
formatted_timestamp VARCHAR(255) NOT NULL,
|
||||
metrics text NOT NULL,
|
||||
"owner" varchar(255) NULL,
|
||||
resource varchar(255) NOT NULL,
|
||||
"timestamp" varchar(255) NOT NULL,
|
||||
CONSTRAINT bucket_bucket_unit_check CHECK (((bucket_unit)::text = ANY ((ARRAY['RAW'::character varying, 'HOURLY'::character varying, 'DAILY'::character varying, 'WEEKLY'::character varying, 'MONTHLY'::character varying, 'YEARLY'::character varying, 'TOTAL'::character varying])::text[]))),
|
||||
resource VARCHAR(255) NOT NULL,
|
||||
CONSTRAINT bucket_bucket_unit_check CHECK (((bucket_unit >= 0) AND (bucket_unit <= 6))),
|
||||
CONSTRAINT bucket_pkey PRIMARY KEY (id)
|
||||
);
|
||||
Loading…
x
Reference in New Issue
Block a user