added loki, prometheus, promtail, statist

This commit is contained in:
2021-03-14 20:57:44 +01:00
parent 89b7fd0747
commit 1d8d30a8ab
18 changed files with 509 additions and 0 deletions

7
loki/Dockerfile Normal file
View File

@@ -0,0 +1,7 @@
FROM centos:7
RUN yum update -y && yum install -y unzip && yum clean all
RUN curl -O -L "https://github.com/grafana/loki/releases/download/v2.1.0/loki-linux-amd64.zip" && unzip loki-linux-amd64.zip -d /loki && rm loki-linux-amd64.zip && cd /loki && chmod a+x "loki-linux-amd64"
COPY config.yaml /loki
COPY docker-entrypoint.sh /
RUN chmod +x /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]

37
loki/config.yaml Normal file
View File

@@ -0,0 +1,37 @@
auth_enabled: false
server:
http_listen_port: 3100
ingester:
lifecycler:
address: 127.0.0.1
ring:
kvstore:
store: inmemory
replication_factor: 1
final_sleep: 0s
chunk_idle_period: 5m
chunk_retain_period: 30s
schema_config:
configs:
- from: 2020-05-15
store: boltdb
object_store: filesystem
schema: v11
index:
prefix: index_
period: 168h
storage_config:
boltdb:
directory: /tmp/loki/index
filesystem:
directory: /tmp/loki/chunks
limits_config:
enforce_metric_name: false
reject_old_samples: true
reject_old_samples_max_age: 168h

32
loki/docker-entrypoint.sh Normal file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
if [[ (! -z "${LOKI_DAEMON_USER}" ) && ( "${LOKI_DAEMON_USER}" != "root" ) ]]; then
useradd -r -s /bin/false $LOKI_DAEMON_USER
if [[ ! -z "${LOKI_DAEMON_USER_UID}" ]]; then
usermod -u $LOKI_DAEMON_USER_UID $LOKI_DAEMON_USER
fi
if [[ ! -z "${LOKI_DAEMON_USER_GID}" ]]; then
groupmod -g $LOKI_DAEMON_USER_GID $LOKI_DAEMON_USER
fi
fi
if [ ! -d "/lokidata" ]; then
mkdir -p /lokidata
fi
echo "Chowning Data"
if [[ ! -z "${LOKI_DAEMON_USER}" ]]; then
chown -R $(id -u ${LOKI_DAEMON_USER}):$(id -g ${LOKI_DAEMON_USER}) /loki
chown -R $(id -u ${LOKI_DAEMON_USER}):$(id -g ${LOKI_DAEMON_USER}) /lokidata
else
chown -R $(id -u):$(id -g) /loki
chown -R $(id -u):$(id -g) /lokidata
fi
if [[ ! -z "${LOKI_DAEMON_USER}" ]]; then
runuser -u ${LOKI_DAEMON_USER} -- /loki/loki-linux-amd64 -config.file=/loki/config.yaml
else
/loki/loki-linux-amd64 -config.file=/loki/config.yaml
fi

8
prometheus/Dockerfile Normal file
View File

@@ -0,0 +1,8 @@
FROM centos:7
RUN yum update -y && yum clean all
RUN curl -O -L "https://github.com/prometheus/prometheus/releases/download/v2.25.0/prometheus-2.25.0.linux-amd64.tar.gz" && tar -xf prometheus-2.25.0.linux-amd64.tar.gz -C / && mv /prometheus-2.25.0.linux-amd64 /prometheus && rm prometheus-2.25.0.linux-amd64.tar.gz && cd /prometheus && chmod a+x "prometheus"
COPY config.yaml /prometheus
COPY docker-entrypoint.sh /
RUN chmod +x /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["/prometheus/prometheus","--config.file=/prometheus/config.yaml"]

19
prometheus/config.yaml Normal file
View File

@@ -0,0 +1,19 @@
global:
scrape_interval: 15s # By default, scrape targets every 15 seconds.
# Attach these labels to any time series or alerts when communicating with
# external systems (federation, remote storage, Alertmanager).
external_labels:
monitor: 'codelab-monitor'
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
scrape_configs:
# The job name is added as a label `job=<job_name>` to any timeseries scraped from this config.
- job_name: 'prometheus'
# Override the global default and scrape targets from this job every 5 seconds.
scrape_interval: 5s
static_configs:
- targets: ['localhost:9090']

View File

@@ -0,0 +1,26 @@
#!/bin/bash
if [[ (! -z "${PROMETHEUS_DAEMON_USER}" ) && ( "${PROMETHEUS_DAEMON_USER}" != "root" ) ]]; then
useradd -r -s /bin/false $PROMETHEUS_DAEMON_USER
if [[ ! -z "${PROMETHEUS_DAEMON_USER_UID}" ]]; then
usermod -u $PROMETHEUS_DAEMON_USER_UID $PROMETHEUS_DAEMON_USER
fi
if [[ ! -z "${PROMETHEUS_DAEMON_USER_GID}" ]]; then
groupmod -g $PROMETHEUS_DAEMON_USER_GID $PROMETHEUS_DAEMON_USER
fi
fi
echo "Chowning Data"
if [[ ! -z "${PROMETHEUS_DAEMON_USER}" ]]; then
chown -R $(id -u ${PROMETHEUS_DAEMON_USER}):$(id -g ${PROMETHEUS_DAEMON_USER}) /prometheus
else
chown -R $(id -u):$(id -g) /prometheus
fi
if [[ ! -z "${PROMETHEUS_DAEMON_USER}" ]]; then
runuser -u ${PROMETHEUS_DAEMON_USER} -- $@
else
$@
fi

7
promtail/Dockerfile Normal file
View File

@@ -0,0 +1,7 @@
FROM centos:7
RUN yum update -y && yum install -y unzip && yum clean all
RUN curl -O -L "https://github.com/grafana/loki/releases/download/v2.1.0/promtail-linux-amd64.zip" && unzip promtail-linux-amd64.zip -d /promtail && rm promtail-linux-amd64.zip && cd /promtail && chmod a+x "promtail-linux-amd64"
COPY config.yaml /promtail
COPY docker-entrypoint.sh /
RUN chmod +x /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]

18
promtail/config.yaml Normal file
View File

@@ -0,0 +1,18 @@
server:
http_listen_port: 9080
grpc_listen_port: 0
positions:
filename: /tmp/positions.yaml
clients:
- url: http://localhost:3100/loki/api/v1/push
scrape_configs:
- job_name: system
static_configs:
- targets:
- localhost
labels:
job: varlogs
__path__: /var/log/*log

View File

@@ -0,0 +1,26 @@
#!/bin/bash
if [[ (! -z "${PROMTAIL_DAEMON_USER}" ) && ( "${PROMTAIL_DAEMON_USER}" != "root" ) ]]; then
useradd -r -s /bin/false $PROMTAIL_DAEMON_USER
if [[ ! -z "${PROMTAIL_DAEMON_USER_UID}" ]]; then
usermod -u $PROMTAIL_DAEMON_USER_UID $PROMTAIL_DAEMON_USER
fi
if [[ ! -z "${PROMTAIL_DAEMON_USER_GID}" ]]; then
groupmod -g $PROMTAIL_DAEMON_USER_GID $PROMTAIL_DAEMON_USER
fi
fi
echo "Chowning Data"
if [[ ! -z "${PROMTAIL_DAEMON_USER}" ]]; then
chown -R $(id -u ${PROMTAIL_DAEMON_USER}):$(id -g ${PROMTAIL_DAEMON_USER}) /promtail
else
chown -R $(id -u):$(id -g) /promtail
fi
if [[ ! -z "${PROMTAIL_DAEMON_USER}" ]]; then
runuser -u ${PROMTAIL_DAEMON_USER} -- /promtail/promtail-linux-amd64 -config.file=/promtail/config.yaml
else
/promtail/promtail-linux-amd64 -config.file=/promtail/config.yaml
fi

8
statist/Dockerfile Normal file
View File

@@ -0,0 +1,8 @@
FROM centos:7
RUN yum update -y && yum clean all
RUN curl -sL https://rpm.nodesource.com/setup_14.x | bash - && yum install -y nodejs
COPY statist /statist
COPY docker-entrypoint.sh /
WORKDIR /statist
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["node","index.js"]

View File

@@ -0,0 +1,26 @@
#!/bin/bash
if [[ (! -z "${NODE_DAEMON_USER}" ) && ( "${NODE_DAEMON_USER}" != "root" ) ]]; then
useradd -r -s /bin/false $NODE_DAEMON_USER
if [[ ! -z "${NODE_DAEMON_USER_UID}" ]]; then
usermod -u $NODE_DAEMON_USER_UID $NODE_DAEMON_USER
fi
if [[ ! -z "${NODE_DAEMON_USER_GID}" ]]; then
groupmod -g $NODE_DAEMON_USER_GID $NODE_DAEMON_USER
fi
fi
echo "Chowning Data"
if [[ ! -z "${NODE_DAEMON_USER}" ]]; then
chown -R $(id -u ${NODE_DAEMON_USER}):$(id -g ${NODE_DAEMON_USER}) /statist
else
chown -R $(id -u):$(id -g) /statist
fi
if [[ ! -z "${NODE_DAEMON_USER}" ]]; then
setcap 'cap_net_bind_service=+ep' /usr/bin/node
runuser -u ${NODE_DAEMON_USER} -- $@
else
$@
fi

View File

View File

@@ -0,0 +1,81 @@
module.exports = {
init: function(data){
/* counter sample
data.name = {}
data.name.help = "help";
data.name.type = "counter";
data.name.value = 0;
*/
/* gauge sample
data.name = {}
data.name.help = "help";
data.name.type = "gauge";
data.name.value= 0;
*/
/* histogram sample
data.name = {}
data.name.help = "help";
data.name.type = "histogram";
data.name.bucket = {};
data.name.sum = 0;
data.name.count = 0;
*/
/* summary sample
data.name = {}
data.name.help = "help";
data.name.type = "summary";
data.name.quantile = {};
data.name.sum = 0;
data.name.count = 0;
*/
/* global label sample
data.__labels = {}
data.__labels.test1="aa";
data.__labels.test2="bb";
*/
/* per metric label sample
data.name.labels = {}
data.name.labels.test3="aa";
data.name.labels.test4="bb";
*/
return data;
},
extractdatafromline: function(data,line){
/* counter sample
data.name.value++;
*/
/* gauge sample
data.name.value= 10;
*/
/* histogram sample
data.name.bucket["0.1"] = 3;
data.name.bucket["0.5"] = 5;
data.name.bucket["+Inf"] = 5;
data.name.sum = 1.3;
data.name.count = 5;
*/
/* summary sample
data.name.quantile["0.95"] = 3;
data.name.sum = 1.3;
data.name.count = 5;
*/
return data;
}
};

View File

101
statist/statist/fromlog.js Normal file
View File

@@ -0,0 +1,101 @@
var datadefinition = require("./fromlog.datadefinition" );
var data = {};
var fs = require('fs'),
bite_size = 4096,
save_interval_or_retry = 10000,
filereadbytes,
tempdata = "",
fd;
function init(){
fs.readFile('fromlog.counter',function(err, content){
if (err){
filereadbytes = 0;
}else{
filereadbytes = parseInt(content);
if (isNaN(filereadbytes)){
filereadbytes = 0;
}
}
var oldfilereadbytes = filereadbytes;
setInterval(function(){
if (filereadbytes != oldfilereadbytes){
fs.writeFile("fromlog.counter", filereadbytes.toString(),function(err){});
oldfilereadbytes = filereadbytes;
}
}, save_interval_or_retry);
openandreadsome();
});
}
function openandreadsome(){
fs.open('fromlog.file', 'r',function(err, file){
if (err){
return setTimeout(openandreadsome, save_interval_or_retry);
}
fd = file;
fs.fstat(file, function(err, stats){
if (err){
return fs.close(file,function(err){
setTimeout(openandreadsome, save_interval_or_retry);
});
}
readsome(stats);
});
});
}
function readsome(stats) {
if(stats.size == filereadbytes) {
return fs.close(fd,function(err){
setTimeout(openandreadsome, save_interval_or_retry);
});
}else {
if (stats.size < filereadbytes){
filereadbytes =0;
}
fs.read(fd, Buffer.alloc(bite_size), 0, bite_size, filereadbytes, function(err, actualbytesread, buffer){
if (err){
return fs.close(fd,function(err){
setTimeout(openandreadsome, save_interval_or_retry);
});
}
processsome(actualbytesread,buffer);
readsome(stats);
});
}
}
function processsome(actualbytesread, buff) {
var datastring = buff.toString('utf-8', 0, actualbytesread);
tempdata+=datastring;
var tempdatasplit = tempdata.split(/\r\n|\r|\n/);
for (var i = 0; i < tempdatasplit.length - 1; i++) {
data= datadefinition.extractdatafromline(data,tempdatasplit[i]);
}
tempdata = tempdatasplit[tempdatasplit.length - 1];
filereadbytes+= actualbytesread;
}
module.exports = {
init: function () {
init();
data= datadefinition.init(data);
},
data: function () {
return data;
}
};

62
statist/statist/index.js Normal file
View File

@@ -0,0 +1,62 @@
var http = require('http');
var modifier = process.env.MODIFIER;
if (modifier){
modifier = require("./" + modifier );
modifier.init();
http.createServer(function(req,res){
res.writeHead(200,{"Content-Type": "text/plain"});
var data = modifier.data();
var body = "";
var globallabelstring = "";
if (data.__labels){
for (var label in data.__labels) {
if (data.__labels.hasOwnProperty(label)){
globallabelstring+=label + "=\"" + data.__labels[label] + "\",";
}
}
}
for (var prop in data) {
if (prop != "__labels" && data.hasOwnProperty(prop)) {
var labelstring = "";
if (data[prop].labels){
for (var label in data[prop].labels) {
if (data[prop].labels.hasOwnProperty(label)){
labelstring+=label + "=\"" + data[prop].labels[label] + "\",";
}
}
}
body+="# HELP " + prop + " " + data[prop].help + "\n";
if (data[prop].type == "counter"){
body+="# TYPE " + prop + " counter" + "\n";
body+=prop + "{" + globallabelstring + labelstring + "}" + " " + data[prop].value.toString() + "\n";
}else if (data[prop].type == "gauge"){
body+="# TYPE " + prop + " gauge" + "\n";
body+=prop + "{" + globallabelstring + labelstring + "}" + " " + data[prop].value.toString() + "\n";
}else if (data[prop].type == "histogram"){
body+="# TYPE " + prop + " histogram" + "\n";
for (var le in data[prop].bucket) {
if (data[prop].bucket.hasOwnProperty(le)){
body+=prop + "_bucket{le=\""+ le + "\"," + globallabelstring + labelstring + "} " + data[prop].bucket[le].toString() + "\n";
}
}
body+=prop + "_count{" + globallabelstring + labelstring + "} " + data[prop].count.toString()+ "\n";
body+=prop + "_sum{" + globallabelstring + labelstring + "} " + data[prop].sum.toString()+ "\n";
}else if (data[prop].type == "summary"){
body+="# TYPE " + prop + " summary" + "\n";
for (var q in data[prop].quantile) {
if (data[prop].quantile.hasOwnProperty(q)){
body+=prop + "{quantile=\""+ q + "\"," + globallabelstring + labelstring + "} " + data[prop].quantile[q].toString() + "\n";
}
}
body+=prop + "_count{" + globallabelstring + labelstring + "} " + data[prop].count.toString()+ "\n";
body+=prop + "_sum{" + globallabelstring + labelstring + "} " + data[prop].sum.toString()+ "\n";
}
}
}
res.end(body);
}).listen(80);
} else {
console.log("No modifier selected");
}

38
statist/statist/package-lock.json generated Normal file
View File

@@ -0,0 +1,38 @@
{
"name": "statist",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"http": "^0.0.1-security",
"line-reader": "^0.4.0"
}
},
"node_modules/http": {
"version": "0.0.1-security",
"resolved": "https://registry.npmjs.org/http/-/http-0.0.1-security.tgz",
"integrity": "sha512-RnDvP10Ty9FxqOtPZuxtebw1j4L/WiqNMDtuc1YMH1XQm5TgDRaR1G9u8upL6KD1bXHSp9eSXo/ED+8Q7FAr+g=="
},
"node_modules/line-reader": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/line-reader/-/line-reader-0.4.0.tgz",
"integrity": "sha1-F+RIGNoKwzVnW6MAlU+U72cOZv0="
}
},
"dependencies": {
"http": {
"version": "0.0.1-security",
"resolved": "https://registry.npmjs.org/http/-/http-0.0.1-security.tgz",
"integrity": "sha512-RnDvP10Ty9FxqOtPZuxtebw1j4L/WiqNMDtuc1YMH1XQm5TgDRaR1G9u8upL6KD1bXHSp9eSXo/ED+8Q7FAr+g=="
},
"line-reader": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/line-reader/-/line-reader-0.4.0.tgz",
"integrity": "sha1-F+RIGNoKwzVnW6MAlU+U72cOZv0="
}
}
}

View File

@@ -0,0 +1,13 @@
{
"name": "statist",
"version": "1.0.0",
"description": "collect data for prometheus",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "xgiovio",
"license": "ISC",
"dependencies": {
}
}