Skip to content
This repository has been archived by the owner on Nov 11, 2021. It is now read-only.

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
ThomasK33 committed Dec 26, 2020
2 parents 88bfbea + 7a40110 commit 870f969
Show file tree
Hide file tree
Showing 61 changed files with 3,427 additions and 813 deletions.
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"Deserialization",
"Dota",
"Envs",
"Fluentd",
"GHCR",
"Persistor",
"Scaleway",
Expand Down
2 changes: 1 addition & 1 deletion architecture.drawio

Large diffs are not rendered by default.

163 changes: 2 additions & 161 deletions devops/kubernetes/src/charts/clusterClean.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@ import { Construct } from "constructs";
import {
KubeNamespace,
KubeSecret,
KubeServiceAccount,
KubeClusterRole,
KubeClusterRoleBinding,
KubeDaemonSet,
KubeStatefulSet,
KubeService,
ObjectMeta,
Expand All @@ -32,6 +28,7 @@ import { Kibana } from "../../imports/kibana.k8s.elastic.co/kibana";
import { Certificate } from "../../imports/cert-manager.io/certificate";
import { ClusterIngressTraefik } from "../cluster/ingressTraefik";
import { Middleware } from "../../imports/traefik.containo.us/middleware";
import { FluentdConstruct } from "./fluentd/fluentd";

const {
DOMAIN = "fortify.gg",
Expand Down Expand Up @@ -433,163 +430,7 @@ export class ClusterSetupClean extends Chart {

// --- Fluentd setup ---

const fluentdSA = new KubeServiceAccount(
this,
"fluentd-service-account",
{
metadata: {
name: "fluentd",
namespace: logsNS.name,
},
}
);

new KubeClusterRole(this, "fluentd-cluster-role", {
metadata: {
name: "fluentd",
namespace: logsNS.name,
},
rules: [
{
apiGroups: [""],
resources: ["pods", "namespaces"],
verbs: ["get", "list", "watch"],
},
],
});

new KubeClusterRoleBinding(this, "fluentd-cluster-role-binding", {
metadata: {
name: "fluentd",
},
roleRef: {
kind: "ClusterRole",
name: "fluentd",
apiGroup: "rbac.authorization.k8s.io",
},
subjects: [
{
kind: "ServiceAccount",
name: "fluentd",
namespace: logsNS.name,
},
],
});

const fluentDsLabels = {
"k8s-app": "fluentd-logging",
version: "v1",
};

new KubeDaemonSet(this, "fluentd-ds", {
metadata: {
name: "fluentd",
namespace: logsNS.name,
labels: fluentDsLabels,
},
spec: {
selector: {
matchLabels: fluentDsLabels,
},
template: {
metadata: {
labels: fluentDsLabels,
},
spec: {
serviceAccount: fluentdSA.name,
serviceAccountName: fluentdSA.name,
tolerations: [
{
key: "node-role.kubernetes.io/master",
effect: "NoSchedule",
},
],
containers: [
{
name: "fluentd",
image:
"fluent/fluentd-kubernetes-daemonset:v1-debian-elasticsearch",
env: [
{
name: "FLUENT_ELASTICSEARCH_HOST",
value: "elasticsearch-es-http",
},
{
name: "FLUENT_ELASTICSEARCH_PORT",
value: "9200",
},
{
name: "FLUENT_ELASTICSEARCH_SCHEME",
value: "http",
},
// Option to configure elasticsearch plugin with self signed certs
{
name: "FLUENT_ELASTICSEARCH_SSL_VERIFY",
value: "true",
},
// Option to configure elasticsearch plugin with tls
{
name:
"FLUENT_ELASTICSEARCH_SSL_VERSION",
value: "TLSv1_2",
},
// X-Pack Authentication
{
name: "FLUENT_ELASTICSEARCH_USER",
value: "elastic",
},
{
name: "FLUENT_ELASTICSEARCH_PASSWORD",
valueFrom: {
secretKeyRef: {
key: "elastic",
name:
"elasticsearch-es-elastic-user",
},
},
},
],
resources: {
limits: {
memory: "200Mi",
},
requests: {
cpu: "100m",
memory: "200Mi",
},
},
volumeMounts: [
{
name: "varlog",
mountPath: "/var/log",
},
{
name: "varlibdockercontainers",
mountPath: "/var/lib/docker/containers",
readOnly: true,
},
],
},
],
terminationGracePeriodSeconds: 30,
volumes: [
{
name: "varlog",
hostPath: {
path: "/var/log",
},
},
{
name: "varlibdockercontainers",
hostPath: {
path: "/var/lib/docker/containers",
},
},
],
},
},
},
});
new FluentdConstruct(this, "fluentd");

// --- InfluxDB setup ---

Expand Down
48 changes: 48 additions & 0 deletions devops/kubernetes/src/charts/fluentd/configs/fluent.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
@include "#{ENV['FLUENTD_SYSTEMD_CONF'] || 'systemd'}.conf"
@include "#{ENV['FLUENTD_PROMETHEUS_CONF'] || 'prometheus'}.conf"
@include kubernetes.conf
@include conf.d/*.conf

<match **>
@type elasticsearch
@id out_es
@log_level info
include_tag_key true
host "#{ENV['FLUENT_ELASTICSEARCH_HOST']}"
port "#{ENV['FLUENT_ELASTICSEARCH_PORT']}"
path "#{ENV['FLUENT_ELASTICSEARCH_PATH']}"
scheme "#{ENV['FLUENT_ELASTICSEARCH_SCHEME'] || 'http'}"
ssl_verify "#{ENV['FLUENT_ELASTICSEARCH_SSL_VERIFY'] || 'true'}"
ssl_version "#{ENV['FLUENT_ELASTICSEARCH_SSL_VERSION'] || 'TLSv1_2'}"
user "#{ENV['FLUENT_ELASTICSEARCH_USER'] || use_default}"
password "#{ENV['FLUENT_ELASTICSEARCH_PASSWORD'] || use_default}"
reload_connections "#{ENV['FLUENT_ELASTICSEARCH_RELOAD_CONNECTIONS'] || 'false'}"
reconnect_on_error "#{ENV['FLUENT_ELASTICSEARCH_RECONNECT_ON_ERROR'] || 'true'}"
reload_on_failure "#{ENV['FLUENT_ELASTICSEARCH_RELOAD_ON_FAILURE'] || 'true'}"
log_es_400_reason "#{ENV['FLUENT_ELASTICSEARCH_LOG_ES_400_REASON'] || 'false'}"
logstash_prefix "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_PREFIX'] || 'logstash'}"
logstash_dateformat "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_DATEFORMAT'] || '%Y.%m.%d'}"
logstash_format "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_FORMAT'] || 'true'}"
index_name "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_INDEX_NAME'] || 'logstash'}"
target_index_key "#{ENV['FLUENT_ELASTICSEARCH_TARGET_INDEX_KEY'] || use_nil}"
type_name "#{ENV['FLUENT_ELASTICSEARCH_LOGSTASH_TYPE_NAME'] || 'fluentd'}"
include_timestamp "#{ENV['FLUENT_ELASTICSEARCH_INCLUDE_TIMESTAMP'] || 'false'}"
template_name "#{ENV['FLUENT_ELASTICSEARCH_TEMPLATE_NAME'] || use_nil}"
template_file "#{ENV['FLUENT_ELASTICSEARCH_TEMPLATE_FILE'] || use_nil}"
template_overwrite "#{ENV['FLUENT_ELASTICSEARCH_TEMPLATE_OVERWRITE'] || use_default}"
sniffer_class_name "#{ENV['FLUENT_SNIFFER_CLASS_NAME'] || 'Fluent::Plugin::ElasticsearchSimpleSniffer'}"
request_timeout "#{ENV['FLUENT_ELASTICSEARCH_REQUEST_TIMEOUT'] || '5s'}"
suppress_type_name "#{ENV['FLUENT_ELASTICSEARCH_SUPPRESS_TYPE_NAME'] || 'true'}"
enable_ilm "#{ENV['FLUENT_ELASTICSEARCH_ENABLE_ILM'] || 'false'}"
ilm_policy_id "#{ENV['FLUENT_ELASTICSEARCH_ILM_POLICY_ID'] || use_default}"
ilm_policy "#{ENV['FLUENT_ELASTICSEARCH_ILM_POLICY'] || use_default}"
ilm_policy_overwrite "#{ENV['FLUENT_ELASTICSEARCH_ILM_POLICY_OVERWRITE'] || 'false'}"
<buffer>
flush_thread_count "#{ENV['FLUENT_ELASTICSEARCH_BUFFER_FLUSH_THREAD_COUNT'] || '8'}"
flush_interval "#{ENV['FLUENT_ELASTICSEARCH_BUFFER_FLUSH_INTERVAL'] || '5s'}"
chunk_limit_size "#{ENV['FLUENT_ELASTICSEARCH_BUFFER_CHUNK_LIMIT_SIZE'] || '2M'}"
queue_limit_length "#{ENV['FLUENT_ELASTICSEARCH_BUFFER_QUEUE_LIMIT_LENGTH'] || '32'}"
retry_max_interval "#{ENV['FLUENT_ELASTICSEARCH_BUFFER_RETRY_MAX_INTERVAL'] || '30'}"
retry_forever true
</buffer>
</match>
Loading

0 comments on commit 870f969

Please sign in to comment.