Assorted infrastructure cleanup. (#14223)

* Remove old documentation check related config files.

These are no longer used by any of our CI as far as I can tell.

* Purge LGTM configuration and references.

It has been fully shut down, so none of this works anymore.

* Purge Travis CI config, scripts, and references.

We are no longer using Travis CI, so all of this is useless.

* Pureg config for other CI tools we are no longer using.

* Remove old packaging related test scripts.

These haven’t been used in years, and are potentially confusing for new
contributors.

* Restore MLC configuration.

It is, in fact, still in use.

* Fix bogus CI config in dist files.

* Fix botched merge in CODEOWNERS.
This commit is contained in:
Austin S. Hemmelgarn 2023-01-25 11:10:37 -05:00 committed by GitHub
parent 0541c97e53
commit 597f08dee6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 3 additions and 1275 deletions

View File

@ -1,2 +0,0 @@
--exclude-exts=.min.css
--ignore=adjoining-classes,box-model,ids,order-alphabetical,unqualified-attributes

2
.github/CODEOWNERS vendored
View File

@ -5,7 +5,6 @@
* @Ferroin
# Ownership by directory structure
.travis/ @Ferroin @tkatsoulas
.github/ @Ferroin @tkatsoulas
aclk/ @stelfrag @underhood
build/ @Ferroin @tkatsoulas
@ -41,7 +40,6 @@ Dockerfile* @Ferroin @tkatsoulas
# Ownership of specific files
.gitignore @Ferroin @tkatsoulas @vkalintiris
.travis.yml @Ferroin @tkatsoulas
.eslintrc @Ferroin @tkatsoulas
.eslintignore @Ferroin @tkatsoulas
.csslintrc @Ferroin @tkatsoulas

2
.github/labeler.yml vendored
View File

@ -34,8 +34,6 @@ area/build:
- "**/Makefile.am"
area/ci:
- .travis/*
- .travis/**/*
- .github/*
- .github/**/*

View File

@ -1 +0,0 @@
CHANGELOG.md

View File

@ -1,111 +0,0 @@
// Source: https://github.com/codacy/codacy-remark-lint/raw/master/.remarkrc.js
exports.settings = {
gfm: true,
commonmark: true,
looseTable: false,
spacedTable: false,
paddedTable: false,
fences: true,
rule: "-",
ruleRepetition: 3,
emphasis: "*",
strong: "*",
bullet: "-",
listItemIndent: "1",
incrementListMarker: true
};
const remarkPresetLintMarkdownStyleGuide = {
plugins: require("remark-preset-lint-markdown-style-guide").plugins.filter(
function(elem) {
return elem != require("remark-lint-no-duplicate-headings");
}
)
};
exports.plugins = [
require("remark-preset-lint-consistent"),
require("remark-preset-lint-recommended"),
remarkPresetLintMarkdownStyleGuide,
[require("remark-lint-no-dead-urls"), { skipOffline: true }],
require("remark-lint-heading-whitespace"),
[require("remark-lint-maximum-line-length"), 120],
[require("remark-lint-maximum-heading-length"), 120],
[require("remark-lint-list-item-indent"), "tab-size"],
[require("remark-lint-list-item-spacing"), false],
[require("remark-lint-strong-marker"), "*"],
[require("remark-lint-emphasis-marker"), "_"],
[require("remark-lint-unordered-list-marker-style"), "-"],
[require("remark-lint-ordered-list-marker-style"), "."],
[require("remark-lint-ordered-list-marker-value"), "ordered"],
/*[
require("remark-lint-write-good"),
[
"warn",
{
passive: false,
illusion: true,
so: true,
thereIs: true,
weasel: true,
adverb: true,
tooWordy: true,
cliches: true,
eprime: false
}
]
],*/
require("remark-validate-links"),
require("remark-frontmatter"),
/*[
require("remark-retext"),
require("unified")().use({
plugins: [
require("retext-english"),
require("retext-syntax-urls"),
[
require("retext-spell"),
{
ignoreLiteral: true,
dictionary: require("dictionary-en-us"),
...personalDictionary
}
],
[
require("retext-sentence-spacing"),
{
preferred: 1
}
],
require("retext-repeated-words"),
require("retext-usage"),
require("retext-indefinite-article"),
require("retext-redundant-acronyms"),
[
require("retext-contractions"),
{
straight: true,
allowLiteral: true
}
],
require("retext-diacritics"),
[
require("retext-quotes"),
{
preferred: "straight"
}
],
require("retext-equality"),
require("retext-passive"),
require("retext-profanities"),
[
require("retext-readability"),
{
age: 20
}
]
]
})
]*/
];

View File

@ -1,7 +0,0 @@
deployments:
netdata:
auto_deploy_on_commits: true
filename: ./packaging/docker/Dockerfile
context_path: ./
port_forwarding: 80:19999
run_options: -v /proc:/host/proc:ro -v /sys:/host/sys:ro -v /var/run/docker.sock:/var/run/docker.sock:ro --cap-add SYS_PTRACE --security-opt apparmor=unconfined

View File

@ -1,68 +0,0 @@
---
dist: focal
language: c
addons:
apt:
packages: ['moreutils']
env:
global:
- RELEASE_CHANNEL=nightly
before_install:
- exec > >(ts -s '%H:%M:%.S ') 2>&1
- source .travis/utils.sh
# Install dependencies for all, once
#
install:
- sudo apt-get install -y libuv1-dev liblz4-dev libcap2-bin zlib1g-dev uuid-dev fakeroot libipmimonitoring-dev libmnl-dev libnetfilter-acct-dev gnupg python3-pip
- sudo pip3 install git-semver==0.3.2 # 11/Sep/2019: git-semver tip was broken, so we had to force last good run of it
- source tests/installer/slack.sh
- export NOTIF_CHANNEL="automation-beta"
- if [ "${TRAVIS_REPO_SLUG}" = "netdata/netdata" ]; then export NOTIF_CHANNEL="automation"; fi;
- export BUILD_VERSION="$(cat packaging/version | cut -d'-' -f1)"
- export LATEST_RELEASE_VERSION="$(cat packaging/version | cut -d'-' -f1)"
- export LATEST_RELEASE_DATE="$(git log -1 --format=%aD "${LATEST_RELEASE_VERSION}" | cat)"
- if [[ "${TRAVIS_COMMIT_MESSAGE}" = *"[Build latest]"* ]]; then export BUILD_VERSION="$(cat packaging/version | cut -d'-' -f1,2 | sed -e 's/-/./g').latest"; fi;
- export DEPLOY_REPO="netdata" # Default production packaging repository
- export PACKAGING_USER="netdata" # Standard package cloud account
- if [[ "${TRAVIS_COMMIT_MESSAGE}" = *"[Build latest]"* ]]; then export DEPLOY_REPO="netdata-edge"; fi;
- export PACKAGE_CLOUD_RETENTION_DAYS=30
- if [ ! "${TRAVIS_REPO_SLUG}" = "netdata/netdata" ]; then export DEPLOY_REPO="netdata-devel"; fi;
# These are release-related artifacts and have to be evaluated before we start doing conditional checks inside stages
- source ".travis/tagger.sh"
- export GIT_TAG="$(git tag --points-at)"
- git submodule update --init --recursive
# Setup notification system
#
notifications:
webhooks:
urls:
- https://app.fossa.io/hooks/travisci
# Define the stage sequence and conditionals
#
stages:
# Mandatory runs, we always want these executed
- name: Build process
if: commit_message =~ /^((?!\[Package (amd64|arm64|i386) (DEB|RPM)( .*)?\]).)*$/
# Define stage implementation details
#
jobs:
# This is a hook to help us introduce "soft" errors on our process
allow_failures:
- env: ALLOW_SOFT_FAILURE_HERE=true
include:
# Ensure netdata code builds successfully
- stage: Build process
name: Standard netdata build
script: fakeroot ./netdata-installer.sh --install-prefix $HOME --dont-wait --dont-start-it --enable-plugin-nfacct --enable-plugin-freeipmi --disable-lto
env: CFLAGS='-O1 -Wall -Wextra -Wformat-signedness -fstack-protector-all -fno-common -DNETDATA_INTERNAL_CHECKS=1 -D_FORTIFY_SOURCE=2 -DNETDATA_VERIFY_LOCKS=1'
after_failure: post_message "TRAVIS_MESSAGE" "<!here> standard netdata build is failing (Still dont know which one, will improve soon)"

View File

@ -1,149 +0,0 @@
<!--
---
title: "Description of CI build configuration"
custom_edit_url: https://github.com/netdata/netdata/edit/master/.travis/README.md
---
-->
# Description of CI build configuration
## Variables needed by travis
- GITHUB_TOKEN - GitHub token with push access to repository
- DOCKER_USERNAME - Username (netdatabot) with write access to docker hub repository
- DOCKER_PWD - Password to docker hub
- encrypted_8daf19481253_key - key needed by openssl to decrypt GCS credentials file
- encrypted_8daf19481253_iv - IV needed by openssl to decrypt GCS credentials file
- COVERITY_SCAN_TOKEN - Token to allow coverity test analysis uploads
- SLACK_USERNAME - This is required for the slack notifications triggered by travis pipeline
- SLACK_CHANNEL - This is the channel that Travis will be posting messages
- SLACK_NOTIFY_WEBHOOK_URL - This is the incoming URL webhook as provided by slack integration. Visit Apps integration in slack to generate the required hook
- SLACK_BOT_NAME - This is the name your bot will appear with on slack
## CI workflow details
Our CI pipeline is designed to help us identify and mitigate risks at all stages of implementation.
To accommodate this need, we used [Travis CI](http://www.travis-ci.com) as our CI/CD tool.
Our main areas of concern are:
1) Only push code that is working. That means fail fast so that we can improve before we reach the public
2) Reduce the time to market to minimum, by streamlining the release process.
That means a lot of testing, a lot of consistency checks, a lot of validations
3) Generated artifacts consistency. We should not allow broken software to reach the public.
When this happens, it's embarrassing and we struggle to eliminate it.
4) We are an innovative company, so we love to automate :)
Having said that, here's a brief introduction to Netdata's improved CI/CD pipeline with Travis.
Our CI/CD lifecycle contains three different execution entry points:
1) A user opens a pull request to netdata/master: Travis will run a pipeline on the branch under that PR
2) A merge or commit happens on netdata/master. This will trigger travis to run, but we have two distinct cases in this scenario:
a) A user merges a pull request to netdata/master: Travis will run on master, after the merge.
b) A user runs a commit/merge with a special keyword (mentioned later).
This triggers a release for either minor, major or release candidate versions, depending the keyword
3) A scheduled job runs on master once per day: Travis will run on master at the scheduled interval
To accommodate all three entry points our CI/CD workflow has a set of steps that run on all three entry points.
Once all these steps are successful, then our pipeline executes another subset of steps for entry points 2 and 3.
In travis terms the "steps" are "Stages" and within each stage we execute a set of activities called "jobs" in travis.
### Always run: Stages that running on all three execution entry points
## Code quality, linting, syntax, code style
At this early stage we iterate through a set of basic quality control checks:
- Shell checking: Run linters for our various BASH scripts
- Checksum validators: Run validators to ensure our installers and documentation are in sync
- Dashboard validator: We provide a pre-generated dashboard.js script file that we need to make sure its up to date. We validate that.
## Build process
At this stage, basically, we build :-)
We do a baseline check of our build artifacts to guarantee they are not broken
Briefly our activities include:
- Verify docker builds successfully
- Run the standard Netdata installer, to make sure we build & run properly
- Do the same through 'make dist', as this is our stable channel for our kickstart files
## Artifacts validation
At this point we know our software is building, we need to go through the a set of checks, to guarantee
that our product meets certain expectations. At the current stage, we are focusing on basic capabilities
like installing in different distributions, running the full lifecycle of install-run-update-install and so on.
We are still working on enriching this with more and more use cases, to get us closer to achieving full stability of our software.
Briefly we currently evaluate the following activities:
- Basic software unit testing (only run when changes happen that require it)
- Non containerized build and install on ubuntu 14.04
- Non containerized build and install on ubuntu 18.04
- Running the full Netdata lifecycle (install, update, uninstall) on ubuntu 18.04
- Build and install on CentOS 7
(More to come)
### Nightly operations: Stages that run daily under cronjob
The nightly stages are related to the daily nightly activities, that produce our daily latest releases.
We also maintain a couple of cronjobs that run during the night to provide us with deeper insights,
like for example coverity scanning or extended kickstart checksum checks
## Nightly operations
At this stage we run scheduled jobs and execute the nightly changelog generator, coverity scans,
labeler for our issues and extended kickstart files checksum validations.
## Nightly release
During this stage we are building and publishing latest docker images, prepare the nightly artifacts
and deploy them (the artifacts) to our google cloud service provider.
### Publishing
Publishing is responsible for executing the major/minor/patch releases and is separated
in two stages: packaging preparation process and publishing.
## Packaging for release
During packaging we are preparing the release changelog information and run the labeler.
## Publish for release
The publishing stage is the most complex part in publishing. This is the stage were we generate and publish docker images,
prepare the release artifacts and get ready with the release draft.
### Package Management workflows
As part of our goal to provide the best support to our customers, we have created a set of CI workflows to automatically produce
DEB and RPM for multiple distributions. These workflows are implemented under the templated stages '_DEB_TEMPLATE' and '_RPM_TEMPLATE'.
We currently plan to actively support the following Operating Systems, with a plan to further expand this list following our users needs.
### Operating systems supported
The following distributions are supported
- Debian versions
- Buster (TBD - not released yet, check [debian releases](https://www.debian.org/releases/) for details)
- Stretch
- Jessie
- Wheezy
- Ubuntu versions
- Disco
- Cosmic
- Bionic
- artful
- Enterprise Linux versions (Covers Red Hat, CentOS, and Amazon Linux with version 6)
- Version 8 (TBD)
- Version 7
- Version 6
- Fedora versions
- Version 31 (TBD)
- Version 30
- Version 29
- Version 28
- openSUSE versions
- 15.1
- 15.0
- Gentoo distributions
- TBD
### Architectures supported
We plan to support amd64, x86 and arm64 architectures. As of June 2019 only amd64 and x86 will become available, as we are still working on solving issues with the architecture.
The Package deployment can be triggered manually by executing an empty commit with the following message pattern: `[Package PACKAGE_TYPE PACKAGE_ARCH] DESCRIBE_THE_REASONING_HERE`.
Travis Yaml configuration allows the user to combine package type and architecture as necessary to regenerate the current stable release (For example tag v1.15.0 as of 4th of May 2019)
Sample patterns to trigger building of packages for all amd64 supported architecture:
- '[Package amd64 RPM]': Build & publish all amd64 available RPM packages
- '[Package amd64 DEB]': Build & publish all amd64 available DEB packages

View File

@ -1,38 +0,0 @@
#!/usr/bin/env bash
#
# This scriptlet validates nightlies age and notifies is if it gets too old
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
set -e
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "${CWD}" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as .travis/$(basename "$0") from top level directory of netdata git repository"
echo "Changelog age checker exited abnormally"
exit 1
fi
source tests/installer/slack.sh || echo "I could not load slack library"
LAST_MODIFICATION="$(git log -1 --pretty="format:%at" CHANGELOG.md)"
CURRENT_TIME="$(date +"%s")"
TWO_DAYS_IN_SECONDS=172800
DIFF=$((CURRENT_TIME - LAST_MODIFICATION))
echo "Checking CHANGELOG.md last modification time on GIT.."
echo "CHANGELOG.md timestamp: ${LAST_MODIFICATION}"
echo "Current timestamp: ${CURRENT_TIME}"
echo "Diff: ${DIFF}"
if [ ${DIFF} -gt ${TWO_DAYS_IN_SECONDS} ]; then
echo "CHANGELOG.md is more than two days old!"
post_message "TRAVIS_MESSAGE" "Hi <!here>, CHANGELOG.md was found more than two days old (Diff: ${DIFF} seconds)" "${NOTIF_CHANNEL}"
else
echo "CHANGELOG.md is less than two days old, fine"
fi

View File

@ -1,77 +0,0 @@
#!/usr/bin/env bash
#
# Artifacts creation script.
# This script generates two things:
# 1) The static binary that can run on all linux distros (built-in dependencies etc)
# 2) The distribution source tarball
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author: Paul Emm. Katsoulakis <paul@netdata.cloud>
#
# shellcheck disable=SC2230
set -e
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "${CWD}" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as .travis/$(basename "$0") from top level directory of netdata git repository"
exit 1
fi
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode on ${TRAVIS_REPO_SLUG}, not running anything here"
exit 0
fi
echo "--- Initialize git configuration ---"
git checkout "${1-master}"
git pull
if [ "${RELEASE_CHANNEL}" == stable ]; then
echo "--- Set default release channel to stable ---"
sed -i 's/^RELEASE_CHANNEL="nightly" *#/RELEASE_CHANNEL="stable" #/' \
netdata-installer.sh \
packaging/makeself/install-or-update.sh
fi
# Everything from this directory will be uploaded to GCS
mkdir -p artifacts
BASENAME="netdata-$(git describe)"
# Make sure stdout is in blocking mode. If we don't, then conda create will barf during downloads.
# See https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959 for details.
python -c 'import os,sys,fcntl; flags = fcntl.fcntl(sys.stdout, fcntl.F_GETFL); fcntl.fcntl(sys.stdout, fcntl.F_SETFL, flags&~os.O_NONBLOCK);'
echo "--- Create tarball ---"
command -v git > /dev/null && [ -d .git ] && git clean -d -f
autoreconf -ivf
./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var --libexecdir=/usr/libexec --with-zlib --with-math --with-user=netdata CFLAGS=-O2
make dist
mv "${BASENAME}.tar.gz" artifacts/
echo "--- Create self-extractor ---"
sxarches="x86_64 armv7l aarch64"
for arch in ${sxarches}; do
git clean -d -f
rm -rf packating/makeself/tmp
./packaging/makeself/build-static.sh ${arch}
done
# Needed for GCS
echo "--- Copy artifacts to separate directory ---"
#shellcheck disable=SC2164
cp packaging/version artifacts/latest-version.txt
cd artifacts
ln -s "${BASENAME}.tar.gz" netdata-latest.tar.gz
for arch in ${sxarches}; do
ln -s "netdata-${arch}-$(git describe).gz.run" netdata-${arch}-latest.gz.run
done
ln -s "${BASENAME}.gz.run" netdata-latest.gz.run
sha256sum -b ./* > "sha256sums.txt"
echo "checksums:"
cat sha256sums.txt

View File

@ -1,47 +0,0 @@
#!/usr/bin/env bash
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
set -e
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as .travis/$(basename "$0") from top level directory of netdata git repository"
echo "Changelog creation aborted"
exit 1
fi
ORGANIZATION=$(echo "$TRAVIS_REPO_SLUG" | awk -F '/' '{print $1}')
PROJECT=$(echo "$TRAVIS_REPO_SLUG" | awk -F '/' '{print $2}')
GIT_MAIL=${GIT_MAIL:-"bot@netdata.cloud"}
GIT_USER=${GIT_USER:-"netdatabot"}
if [ -z ${GIT_TAG+x} ]; then
OPTS=""
else
OPTS="--future-release ${GIT_TAG}"
fi
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode on ${TRAVIS_REPO_SLUG}, nothing else to do here"
exit 0
fi
echo "--- Creating changelog ---"
git checkout master
git pull
docker login -u "${DOCKER_USERNAME}" -p "${DOCKER_PWD}"
docker run -it -v "$(pwd)":/project markmandel/github-changelog-generator:latest \
--user "${ORGANIZATION}" \
--project "${PROJECT}" \
--token "${GITHUB_TOKEN}" \
--since-tag "v1.10.0" \
--unreleased-label "**Next release**" \
--no-issues \
--exclude-labels "stale,duplicate,question,invalid,wontfix,discussion,no changelog" \
--max-issues 500 \
--bug-labels IGNOREBUGS ${OPTS}

View File

@ -1 +0,0 @@
changes-#18220

View File

@ -1,65 +0,0 @@
#!/bin/bash
#
# Draft release generator.
# This utility is responsible for submitting a draft release to github repo
# It is agnostic of other processes, when executed it will draft a release,
# based on the most recent reachable tag.
#
# Requirements:
# - GITHUB_TOKEN variable set with GitHub token. Access level: repo.public_repo
# - artifacts directory in place
# - The directory is created by create_artifacts.sh mechanism
# - The artifacts need to be created with the same tag, obviously
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author: Pavlos Emm. Katsoulakis <paul@netdata.cloud>
set -e
if [ ! -f .gitignore ]; then
echo "Run as ./travis/$(basename "$0") from top level directory of git repository"
exit 1
fi
echo "--- Initialize git configuration ---"
git checkout master
git pull
if [[ $(git describe) =~ -rc* ]]; then
echo "This is a release candidate tag, we do not generate a release draft"
exit 0
fi
# Load the tag, if any
GIT_TAG=$(git describe)
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode on ${TRAVIS_REPO_SLUG}, i was about to run for release (${GIT_TAG}), but i am emulating, so bye"
exit 0
fi;
echo "---- CREATING RELEASE DRAFT WITH ASSETS -----"
# Download hub
HUB_VERSION=${HUB_VERSION:-"2.5.1"}
wget "https://github.com/github/hub/releases/download/v${HUB_VERSION}/hub-linux-amd64-${HUB_VERSION}.tgz" -O "/tmp/hub-linux-amd64-${HUB_VERSION}.tgz"
tar -C /tmp -xvf "/tmp/hub-linux-amd64-${HUB_VERSION}.tgz"
export PATH=$PATH:"/tmp/hub-linux-amd64-${HUB_VERSION}/bin"
# Create a release draft
if [ -z ${GIT_TAG+x} ]; then
echo "Variable GIT_TAG is not set. Something went terribly wrong! Exiting."
exit 1
fi
if [ "${GIT_TAG}" != "$(git tag --points-at)" ]; then
echo "ERROR! Current commit is not tagged. Stopping release creation."
exit 1
fi
until hub release create --draft \
-a "artifacts/netdata-${GIT_TAG}.tar.gz" \
-a "artifacts/netdata-${GIT_TAG}.gz.run" \
-a "artifacts/sha256sums.txt" \
-m "${GIT_TAG}" "${GIT_TAG}"; do
sleep 5
done

Binary file not shown.

View File

@ -1,64 +0,0 @@
#!/bin/bash
#
# Script to automatically do a couple of things:
# - generate a new tag according to semver (https://semver.org/)
# - generate CHANGELOG.md by using https://github.com/skywinder/github-changelog-generator
#
# Tags are generated by searching for a keyword in last commit message. Keywords are:
# - [patch] or [fix] to bump patch number
# - [minor], [feature] or [feat] to bump minor number
# - [major] or [breaking change] to bump major number
# All keywords MUST be surrounded with square braces.
#
# Script uses git mechanisms for locking, so it can be used in parallel builds
#
# Requirements:
# - GITHUB_TOKEN variable set with GitHub token. Access level: repo.public_repo
# - docker
#
# This is a modified version of:
# https://github.com/paulfantom/travis-helper/blob/master/releasing/releaser.sh
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author: Pavlos Emm. Katsoulakis <paul@netdata.cloud>
# Author: Pawel Krupa (@paulfantom)
set -e
if [ ! -f .gitignore ]; then
echo "Run as ./travis/$(basename "$0") from top level directory of git repository"
exit 1
fi
echo "--- Changelog generator script starting ---"
# If we dont have a produced TAG there is nothing to do, so bail out happy
if [ -z "${GIT_TAG}" ]; then
echo "GIT_TAG is empty, that is not suppose to happen (Value: $GIT_TAG)"
exit 1
fi
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode on ${TRAVIS_REPO_SLUG}, nothing to do on the changelog generator and tagging script for (${GIT_TAG}), bye"
exit 0
fi
echo "--- Initialize git configuration ---"
export GIT_MAIL="bot@netdata.cloud"
export GIT_USER="netdatabot"
git checkout master
git pull
echo "---- UPDATE VERSION FILE ----"
echo "$GIT_TAG" >packaging/version
git add packaging/version
echo "---- Create CHANGELOG -----"
./.travis/create_changelog.sh
git add CHANGELOG.md
echo "---- COMMIT AND PUSH CHANGES ----"
git commit -m "[ci skip] release $GIT_TAG" --author "${GIT_USER} <${GIT_MAIL}>"
git tag "$GIT_TAG" -a -m "Automatic tag generation for travis build no. $TRAVIS_BUILD_NUMBER"
git push "https://${GITHUB_TOKEN}:@$(git config --get remote.origin.url | sed -e 's/^https:\/\///')"
git push "https://${GITHUB_TOKEN}:@$(git config --get remote.origin.url | sed -e 's/^https:\/\///')" --tags
# After those operations output of command `git describe` should be identical with a value of GIT_TAG

View File

@ -1,50 +0,0 @@
#!/usr/bin/env bash
#
# Changelog generation scriptlet, for nightlies
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pawel Krupa (paulfantom)
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
set -e
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as .travis/$(basename "$0") from top level directory of netdata git repository"
echo "Changelog generation process aborted"
exit 1
fi
LAST_TAG="$1"
COMMITS_SINCE_RELEASE="$2"
NEW_VERSION="${LAST_TAG}-$((COMMITS_SINCE_RELEASE + 1))-nightly"
GIT_MAIL=${GIT_MAIL:-"bot@netdata.cloud"}
GIT_USER=${GIT_USER:-"netdatabot"}
PUSH_URL=$(git config --get remote.origin.url | sed -e 's/^https:\/\///')
FAIL=0
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode on ${TRAVIS_REPO_SLUG}, nothing else to do here"
exit 0
fi
echo "Running changelog creation mechanism"
.travis/create_changelog.sh
echo "Changelog created! Adding packaging/version(${NEW_VERSION}) and CHANGELOG.md to the repository"
echo "${NEW_VERSION}" > packaging/version
git add packaging/version && echo "1) Added packaging/version to repository" || FAIL=1
git add CHANGELOG.md && echo "2) Added changelog file to repository" || FAIL=1
git commit -m '[ci skip] create nightly packages and update changelog' --author "${GIT_USER} <${GIT_MAIL}>" && echo "3) Committed changes to repository" || FAIL=1
git push "https://${GITHUB_TOKEN}:@${PUSH_URL}" && echo "4) Pushed changes to remote ${PUSH_URL}" || FAIL=1
# In case of a failure, wrap it up and bail out cleanly
if [ $FAIL -eq 1 ]; then
git clean -xfd
echo "Changelog generation failed during github UPDATE!"
exit 1
fi
echo "Changelog generation completed successfully!"

View File

@ -1,51 +0,0 @@
#!/usr/bin/env bash
#
# This is the nightly changelog generation script
# It is responsible for two major activities:
# 1) Update packaging/version with the current nightly version
# 2) Generate the changelog for the mentioned version
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pawel Krupa (paulfantom)
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
set -e
FAIL=0
source tests/installer/slack.sh || echo "Could not load slack library"
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "${CWD}" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as .travis/$(basename "$0") from top level directory of netdata git repository"
echo "Changelog generation process aborted"
exit 1
fi
LAST_TAG=$(git describe --abbrev=0 --tags)
COMMITS_SINCE_RELEASE=$(git rev-list "${LAST_TAG}"..HEAD --count)
PREVIOUS_NIGHTLY_COUNT="$(rev <packaging/version | cut -d- -f 2 | rev)"
# If no commits since release, just stop
if [ "${COMMITS_SINCE_RELEASE}" == "${PREVIOUS_NIGHTLY_COUNT}" ]; then
echo "No changes since last nightly release, nothing else to do"
exit 0
fi
if [ ! "${TRAVIS_REPO_SLUG}" == "netdata/netdata" ]; then
echo "Beta mode -- nothing to do for ${TRAVIS_REPO_SLUG} on the nightlies script, bye"
exit 0
fi
echo "--- Running Changelog generation ---"
echo "We got $COMMITS_SINCE_RELEASE changes since $LAST_TAG, re-generating changelog"
NIGHTLIES_CHANGELOG_FAILED=0
.travis/generate_changelog_for_nightlies.sh "${LAST_TAG}" "${COMMITS_SINCE_RELEASE}" || NIGHTLIES_CHANGELOG_FAILED=1
if [ ${NIGHTLIES_CHANGELOG_FAILED} -eq 1 ]; then
echo "Changelog generation has failed, this is a soft error, process continues"
post_message "TRAVIS_MESSAGE" "Changelog generation job for nightlies failed, possibly due to github issues" "${NOTIF_CHANNEL}" || echo "Slack notification failed"
fi
exit "${FAIL}"

View File

@ -1,61 +0,0 @@
# #BASH library
#
# Tags are generated by searching for a keyword in last commit message. Keywords are:
# - [patch] or [fix] to bump patch number
# - [minor], [feature] or [feat] to bump minor number
# - [major] or [breaking change] to bump major number
# All keywords MUST be surrounded with square braces.
#
# Requirements:
# - GITHUB_TOKEN variable set with GitHub token. Access level: repo.public_repo
# - git-semver python package (pip install git-semver)
#
# Original script is available at https://github.com/paulfantom/travis-helper/blob/master/releasing/releaser.sh
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pawel Krupa (paulfantom)
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
# Figure out what will be new release candidate tag based only on previous ones.
# This assumes that RELEASES are in format of "v0.1.2" and prereleases (RCs) are using "v0.1.2-rc0"
function set_tag_release_candidate() {
LAST_TAG=$(git semver)
echo "${0}: Last tag found is: ${LAST_TAG}"
if [[ $LAST_TAG =~ -rc* ]]; then
VERSION=$(echo "$LAST_TAG" | cut -d'-' -f 1)
LAST_RC=$(echo "$LAST_TAG" | cut -d'c' -f 2)
RC=$((LAST_RC + 1))
else
VERSION="$(git semver --next-minor)"
RC=0
echo "${0}: Warning: Will set version to ${VERSION} (Last tag: ${LAST_TAG}) while tagged for release candidate generation"
fi
GIT_TAG="v${VERSION}-rc${RC}"
echo "${0}: Generated a new tag, set to: (${GIT_TAG})"
}
function set_tag_for_release() {
echo "${0}: Checking for tag existence"
if [ -z "${GIT_TAG}" ]; then
echo "${0}: No tag was found, generating a new tag"
git semver
echo "${0}: Last commit message: ${TRAVIS_COMMIT_MESSAGE}"
# Figure out next tag based on commit message
case "${TRAVIS_COMMIT_MESSAGE}" in
*"[netdata patch release]"*) GIT_TAG="v$(git semver --next-patch)" ;;
*"[netdata minor release]"*) GIT_TAG="v$(git semver --next-minor)" ;;
*"[netdata major release]"*) GIT_TAG="v$(git semver --next-major)" ;;
*"[netdata release candidate]"*) set_tag_release_candidate ;;
*)
echo "${0}: Keyword not detected. Nothing to set for GIT_TAG"
;;
esac
else
echo "${0}: We seem to already have a GIT_TAG set to (${GIT_TAG})"
fi
}

View File

@ -1,20 +0,0 @@
#!/bin/sh
token="${1}"
version="${2}"
type="${3}"
resp="$(curl -X POST \
-H 'Accept: application/vnd.github.v3+json' \
-H "Authorization: Bearer ${token}" \
"https://api.github.com/repos/netdata/netdata/actions/workflows/build.yml/dispatches" \
-d "{\"ref\": \"master\", \"inputs\": {\"version\": \"${version}\", \"type\": \"${type}\"}}")"
if [ -z "${resp}" ]; then
echo "Successfully triggered release artifact build."
exit 0
else
echo "Failed to trigger release artifact build. Output:"
echo "${resp}"
exit 1
fi

View File

@ -1,19 +0,0 @@
#!/bin/sh
token="${1}"
version="${2}"
resp="$(curl -X POST \
-H 'Accept: application/vnd.github.v3+json' \
-H "Authorization: Bearer ${token}" \
"https://api.github.com/repos/netdata/netdata/actions/workflows/docker.yml/dispatches" \
-d "{\"ref\": \"master\", \"inputs\": {\"version\": \"${version}\"}}")"
if [ -z "${resp}" ]; then
echo "Successfully triggered Docker image build."
exit 0
else
echo "Failed to trigger Docker image build. Output:"
echo "${resp}"
exit 1
fi

View File

@ -1,20 +0,0 @@
#!/bin/sh
token="${1}"
version="${2}"
pkgtype="${3}"
resp="$(curl -X POST \
-H 'Accept: application/vnd.github.v3+json' \
-H "Authorization: Bearer ${token}" \
"https://api.github.com/repos/netdata/netdata/actions/workflows/packaging.yml/dispatches" \
-d "{\"ref\": \"master\", \"inputs\": {\"version\": \"${version}\", \"type\": \"${pkgtype}\"}}")"
if [ -z "${resp}" ]; then
echo "Successfully triggered binary package builds."
exit 0
else
echo "Failed to trigger binary package builds. Output:"
echo "${resp}"
exit 1
fi

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
# Prevent travis from timing out after 10 minutes of no output
tick() {
(while true; do sleep 300; echo; done) &
local PID=$!
disown
"$@"
local RET=$?
kill $PID
return $RET
}
export -f tick
retry() {
local tries=$1
shift
local i=0
while [ "$i" -lt "$tries" ]; do
"$@" && return 0
sleep $((2**((i++))))
done
return 1
}
export -f retry

View File

@ -23,7 +23,6 @@ CLEANFILES = \
EXTRA_DIST = \
.gitignore \
.csslintrc \
.eslintignore \
.eslintrc \
.github/CODEOWNERS \

View File

@ -8,7 +8,6 @@
<a href="https://github.com/netdata/netdata/releases/latest"><img src="https://img.shields.io/github/release/netdata/netdata.svg" alt="Latest release"></a>
<a href="https://github.com/netdata/netdata-nightlies/releases/latest"><img src="https://img.shields.io/github/release/netdata/netdata-nightlies.svg" alt="Latest nightly build"></a>
<br />
<a href="https://travis-ci.com/netdata/netdata"><img src="https://travis-ci.com/netdata/netdata.svg?branch=master" alt="Build status"></a>
<a href="https://bestpractices.coreinfrastructure.org/projects/2231"><img src="https://bestpractices.coreinfrastructure.org/projects/2231/badge" alt="CII Best Practices"></a>
<a href="https://codeclimate.com/github/netdata/netdata"><img src="https://codeclimate.com/github/netdata/netdata/badges/gpa.svg" alt="Code Climate"></a>
<a href="https://www.gnu.org/licenses/gpl-3.0"><img src="https://img.shields.io/badge/License-GPL%20v3%2B-blue.svg" alt="License: GPL v3+"></a>

View File

@ -1,27 +0,0 @@
#!/bin/sh
BASENAME="netdata-$(git describe)"
mkdir -p artifacts
autoreconf -ivf
./configure \
--prefix=/usr \
--sysconfdir=/etc \
--localstatedir=/var \
--libexecdir=/usr/libexec \
--with-zlib \
--with-math \
--with-user=netdata \
CFLAGS=-O2
make dist
mv "${BASENAME}.tar.gz" artifacts/
USER="" ./packaging/makeself/build-x86_64-static.sh
cp packaging/version artifacts/latest-version.txt
cd artifacts || exit 1
ln -s "${BASENAME}.tar.gz" netdata-latest.tar.gz
ln -s "${BASENAME}.gz.run" netdata-latest.gz.run
sha256sum -b ./* > "sha256sums.txt"

View File

@ -95,7 +95,6 @@ override_dh_installdocs:
find . \
-name README.md \
-not -path './.travis/*' \
-not -path './debian/*' \
-not -path './contrib/*' \
-exec cp \

View File

@ -520,47 +520,4 @@ Caddyfile.
## Publish a test image to your own repository
At Netdata, we provide multiple ways of testing your Docker images using your own repositories.
You may either use the command line tools available or take advantage of our Travis CI infrastructure.
### Inside Netdata organization, using Travis CI
To enable Travis CI integration on your own repositories (Docker and GitHub), you need to be part of the Netdata
organization.
Once you have contacted the Netdata owners to setup you up on GitHub and Travis, execute the following steps
- Preparation
- Have Netdata forked on your personal GitHub account
- Get a GitHub token: Go to **GitHub settings** -> **Developer Settings** -> **Personal access tokens**, and
generate a new token with full access to `repo_hook`, read-only access to `admin:org`, `public_repo`,
`repo_deployment`, `repo:status`, and `user:email` settings enabled. This will be your `GITHUB_TOKEN` that is
described later in the instructions, so keep it somewhere safe.
- Contact the Netdata team and seek for permissions on `https://scan.coverity.com` should you require Travis to be
able to push your forked code to coverity for analysis and report. Once you are setup, you should have your
email you used in coverity and a token from them. These will be your `COVERITY_SCAN_SUBMIT_EMAIL` and
`COVERITY_SCAN_TOKEN` that we will refer to later.
- Have a valid Docker hub account, the credentials from this account will be your `DOCKER_USERNAME` and
`DOCKER_PWD` mentioned later.
- Setting up Travis CI for your own fork (Detailed instructions provided by Travis team [here](https://docs.travis-ci.com/user/tutorial/))
- Login to travis with your own GITHUB credentials (There is Open Auth access)
- Go to your profile settings, under [repositories](https://travis-ci.com/account/repositories) section and setup
your Netdata fork to be built by Travis CI.
- Once the repository has been setup, go to repository settings within Travis CI (usually under
`https://travis-ci.com/NETDATA_DEVELOPER/netdata/settings`, where `NETDATA_DEVELOPER` is your GitHub handle),
and select your desired settings.
- While in Travis settings, under Netdata repository settings in the Environment Variables section, you need to add
the following:
- `DOCKER_USERNAME` and `DOCKER_PWD` variables so that Travis can log in to your Docker Hub account and publish
Docker images there.
- `REPOSITORY` variable to `NETDATA_DEVELOPER/netdata`, where `NETDATA_DEVELOPER` is your GitHub handle again.
- `GITHUB_TOKEN` variable with the token generated on the preparation step, for Travis workflows to function
properly.
- `COVERITY_SCAN_SUBMIT_EMAIL` and `COVERITY_SCAN_TOKEN` variables to enable Travis to submit your code for
analysis to Coverity.
Having followed these instructions, your forked repository should be all set up for integration with Travis CI. Happy
testing!
You may either use the command line tools available or take advantage of our GitHub Acions infrastructure.

View File

@ -27,7 +27,7 @@ fi
STARTIT=1
REINSTALL_OPTIONS=""
RELEASE_CHANNEL="nightly" # check .travis/create_artifacts.sh before modifying
RELEASE_CHANNEL="nightly"
while [ "${1}" ]; do
case "${1}" in

View File

@ -1,53 +0,0 @@
#!/bin/sh
#
# Mechanism to validate kickstart files integrity status
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pawel Krupa (pawel@netdata.cloud)
# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
# Author : Austin S. Hemmelgarn (austin@netdata.cloud)
set -e
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel 2>/dev/null || echo "")")
CWD="$(git rev-parse --show-cdup 2>/dev/null || echo "")"
if [ -n "$CWD" ] || [ "${TOP_LEVEL}" != "netdata" ]; then
echo "Run as ./tests/installer/$(basename "$0") from top level directory of netdata git repository"
echo "Kickstart validation process aborted"
exit 1
fi
check_file() {
README_MD5=$(grep "$1" "$2" | grep md5sum | grep curl | cut -d '"' -f2)
KICKSTART_URL="https://my-netdata.io/$1"
KICKSTART="packaging/installer/$1"
KICKSTART_MD5="$(md5sum "${KICKSTART}" | cut -d' ' -f1)"
CALCULATED_MD5="$(curl -Ss "${KICKSTART_URL}" | md5sum | cut -d ' ' -f 1)"
# Conditionally run the website validation
if [ -z "${LOCAL_ONLY}" ]; then
echo "Validating ${KICKSTART_URL} against local file ${KICKSTART} with MD5 ${KICKSTART_MD5}.."
if [ "$KICKSTART_MD5" = "$CALCULATED_MD5" ]; then
echo "${KICKSTART_URL} looks fine"
else
echo "${KICKSTART_URL} md5sum does not match local file, it needs to be updated"
exit 2
fi
fi
echo "Validating documentation for $1"
if [ "$KICKSTART_MD5" != "$README_MD5" ]; then
echo "Invalid checksum for $1 in $2."
echo "checksum in docs: $README_MD5"
echo "current checksum: $KICKSTART_MD5"
exit 2
else
echo "$1 MD5Sum is well documented"
fi
}
check_file kickstart.sh packaging/installer/methods/kickstart.md
echo "No problems found, exiting successfully!"

View File

@ -1,65 +0,0 @@
# #No shebang necessary
# BASH Lib: Simple incoming webhook for slack integration.
#
# The script expects the following parameters to be defined by the upper layer:
# SLACK_NOTIFY_WEBHOOK_URL
# SLACK_BOT_NAME
# SLACK_CHANNEL
#
# Copyright:
#
# Author: Pavlos Emm. Katsoulakis <paul@netdata.cloud
post_message() {
TYPE="$1"
MESSAGE="$2"
CUSTOM_CHANNEL="$3"
case "$TYPE" in
"PLAIN_MESSAGE")
curl -X POST --data-urlencode "payload={\"channel\": \"${SLACK_CHANNEL}\", \"username\": \"${SLACK_BOT_NAME}\", \"text\": \"${MESSAGE}\", \"icon_emoji\": \":space_invader:\"}" "${SLACK_NOTIFY_WEBHOOK_URL}"
;;
"TRAVIS_MESSAGE")
if [ "${TRAVIS_EVENT_TYPE}" == "pull_request" ] || [ "${TRAVIS_BRANCH}" != "master" ] ; then
echo "Skipping notification due to build type."
return 0
fi
if [ -n "${CUSTOM_CHANNEL}" ]; then
echo "Sending travis message to custom channel ${CUSTOM_CHANNEL}"
OPTIONAL_CHANNEL_INFO="\"channel\": \"${CUSTOM_CHANNEL}\","
fi
POST_MESSAGE="{
${OPTIONAL_CHANNEL_INFO}
\"text\": \"${TRAVIS_REPO_SLUG}, ${MESSAGE}\",
\"attachments\": [{
\"text\": \"${TRAVIS_JOB_NUMBER}: Event type '${TRAVIS_EVENT_TYPE}', on '${TRAVIS_OS_NAME}' \",
\"fallback\": \"I could not determine the build\",
\"callback_id\": \"\",
\"color\": \"#3AA3E3\",
\"attachment_type\": \"default\",
\"actions\": [
{
\"name\": \"${TRAVIS_BUILD_NUMBER}\",
\"text\": \"Build #${TRAVIS_BUILD_NUMBER}\",
\"type\": \"button\",
\"url\": \"${TRAVIS_BUILD_WEB_URL}\"
},
{
\"name\": \"${TRAVIS_JOB_NUMBER}\",
\"text\": \"Job #${TRAVIS_JOB_NUMBER}\",
\"type\": \"button\",
\"url\": \"${TRAVIS_JOB_WEB_URL}\"
}]
}]
}"
echo "Sending ${POST_MESSAGE}"
curl -X POST --data-urlencode "payload=${POST_MESSAGE}" "${SLACK_NOTIFY_WEBHOOK_URL}"
;;
*)
echo "Unrecognized message type \"$TYPE\" was given"
return 1
;;
esac
}

View File

@ -1,60 +0,0 @@
#!/usr/bin/env bats
#
# Netdata installation lifecycle testing script.
# This is to validate the install, update and uninstall of netdata
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
#
INSTALLATION="$BATS_TMPDIR/installation"
ENV="${INSTALLATION}/netdata/etc/netdata/.environment"
# list of files which need to be checked. Path cannot start from '/'
FILES="usr/libexec/netdata/plugins.d/go.d.plugin
usr/libexec/netdata/plugins.d/charts.d.plugin
usr/libexec/netdata/plugins.d/python.d.plugin"
DIRS="usr/sbin/netdata
etc/netdata
usr/share/netdata
usr/libexec/netdata
var/cache/netdata
var/lib/netdata
var/log/netdata"
setup() {
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "${CWD}" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as ./tests/lifecycle/$(basename "$0") from top level directory of git repository"
exit 1
fi
}
@test "install netdata" {
./netdata-installer.sh --dont-wait --dont-start-it --auto-update --install-prefix "${INSTALLATION}"
# Validate particular files
for file in $FILES; do
[ ! -f "$BATS_TMPDIR/$file" ]
done
# Validate particular directories
for a_dir in $DIRS; do
[ ! -d "$BATS_TMPDIR/$a_dir" ]
done
}
@test "update netdata" {
export ENVIRONMENT_FILE="${ENV}"
${INSTALLATION}/netdata/usr/libexec/netdata/netdata-updater.sh --not-running-from-cron
! grep "new_installation" "${ENV}"
}
@test "uninstall netdata" {
./packaging/installer/netdata-uninstaller.sh --yes --force --env "${ENV}"
[ ! -f "${INSTALLATION}/netdata/usr/sbin/netdata" ]
[ ! -f "/etc/cron.daily/netdata-updater" ]
}

View File

@ -1,65 +0,0 @@
#!/usr/bin/env bats
#
# This script is responsible for validating
# updater capabilities after a change
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
#
INSTALLATION="$BATS_TMPDIR/installation"
ENV="${INSTALLATION}/netdata/etc/netdata/.environment"
# list of files which need to be checked. Path cannot start from '/'
FILES="usr/libexec/netdata/plugins.d/go.d.plugin
usr/libexec/netdata/plugins.d/charts.d.plugin
usr/libexec/netdata/plugins.d/python.d.plugin"
DIRS="usr/sbin/netdata
etc/netdata
usr/share/netdata
usr/libexec/netdata
var/cache/netdata
var/lib/netdata
var/log/netdata"
setup() {
# If we are not in netdata git repo, at the top level directory, fail
TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
CWD=$(git rev-parse --show-cdup || echo "")
if [ -n "${CWD}" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
echo "Run as ./tests/$(basename "$0") from top level directory of git repository"
exit 1
fi
}
@test "install stable netdata using kickstart" {
./packaging/installer/kickstart.sh --dont-wait --dont-start-it --auto-update --install-prefix ${INSTALLATION}
# Validate particular files
for file in $FILES; do
[ ! -f "$BATS_TMPDIR/$file" ]
done
# Validate particular directories
for a_dir in $DIRS; do
[ ! -d "$BATS_TMPDIR/$a_dir" ]
done
# Cleanup
rm -rf ${kickstart_file}
}
@test "update netdata using the new updater" {
export ENVIRONMENT_FILE="${ENV}"
# Run the updater, with the override so that it uses the local repo we have at hand
export NETDATA_LOCAL_TARBALL_OVERRIDE="${PWD}"
${INSTALLATION}/netdata/usr/libexec/netdata/netdata-updater.sh --not-running-from-cron
! grep "new_installation" "${ENV}"
}
@test "uninstall netdata using latest uninstaller" {
./packaging/installer/netdata-uninstaller.sh --yes --force --env "${ENV}"
[ ! -f "${INSTALLATION}/netdata/usr/sbin/netdata" ]
[ ! -f "/etc/cron.daily/netdata-updater" ]
}

View File

@ -1,71 +0,0 @@
#!/usr/bin/env sh
#
# Wrapper script that installs the required dependencies
# for the BATS script to run successfully
#
# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
#
# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
#
echo "Syncing/updating repository.."
blind_arch_grep_install() {
# There is a peculiar docker case with arch, where grep is not available
# This method will have to be triggered blindly, to inject grep so that we can process
# It starts to become a chicken-egg situation with all the distros..
echo "* * Workaround hack * *"
echo "Attempting blind install for archlinux case"
if command -v pacman > /dev/null 2>&1; then
echo "Executing grep installation"
pacman -Sy
pacman --noconfirm --needed -S grep
fi
}
blind_arch_grep_install || echo "Workaround failed, proceed as usual"
running_os="$(grep '^ID=' /etc/os-release | cut -d'=' -f2 | sed -e 's/"//g')"
case "${running_os}" in
"centos"|"fedora"|"CentOS")
echo "Running on CentOS, updating YUM repository.."
yum clean all
yum update -y
echo "Installing extra dependencies.."
yum install -y epel-release
yum install -y bats curl
;;
"debian"|"ubuntu")
echo "Running ${running_os}, updating APT repository"
apt-get update -y
apt-get install -y bats curl
;;
"opensuse-leap"|"opensuse-tumbleweed")
zypper update -y
zypper install -y bats curl
# Fixes curl: (60) SSL certificate problem: unable to get local issuer certificate
# https://travis-ci.com/netdata/netdata/jobs/267573805
update-ca-certificates
;;
"arch")
pacman --noconfirm -Syu
pacman --noconfirm --needed -S bash-bats curl libffi
;;
"alpine")
apk update
apk add bash curl bats
;;
*)
echo "Running on ${running_os}, no repository preparation done"
;;
esac
# Run dependency scriptlet, before anything else
#
./packaging/installer/install-required-packages.sh --non-interactive netdata
echo "Running BATS file.."
bats --tap tests/updater_checks.bats

View File

@ -4932,8 +4932,7 @@ function handleSignInMessage(e) {
netdataRegistryCallback(registryAgents);
if (e.data.redirectURI && !window.location.href.includes(e.data.redirectURI)) {
// lgtm false-positive - redirectURI does not come from user input, but from iframe callback
window.location.replace(e.data.redirectURI); // lgtm[js/client-side-unvalidated-url-redirection]
window.location.replace(e.data.redirectURI);
}
}