Unverified Commit 02586cd8 authored by intrigeri's avatar intrigeri
Browse files

Merge remote-tracking branch 'origin/stable' into feature/shellcheck+force-all-tests

parents 8a19e328 aa9edc01
Pipeline #512 failed with stage
in 4 minutes and 7 seconds
workflow:
rules:
- if: $CI_MERGE_REQUEST_IID
- if: $CI_COMMIT_TAG
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
image: debian:buster
before_script:
- export DEBIAN_FRONTEND=noninteractive
- apt-get update -qq
- export DEBIAN_FRONTEND=noninteractive
- apt-get update -qq
.prepare-lint-po: &prepare-lint-po
- apt-get -qy install git i18nspector
- git clone https://gitlab.tails.boum.org/tails/jenkins-tools.git /tmp/jenkins-tools
lint-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install git i18nspector
- git clone https://gitlab.tails.boum.org/tails/jenkins-tools.git /tmp/jenkins-tools
- /tmp/jenkins-tools/slaves/lint_po
- *prepare-lint-po
- /tmp/jenkins-tools/slaves/lint_po
lint-latest-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH == "stable"'
script:
- *prepare-lint-po
- apt-get -qy install intltool
- ./import-translations
- /tmp/jenkins-tools/slaves/lint_po po/*.po
check-po-msgfmt:
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
test-iuk:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-iuk.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo
attr
bsdtar
libdevice-cdio-perl
faketime
genisoimage
gnutls-bin
libdata-dumper-concise-perl
libdatetime-perl
libfile-copy-recursive-perl'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/iuk
- dzil authordebs --install
- export SOURCE_DATE_EPOCH=$(date --utc +%s)
- 'TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
NODE_PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/lib/js
PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/bin:$PATH
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
LC_ALL=C.UTF-8
dzil test --all'
test-perl5lib:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- dzil test --all
test-persistence-setup:
script:
- apt-get -qy install apt-file
- 'echo "deb http://deb.debian.org/debian buster-backports main"
> /etc/apt/sources.list.d/buster-backports.list'
- 'echo "deb http://deb.debian.org/debian bullseye main"
> /etc/apt/sources.list.d/bullseye.list'
# Needs to be kept in sync' with config/chroot_apt/preferences
- |
cat > /etc/apt/preferences.d/electrum.pref <<-'EOF'
Explanation: unavailable in Buster
Package: electrum python3-electrum python3-aiohttp-socks python3-aiorpcx python3-ecdsa
Pin: release o=Debian,n=bullseye
Pin-Priority: 999
Explanation: Electrum needs a version newer than the one in Buster
Package: python3-attr python3-protobuf libprotobuf23
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 990
Explanation: Electrum 4.0.2 and recent TREZOR firmware need 0.12
Package: python3-trezor trezor
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 999
Package: *
Pin: release o=Debian,n=bullseye
Pin-Priority: -10
EOF
- apt-get update -qq
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-persistence-setup.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
libtest-most-perl
libtest-spec-perl
libtest-bdd-cucumber-perl
liblwp-online-perl
libdata-dumper-concise-perl
libtest-fatal-perl
libsys-statistics-linux-perl
sudo
xvfb'
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/persistence-setup
- dzil authordebs --install
- >
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
LC_ALL=C.UTF-8
xvfb-run --auto-servernum dzil test --all
shellcheck:
image: debian:testing
......@@ -31,14 +172,28 @@ shellcheck:
test-python-doctest:
script:
- apt-get -qy install python3
- config/chroot_local-includes/usr/local/lib/tails-gdm-error-message doctest --verbose
- apt-get -qy install python3
- config/chroot_local-includes/usr/local/lib/tails-gdm-error-message doctest --verbose
test-whisperback:
script:
- 'cat config/chroot_local-packageslists/whisperback.list | grep -E -v "^#"
| xargs apt-get -qy install'
- apt-get -qy install python3-pytest
- 'PYTHONPATH=config/chroot_local-includes/usr/lib/python3/dist-packages
pytest-3 --verbose
config/chroot_local-includes/usr/lib/python3/dist-packages/whisperBack/test.py'
- 'cat config/chroot_local-packageslists/whisperback.list | grep -E -v "^#"
| xargs apt-get -qy install'
- apt-get -qy install python3-pytest
- 'PYTHONPATH=config/chroot_local-includes/usr/lib/python3/dist-packages
pytest-3 --verbose --junit-xml=report.xml
config/chroot_local-includes/usr/lib/python3/dist-packages/whisperBack/test.py'
artifacts:
when: always
reports:
junit: report.xml
apt-snapshots-expiry:
script:
- apt-get -qy install curl git
- ./bin/apt-snapshots-expiry
rules:
- if: '$CI_COMMIT_BRANCH =~ /^stable|testing|devel$/'
- changes:
- config/APT_snapshots.d/*/serial
- vagrant/definitions/tails-builder/config/APT_snapshots.d/*/serial
......@@ -644,20 +644,6 @@ def on_jenkins?
!ENV['JENKINS_URL'].nil?
end
desc 'Test Tails'
task :test do
args = ARGV.drop_while { |x| ['test', '--'].include?(x) }
if on_jenkins?
args += ['--'] unless args.include? '--'
args += ['--tag', '~@fragile'] unless releasing?
base_branch = git_helper('base_branch')
if git_helper('git_only_doc_changes_since?', "origin/#{base_branch}")
args += ['--tag', '@doc']
end
end
run_command('./run_test_suite', *args)
end
desc 'Clean up all build related files'
task clean_all: ['vm:destroy', 'basebox:clean_all']
......
......@@ -3,7 +3,7 @@
BASE_BRANCHES="stable testing devel"
# Returns "" if in undetached head
# Returns "" if in detached head
git_current_branch() {
local git_ref
if git_ref="$(git symbolic-ref HEAD 2>/dev/null)"; then
......
......@@ -11,6 +11,7 @@ import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import List
LOG_FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
log = logging.getLogger()
......@@ -64,7 +65,7 @@ def main():
"gc",
help="Garbage collect expired data from the cache")
parser_gc.add_argument(
"--max-days", type=int, action="store", default=30,
"--max-days", type=int, action="store", default=20,
help="Number of days after which cached data expires")
parser_gc.set_defaults(func=gc)
......@@ -102,7 +103,7 @@ def main():
for key_file in KEY_FILES + [args.cache_base_dir]:
if not Path(key_file).exists():
log.error("%s does not exist" % (key_file))
log.error("%s does not exist", key_file)
sys.exit(1)
if args.command is None:
......@@ -113,18 +114,19 @@ def main():
def gc(args):
log.info("Garbage collecting expired data from the cache…")
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir() if d.is_dir()]
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir()
if d.is_dir() and d.name != "lost+found"]
delete_before = datetime.utcnow() - timedelta(days=args.max_days)
log.debug("Will delete data created before %s" % (delete_before))
log.debug("Will delete data created before %s", delete_before)
for cache_dir in cache_dirs:
mtime = datetime.utcfromtimestamp(cache_dir.stat().st_mtime)
if mtime < delete_before:
log.info(" - Deleting cache for %s with mtime %s"
% (cache_dir.name, mtime))
log.info(" - Deleting cache for %s with mtime %s",
cache_dir.name, mtime)
shutil.rmtree(cache_dir)
else:
log.debug(" - Cache for %s has mtime %s ⇒ keeping"
% (cache_dir.name, mtime))
log.debug(" - Cache for %s has mtime %s ⇒ keeping",
cache_dir.name, mtime)
def get(args):
......@@ -143,7 +145,7 @@ def get(args):
raise FileNotFoundError("Found no cached %s for key %s"
% (file_to_get, args.cache_key))
log.debug("Copying %s from the cache" % (file_to_get))
log.debug("Copying %s from the cache", file_to_get)
if cached_file.is_dir():
shutil.copytree(src=cached_file, dst=dest_file, symlinks=True)
else:
......@@ -161,7 +163,7 @@ def put(args):
raise FileNotFoundError("Cannot store non-existing %s in the cache"
% file_to_cache)
log.debug("Caching %s with key %s" % (file_to_cache, args.cache_key))
log.debug("Caching %s with key %s", file_to_cache, args.cache_key)
cached_file.parent.mkdir(parents=True)
if file_to_cache.is_dir():
shutil.copytree(src=file_to_cache, dst=cached_file, symlinks=True)
......@@ -173,7 +175,7 @@ def put(args):
def forget(args):
cache_dir = Path(args.cache_base_dir, args.cache_key)
if cache_dir.exists():
log.info("Deleting cached data for key %s" % args.cache_key)
log.info("Deleting cached data for key %s", args.cache_key)
shutil.rmtree(cache_dir)
else:
log.info("No cached data to forget for key %s", args.cache_key)
......@@ -186,18 +188,19 @@ def package_version(package: str) -> str:
check=True).stdout.rstrip()
def compute_cache_key(key_files: [str], key_packages: [str]) -> str:
def compute_cache_key(key_files: List[str], key_packages: List[str]) -> str:
input_data = {
'git_commit': subprocess.run(
["git", "log", "-1", "--pretty=%H", "--", *key_files],
stdout=subprocess.PIPE, universal_newlines=True,
check=True).stdout.rstrip(),
'packages': dict(
(package, package_version(package)) for package in sorted(key_packages)
(package, package_version(package))
for package in sorted(key_packages)
),
}
serialized = json.dumps(input_data, sort_keys=True)
log.debug("Serialized data: " + serialized)
log.debug("Serialized data: %s", serialized)
return hashlib.sha1(bytes(serialized, encoding='utf-8')).hexdigest()
......
#!/bin/bash
set -eu
set -x
git rm \
"${RELEASE_CHECKOUT:?}/wiki/src/torrents/files/tails-amd64-"*.{build-manifest,iso.sig,img.sig,packages,iso.torrent,img.torrent}
mkdir -p "${RELEASE_CHECKOUT:?}/wiki/src/torrents/files"
cp "${ISO_PATH:?}.sig" \
"${IMG_PATH:?}.sig" \
"${ISOS:?}/tails-amd64-${VERSION:?}/tails-amd64-${VERSION:?}.build-manifest" \
"${ISOS:?}/tails-amd64-${VERSION:?}/tails-amd64-${VERSION:?}.packages" \
"${ISOS:?}/tails-amd64-${VERSION:?}".{iso,img}.torrent \
"${RELEASE_CHECKOUT:?}/wiki/src/torrents/files/"
# shellcheck disable=SC2012
LC_NUMERIC=C ls -l -h -L "${ISO_PATH:?}" | \
cut -f 5 -d ' ' | sed -r 's/(.+)([MG])/\1 \2B/' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_iso_size.html"
# shellcheck disable=SC2012
LC_NUMERIC=C ls -l -h -L "${IMG_PATH:?}" | \
cut -f 5 -d ' ' | sed -r 's/(.+)([MG])/\1 \2B/' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_img_size.html"
gpg --check-trustdb
LANG=C TZ=UTC \
gpg --no-options --keyid-format long \
--trusted-key "${TAILS_SIGNATURE_KEY_LONG_ID:?}" \
--verify "${ISO_PATH:?}.sig" "${ISO_PATH:?}" 2>&1 \
| perl -pE 's/\[ultimate\]$/[full]/' \
| sed 's/ /\&nbsp;/g;s/</\&lt;/;s/>/\&gt;/;s/$/<br\/>/g' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_iso_gpg_signature_output.html"
LANG=C TZ=UTC \
gpg --no-options --keyid-format long \
--trusted-key "${TAILS_SIGNATURE_KEY_LONG_ID:?}" \
--verify "${IMG_PATH:?}.sig" "${IMG_PATH:?}" 2>&1 \
| perl -pE 's/\[ultimate\]$/[full]/' \
| sed 's/ /\&nbsp;/g;s/</\&lt;/;s/>/\&gt;/;s/$/<br\/>/g' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_img_gpg_signature_output.html"
#!/bin/bash
set -eu
set -x
ssh bittorrent.lizard gpg --import \
< "${RELEASE_CHECKOUT:?}/wiki/src/tails-signing.key"
for type in iso img ; do
image_filename="tails-amd64-${VERSION:?}.${type:?}"
torrent_dirname="tails-amd64-${VERSION:?}-${type:?}"
scp \
"${ISOS:?}/${image_filename:?}.torrent" \
"${ISOS:?}/tails-amd64-${VERSION:?}/${image_filename:?}.sig" \
bittorrent.lizard:
# shellcheck disable=SC2087
ssh bittorrent.lizard <<-EOF
mkdir --mode 0775 "${torrent_dirname:?}" && \
cd "${torrent_dirname:?}" && \
mv "../${image_filename:?}.sig" . && \
wget --quiet \
"https://nightly.tails.boum.org/build_Tails_ISO_${RELEASE_BRANCH:?}/builds/${MATCHING_JENKINS_IMAGES_BUILD_ID:?}/archive/build-artifacts/${image_filename:?}" && \
gpg --verify ${image_filename:?}{.sig,} && \
cd && \
chgrp -R debian-transmission "${torrent_dirname:?}" && \
chmod -R go+rX,g+w "${torrent_dirname:?}" && \
mv \
"${torrent_dirname:?}" \
/var/lib/transmission-daemon/downloads/ && \
transmission-remote --add ${image_filename:?}.torrent \
--find /var/lib/transmission-daemon/downloads/
EOF
done
......@@ -2,11 +2,10 @@
set -eu
FAILURE=no
for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_snapshots.d; do
(
set -eu
echo "${dir:?}:"
cd "${dir:?}"
cd "$(git rev-parse --show-toplevel)/${dir:?}"
for ARCHIVE in * ; do
SERIAL="$(cat "${ARCHIVE:?}/serial")"
if [ "${SERIAL:?}" = 'latest' ]; then
......@@ -28,8 +27,18 @@ for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_s
esac
EXPIRY="$(curl --silent "https://time-based.snapshots.deb.tails.boum.org/${ARCHIVE:?}/dists/${DIST:?}/snapshots/${SERIAL:?}/Release" | sed -n 's/^Valid-Until:\s\+\(.*\)$/\1/p')"
fi
echo "* Archive '${ARCHIVE:?}' uses snapshot '${SERIAL:?}' which expires on: ${EXPIRY:?}"
STATUS="archive '${ARCHIVE:?}' uses snapshot '${SERIAL:?}' which expires on: ${EXPIRY:?}"
if [ "${EXPIRY}" = 'never' ] || \
[ "$(date -d "${EXPIRY}" +%s)" -ge "$(date -d "now + 1 month" +%s)" ]; then
echo "OK: ${STATUS}"
else
FAILURE=yes
echo "FAIL: ${STATUS}, which is within one month!" >&2
fi
done
echo ---
)
done
if [ "${FAILURE}" = yes ]; then
exit 1
fi
#!/bin/bash
set -eu
set -x
error () {
echo "error: ${*}" >&2
exit 1
}
USAGE="Usage: $(basename "$0") DAYS_FROM_NOW"
[ $# -eq 1 ] || error "$USAGE"
DAYS_FROM_NOW="$1"
for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_snapshots.d; do
(
set -eu
set -x
cd "${dir}"
for ARCHIVE in * ; do
if ! grep -qs '^latest$' "$ARCHIVE"/serial; then
ssh reprepro-time-based-snapshots@incoming.deb.tails.boum.org \
tails-bump-apt-snapshot-valid-until \
"$ARCHIVE" "$(cat "$ARCHIVE"/serial)" \
"${DAYS_FROM_NOW:?}"
fi
done
)
done
......@@ -2,7 +2,13 @@
# Iterate through all .po files, run msgfmt for each of them, and output any
# errors together with their context.
#
# This script can also be used to "sanitize" .po files to avoid Ikiwiki build
# failures. In that case, problematic translations are removed from the .po
# file and replaced by an empty string. The effect is that the string in the
# built website will not be translated.
import argparse
import glob
import re
import subprocess
......@@ -18,30 +24,67 @@ WARNINGS = [
ERRORS = re.compile('^([^\s]+\.po):([0-9]+): ')
def print_error_context(file, line_number):
def find_context_start(msgstr_line, content):
"""
Print full msgid and msgstr surrounding the given line_number.
Find the line number of the `msgid` corresponding to the given `msgstr`.
"""
with open(file) as f:
content = f.readlines()
start = line_number - 1
start = msgstr_line - 1
while not content[start].startswith('msgid '):
start -= 1
return start
end = line_number
def find_context_end(msgstr_line, content):
"""
Find the line number corresponding to the end of the given `msgstr`.
"""
end = msgstr_line
while not end == len(content) and content[end] != '\n':
end += 1
return end
def print_error_context(file, msgstr_line):
"""
Print the full msgid and msgstr surrounding the `msgstr_line` in `file`.
"""
with open(file) as f:
content = f.readlines()
start = find_context_start(msgstr_line, content)
end = find_context_end(msgstr_line, content)
for line in range(start, end):
print(content[line].strip())
def check_po_msgfmt():
def delete_msgstr(file, msgstr_line):
"""
Delete the translation starting on `msgstr_line` of the file `file`.
"""
with open(file) as f:
content = f.readlines()
end = find_context_end(msgstr_line, content)
content[msgstr_line-1] = 'msgstr ""\n'
content = content[:msgstr_line] + content[end:]
with open(file, 'w') as f:
f.writelines(content)
def check_po_msgfmt(sanitize=False):
"""
Run `msgfmt` for all .po files in the current directory and print any
errors found. If `sanitize` is `True`, also delete problematic
translations from corresponding .po files.
"""
errors = False
# find all .po files and run msgfmt for each of them
for f in glob.glob('**/*.po', recursive=True):
proc = subprocess.Popen(['msgfmt', '-c', '-o', '/dev/null', f],
stderr=subprocess.PIPE)
......@@ -60,13 +103,25 @@ def check_po_msgfmt():
errors = True
print(line)
f, n = match.groups()
print_error_context(f, int(n))
file, n = match.groups()
msgstr_line = int(n)
print_error_context(file, msgstr_line)
print('')
if sanitize:
delete_msgstr(file, msgstr_line)
if errors:
sys.exit(1)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--sanitize', action='store_true',
help='Replace problematic translations with an empty string.')
return parser.parse_args()
if __name__ == '__main__':
check_po_msgfmt()
args = parse_args()
check_po_msgfmt(sanitize=args.sanitize)
#!/bin/bash
set -eu
set -x
perl -ni \
-E 'chomp;
last if m{\Ausr/local/lib/kill-boot-profile\s}xms;
say unless m{(?:
[.]pyc\s+\d+\z
| \Aboot-profile[.]pid\s
| \Alib/firmware/
| \Alib/live/mount/medium/live/(?:filesystem[.]squashfs|initrd[.]img)\s
| \Alib/live/mount/overlay/rw/etc/fstab\s
| \Alib/live/mount/overlay/rw/etc/console-setup/cached_\S+[.](?:gz|sh)\s
| \Alib/live/mount/overlay/rw/etc/machine-id\s
| \Alib/live/mount/overlay/rw/etc/network/interfaces\s
| \Alib/live/mount/overlay/rw/var/log/wtmp\s