Commit 18030cfa authored by sajolida's avatar sajolida
Browse files

Merge remote-tracking branch 'origin/master' into web/16928-define-fundraising

parents a050eb93 3cd8f577
......@@ -57,12 +57,16 @@
/config/chroot_local-includes/usr/share/applications/tails-shutdown.desktop
/config/chroot_local-includes/usr/share/applications/tor-browser.desktop
/config/chroot_local-includes/usr/share/applications/tails-about.desktop
/config/chroot_local-includes/usr/share/applications/tails-installer.desktop
/config/chroot_local-includes/usr/share/applications/unlock-veracrypt-volumes.desktop
/config/chroot_local-includes/usr/share/applications/whisperback.desktop
/config/chroot_local-includes/usr/share/desktop-directories/Tails.directory
/config/chroot_local-includes/usr/share/polkit-1/actions/org.boum.tails.root-terminal.policy
/config/chroot_local-includes/usr/share/polkit-1/actions/org.boum.tails.additional-software.policy
/config/chroot_local-includes/usr/share/tails/greeter/*.ui
/config/chroot_local-includes/usr/share/tails-installer/*.ui
/config/chroot_local-includes/usr/share/tails/unlock-veracrypt-volumes/*.ui
/config/chroot_local-includes/usr/share/whisperback/*.ui
/tmp/
# The test suite's local configuration files
......
workflow:
rules:
- if: $CI_MERGE_REQUEST_IID
- if: $CI_COMMIT_TAG
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
image: debian:buster
before_script:
- export DEBIAN_FRONTEND=noninteractive
- apt-get update -qq
.prepare-lint-po: &prepare-lint-po
- apt-get -qy install git i18nspector
- git clone https://gitlab.tails.boum.org/tails/jenkins-tools.git /tmp/jenkins-tools
lint-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- *prepare-lint-po
- /tmp/jenkins-tools/slaves/lint_po
lint-latest-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH == "stable"'
script:
- *prepare-lint-po
- apt-get -qy install intltool
- ./import-translations
- /tmp/jenkins-tools/slaves/lint_po po/*.po
check-po-msgfmt:
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
check-po-meta-date:
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install git ruby
- ./bin/sanity-check-website
check-translatable-live-website-urls:
script:
- apt-get -qy install python3-polib
- ./bin/check-translatable-live-website-urls po/tails.pot
test-iuk:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-iuk.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo
attr
bsdtar
libdevice-cdio-perl
faketime
genisoimage
gnutls-bin
libdata-dumper-concise-perl
libdatetime-perl
libfile-copy-recursive-perl'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/iuk
- dzil authordebs --install
- export SOURCE_DATE_EPOCH=$(date --utc +%s)
- 'TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
NODE_PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/lib/js
PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/bin:$PATH
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
LC_ALL=C.UTF-8
dzil test --all'
test-perl5lib:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- dzil test --all
test-persistence-setup:
script:
- apt-get -qy install apt-file
- 'echo "deb http://deb.debian.org/debian buster-backports main"
> /etc/apt/sources.list.d/buster-backports.list'
- 'echo "deb http://deb.debian.org/debian bullseye main"
> /etc/apt/sources.list.d/bullseye.list'
# Needs to be kept in sync' with config/chroot_apt/preferences
- |
cat > /etc/apt/preferences.d/electrum.pref <<-'EOF'
Explanation: unavailable in Buster
Package: electrum python3-electrum python3-aiohttp-socks python3-aiorpcx python3-ecdsa
Pin: release o=Debian,n=bullseye
Pin-Priority: 999
Explanation: Electrum needs a version newer than the one in Buster
Package: python3-attr python3-protobuf libprotobuf23
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 990
Explanation: Electrum 4.0.2 and recent TREZOR firmware need 0.12
Package: python3-trezor trezor
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 999
Package: *
Pin: release o=Debian,n=bullseye
Pin-Priority: -10
EOF
- apt-get update -qq
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-persistence-setup.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
libtest-most-perl
libtest-spec-perl
libtest-bdd-cucumber-perl
liblwp-online-perl
libdata-dumper-concise-perl
libtest-fatal-perl
libsys-statistics-linux-perl
sudo
xvfb'
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/persistence-setup
- dzil authordebs --install
- >
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
LC_ALL=C.UTF-8
xvfb-run --auto-servernum dzil test --all
test-python-doctest:
script:
- apt-get -qy install python3
- config/chroot_local-includes/usr/local/lib/tails-gdm-error-message doctest --verbose
test-whisperback:
script:
- 'cat config/chroot_local-packageslists/whisperback.list | grep -E -v "^#"
| xargs apt-get -qy install'
- apt-get -qy install python3-pytest
- 'PYTHONPATH=config/chroot_local-includes/usr/lib/python3/dist-packages
pytest-3 --verbose --junit-xml=report.xml
config/chroot_local-includes/usr/lib/python3/dist-packages/whisperBack/test.py'
artifacts:
when: always
reports:
junit: report.xml
apt-snapshots-expiry:
script:
- apt-get -qy install curl git
- ./bin/apt-snapshots-expiry
rules:
- if: '$CI_COMMIT_BRANCH =~ /^stable|testing|devel$/'
- changes:
- config/APT_snapshots.d/*/serial
- vagrant/definitions/tails-builder/config/APT_snapshots.d/*/serial
......@@ -11,7 +11,3 @@
[submodule "submodules/tails-workarounds"]
path = submodules/tails-workarounds
url = https://gitlab.tails.boum.org/tails/workarounds.git
[submodule "submodules/sof"]
path = submodules/sof
url = https://github.com/thesofproject/sof-bin.git
branch = stable-v1.5.1
......@@ -523,6 +523,8 @@ def retrieve_artifacts(missing_ok: false)
fetch_command = [
'scp',
'-i', key_file,
# We don't want to use any identity saved in ssh agent'
'-o', 'IdentityAgent=none',
# We need this since the user will not necessarily have a
# known_hosts entry. It is safe since an attacker must
# compromise libvirt's network config or the user running the
......@@ -644,20 +646,6 @@ def on_jenkins?
!ENV['JENKINS_URL'].nil?
end
desc 'Test Tails'
task :test do
args = ARGV.drop_while { |x| ['test', '--'].include?(x) }
if on_jenkins?
args += ['--'] unless args.include? '--'
args += ['--tag', '~@fragile'] unless releasing?
base_branch = git_helper('base_branch')
if git_helper('git_only_doc_changes_since?', "origin/#{base_branch}")
args += ['--tag', '@doc']
end
end
run_command('./run_test_suite', *args)
end
desc 'Clean up all build related files'
task clean_all: ['vm:destroy', 'basebox:clean_all']
......
......@@ -60,12 +60,6 @@ echo "POTFILES_DOT_IN='$(
| sed -e 's,^config/chroot_local-includes,,' | tr "\n" ' '
)'" \
>> config/chroot_local-includes/usr/share/tails/build/variables
echo "SOF_VERSION='$(
git -C submodules/sof branch --all --contains HEAD \
--format '%(refname:short)' 'origin/stable-v*' \
| cut -d"-" -f 2
)'" \
>> config/chroot_local-includes/usr/share/tails/build/variables
# fix permissions on some source files that will be copied as is to the chroot.
# they may be wrong, e.g. if the Git repository was cloned with a strict umask.
......@@ -154,7 +148,7 @@ BUILD_USB_IMAGE_FILENAME="${BUILD_BASENAME}.img"
) > "$BUILD_APT_SOURCES"
# make submodules available in the chroot:
SUBMODULES_SRC="submodules/sof submodules/tails-workarounds"
SUBMODULES_SRC="submodules/tails-workarounds"
SUBMODULES_DST="config/chroot_local-includes/tmp/submodules"
mkdir -p "$SUBMODULES_DST"
cp -a $SUBMODULES_SRC "$SUBMODULES_DST"/
......
......@@ -3,7 +3,7 @@
BASE_BRANCHES="stable testing devel"
# Returns "" if in undetached head
# Returns "" if in detached head
git_current_branch() {
local git_ref
if git_ref="$(git symbolic-ref HEAD 2>/dev/null)"; then
......
......@@ -11,6 +11,7 @@ import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import List
LOG_FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
log = logging.getLogger()
......@@ -64,7 +65,7 @@ def main():
"gc",
help="Garbage collect expired data from the cache")
parser_gc.add_argument(
"--max-days", type=int, action="store", default=30,
"--max-days", type=int, action="store", default=20,
help="Number of days after which cached data expires")
parser_gc.set_defaults(func=gc)
......@@ -102,7 +103,7 @@ def main():
for key_file in KEY_FILES + [args.cache_base_dir]:
if not Path(key_file).exists():
log.error("%s does not exist" % (key_file))
log.error("%s does not exist", key_file)
sys.exit(1)
if args.command is None:
......@@ -113,18 +114,19 @@ def main():
def gc(args):
log.info("Garbage collecting expired data from the cache…")
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir() if d.is_dir()]
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir()
if d.is_dir() and d.name != "lost+found"]
delete_before = datetime.utcnow() - timedelta(days=args.max_days)
log.debug("Will delete data created before %s" % (delete_before))
log.debug("Will delete data created before %s", delete_before)
for cache_dir in cache_dirs:
mtime = datetime.utcfromtimestamp(cache_dir.stat().st_mtime)
if mtime < delete_before:
log.info(" - Deleting cache for %s with mtime %s"
% (cache_dir.name, mtime))
log.info(" - Deleting cache for %s with mtime %s",
cache_dir.name, mtime)
shutil.rmtree(cache_dir)
else:
log.debug(" - Cache for %s has mtime %s ⇒ keeping"
% (cache_dir.name, mtime))
log.debug(" - Cache for %s has mtime %s ⇒ keeping",
cache_dir.name, mtime)
def get(args):
......@@ -143,7 +145,7 @@ def get(args):
raise FileNotFoundError("Found no cached %s for key %s"
% (file_to_get, args.cache_key))
log.debug("Copying %s from the cache" % (file_to_get))
log.debug("Copying %s from the cache", file_to_get)
if cached_file.is_dir():
shutil.copytree(src=cached_file, dst=dest_file, symlinks=True)
else:
......@@ -161,7 +163,7 @@ def put(args):
raise FileNotFoundError("Cannot store non-existing %s in the cache"
% file_to_cache)
log.debug("Caching %s with key %s" % (file_to_cache, args.cache_key))
log.debug("Caching %s with key %s", file_to_cache, args.cache_key)
cached_file.parent.mkdir(parents=True)
if file_to_cache.is_dir():
shutil.copytree(src=file_to_cache, dst=cached_file, symlinks=True)
......@@ -173,7 +175,7 @@ def put(args):
def forget(args):
cache_dir = Path(args.cache_base_dir, args.cache_key)
if cache_dir.exists():
log.info("Deleting cached data for key %s" % args.cache_key)
log.info("Deleting cached data for key %s", args.cache_key)
shutil.rmtree(cache_dir)
else:
log.info("No cached data to forget for key %s", args.cache_key)
......@@ -186,18 +188,19 @@ def package_version(package: str) -> str:
check=True).stdout.rstrip()
def compute_cache_key(key_files: [str], key_packages: [str]) -> str:
def compute_cache_key(key_files: List[str], key_packages: List[str]) -> str:
input_data = {
'git_commit': subprocess.run(
["git", "log", "-1", "--pretty=%H", "--", *key_files],
stdout=subprocess.PIPE, universal_newlines=True,
check=True).stdout.rstrip(),
'packages': dict(
(package, package_version(package)) for package in sorted(key_packages)
(package, package_version(package))
for package in sorted(key_packages)
),
}
serialized = json.dumps(input_data, sort_keys=True)
log.debug("Serialized data: " + serialized)
log.debug("Serialized data: %s", serialized)
return hashlib.sha1(bytes(serialized, encoding='utf-8')).hexdigest()
......
#!/bin/bash
set -eu
set -x
git rm \
"${RELEASE_CHECKOUT:?}/wiki/src/torrents/files/tails-amd64-"*.{build-manifest,iso.sig,img.sig,packages,iso.torrent,img.torrent}
mkdir -p "${RELEASE_CHECKOUT:?}/wiki/src/torrents/files"
cp "${ISO_PATH:?}.sig" \
"${IMG_PATH:?}.sig" \
"${ISOS:?}/tails-amd64-${VERSION:?}/tails-amd64-${VERSION:?}.build-manifest" \
"${ISOS:?}/tails-amd64-${VERSION:?}/tails-amd64-${VERSION:?}.packages" \
"${ISOS:?}/tails-amd64-${VERSION:?}".{iso,img}.torrent \
"${RELEASE_CHECKOUT:?}/wiki/src/torrents/files/"
# shellcheck disable=SC2012
LC_NUMERIC=C ls -l -h -L "${ISO_PATH:?}" | \
cut -f 5 -d ' ' | sed -r 's/(.+)([MG])/\1 \2B/' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_iso_size.html"
# shellcheck disable=SC2012
LC_NUMERIC=C ls -l -h -L "${IMG_PATH:?}" | \
cut -f 5 -d ' ' | sed -r 's/(.+)([MG])/\1 \2B/' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_img_size.html"
gpg --check-trustdb
LANG=C TZ=UTC \
gpg --no-options --keyid-format long \
--trusted-key "${TAILS_SIGNATURE_KEY_LONG_ID:?}" \
--verify "${ISO_PATH:?}.sig" "${ISO_PATH:?}" 2>&1 \
| perl -pE 's/\[ultimate\]$/[full]/' \
| sed 's/ /\&nbsp;/g;s/</\&lt;/;s/>/\&gt;/;s/$/<br\/>/g' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_iso_gpg_signature_output.html"
LANG=C TZ=UTC \
gpg --no-options --keyid-format long \
--trusted-key "${TAILS_SIGNATURE_KEY_LONG_ID:?}" \
--verify "${IMG_PATH:?}.sig" "${IMG_PATH:?}" 2>&1 \
| perl -pE 's/\[ultimate\]$/[full]/' \
| sed 's/ /\&nbsp;/g;s/</\&lt;/;s/>/\&gt;/;s/$/<br\/>/g' \
> "${RELEASE_CHECKOUT:?}/wiki/src/inc/stable_amd64_img_gpg_signature_output.html"
#!/bin/bash
set -eu
set -o pipefail
NAME=$(basename "${0}")
LONGOPTS="version:,isos:,release-branch:,matching-jenkins-images-build-id:"
OPTS=$(getopt -o "" --longoptions $LONGOPTS -n "${NAME}" -- "$@")
eval set -- "$OPTS"
while [ $# -gt 0 ]; do
case $1 in
--version)
shift
VERSION="$1"
;;
--isos)
shift
ISOS="$1"
;;
--release-branch)
shift
RELEASE_BRANCH="$1"
;;
--matching-jenkins-images-build-id)
shift
MATCHING_JENKINS_IMAGES_BUILD_ID="$1"
;;
esac
shift
done
ssh misc.lizard mkdir "tails-amd64-${VERSION:?}"
scp "${ISOS:?}/tails-amd64-${VERSION:?}/tails-amd64-${VERSION:?}."{apt-sources,build-manifest,buildlog,packages,iso.sig,img.sig} \
"misc.lizard:tails-amd64-${VERSION:?}"
ssh misc.lizard gpg --import < "wiki/src/tails-signing.key"
ssh misc.lizard << EOF
cd tails-amd64-${VERSION:?} && \
wget --quiet \
"https://nightly.tails.boum.org/build_Tails_ISO_${RELEASE_BRANCH:?}/builds/${MATCHING_JENKINS_IMAGES_BUILD_ID:?}/archive/build-artifacts/tails-amd64-${VERSION:?}.iso" \
"https://nightly.tails.boum.org/build_Tails_ISO_${RELEASE_BRANCH:?}/builds/${MATCHING_JENKINS_IMAGES_BUILD_ID:?}/archive/build-artifacts/tails-amd64-${VERSION:?}.img" && \
gpg --verify tails-amd64-${VERSION:?}.iso{.sig,} && \
gpg --verify tails-amd64-${VERSION:?}.img{.sig,}
EOF
ssh misc.lizard << EOF
( [ -d isos ] || git clone gitolite@puppet-git.lizard:isos.git ) && \
cd isos && \
git annex init && \
git annex sync && \
git annex import ../tails-amd64-${VERSION:?} && \
rmdir ../tails-amd64-${VERSION:?} && \
git commit -m "Add Tails ${VERSION:?}" && \
git annex sync && \
git annex copy tails-amd64-${VERSION:?} --to origin && \
git annex drop tails-amd64-${VERSION:?} && \
git annex sync
EOF
#!/bin/bash
set -eu
set -x
ssh bittorrent.lizard gpg --import \
< "${RELEASE_CHECKOUT:?}/wiki/src/tails-signing.key"
for type in iso img ; do
image_filename="tails-amd64-${VERSION:?}.${type:?}"
torrent_dirname="tails-amd64-${VERSION:?}-${type:?}"
scp \
"${ISOS:?}/${image_filename:?}.torrent" \
"${ISOS:?}/tails-amd64-${VERSION:?}/${image_filename:?}.sig" \
bittorrent.lizard:
# shellcheck disable=SC2087
ssh bittorrent.lizard <<-EOF
mkdir --mode 0775 "${torrent_dirname:?}" && \
cd "${torrent_dirname:?}" && \
mv "../${image_filename:?}.sig" . && \
wget --quiet \
"https://nightly.tails.boum.org/build_Tails_ISO_${RELEASE_BRANCH:?}/builds/${MATCHING_JENKINS_IMAGES_BUILD_ID:?}/archive/build-artifacts/${image_filename:?}" && \
gpg --verify ${image_filename:?}{.sig,} && \
cd && \
chgrp -R debian-transmission "${torrent_dirname:?}" && \
chmod -R go+rX,g+w "${torrent_dirname:?}" && \
mv \
"${torrent_dirname:?}" \
/var/lib/transmission-daemon/downloads/ && \
transmission-remote --add ${image_filename:?}.torrent \
--find /var/lib/transmission-daemon/downloads/ && \
transmission-remote --list
EOF
done
#!/bin/sh
set -eu
FAILURE=no
for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_snapshots.d; do
echo "${dir:?}:"
cd "$(git rev-parse --show-toplevel)/${dir:?}"
for ARCHIVE in * ; do
SERIAL="$(cat ${ARCHIVE:?}/serial)"
if [ "${SERIAL:?}" = 'latest' ]; then
EXPIRY='never'
if [ "${ARCHIVE:?}" != 'debian-security' ]; then
echo "Warning: origin '${ARCHIVE:?}' is using the 'latest' snapshot, which is unexpected" >&2
fi
else
case "${ARCHIVE:?}" in
'debian-security')
DIST='buster/updates'
;;
'torproject')
DIST='buster'
;;
*)
DIST='stable'
;;
esac
EXPIRY="$(curl --silent "https://time-based.snapshots.deb.tails.boum.org/${ARCHIVE:?}/dists/${DIST:?}/snapshots/${SERIAL:?}/Release" | sed -n 's/^Valid-Until:\s\+\(.*\)$/\1/p')"
fi
STATUS="archive '${ARCHIVE:?}' uses snapshot '${SERIAL:?}' which expires on: ${EXPIRY:?}"
if [ "${EXPIRY}" = 'never' ] || \
[ "$(date -d "${EXPIRY}" +%s)" -ge "$(date -d "now + 1 month" +%s)" ]; then
echo "OK: ${STATUS}"
else
FAILURE=yes
echo "FAIL: ${STATUS}, which is within one month!" >&2
fi
done
echo ---
done
if [ "${FAILURE}" = yes ]; then
exit 1
fi
#!/bin/bash
set -eu
set -x
error () {
echo "error: ${*}" >&2
exit 1
}
USAGE="Usage: $(basename "$0") DAYS_FROM_NOW"
[ $# -eq 1 ] || error "$USAGE"
DAYS_FROM_NOW="$1"
for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_snapshots.d; do
(
set -eu
set -x
cd "${dir}"
for ARCHIVE in * ; do
if ! grep -qs '^latest$' "$ARCHIVE"/serial; then
ssh reprepro-time-based-snapshots@incoming.deb.tails.boum.org \
tails-bump-apt-snapshot-valid-until \
"$ARCHIVE" "$(cat "$ARCHIVE"/serial)" \
"${DAYS_FROM_NOW:?}"
fi
done
)
done
#!/usr/bin/python3
# Iterate through all .po files, run msgfmt for each of them, and output any
# errors together with their context.
#
# This script can also be used to "sanitize" .po files to avoid Ikiwiki build
# failures. In that case, problematic translations are removed from the .po
# file and replaced by an empty string. The effect is that the string in the
# built website will not be translated.