Commit 954ab6da authored by sajolida's avatar sajolida
Browse files

Merge remote-tracking branch 'origin/master' into web/16128-verification-on-page

parents c91a72b9 9a38fa5a
workflow:
rules:
- if: $CI_MERGE_REQUEST_IID
- if: $CI_COMMIT_TAG
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
image: debian:buster
before_script:
- export DEBIAN_FRONTEND=noninteractive
- apt-get update -qq
- export DEBIAN_FRONTEND=noninteractive
- apt-get update -qq
.prepare-lint-po: &prepare-lint-po
- apt-get -qy install git i18nspector
- git clone https://gitlab.tails.boum.org/tails/jenkins-tools.git /tmp/jenkins-tools
lint-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install git i18nspector
- git clone https://gitlab.tails.boum.org/tails/jenkins-tools.git /tmp/jenkins-tools
- /tmp/jenkins-tools/slaves/lint_po
- *prepare-lint-po
- /tmp/jenkins-tools/slaves/lint_po
lint-latest-po:
image: debian:testing
rules:
- if: '$CI_COMMIT_BRANCH == "stable"'
script:
- *prepare-lint-po
- apt-get -qy install intltool
- ./import-translations
- /tmp/jenkins-tools/slaves/lint_po po/*.po
check-po-msgfmt:
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
test-iuk:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-iuk.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo
attr
bsdtar
libdevice-cdio-perl
faketime
genisoimage
gnutls-bin
libdata-dumper-concise-perl
libdatetime-perl
libfile-copy-recursive-perl'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/iuk
- dzil authordebs --install
- export SOURCE_DATE_EPOCH=$(date --utc +%s)
- 'TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
NODE_PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/lib/js
PATH=$CI_PROJECT_DIR/submodules/mirror-pool-dispatcher/bin:$PATH
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
LC_ALL=C.UTF-8
dzil test --all'
test-perl5lib:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
apt-file
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
sudo'
- apt-get update -qq # Take into account APT configuration added by apt-file
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- dzil test --all
test-persistence-setup:
script:
- apt-get -qy install apt-file
- 'echo "deb http://deb.debian.org/debian buster-backports main"
> /etc/apt/sources.list.d/buster-backports.list'
- 'echo "deb http://deb.debian.org/debian bullseye main"
> /etc/apt/sources.list.d/bullseye.list'
# Needs to be kept in sync' with config/chroot_apt/preferences
- |
cat > /etc/apt/preferences.d/electrum.pref <<-'EOF'
Explanation: unavailable in Buster
Package: electrum python3-electrum python3-aiohttp-socks python3-aiorpcx python3-ecdsa
Pin: release o=Debian,n=bullseye
Pin-Priority: 999
Explanation: Electrum needs a version newer than the one in Buster
Package: python3-attr python3-protobuf libprotobuf23
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 990
Explanation: Electrum 4.0.2 and recent TREZOR firmware need 0.12
Package: python3-trezor trezor
Pin: release o=Debian Backports,n=buster-backports
Pin-Priority: 999
Package: *
Pin: release o=Debian,n=bullseye
Pin-Priority: -10
EOF
- apt-get update -qq
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
config/chroot_local-packageslists/tails-persistence-setup.list
| grep -E -v "^#"
| xargs apt-get -qy install'
- 'apt-get -qy install
libdist-zilla-plugin-test-notabs-perl
libdist-zilla-plugin-test-perl-critic-perl
libdist-zilla-app-command-authordebs-perl
libmodule-build-perl
libtest-most-perl
libtest-spec-perl
libtest-bdd-cucumber-perl
liblwp-online-perl
libdata-dumper-concise-perl
libtest-fatal-perl
libsys-statistics-linux-perl
sudo
xvfb'
# Otherwise, apt-get called by "dzil authordebs --install" asks confirmation
- echo 'APT::Get::Assume-Yes "true";' > /etc/apt/apt.conf.d/yes
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib
- dzil authordebs --install
- cd $CI_PROJECT_DIR/config/chroot_local-includes/usr/src/persistence-setup
- dzil authordebs --install
- >
PERL5LIB=$CI_PROJECT_DIR/config/chroot_local-includes/usr/src/perl5lib/lib
TAILS_GIT_CHECKOUT=$CI_PROJECT_DIR
LC_ALL=C.UTF-8
xvfb-run --auto-servernum dzil test --all
test-python-doctest:
script:
- apt-get -qy install python3
- config/chroot_local-includes/usr/local/lib/tails-gdm-error-message doctest --verbose
test-whisperback:
script:
- 'cat config/chroot_local-packageslists/whisperback.list | grep -E -v "^#"
| xargs apt-get -qy install'
- apt-get -qy install python3-pytest
- 'PYTHONPATH=config/chroot_local-includes/usr/lib/python3/dist-packages
pytest-3 --verbose --junit-xml=report.xml
config/chroot_local-includes/usr/lib/python3/dist-packages/whisperBack/test.py'
artifacts:
when: always
reports:
junit: report.xml
apt-snapshots-expiry:
script:
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
- apt-get -qy install curl git
- ./bin/apt-snapshots-expiry
rules:
- if: '$CI_COMMIT_BRANCH =~ /^stable|testing|devel$/'
- changes:
- config/APT_snapshots.d/*/serial
- vagrant/definitions/tails-builder/config/APT_snapshots.d/*/serial
......@@ -644,20 +644,6 @@ def on_jenkins?
!ENV['JENKINS_URL'].nil?
end
desc 'Test Tails'
task :test do
args = ARGV.drop_while { |x| ['test', '--'].include?(x) }
if on_jenkins?
args += ['--'] unless args.include? '--'
args += ['--tag', '~@fragile'] unless releasing?
base_branch = git_helper('base_branch')
if git_helper('git_only_doc_changes_since?', "origin/#{base_branch}")
args += ['--tag', '@doc']
end
end
run_command('./run_test_suite', *args)
end
desc 'Clean up all build related files'
task clean_all: ['vm:destroy', 'basebox:clean_all']
......
......@@ -3,7 +3,7 @@
BASE_BRANCHES="stable testing devel"
# Returns "" if in undetached head
# Returns "" if in detached head
git_current_branch() {
local git_ref
if git_ref="$(git symbolic-ref HEAD 2>/dev/null)"; then
......
......@@ -11,6 +11,7 @@ import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import List
LOG_FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
log = logging.getLogger()
......@@ -64,7 +65,7 @@ def main():
"gc",
help="Garbage collect expired data from the cache")
parser_gc.add_argument(
"--max-days", type=int, action="store", default=30,
"--max-days", type=int, action="store", default=20,
help="Number of days after which cached data expires")
parser_gc.set_defaults(func=gc)
......@@ -102,7 +103,7 @@ def main():
for key_file in KEY_FILES + [args.cache_base_dir]:
if not Path(key_file).exists():
log.error("%s does not exist" % (key_file))
log.error("%s does not exist", key_file)
sys.exit(1)
if args.command is None:
......@@ -113,18 +114,19 @@ def main():
def gc(args):
log.info("Garbage collecting expired data from the cache…")
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir() if d.is_dir()]
cache_dirs = [d for d in Path(args.cache_base_dir).iterdir()
if d.is_dir() and d.name != "lost+found"]
delete_before = datetime.utcnow() - timedelta(days=args.max_days)
log.debug("Will delete data created before %s" % (delete_before))
log.debug("Will delete data created before %s", delete_before)
for cache_dir in cache_dirs:
mtime = datetime.utcfromtimestamp(cache_dir.stat().st_mtime)
if mtime < delete_before:
log.info(" - Deleting cache for %s with mtime %s"
% (cache_dir.name, mtime))
log.info(" - Deleting cache for %s with mtime %s",
cache_dir.name, mtime)
shutil.rmtree(cache_dir)
else:
log.debug(" - Cache for %s has mtime %s ⇒ keeping"
% (cache_dir.name, mtime))
log.debug(" - Cache for %s has mtime %s ⇒ keeping",
cache_dir.name, mtime)
def get(args):
......@@ -143,7 +145,7 @@ def get(args):
raise FileNotFoundError("Found no cached %s for key %s"
% (file_to_get, args.cache_key))
log.debug("Copying %s from the cache" % (file_to_get))
log.debug("Copying %s from the cache", file_to_get)
if cached_file.is_dir():
shutil.copytree(src=cached_file, dst=dest_file, symlinks=True)
else:
......@@ -161,7 +163,7 @@ def put(args):
raise FileNotFoundError("Cannot store non-existing %s in the cache"
% file_to_cache)
log.debug("Caching %s with key %s" % (file_to_cache, args.cache_key))
log.debug("Caching %s with key %s", file_to_cache, args.cache_key)
cached_file.parent.mkdir(parents=True)
if file_to_cache.is_dir():
shutil.copytree(src=file_to_cache, dst=cached_file, symlinks=True)
......@@ -173,7 +175,7 @@ def put(args):
def forget(args):
cache_dir = Path(args.cache_base_dir, args.cache_key)
if cache_dir.exists():
log.info("Deleting cached data for key %s" % args.cache_key)
log.info("Deleting cached data for key %s", args.cache_key)
shutil.rmtree(cache_dir)
else:
log.info("No cached data to forget for key %s", args.cache_key)
......@@ -186,18 +188,19 @@ def package_version(package: str) -> str:
check=True).stdout.rstrip()
def compute_cache_key(key_files: [str], key_packages: [str]) -> str:
def compute_cache_key(key_files: List[str], key_packages: List[str]) -> str:
input_data = {
'git_commit': subprocess.run(
["git", "log", "-1", "--pretty=%H", "--", *key_files],
stdout=subprocess.PIPE, universal_newlines=True,
check=True).stdout.rstrip(),
'packages': dict(
(package, package_version(package)) for package in sorted(key_packages)
(package, package_version(package))
for package in sorted(key_packages)
),
}
serialized = json.dumps(input_data, sort_keys=True)
log.debug("Serialized data: " + serialized)
log.debug("Serialized data: %s", serialized)
return hashlib.sha1(bytes(serialized, encoding='utf-8')).hexdigest()
......
......@@ -2,11 +2,10 @@
set -eu
FAILURE=no
for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_snapshots.d; do
(
set -eu
echo "${dir:?}:"
cd "${dir:?}"
cd "$(git rev-parse --show-toplevel)/${dir:?}"
for ARCHIVE in * ; do
SERIAL="$(cat ${ARCHIVE:?}/serial)"
if [ "${SERIAL:?}" = 'latest' ]; then
......@@ -28,8 +27,18 @@ for dir in config/APT_snapshots.d vagrant/definitions/tails-builder/config/APT_s
esac
EXPIRY="$(curl --silent "https://time-based.snapshots.deb.tails.boum.org/${ARCHIVE:?}/dists/${DIST:?}/snapshots/${SERIAL:?}/Release" | sed -n 's/^Valid-Until:\s\+\(.*\)$/\1/p')"
fi
echo "* Archive '${ARCHIVE:?}' uses snapshot '${SERIAL:?}' which expires on: ${EXPIRY:?}"
STATUS="archive '${ARCHIVE:?}' uses snapshot '${SERIAL:?}' which expires on: ${EXPIRY:?}"
if [ "${EXPIRY}" = 'never' ] || \
[ "$(date -d "${EXPIRY}" +%s)" -ge "$(date -d "now + 1 month" +%s)" ]; then
echo "OK: ${STATUS}"
else
FAILURE=yes
echo "FAIL: ${STATUS}, which is within one month!" >&2
fi
done
echo ---
)
done
if [ "${FAILURE}" = yes ]; then
exit 1
fi
#!/usr/bin/env ruby
# This script reports which binary/source packages that can be safely
# deleted from one of the main APTs suite in our custom repo. It requires a
# .build-manifest as the source for which packages that are used
# during build and thus cannot be deleted.
begin
require 'debian'
rescue LoadError
raise 'please install the ruby-debian package'
end
require 'open-uri'
require 'optparse'
require 'yaml'
class NoSource < StandardError
end
def source_package(package)
matches = []
APT_SOURCES.each_package do |dsc|
# The -dbg(sym) packages are not listed, so we look for the
# original package's source instead, which will be the same.
matches << dsc if dsc.binary.include?(package.sub(/-dbg(sym)?$/, ''))
end
raise NoSource, "found no source package for #{package}" if matches.size.zero?
raise "found multiple source packages for #{package}" if matches.size > 1
matches.first.package
end
def binary_packages(package)
APT_SOURCES[package].binary
end
Options = Struct.new(:suite, :build_manifest, keyword_init: true)
class Parser
def self.parse(options)
args = Options.new(suite: nil, build_manifest: nil)
opt_parser = OptionParser.new do |opts|
opts.on(
'--suite SUITE',
'Look for cruft in APT suite SUITE'
) do |suite|
args.suite = suite
end
opts.on(
'--build-manifest MANIFEST',
'Use specified build manifest instead of downloading the latest one'
) do |build_manifest|
args.build_manifest = build_manifest
end
opts.on('-h', '--help', 'Prints this help') do
puts opts
exit
end
end
opt_parser.parse!(options)
!args.suite.nil? or raise 'Please use --suite SUITE'
args
end
end
options = Parser.parse(ARGV)
allowed_suites = ['stable', 'devel']
unless allowed_suites.include?(options.suite)
raise "we only support checking the following' " \
"custom APT suites: #{allowed_suites.join(', ')}"
end
begin
apt_repo_hostnames = [
'deb.tails.boum.org',
'umjqavufhoix3smyq6az2sx4istmuvsgmz4bq5u5x56rnayejoo6l2qd.onion',
]
apt_repo_filenames = apt_repo_hostnames.map do |hostname|
"/var/lib/apt/lists/#{hostname}_dists_#{options.suite}_main_source_Sources"
end
apt_repo_filename = apt_repo_filenames.find do |filename|
File.exist?(filename)
end
APT_SOURCES = Debian::Sources.new(apt_repo_filename).freeze
rescue
raise "could not find Tails custom APT repo's sources, " \
"please add this to your APT sources:\n" \
"deb-src [arch=amd64] http://deb.tails.boum.org/ #{options.suite} main"
end
if options.build_manifest.nil?
url = "https://nightly.tails.boum.org/build_Tails_ISO_#{options.suite}/lastSuccessful/archive/latest.build-manifest"
begin
manifest = YAML.safe_load(
URI.open(url).read
)
rescue OpenURI::HTTPError
raise "got HTTP 404 when attempting to fetch: #{url}\n" \
'Please try again in a while -- Jenkins sometimes needs some time ' \
'to create the latest.build-manifest symlink after a build completes'
end
else
manifest = YAML.load_file(options.build_manifest)
end
all_source_packages = []
used_source_packages = []
binary_cruft_candidates = []
custom_packages = `ssh reprepro@incoming.deb.tails.boum.org reprepro list #{options.suite}`
custom_packages.each_line(chomp: true) do |line|
type, name, version = line.split
if type['source']
all_source_packages << name
else
installed = manifest['packages']['binary'].find { |x| x['package'] == name }
if installed.nil? || version != installed['version']
binary_cruft_candidates << name
else
used_source_packages << source_package(name)
end
end
end
source_cruft = all_source_packages.uniq - used_source_packages
binary_cruft = []
binary_cruft_candidates.each do |p|
begin
next if used_source_packages.include?(source_package(p))
rescue NoSource
# If we don't have a source for a package, it should be a package
# we forgot to clean up when we removed its sources.
end
binary_cruft << p
end
unless binary_cruft.empty?
puts 'Binary packages that are not used:'
binary_cruft.each { |p| puts " - #{p}" }
puts
puts " Clean up command:\n" \
' ssh reprepro@incoming.deb.tails.boum.org ' \
"reprepro remove #{options.suite} #{binary_cruft.join(' ')}"
puts
end
unless source_cruft.empty?
puts 'Source packages that are not used:'
source_cruft.each { |p| puts " - #{p}" }
puts
puts " Clean up command:\n" \
' ssh reprepro@incoming.deb.tails.boum.org ' \
"reprepro removesrcs #{options.suite} #{source_cruft.join(' ')}"
end
#! /usr/bin/python3
import jinja2
def call_for_trusted_reproducer_contents(args) -> str:
jinja2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader('config/release_management/templates'))
return (jinja2_env.get_template('call_for_trusted_reproducer.mdwn').render(
version=args.version))
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--version', required=True)
args = parser.parse_args()
print(call_for_trusted_reproducer_contents(args))
#! /usr/bin/python3
# Documentation: https://tails.boum.org/contribute/working_together/GitLab/#api
import datetime
import email.utils
import functools
......@@ -42,11 +44,11 @@ log = logging.getLogger()
class GitLabWrapper(gitlab.Gitlab):
@functools.lru_cache
@functools.lru_cache(maxsize=None)
def project(self, project_id):
return self.projects.get(project_id)
@functools.lru_cache
@functools.lru_cache(maxsize=None)
def project_path_with_namespace(self, project_id):
return self.project(project_id).path_with_namespace
......
#! /usr/bin/python3
# Documentation: https://tails.boum.org/contribute/working_together/GitLab/#api
import functools
import gitlab
import sys
import logging
import os
from datetime import datetime
from dateutil.relativedelta import relativedelta
try:
import gitlab # type: ignore
except ImportError:
sys.exit("You need to install python3-gitlab to use this program.")
try:
from dateutil.relativedelta import relativedelta
except ImportError:
sys.exit("You need to install python3-dateutil to use this program.")
from pathlib import Path
PYTHON_GITLAB_CONFIG_FILE = os.getenv('PYTHON_GITLAB_CONFIG_FILE',
default=Path.home() /
'.python-gitlab.cfg')
......@@ -33,11 +44,11 @@ log = logging.getLogger()
class GitLabWrapper(gitlab.Gitlab):
@functools.lru_cache
@functools.lru_cache(maxsize=None)
def project(self, project_id):
return self.projects.get(project_id)
@functools.lru_cache
@functools.lru_cache(maxsize=None)
def project_from_name(self, project_name):
project = [
p for p in self.projects.list(all=True)
......
......@@ -27,7 +27,7 @@ done
### Empty config/APT_overlays.d/
git checkout "$BRANCH"
git rm config/APT_overlays.d/*
find config/APT_overlays.d -type f -not -name .placeholder -exec git rm '{}' \;
git commit config/APT_overlays.d/ \
-m "Empty the list of APT overlays: they were merged"
......