Commit e4d62c56 authored by sajolida's avatar sajolida
Browse files

Merge remote-tracking branch 'origin/master' into doc/18032-vulnerabilities

parents 94562925 ea2ed386
......@@ -43,6 +43,20 @@ check-po-msgfmt:
- apt-get -qy install python3 gettext
- ./bin/check-po-msgfmt
check-po-meta-date:
rules:
- if: '$CI_COMMIT_BRANCH =~ /^master|stable|testing|devel$/'
- changes:
- ./**.po
script:
- apt-get -qy install git ruby
- ./bin/sanity-check-website
check-translatable-live-website-urls:
script:
- apt-get -qy install python3-polib
- ./bin/check-translatable-live-website-urls po/tails.pot
test-iuk:
script:
- 'cat config/chroot_local-packageslists/tails-perl5lib.list
......
......@@ -29,6 +29,7 @@ for type in iso img ; do
"${torrent_dirname:?}" \
/var/lib/transmission-daemon/downloads/ && \
transmission-remote --add ${image_filename:?}.torrent \
--find /var/lib/transmission-daemon/downloads/
--find /var/lib/transmission-daemon/downloads/ && \
transmission-remote --list
EOF
done
#!/usr/bin/python3
import re
import sys
import polib # type: ignore
tails_live_website_urls = []
for entry in [e for e in polib.pofile(sys.argv[1]) if not e.obsolete]:
tails_live_website_urls += re.findall(
'(http[s]?://tails.boum.org/[a-zA-Z/]*)', entry.msgid)
if tails_live_website_urls:
print("Error: found translatable URLs to the Tails live website:")
print("\n".join(" - " + url for url in tails_live_website_urls))
print("")
print("Please make them non-translatable.")
print("For context, see tails/tails#17958.")
sys.exit(1)
......@@ -2,13 +2,19 @@
import argparse
import logging
import os
import re
import subprocess
import sys
from typing import List
from pathlib import Path
from urllib.parse import urlparse
from urllib.request import Request, urlopen
JENKINS_IUKS_BASE_URL = "https://nightly.tails.boum.org/parallel_collect_IUKs/builds"
from bs4 import BeautifulSoup # type: ignore
JENKINS_IUKS_BASE_URL = "https://nightly.tails.boum.org/build_IUKs"
RSYNC_SERVER_HOSTNAME = "rsync.lizard"
LOG_FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
log = logging.getLogger()
......@@ -17,17 +23,26 @@ log = logging.getLogger()
def main():
parser = argparse.ArgumentParser(
description="Copy IUKs from Jenkins to our rsync server \
and verify that they match those built locally"
)
parser.add_argument("--hashes-file", type=str, action="store", required=True)
parser.add_argument("--jenkins-build-id", type=int, action="store", required=True)
and verify that they match those built locally")
parser.add_argument("--hashes-file",
type=str,
action="store",
required=True)
parser.add_argument("--jenkins-build-id",
type=int,
action="store",
required=True)
parser.add_argument("--work-dir", type=str, action="store", default=".")
parser.add_argument("-q", "--quiet", action="store_true",
parser.add_argument("-q",
"--quiet",
action="store_true",
help="quiet output")
parser.add_argument("--debug", action="store_true", help="debug output")
parser.add_argument("--skip-sending-hashes-file", action="store_true",
parser.add_argument("--skip-sending-hashes-file",
action="store_true",
help="Assume the hashes file was uploaded already")
parser.add_argument("--skip-downloading-iuks", action="store_true",
parser.add_argument("--skip-downloading-iuks",
action="store_true",
help="Assume the IUKs were already downloaded")
args = parser.parse_args()
......@@ -39,7 +54,7 @@ def main():
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
if not Path(args.hashes_file).exists():
log.error("%s does not exist" % (args.hashes_file))
log.error("%s does not exist", args.hashes_file)
sys.exit(1)
if not args.skip_sending_hashes_file:
......@@ -65,61 +80,112 @@ def main():
)
def send_hashes_file(
hashes_file: str,
desthost: str,
destdir: str) -> None:
def send_hashes_file(hashes_file: str, desthost: str, destdir: str) -> None:
log.info("Sending %(f)s to %(d)s on %(h)s…" % {
"f": hashes_file,
"d": destdir,
"h": desthost,
})
subprocess.run(
["scp", hashes_file, "%s:%s" % (desthost, destdir)],
check=True
)
["scp", hashes_file, "%s:%s" % (desthost, destdir)], check=True)
def iuks_listed_in(hashes_file: str) -> List[str]:
with Path(hashes_file).open() as f:
lines = f.readlines()
return [l.split(' ')[-1].rstrip() for l in lines]
def download_iuks_from_jenkins(
hashes_file: str,
desthost: str,
destdir: str,
jenkins_iuks_base_url: str,
jenkins_build_id: int) -> None:
log.info("Downloading IUKs from Jenkins to %s…" % (desthost))
iuks = iuks_listed_in(hashes_file)
log.debug("IUKS: %s" % ', '.join(iuks))
for iuk in iuks:
log.debug("Downloading %s to %s" % (iuk, destdir))
url = "%s/%s/archive/%s" % (
jenkins_iuks_base_url,
jenkins_build_id,
iuk
)
subprocess.run(
["ssh", desthost, "wget", "--quiet", "--no-clobber",
"-O", "%s/%s" % (destdir, iuk), url],
check=True
)
return [line.split(' ')[-1].rstrip() for line in lines]
def get_jenkins_iuks_urls(jenkins_iuks_base_url: str,
jenkins_build_id: int) -> List[str]:
urls: List[str] = []
source_version_index_url = jenkins_iuks_base_url + \
"/configurations/axis-SOURCE_VERSION"
for source_version_url in [
source_version_index_url + '/' + link.get('href')
for link in BeautifulSoup(
urlopen(Request(source_version_index_url)),
'html.parser').find_all(href=re.compile('^[1-9]'))
]:
axis_label_index_url = source_version_url + "axis-label_exp/"
log.debug("Looking at %s", axis_label_index_url)
label_urls = [
axis_label_index_url + link.get('href')
for link in BeautifulSoup(urlopen(Request(axis_label_index_url)),
'html.parser').find_all(
href=re.compile('^[a-z]'))
]
if len(label_urls) == 0:
log.debug("Found no label URL in %s, ignoring this source version",
axis_label_index_url)
continue
if len(label_urls) > 1:
log.error("Found too many label URLs in %s: %s",
axis_label_index_url, label_urls)
sys.exit(1)
label_url = label_urls[0]
artifacts_index_url = label_url + '/builds/' + str(
jenkins_build_id) + '/archive/'
log.debug("Looking at %s", artifacts_index_url)
iuk_urls = [
artifacts_index_url + link.get('href') for link in BeautifulSoup(
urlopen(Request(artifacts_index_url)), 'html.parser').find_all(
href=re.compile('[.]iuk$'))
]
if len(iuk_urls) == 0:
log.debug("Found no IUK URL in %s, ignoring this source version",
artifacts_index_url)
continue
if len(iuk_urls) > 1:
log.error("Found too many IUK URLs in %s: %s", artifacts_index_url,
iuk_urls)
sys.exit(1)
else:
iuk_url = iuk_urls[0]
urls.append(iuk_url)
log.debug("Found IUK URLs: %s", urls)
return urls
def download_iuks_from_jenkins(hashes_file: str, desthost: str, destdir: str,
jenkins_iuks_base_url: str,
jenkins_build_id: int) -> None:
log.info("Downloading IUKs from Jenkins to %s…", desthost)
expected_iuks = iuks_listed_in(hashes_file)
log.debug("IUKS: %s", ', '.join(expected_iuks))
jenkins_iuks_urls = get_jenkins_iuks_urls(jenkins_iuks_base_url,
jenkins_build_id)
jenkins_iuks = [
os.path.basename(urlparse(url).path) for url in jenkins_iuks_urls
]
if set(expected_iuks) != set(jenkins_iuks):
log.error(
"Jenkins' set of IUKs differs from local one:\n"
" - locally: %s\n"
" - Jenkins: %s\n",
expected_iuks, jenkins_iuks)
sys.exit(1)
for iuk_url in jenkins_iuks_urls:
log.debug("Downloading %s to %s", iuk_url, destdir)
subprocess.run([
"ssh", desthost, "wget", "--quiet", "--no-clobber",
"--directory-prefix=%s" % destdir, iuk_url
],
check=True)
def verify_iuks(desthost: str, iuks_dir: str, hashes_file: str) -> None:
log.info("Verifying that IUKs built on Jenkins match those you've built…")
try:
subprocess.run(
["ssh", desthost,
"cd '%(d)s' && sha256sum --check --strict '%(f)s'" % {
"d": iuks_dir,
"f": Path(hashes_file).name,
}],
check=True
)
subprocess.run([
"ssh", desthost,
"cd '%(d)s' && sha256sum --check --strict '%(f)s'" % {
"d": iuks_dir,
"f": Path(hashes_file).name,
}
],
check=True)
except subprocess.CalledProcessError:
print("\nERROR: IUKs built on Jenkins don't match yours\n",
file=sys.stderr)
......
......@@ -3,7 +3,6 @@
# shellcheck disable=SC2029
set -eu
set -x
error () {
echo "error: ${*}" >&2
......
#! /usr/bin/python3
# Documentation: https://tails.boum.org/contribute/working_together/GitLab/#api
import functools
import sys
import logging
import os
import datetime
try:
import dateutil.parser
except ImportError:
sys.exit("You need to install python3-dateutil to use this program.")
try:
import requests
except ImportError:
sys.exit("You need to install python3-requests to use this program.")
try:
from cachecontrol import CacheControlAdapter # type: ignore
from cachecontrol.heuristics import OneDayCache
except ImportError:
sys.exit("You need to install python3-cachecontrol to use this program.")
try:
import gitlab # type: ignore
except ImportError:
sys.exit("You need to install python3-gitlab to use this program.")
from pathlib import Path
PYTHON_GITLAB_CONFIG_FILE = os.getenv('PYTHON_GITLAB_CONFIG_FILE',
default=Path.home() /
'.python-gitlab.cfg')
PYTHON_GITLAB_NAME = os.getenv('GITLAB_NAME', default='Tails')
GROUP_NAME = 'tails'
PROJECT_NAME = GROUP_NAME + '/' + 'tails'
LABEL = 'UX:debt'
ALL_REPORTS = ['added', 'removed', 'solved', 'rejected']
LOG_FORMAT = "%(asctime)-15s %(levelname)s %(message)s"
log = logging.getLogger()
class GitLabWrapper(gitlab.Gitlab):
@classmethod
def from_config(cls, gitlab_name, config_files):
# adapter = CacheControlAdapter(heuristic=ExpiresAfter(days=1))
adapter = CacheControlAdapter(heuristic=OneDayCache())
session = requests.Session()
session.mount('https://', adapter)
config = gitlab.config.GitlabConfigParser(gitlab_id=gitlab_name,
config_files=config_files)
return cls(config.url,
private_token=config.private_token,
oauth_token=config.oauth_token,
job_token=config.job_token,
ssl_verify=config.ssl_verify,
timeout=config.timeout,
http_username=config.http_username,
http_password=config.http_password,
api_version=config.api_version,
per_page=config.per_page,
pagination=config.pagination,
order_by=config.order_by,
session=session)
@functools.lru_cache(maxsize=None)
def project(self, project_id):
return self.projects.get(project_id)
@functools.lru_cache(maxsize=None)
def project_from_name(self, project_name):
project = [
p for p in self.projects.list(all=True)
# Disambiguate between projects whose names share a common prefix
if p.path_with_namespace == project_name
][0]
assert isinstance(project, gitlab.v4.objects.Project)
return project
class UxDebtChangesGenerator(object):
def __init__(self, gl, group, project_name: str, after: datetime.datetime):
self.gl = gl
self.group = group
self.project = self.gl.project_from_name(project_name)
self.after = datetime.datetime(after.year,
after.month,
after.day,
tzinfo=datetime.timezone.utc)
def closed_issues(self, reason: str) -> list:
closed_issues = []
closed_issues_events = self.project.events.list(as_list=False,
target_type='issue',
action='closed',
after=self.after)
gl_closed_issues_with_duplicates = [
event.target_iid for event in closed_issues_events
]
gl_closed_issues = []
for issue in gl_closed_issues_with_duplicates:
if issue not in gl_closed_issues:
gl_closed_issues.append(issue)
for issue in gl_closed_issues:
issue = self.project.issues.get(issue)
# Ignore issues that have been reopened since
if issue.state != 'closed':
continue
if LABEL not in issue.labels:
continue
if reason == 'resolved':
if 'Rejected' in issue.labels:
continue
elif reason == 'rejected':
if 'Rejected' not in issue.labels:
continue
else:
raise NotImplementedError("Unsupported reason %s" % reason)
closed_issues.append({
"title": issue.title,
"web_url": issue.web_url,
})
return closed_issues
def label_added(self):
issues = []
for issue in self.project.issues.list(state='opened', labels=[LABEL]):
if LABEL not in issue.labels:
continue
events = issue.resourcelabelevents.list()
for event in events:
if event.action != 'add' or event.label['name'] != 'UX:debt':
continue
event_created_at = dateutil.parser.isoparse(event.created_at)
if event_created_at < self.after:
continue
issues.append({
"title": issue.title,
"web_url": issue.web_url,
})
return issues
def label_removed(self):
issues = []
for issue in self.project.issues.list(state='opened'):
if LABEL in issue.labels:
continue
events = issue.resourcelabelevents.list()
for event in events:
if event.action != 'remove' or event.label['name'] != 'UX:debt':
continue
event_created_at = dateutil.parser.isoparse(event.created_at)
if event_created_at < self.after:
continue
issues.append({
"title": issue.title,
"web_url": issue.web_url,
})
return issues
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
'--since',
type=datetime.date.fromisoformat,
required=True,
help="Consider changes after this date, in the format YYYY-MM-DD")
parser.add_argument(
"--report",
dest='reports',
action='append',
help="Only run the specified report (among %s)\n" % ALL_REPORTS +
"Can be specified multiple times to run several reports.")
parser.add_argument("--debug", action="store_true", help="debug output")
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
else:
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
gl = GitLabWrapper.from_config(PYTHON_GITLAB_NAME,
config_files=[PYTHON_GITLAB_CONFIG_FILE])
gl.auth()
group = gl.groups.list(search=GROUP_NAME)[0]
assert isinstance(group, gitlab.v4.objects.Group)
reports = args.reports or ALL_REPORTS
log.debug("Preparing these reports: %s", reports)
changes_generator = UxDebtChangesGenerator(gl, group, PROJECT_NAME,
args.since)
if 'added' in reports:
print("Issues that had the UX:debt label added")
print("=======================================")
print()
for issue in changes_generator.label_added():
print(f'- {issue["title"]}')
print(f' {issue["web_url"]}')
print()
if 'removed' in reports:
print("Issues that had the UX:debt label removed")
print("=========================================")
print()
for issue in changes_generator.label_removed():
print(f'- {issue["title"]}')
print(f' {issue["web_url"]}')
print()
if 'solved' in reports:
print("Solved issues")
print("=============")
print()
for closed_issue in changes_generator.closed_issues(reason='resolved'):
print(f'- {closed_issue["title"]}')
print(f' {closed_issue["web_url"]}')
print()
if 'rejected' in reports:
print("Rejected issues")
print("===============")
print()
for closed_issue in changes_generator.closed_issues(reason='rejected'):
print(f'- {closed_issue["title"]}')
print(f' {closed_issue["web_url"]}')
print()
......@@ -15,15 +15,13 @@ export SOURCE_DATE_YYYYMMDD="$(date --utc --date="$(dpkg-parsechangelog --show-f
export SOURCE_DATE_FAKETIME="$(date --utc --date="$(dpkg-parsechangelog --show-field=Date)" '+%Y-%m-%d %H:%M:%S')"
# Base for the string that will be passed to "lb config --bootappend-live"
# FIXME: see [[bugs/sdmem_on_eject_broken_for_CD]] for explanation why we
# need to set block.events_dfl_poll_msecs
AMNESIA_APPEND="live-media=removable nopersistence noprompt timezone=Etc/UTC block.events_dfl_poll_msecs=1000 splash noautologin module=Tails slab_nomerge slub_debug=FZP mce=0 vsyscall=none page_poison=1 init_on_free=1 mds=full,nosmt"
AMNESIA_APPEND="live-media=removable nopersistence noprompt timezone=Etc/UTC splash noautologin module=Tails slab_nomerge slub_debug=FZP mce=0 vsyscall=none page_poison=1 init_on_free=1 mds=full,nosmt"
# Options passed to isohybrid
AMNESIA_ISOHYBRID_OPTS="-h 255 -s 63 --id 42 --verbose"
# Kernel version
KERNEL_VERSION='5.8.0-2'
KERNEL_VERSION='5.9.0-0.bpo.2'
KERNEL_SOURCE_VERSION=$(
echo "$KERNEL_VERSION" \
| perl -p -E 's{\A (\d+ [.] \d+) [.] .*}{$1}xms'
......
This diff is collapsed.
......@@ -59,7 +59,7 @@ Pin: release o=Debian,n=buster-backports
Pin-Priority: 999
Package: linux-compiler-* linux-headers-* linux-image-* linux-kbuild-* linux-source-*
Pin: release o=Debian,n=sid
Pin: release o=Debian,n=buster-backports
Pin-Priority: 999
Explanation: src:live-boot (#15477)
......
......@@ -11,6 +11,7 @@ systemctl enable memlockd.service
# Enable our own systemd unit files
systemctl enable initramfs-shutdown.service
systemctl enable onion-grater.service
systemctl enable tails-allow-external-TailsData-access.service
systemctl enable tails-synchronize-data-to-new-persistent-volume-on-shutdown.service
systemctl enable tails-autotest-broken-Xorg.service
systemctl enable tails-autotest-remote-shell.service
......
[Unit]
Description=Allow access to external TailsData partitions
Requires=media-amnesia-TailsData.mount
After=media-amnesia-TailsData.mount
[Service]
ExecStart=setfacl -m user:amnesia:rx /media/amnesia/TailsData
[Install]
WantedBy=media-amnesia-TailsData.mount
......@@ -18,33 +18,32 @@
# Author(s): Luke Macken <lmacken@redhat.com>
import os
import sys
import gettext
import locale
# Add sbin to PATH to support unprivileged mode
if os.path.exists('/usr/sbin') or os.path.exists('/usr/local/sbin'):
try:
os.environ['PATH'] = '/usr/local/sbin:/usr/sbin:' + os.environ['PATH']
except KeyError, e:
except KeyError as ex:
os.environ['PATH'] = '/usr/local/sbin:/usr/sbin'
def utf8_gettext(*args, **kwargs):
" Translate string, converting it to a UTF-8 encoded bytestring "
""" Translate string, converting it to a UTF-8 encoded bytestring """
return gettext.translation(
'tails', '/usr/share/locale', fallback=True
).gettext(*args, **kwargs)
_ = utf8_gettext