diff --git a/pkg/apps/application-list.jsx b/pkg/apps/application-list.jsx
index 7afd38483f5c..438ea9fc9cd9 100644
--- a/pkg/apps/application-list.jsx
+++ b/pkg/apps/application-list.jsx
@@ -98,11 +98,19 @@ export const ApplicationList = ({ metainfo_db, appProgress, appProgressTitle, ac
comps.push(metainfo_db.components[id]);
comps.sort((a, b) => a.name.localeCompare(b.name));
- function get_config(name, distro_id, def) {
+ function get_config(name, os_release, def) {
+ // ID is a single value, ID_LIKE is a list
+ const os_list = [os_release.ID || "", ...(os_release.ID_LIKE || "").split(/\s+/)];
+
if (cockpit.manifests.apps && cockpit.manifests.apps.config) {
- let val = cockpit.manifests.apps.config[name];
- if (typeof val === 'object' && val !== null && !Array.isArray(val))
- val = val[distro_id];
+ const val = cockpit.manifests.apps.config[name];
+ if (typeof val === 'object' && val !== null && !Array.isArray(val)) {
+ for (const os of os_list) {
+ if (val[os])
+ return val[os];
+ }
+ return def;
+ }
return val !== undefined ? val : def;
} else {
return def;
@@ -112,8 +120,8 @@ export const ApplicationList = ({ metainfo_db, appProgress, appProgressTitle, ac
function refresh() {
read_os_release().then(os_release =>
PackageKit.refresh(metainfo_db.origin_files,
- get_config('appstream_config_packages', os_release.ID, []),
- get_config('appstream_data_packages', os_release.ID, []),
+ get_config('appstream_config_packages', os_release, []),
+ get_config('appstream_data_packages', os_release, []),
setProgress))
.finally(() => setProgress(false))
.catch(show_error);
@@ -121,7 +129,7 @@ export const ApplicationList = ({ metainfo_db, appProgress, appProgressTitle, ac
let refresh_progress, refresh_button, tbody;
if (progress) {
- refresh_progress = ;
+ refresh_progress = ;
refresh_button = ;
} else {
refresh_progress = null;
diff --git a/pkg/apps/manifest.json b/pkg/apps/manifest.json
index 0679c1b3bf21..56039e57f53b 100644
--- a/pkg/apps/manifest.json
+++ b/pkg/apps/manifest.json
@@ -14,10 +14,10 @@
"config": {
"appstream_config_packages": {
- "debian": ["appstream"], "ubuntu": ["appstream"]
+ "debian": ["appstream"]
},
"appstream_data_packages": {
- "fedora": ["appstream-data"], "rhel": ["appstream-data"]
+ "fedora": ["appstream-data"]
}
}
}
diff --git a/pkg/lib/cockpit-po-plugin.js b/pkg/lib/cockpit-po-plugin.js
index a7f31ca6466a..624198d31ac3 100644
--- a/pkg/lib/cockpit-po-plugin.js
+++ b/pkg/lib/cockpit-po-plugin.js
@@ -44,10 +44,7 @@ function get_plural_expr(statement) {
return expr;
}
-function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
- if (webpack_compilation)
- webpack_compilation.fileDependencies.add(po_file);
-
+function buildFile(po_file, subdir, webpack_module, webpack_compilation, out_path, filter) {
return new Promise((resolve, reject) => {
const parsed = gettext_parser.po.parse(fs.readFileSync(po_file), 'utf8');
delete parsed.translations[""][""]; // second header copy
@@ -76,6 +73,9 @@ function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
if (translation.comments.flag?.match(/\bfuzzy\b/))
continue;
+ if (!references.some(filter))
+ continue;
+
const key = JSON.stringify(context_prefix + msgid);
// cockpit.js always ignores the first item
chunks.push(`,\n ${key}: [\n null`);
@@ -90,8 +90,6 @@ function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
const wrapper = config.wrapper?.(subdir) || DEFAULT_WRAPPER;
const output = wrapper.replace('PO_DATA', chunks.join('')) + '\n';
- const lang = path.basename(po_file).slice(0, -3);
- const out_path = (subdir ? (subdir + '/') : '') + 'po.' + lang + '.js';
if (webpack_compilation)
webpack_compilation.emitAsset(out_path, new webpack_module.sources.RawSource(output));
else
@@ -110,9 +108,20 @@ function init(options) {
function run(webpack_module, webpack_compilation) {
const promises = [];
- config.subdirs.map(subdir =>
- promises.push(...get_po_files().map(po_file =>
- buildFile(po_file, subdir, webpack_module, webpack_compilation))));
+ for (const subdir of config.subdirs) {
+ for (const po_file of get_po_files()) {
+ if (webpack_compilation)
+ webpack_compilation.fileDependencies.add(po_file);
+ const lang = path.basename(po_file).slice(0, -3);
+ promises.push(Promise.all([
+ // Separate translations for the manifest.json file and normal pages
+ buildFile(po_file, subdir, webpack_module, webpack_compilation,
+ `${subdir}/po.${lang}.js`, str => !str.includes('manifest.json')),
+ buildFile(po_file, subdir, webpack_module, webpack_compilation,
+ `${subdir}/po.manifest.${lang}.js`, str => str.includes('manifest.json'))
+ ]));
+ }
+ }
return Promise.all(promises);
}
diff --git a/pkg/shell/index.html b/pkg/shell/index.html
index 6d09105f32bf..96a76a89d507 100644
--- a/pkg/shell/index.html
+++ b/pkg/shell/index.html
@@ -8,7 +8,10 @@
+
+
+
diff --git a/src/client/cockpit-client b/src/client/cockpit-client
index 68eb110b317e..003ded3de0d8 100755
--- a/src/client/cockpit-client
+++ b/src/client/cockpit-client
@@ -55,6 +55,17 @@ def prctl(*args):
raise Exception('prctl() failed')
+def get_user_state_dir():
+ try:
+ # GLib ≥ 2.72
+ return GLib.get_user_state_dir()
+ except AttributeError:
+ try:
+ return os.environ["XDG_STATE_HOME"]
+ except KeyError:
+ return os.path.expanduser("~/.local/share")
+
+
prctl.SET_PDEATHSIG = 1
@@ -222,7 +233,7 @@ class CockpitClient(Gtk.Application):
context.set_sandbox_enabled(enabled=True)
context.set_cache_model(WebKit2.CacheModel.DOCUMENT_VIEWER)
- cookiesFile = os.path.join(GLib.get_user_state_dir(), "cockpit-client", "cookies.txt")
+ cookiesFile = os.path.join(get_user_state_dir(), "cockpit-client", "cookies.txt")
cookies = context.get_cookie_manager()
cookies.set_persistent_storage(cookiesFile, WebKit2.CookiePersistentStorage.TEXT)
diff --git a/src/cockpit/packages.py b/src/cockpit/packages.py
index 388c40b689f3..5d882ef67420 100644
--- a/src/cockpit/packages.py
+++ b/src/cockpit/packages.py
@@ -20,6 +20,7 @@
import functools
import gzip
import io
+import itertools
import json
import logging
import mimetypes
@@ -62,28 +63,55 @@
logger = logging.getLogger(__name__)
-def parse_accept_language(headers: JsonObject) -> List[str]:
+# In practice, this is going to get called over and over again with exactly the
+# same list. Let's try to cache the result.
+@functools.lru_cache()
+def parse_accept_language(accept_language: str) -> Sequence[str]:
"""Parse the Accept-Language header, if it exists.
- Returns an ordered list of languages.
+ Returns an ordered list of languages, with fallbacks inserted, and
+ truncated to the position where 'en' would have otherwise appeared, if
+ applicable.
https://tools.ietf.org/html/rfc7231#section-5.3.5
+ https://datatracker.ietf.org/doc/html/rfc4647#section-3.4
"""
- locales = []
- for language in get_str(headers, 'Accept-Language', '').split(','):
- language = language.strip()
- locale, _, weightstr = language.partition(';q=')
- weight = float(weightstr or 1)
-
- # Skip possible empty locales
- if not locale:
- continue
-
- # Locales are case-insensitive and we store our list in lowercase
- locales.append((locale.lower(), weight))
-
- return [locale for locale, _ in sorted(locales, key=lambda k: k[1], reverse=True)]
+ logger.debug('parse_accept_language(%r)', accept_language)
+ locales_with_q = []
+ for entry in accept_language.split(','):
+ entry = entry.strip().lower()
+ logger.debug(' entry %r', entry)
+ locale, _, qstr = entry.partition(';q=')
+ try:
+ q = float(qstr or 1.0)
+ except ValueError:
+ continue # ignore malformed entry
+
+ while locale:
+ logger.debug(' adding %r q=%r', locale, q)
+ locales_with_q.append((locale, q))
+ # strip off '-detail' suffixes until there's nothing left
+ locale, _, _region = locale.rpartition('-')
+
+ # Sort the list by highest q value. Otherwise, this is a stable sort.
+ locales_with_q.sort(key=lambda pair: pair[1], reverse=True)
+ logger.debug(' sorted list is %r', locales_with_q)
+
+ # If we have 'en' anywhere in our list, ignore it and all items after it.
+ # This will result in us getting an untranslated (ie: English) version if
+ # none of the more-preferred languages are found, which is what we want.
+ # We also take the chance to drop duplicate items. Note: both of these
+ # things need to happen after sorting.
+ results = []
+ for locale, _q in locales_with_q:
+ if locale == 'en':
+ break
+ if locale not in results:
+ results.append(locale)
+
+ logger.debug(' results list is %r', results)
+ return tuple(results)
def sortify_version(version: str) -> str:
@@ -157,7 +185,8 @@ def __init__(self, path: Path, value: JsonObject):
class Package:
- PO_JS_RE: ClassVar[Pattern] = re.compile(r'po\.([^.]+)\.js(\.gz)?')
+ # For po{,.manifest}.js files, the interesting part is the locale name
+ PO_JS_RE: ClassVar[Pattern] = re.compile(r'(po|po\.manifest)\.([^.]+)\.js(\.gz)?')
# immutable after __init__
manifest: Manifest
@@ -166,7 +195,7 @@ class Package:
priority: int
# computed later
- translations: Optional[Dict[str, str]] = None
+ translations: Optional[Dict[str, Dict[str, str]]] = None
files: Optional[Dict[str, str]] = None
def __init__(self, manifest: Manifest):
@@ -186,7 +215,7 @@ def ensure_scanned(self) -> None:
return
self.files = {}
- self.translations = {}
+ self.translations = {'po.js': {}, 'po.manifest.js': {}}
for file in self.path.rglob('*'):
name = str(file.relative_to(self.path))
@@ -195,14 +224,31 @@ def ensure_scanned(self) -> None:
po_match = Package.PO_JS_RE.fullmatch(name)
if po_match:
- locale = po_match.group(1)
+ basename = po_match.group(1)
+ locale = po_match.group(2)
# Accept-Language is case-insensitive and uses '-' to separate variants
lower_locale = locale.lower().replace('_', '-')
- self.translations[lower_locale] = name
+
+ logger.debug('Adding translation %r %r -> %r', basename, lower_locale, name)
+ self.translations[f'{basename}.js'][lower_locale] = name
else:
- basename = name[:-3] if name.endswith('.gz') else name
+ # strip out trailing '.gz' components
+ basename = re.sub('.gz$', '', name)
+ logger.debug('Adding content %r -> %r', basename, name)
self.files[basename] = name
+ # If we see a filename like `x.min.js` we want to also offer it
+ # at `x.js`, but only if `x.js(.gz)` itself is not present.
+ # Note: this works for both the case where we found the `x.js`
+ # first (it's already in the map) and also if we find it second
+ # (it will be replaced in the map by the line just above).
+ # See https://github.com/cockpit-project/cockpit/pull/19716
+ self.files.setdefault(basename.replace('.min.', '.'), name)
+
+ # support old cockpit-po-plugin which didn't write po.manifest.??.js
+ if not self.translations['po.manifest.js']:
+ self.translations['po.manifest.js'] = self.translations['po.js']
+
def get_content_security_policy(self) -> str:
policy = {
"default-src": "'self'",
@@ -236,21 +282,14 @@ def load_file(self, filename: str) -> Document:
return Document(path.open('rb'), content_type, content_encoding, content_security_policy)
- def load_translation(self, locales: List[str]) -> Document:
+ def load_translation(self, path: str, locales: Sequence[str]) -> Document:
self.ensure_scanned()
assert self.translations is not None
- # First check the locales that the user sent
+ # First match wins
for locale in locales:
with contextlib.suppress(KeyError):
- return self.load_file(self.translations[locale])
-
- # Next, check the language-only versions of variant-specified locales
- for locale in locales:
- language, _, region = locale.partition('-')
- if region:
- with contextlib.suppress(KeyError):
- return self.load_file(self.translations[language])
+ return self.load_file(self.translations[path][locale])
# We prefer to return an empty document than 404 in order to avoid
# errors in the console when a translation can't be found
@@ -261,9 +300,9 @@ def load_path(self, path: str, headers: JsonObject) -> Document:
assert self.files is not None
assert self.translations is not None
- if path == 'po.js':
- locales = parse_accept_language(headers)
- return self.load_translation(locales)
+ if path in self.translations:
+ locales = parse_accept_language(get_str(headers, 'Accept-Language', ''))
+ return self.load_translation(path, locales)
else:
return self.load_file(self.files[path])
@@ -462,8 +501,13 @@ def load(self) -> None:
def show(self):
for name in sorted(self.packages):
package = self.packages[name]
- menuitems = ''
- print(f'{name:20} {menuitems:40} {package.path}')
+ menuitems = []
+ for entry in itertools.chain(
+ package.manifest.get('menu', {}).values(),
+ package.manifest.get('tools', {}).values()):
+ with contextlib.suppress(KeyError):
+ menuitems.append(entry['label'])
+ print(f'{name:20} {", ".join(menuitems):40} {package.path}')
def get_bridge_configs(self) -> Sequence[BridgeConfig]:
def yield_configs():
@@ -492,17 +536,19 @@ def load_manifests_js(self, headers: JsonObject) -> Document:
chunks: List[bytes] = []
# Send the translations required for the manifest files, from each package
- locales = parse_accept_language(headers)
+ locales = parse_accept_language(get_str(headers, 'Accept-Language', ''))
for name, package in self.packages.items():
if name in ['static', 'base1']:
continue
+
# find_translation will always find at least 'en'
- translation = package.load_translation(locales)
+ translation = package.load_translation('po.manifest.js', locales)
with translation.data:
if translation.content_encoding == 'gzip':
data = gzip.decompress(translation.data.read())
else:
data = translation.data.read()
+
chunks.append(data)
chunks.append(b"""
diff --git a/test/pytest/test_packages.py b/test/pytest/test_packages.py
index 7c61b3b21e64..4170f5d50b77 100644
--- a/test/pytest/test_packages.py
+++ b/test/pytest/test_packages.py
@@ -16,7 +16,6 @@
# along with this program. If not, see .
import json
-from typing import List
import pytest
@@ -24,12 +23,26 @@
@pytest.mark.parametrize(("test_input", "expected"), [
- ('de-at, zh-CH, en,', ['de-at', 'zh-ch', 'en']),
- ('es-es, nl;q=0.8, fr;q=0.9', ['es-es', 'fr', 'nl']),
- ('fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5', ['fr-ch', 'fr', 'en', 'de', '*'])
+ # correct handles empty values
+ ('', ()),
+ (' ', ()),
+ (' , ', ()),
+ (' , ,xx', ('xx',)),
+ # english → empty list
+ ('en', ()),
+ (' , en', ()),
+ # invalid q values get ignored
+ ('aa;q===,bb;q=abc,cc;q=.,zz', ('zz',)),
+ # variant-peeling works
+ ('aa-bb-cc-dd,ee-ff-gg-hh', ('aa-bb-cc-dd', 'aa-bb-cc', 'aa-bb', 'aa', 'ee-ff-gg-hh', 'ee-ff-gg', 'ee-ff', 'ee')),
+ # sorting and english-truncation are working
+ ('fr-ch;q=0.8,es-mx;q=1.0,en-ca;q=0.9', ('es-mx', 'es', 'en-ca')),
+ ('de-at, zh-CN, en,', ('de-at', 'de', 'zh-cn', 'zh')),
+ ('es-es, nl;q=0.8, fr;q=0.9', ('es-es', 'es', 'fr', 'nl')),
+ ('fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5', ('fr-ch', 'fr'))
])
-def test_parse_accept_language(test_input: str, expected: List[str]) -> None:
- assert parse_accept_language({'Accept-Language': test_input}) == expected
+def test_parse_accept_language(test_input: str, expected: 'tuple[str]') -> None:
+ assert parse_accept_language(test_input) == expected
@pytest.fixture
@@ -158,3 +171,117 @@ def test_condition_hides_priority(pkgdir):
assert packages.packages['basic'].manifest['description'] == 'standard package'
assert packages.packages['basic'].manifest['requires'] == {'cockpit': "42"}
assert packages.packages['basic'].priority == 1
+
+
+def test_english_translation(pkgdir):
+ make_package(pkgdir, 'one')
+ (pkgdir / 'one' / 'po.de.js').write_text('eins')
+
+ packages = Packages()
+
+ # make sure we get German
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'de'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b'eins'
+
+ # make sure we get German here (higher q-value) even with English first
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'en;q=0.9, de-ch'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b'eins'
+
+ # make sure we get the empty ("English") translation, and not German
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'en, de'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'de;q=0.9, fr;q=0.7, en'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'de;q=0.9, fr, en-ca'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+ document = packages.load_path('/one/po.js', {'Accept-Language': ''})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+ document = packages.load_path('/one/po.js', {})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+
+def test_translation(pkgdir):
+ # old style: make sure po.de.js is served as fallback for manifest translations
+ make_package(pkgdir, 'one')
+ (pkgdir / 'one' / 'po.de.js').write_text('eins')
+
+ # new style: separated translations
+ make_package(pkgdir, 'two')
+ (pkgdir / 'two' / 'po.de.js').write_text('zwei')
+ (pkgdir / 'two' / 'po.manifest.de.js').write_text('zwo')
+
+ packages = Packages()
+
+ # make sure we can read a po.js file with language fallback
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'es, de'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b'eins'
+
+ # make sure we fall back cleanly to an empty file with correct mime
+ document = packages.load_path('/one/po.js', {'Accept-Language': 'es'})
+ assert '/javascript' in document.content_type
+ assert document.data.read() == b''
+
+ # make sure the manifest translations get sent along with manifests.js
+ document = packages.load_path('/manifests.js', {'Accept-Language': 'de'})
+ contents = document.data.read()
+ assert b'eins\n' in contents
+ assert b'zwo\n' in contents
+ assert b'zwei\n' not in contents
+
+
+def test_filename_mangling(pkgdir):
+ make_package(pkgdir, 'one')
+
+ # test various filename variations
+ (pkgdir / 'one' / 'one.js').write_text('this is one.js')
+ (pkgdir / 'one' / 'two.js.gz').write_text('this is two.js')
+ (pkgdir / 'one' / 'three.min.js.gz').write_text('this is three.js')
+ (pkgdir / 'one' / 'four.min.js').write_text('this is four.js')
+
+ packages = Packages()
+ encodings = set()
+
+ for name in ['one', 'two', 'three', 'four']:
+ document = packages.load_path(f'/one/{name}.js', {})
+ assert document.data.read().decode() == f'this is {name}.js'
+ assert '/javascript' in document.content_type
+ encodings.add(document.content_encoding)
+
+ assert encodings == {None, 'gzip'} # make sure we saw both compressed and uncompressed
+
+
+def test_overlapping_minified(pkgdir):
+ make_package(pkgdir, 'one')
+ (pkgdir / 'one' / 'one.min.js').write_text('min')
+ (pkgdir / 'one' / 'one.js').write_text('max')
+
+ # try the other way around in hope of listing the files in reverse order
+ (pkgdir / 'one' / 'two.js').write_text('max')
+ (pkgdir / 'one' / 'two.min.js').write_text('min')
+
+ packages = Packages()
+
+ # if both files are present, we should find the original one
+ document = packages.load_path('/one/one.js', {})
+ assert document.data.read().decode() == 'max'
+ document = packages.load_path('/one/two.js', {})
+ assert document.data.read().decode() == 'max'
+
+ # but requesting .min. explicitly will load it
+ document = packages.load_path('/one/one.min.js', {})
+ assert document.data.read().decode() == 'min'
+ document = packages.load_path('/one/two.min.js', {})
+ assert document.data.read().decode() == 'min'
+
diff --git a/test/verify/check-apps b/test/verify/check-apps
index ac4cfd7b7fee..b57de7c58acf 100755
--- a/test/verify/check-apps
+++ b/test/verify/check-apps
@@ -16,6 +16,7 @@
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see .
+import time
import packagelib
import testlib
@@ -129,6 +130,51 @@ class TestApps(packagelib.PackageCase):
def testWithUrlRoot(self):
self.testBasic(urlroot="/webcon")
+ def testOsMap(self):
+ b = self.browser
+ m = self.machine
+
+ self.allow_journal_messages("can't remove watch: Invalid argument")
+
+ self.restore_dir("/usr/share/metainfo", reboot_safe=True)
+ self.restore_dir("/usr/share/app-info", reboot_safe=True)
+ self.restore_dir("/var/cache/app-info", reboot_safe=True)
+
+ # Make sure none of the appstream directories exist. They
+ # will be created later and we need to cope with that.
+ m.execute("rm -rf /usr/share/metainfo /usr/share/app-info /var/cache/app-info")
+
+ # use a fake distro map
+ self.write_file("/etc/cockpit/apps.override.json",
+ '{ "config": { "appstream_data_packages":'
+ ' {"testy": ["appstream-data-test"], "otheros": ["nosuchpackage"]},'
+ ' "appstream_config_packages": []'
+ ' }}')
+
+ self.createAppStreamPackage("app-1", "1.0", "1")
+ self.createAppStreamRepoPackage()
+
+ self.login_and_go("/apps")
+ b.wait_visible(".pf-v5-c-empty-state")
+
+ # os-release is a symlink target, don't clobber that
+ self.restore_file("/etc/os-release")
+ m.execute("rm /etc/os-release")
+
+ # unknown OS: nothing gets installed
+ m.write("/etc/os-release", 'ID="unmapped"\nID_LIKE="mysterious"\nVERSION_ID="1"\n')
+ b.click("#refresh")
+ # the progress bar is too fast to reliably catch it
+ time.sleep(1)
+ b.wait_not_present("#refresh-progress")
+ b.wait_visible(".pf-v5-c-empty-state")
+
+ # known OS: appstream-data-tst gets installed from the map
+ m.write("/etc/os-release", 'ID="derivative"\nID_LIKE="spicy testy"\nVERSION_ID="1"\n')
+ b.click("#refresh")
+ with b.wait_timeout(30):
+ b.wait_visible(".app-list #app-1")
+
def testL10N(self):
b = self.browser
m = self.machine
diff --git a/test/verify/check-connection b/test/verify/check-connection
index af7bbc30a865..160633dee0c6 100755
--- a/test/verify/check-connection
+++ b/test/verify/check-connection
@@ -1200,7 +1200,8 @@ UnixPath=/run/cockpit/session
packages = m.execute("cockpit-bridge --packages")
self.assertRegex(packages, r"(^|\n)base1\s+.*/usr/share/cockpit/base1")
- self.assertRegex(packages, r"(^|\n)system\s+.*/usr/share/cockpit/systemd")
+ # also includes menu and tools entries
+ self.assertRegex(packages, r"(^|\n)system\s.*Services.*Terminal.*\s/usr/share/cockpit/systemd")
@testlib.skipDistroPackage()