summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.ycm_extra_conf.py250
-rw-r--r--build-aux/edit-script.sh.in33
-rw-r--r--build-aux/meson-make-symlink.sh12
-rw-r--r--build-aux/tap-driver.py296
-rwxr-xr-xbuild-aux/update-po39
-rw-r--r--doc/meson.build138
-rw-r--r--lib/libalpm/meson.build33
-rw-r--r--lib/libalpm/po/meson.build15
-rw-r--r--meson.build487
-rw-r--r--meson_options.txt61
-rw-r--r--scripts/libmakepkg/integrity/meson.build20
-rw-r--r--scripts/libmakepkg/lint_config/meson.build18
-rw-r--r--scripts/libmakepkg/lint_package/meson.build20
-rw-r--r--scripts/libmakepkg/lint_pkgbuild/meson.build37
-rw-r--r--scripts/libmakepkg/meson.build31
-rw-r--r--scripts/libmakepkg/source/meson.build22
-rw-r--r--scripts/libmakepkg/tidy/meson.build23
-rw-r--r--scripts/libmakepkg/util/meson.build24
-rw-r--r--scripts/meson.build66
-rw-r--r--scripts/po/meson.build15
-rw-r--r--src/common/meson.build4
-rw-r--r--src/pacman/meson.build23
-rw-r--r--src/pacman/po/meson.build15
-rw-r--r--src/util/meson.build3
-rw-r--r--test/pacman/meson.build357
-rw-r--r--test/scripts/meson.build15
-rw-r--r--test/util/meson.build6
27 files changed, 2063 insertions, 0 deletions
diff --git a/.ycm_extra_conf.py b/.ycm_extra_conf.py
new file mode 100644
index 00000000..f297deef
--- /dev/null
+++ b/.ycm_extra_conf.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python
+
+# SPDX-License-Identifier: Unlicense
+#
+# Based on the template file provided by the 'YCM-Generator' project authored by
+# Reuben D'Netto.
+# Jiahui Xie has re-reformatted and expanded the original script in accordance
+# to the requirements of the PEP 8 style guide and 'systemd' project,
+# respectively.
+#
+# The original license is preserved as it is.
+#
+#
+# This is free and unencumbered software released into the public domain.
+#
+# Anyone is free to copy, modify, publish, use, compile, sell, or
+# distribute this software, either in source code form or as a compiled
+# binary, for any purpose, commercial or non-commercial, and by any
+# means.
+#
+# In jurisdictions that recognize copyright laws, the author or authors
+# of this software dedicate any and all copyright interest in the
+# software to the public domain. We make this dedication for the benefit
+# of the public at large and to the detriment of our heirs and
+# successors. We intend this dedication to be an overt act of
+# relinquishment in perpetuity of all present and future rights to this
+# software under copyright law.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# For more information, please refer to <http://unlicense.org/>
+
+"""
+YouCompleteMe configuration file tailored to support the 'meson' build system
+used by the 'systemd' project.
+"""
+
+import glob
+import os
+import ycm_core
+
+
+SOURCE_EXTENSIONS = (".C", ".cpp", ".cxx", ".cc", ".c", ".m", ".mm")
+HEADER_EXTENSIONS = (".H", ".h", ".hxx", ".hpp", ".hh")
+
+
+def DirectoryOfThisScript():
+ """
+ Return the absolute path of the parent directory containing this
+ script.
+ """
+ return os.path.dirname(os.path.abspath(__file__))
+
+
+def GuessBuildDirectory():
+ """
+ Guess the build directory using the following heuristics:
+
+ 1. Returns the current directory of this script plus 'build'
+ subdirectory in absolute path if this subdirectory exists.
+
+ 2. Otherwise, probes whether there exists any directory
+ containing '.ninja_log' file two levels above the current directory;
+ returns this single directory only if there is one candidate.
+ """
+ result = os.path.join(DirectoryOfThisScript(), "build")
+
+ if os.path.exists(result):
+ return result
+
+ result = glob.glob(os.path.join(DirectoryOfThisScript(),
+ "..", "..", "*", ".ninja_log"))
+
+ if not result:
+ return ""
+
+ if 1 != len(result):
+ return ""
+
+ return os.path.split(result[0])[0]
+
+
+def TraverseByDepth(root, include_extensions):
+ """
+ Return a set of child directories of the 'root' containing file
+ extensions specified in 'include_extensions'.
+
+ NOTE:
+ 1. The 'root' directory itself is excluded from the result set.
+ 2. No subdirectories would be excluded if 'include_extensions' is left
+ to 'None'.
+ 3. Each entry in 'include_extensions' must begin with string '.'.
+ """
+ is_root = True
+ result = set()
+ # Perform a depth first top down traverse of the given directory tree.
+ for root_dir, subdirs, file_list in os.walk(root):
+ if not is_root:
+ # print("Relative Root: ", root_dir)
+ # print(subdirs)
+ if include_extensions:
+ get_ext = os.path.splitext
+ subdir_extensions = {
+ get_ext(f)[-1] for f in file_list if get_ext(f)[-1]
+ }
+ if subdir_extensions & include_extensions:
+ result.add(root_dir)
+ else:
+ result.add(root_dir)
+ else:
+ is_root = False
+
+ return result
+
+
+_project_src_dir = os.path.join(DirectoryOfThisScript(), "src")
+_include_dirs_set = TraverseByDepth(_project_src_dir, frozenset({".h"}))
+flags = [
+ "-x",
+ "c"
+ # The following flags are partially redundant due to the existence of
+ # 'compile_commands.json'.
+ # '-Wall',
+ # '-Wextra',
+ # '-Wfloat-equal',
+ # '-Wpointer-arith',
+ # '-Wshadow',
+ # '-std=gnu99',
+]
+
+for include_dir in _include_dirs_set:
+ flags.append("-I" + include_dir)
+
+# Set this to the absolute path to the folder (NOT the file!) containing the
+# compile_commands.json file to use that instead of 'flags'. See here for
+# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
+#
+# You can get CMake to generate this file for you by adding:
+# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
+# to your CMakeLists.txt file.
+#
+# Most projects will NOT need to set this to anything; you can just change the
+# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
+compilation_database_folder = GuessBuildDirectory()
+
+if os.path.exists(compilation_database_folder):
+ database = ycm_core.CompilationDatabase(compilation_database_folder)
+else:
+ database = None
+
+
+def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
+ """
+ Iterate through 'flags' and replace the relative paths prefixed by
+ '-isystem', '-I', '-iquote', '--sysroot=' with absolute paths
+ start with 'working_directory'.
+ """
+ if not working_directory:
+ return list(flags)
+ new_flags = []
+ make_next_absolute = False
+ path_flags = ["-isystem", "-I", "-iquote", "--sysroot="]
+ for flag in flags:
+ new_flag = flag
+
+ if make_next_absolute:
+ make_next_absolute = False
+ if not flag.startswith("/"):
+ new_flag = os.path.join(working_directory, flag)
+
+ for path_flag in path_flags:
+ if flag == path_flag:
+ make_next_absolute = True
+ break
+
+ if flag.startswith(path_flag):
+ path = flag[len(path_flag):]
+ new_flag = path_flag + os.path.join(working_directory, path)
+ break
+
+ if new_flag:
+ new_flags.append(new_flag)
+ return new_flags
+
+
+def IsHeaderFile(filename):
+ """
+ Check whether 'filename' is considered as a header file.
+ """
+ extension = os.path.splitext(filename)[1]
+ return extension in HEADER_EXTENSIONS
+
+
+def GetCompilationInfoForFile(filename):
+ """
+ Helper function to look up compilation info of 'filename' in the 'database'.
+ """
+ # The compilation_commands.json file generated by CMake does not have
+ # entries for header files. So we do our best by asking the db for flags for
+ # a corresponding source file, if any. If one exists, the flags for that
+ # file should be good enough.
+ if not database:
+ return None
+
+ if IsHeaderFile(filename):
+ basename = os.path.splitext(filename)[0]
+ for extension in SOURCE_EXTENSIONS:
+ replacement_file = basename + extension
+ if os.path.exists(replacement_file):
+ compilation_info = \
+ database.GetCompilationInfoForFile(replacement_file)
+ if compilation_info.compiler_flags_:
+ return compilation_info
+ return None
+ return database.GetCompilationInfoForFile(filename)
+
+
+def FlagsForFile(filename, **kwargs):
+ """
+ Callback function to be invoked by YouCompleteMe in order to get the
+ information necessary to compile 'filename'.
+
+ It returns a dictionary with a single element 'flags'. This element is a
+ list of compiler flags to pass to libclang for the file 'filename'.
+ """
+ if database:
+ # Bear in mind that compilation_info.compiler_flags_ does NOT return a
+ # python list, but a "list-like" StringVec object
+ compilation_info = GetCompilationInfoForFile(filename)
+ if not compilation_info:
+ return None
+
+ final_flags = MakeRelativePathsInFlagsAbsolute(
+ compilation_info.compiler_flags_,
+ compilation_info.compiler_working_dir_)
+
+ else:
+ relative_to = DirectoryOfThisScript()
+ final_flags = MakeRelativePathsInFlagsAbsolute(flags, relative_to)
+
+ return {
+ "flags": final_flags,
+ "do_cache": True
+ }
diff --git a/build-aux/edit-script.sh.in b/build-aux/edit-script.sh.in
new file mode 100644
index 00000000..3e3a1b6a
--- /dev/null
+++ b/build-aux/edit-script.sh.in
@@ -0,0 +1,33 @@
+#!@BASH@
+
+input=$1
+output=$2
+mode=$3
+
+"@SED@" \
+ -e "s|@rootdir[@]|@ROOTDIR@|g" \
+ -e "s|@localedir[@]|@LOCALEDIR@|g" \
+ -e "s|@sysconfdir[@]|@sysconfdir@|g" \
+ -e "s|@localstatedir[@]|@localstatedir@|g" \
+ -e "s|@libmakepkgdir[@]|@LIBMAKEPKGDIR@|g" \
+ -e "s|@pkgdatadir[@]|@PKGDATADIR@|g" \
+ -e "s|@prefix[@]|@PREFIX@|g" \
+ -e "1s|#!/bin/bash|#!@BASH@|g" \
+ -e "s|@PACKAGE_VERSION[@]|@PACKAGE_VERSION@|g" \
+ -e "s|@PACKAGE_NAME[@]|@PACKAGE_NAME@|g" \
+ -e "s|@BUILDSCRIPT[@]|@BUILDSCRIPT@|g" \
+ -e "s|@TEMPLATE_DIR[@]|@TEMPLATE_DIR@|g" \
+ -e "s|@DEBUGSUFFIX[@]|@DEBUGSUFFIX@|g" \
+ -e "s|@INODECMD[@]|@INODECMD@|g" \
+ -e "s|@OWNERCMD[@]|@OWNERCMD@|g" \
+ -e "s|@MODECMD[@]|@MODECMD@|g" \
+ -e "s|@SEDINPLACEFLAGS[@]|@SEDINPLACEFLAGS@|g" \
+ -e "s|@SEDPATH[@]|@SEDPATH@|g" \
+ -e "s|@DUFLAGS[@]|@DUFLAGS@|g" \
+ -e "s|@DUPATH[@]|@DUPATH@|g" \
+ -e "s|@configure_input[@]|Generated from ${output##*/}.sh.in; do not edit by hand.|g" \
+ "$input" >"$output"
+
+if [[ $mode ]]; then
+ chmod "$mode" "$output"
+fi
diff --git a/build-aux/meson-make-symlink.sh b/build-aux/meson-make-symlink.sh
new file mode 100644
index 00000000..501cd43d
--- /dev/null
+++ b/build-aux/meson-make-symlink.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+set -eu
+
+# this is needed mostly because $DESTDIR is provided as a variable,
+# and we need to create the target directory...
+
+mkdir -vp "$(dirname "${DESTDIR:-}$2")"
+if [ "$(dirname $1)" = . ]; then
+ ln -vfs -T "$1" "${DESTDIR:-}$2"
+else
+ ln -vfs -T --relative "${DESTDIR:-}$1" "${DESTDIR:-}$2"
+fi
diff --git a/build-aux/tap-driver.py b/build-aux/tap-driver.py
new file mode 100644
index 00000000..c231caec
--- /dev/null
+++ b/build-aux/tap-driver.py
@@ -0,0 +1,296 @@
+#!/usr/bin/env python3
+# Adapted from tappy copyright (c) 2016, Matt Layman
+# MIT license
+# https://github.com/python-tap/tappy
+
+import io
+import re
+import subprocess
+import sys
+
+
+class Directive(object):
+ """A representation of a result line directive."""
+
+ skip_pattern = re.compile(
+ r"""^SKIP\S*
+ (?P<whitespace>\s*) # Optional whitespace.
+ (?P<reason>.*) # Slurp up the rest.""",
+ re.IGNORECASE | re.VERBOSE)
+ todo_pattern = re.compile(
+ r"""^TODO\b # The directive name
+ (?P<whitespace>\s*) # Immediately following must be whitespace.
+ (?P<reason>.*) # Slurp up the rest.""",
+ re.IGNORECASE | re.VERBOSE)
+
+ def __init__(self, text):
+ """Initialize the directive by parsing the text.
+ The text is assumed to be everything after a '#\s*' on a result line.
+ """
+ self._text = text
+ self._skip = False
+ self._todo = False
+ self._reason = None
+
+ match = self.skip_pattern.match(text)
+ if match:
+ self._skip = True
+ self._reason = match.group('reason')
+
+ match = self.todo_pattern.match(text)
+ if match:
+ if match.group('whitespace'):
+ self._todo = True
+ else:
+ # Catch the case where the directive has no descriptive text.
+ if match.group('reason') == '':
+ self._todo = True
+ self._reason = match.group('reason')
+
+ @property
+ def text(self):
+ """Get the entire text."""
+ return self._text
+
+ @property
+ def skip(self):
+ """Check if the directive is a SKIP type."""
+ return self._skip
+
+ @property
+ def todo(self):
+ """Check if the directive is a TODO type."""
+ return self._todo
+
+ @property
+ def reason(self):
+ """Get the reason for the directive."""
+ return self._reason
+
+
+class Parser(object):
+ """A parser for TAP files and lines."""
+
+ # ok and not ok share most of the same characteristics.
+ result_base = r"""
+ \s* # Optional whitespace.
+ (?P<number>\d*) # Optional test number.
+ \s* # Optional whitespace.
+ (?P<description>[^#]*) # Optional description before #.
+ \#? # Optional directive marker.
+ \s* # Optional whitespace.
+ (?P<directive>.*) # Optional directive text.
+ """
+ ok = re.compile(r'^ok' + result_base, re.VERBOSE)
+ not_ok = re.compile(r'^not\ ok' + result_base, re.VERBOSE)
+ plan = re.compile(r"""
+ ^1..(?P<expected>\d+) # Match the plan details.
+ [^#]* # Consume any non-hash character to confirm only
+ # directives appear with the plan details.
+ \#? # Optional directive marker.
+ \s* # Optional whitespace.
+ (?P<directive>.*) # Optional directive text.
+ """, re.VERBOSE)
+ diagnostic = re.compile(r'^#')
+ bail = re.compile(r"""
+ ^Bail\ out!
+ \s* # Optional whitespace.
+ (?P<reason>.*) # Optional reason.
+ """, re.VERBOSE)
+ version = re.compile(r'^TAP version (?P<version>\d+)$')
+
+ TAP_MINIMUM_DECLARED_VERSION = 13
+
+ def parse(self, fh):
+ """Generate tap.line.Line objects, given a file-like object `fh`.
+ `fh` may be any object that implements both the iterator and
+ context management protocol (i.e. it can be used in both a
+ "with" statement and a "for...in" statement.)
+ Trailing whitespace and newline characters will be automatically
+ stripped from the input lines.
+ """
+ with fh:
+ for line in fh:
+ yield self.parse_line(line.rstrip())
+
+ def parse_line(self, text):
+ """Parse a line into whatever TAP category it belongs."""
+ match = self.ok.match(text)
+ if match:
+ return self._parse_result(True, match)
+
+ match = self.not_ok.match(text)
+ if match:
+ return self._parse_result(False, match)
+
+ if self.diagnostic.match(text):
+ return ('diagnostic', text)
+
+ match = self.plan.match(text)
+ if match:
+ return self._parse_plan(match)
+
+ match = self.bail.match(text)
+ if match:
+ return ('bail', match.group('reason'))
+
+ match = self.version.match(text)
+ if match:
+ return self._parse_version(match)
+
+ return ('unknown',)
+
+ def _parse_plan(self, match):
+ """Parse a matching plan line."""
+ expected_tests = int(match.group('expected'))
+ directive = Directive(match.group('directive'))
+
+ # Only SKIP directives are allowed in the plan.
+ if directive.text and not directive.skip:
+ return ('unknown',)
+
+ return ('plan', expected_tests, directive)
+
+ def _parse_result(self, ok, match):
+ """Parse a matching result line into a result instance."""
+ return ('result', ok, match.group('number'),
+ match.group('description').strip(),
+ Directive(match.group('directive')))
+
+ def _parse_version(self, match):
+ version = int(match.group('version'))
+ if version < self.TAP_MINIMUM_DECLARED_VERSION:
+ raise ValueError('It is an error to explicitly specify '
+ 'any version lower than 13.')
+ return ('version', version)
+
+
+class Rules(object):
+
+ def __init__(self):
+ self._lines_seen = {'plan': [], 'test': 0, 'failed': 0, 'version': []}
+ self._errors = []
+
+ def check(self, final_line_count):
+ """Check the status of all provided data and update the suite."""
+ if self._lines_seen['version']:
+ self._process_version_lines()
+ self._process_plan_lines(final_line_count)
+
+ def check_errors(self):
+ if self._lines_seen['failed'] > 0:
+ self._add_error('Tests failed.')
+ if self._errors:
+ for error in self._errors:
+ print(error)
+ return 1
+ return 0
+
+ def _process_version_lines(self):
+ """Process version line rules."""
+ if len(self._lines_seen['version']) > 1:
+ self._add_error('Multiple version lines appeared.')
+ elif self._lines_seen['version'][0] != 1:
+ self._add_error('The version must be on the first line.')
+
+ def _process_plan_lines(self, final_line_count):
+ """Process plan line rules."""
+ if not self._lines_seen['plan']:
+ self._add_error('Missing a plan.')
+ return
+
+ if len(self._lines_seen['plan']) > 1:
+ self._add_error('Only one plan line is permitted per file.')
+ return
+
+ expected_tests, at_line = self._lines_seen['plan'][0]
+ if not self._plan_on_valid_line(at_line, final_line_count):
+ self._add_error(
+ 'A plan must appear at the beginning or end of the file.')
+ return
+
+ if expected_tests != self._lines_seen['test']:
+ self._add_error(
+ 'Expected {expected_count} tests '
+ 'but only {seen_count} ran.'.format(
+ expected_count=expected_tests,
+ seen_count=self._lines_seen['test']))
+
+ def _plan_on_valid_line(self, at_line, final_line_count):
+ """Check if a plan is on a valid line."""
+ # Put the common cases first.
+ if at_line == 1 or at_line == final_line_count:
+ return True
+
+ # The plan may only appear on line 2 if the version is at line 1.
+ after_version = (
+ self._lines_seen['version'] and
+ self._lines_seen['version'][0] == 1 and
+ at_line == 2)
+ if after_version:
+ return True
+
+ return False
+
+ def handle_bail(self, reason):
+ """Handle a bail line."""
+ self._add_error('Bailed: {reason}').format(reason=reason)
+
+ def handle_skipping_plan(self):
+ """Handle a plan that contains a SKIP directive."""
+ sys.exit(77)
+
+ def saw_plan(self, expected_tests, at_line):
+ """Record when a plan line was seen."""
+ self._lines_seen['plan'].append((expected_tests, at_line))
+
+ def saw_test(self, ok):
+ """Record when a test line was seen."""
+ self._lines_seen['test'] += 1
+ if not ok:
+ self._lines_seen['failed'] += 1
+
+ def saw_version_at(self, line_counter):
+ """Record when a version line was seen."""
+ self._lines_seen['version'].append(line_counter)
+
+ def _add_error(self, message):
+ self._errors += [message]
+
+
+if __name__ == '__main__':
+ parser = Parser()
+ rules = Rules()
+
+ try:
+ out = subprocess.check_output(sys.argv[1:], universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ sys.stdout.write(e.output)
+ raise e
+
+ line_generator = parser.parse(io.StringIO(out))
+ line_counter = 0
+ for line in line_generator:
+ line_counter += 1
+
+ if line[0] == 'unknown':
+ continue
+
+ if line[0] == 'result':
+ rules.saw_test(line[1])
+ print('{okay} {num} {description} {directive}'.format(
+ okay=('' if line[1] else 'not ') + 'ok', num=line[2],
+ description=line[3], directive=line[4].text))
+ elif line[0] == 'plan':
+ if line[2].skip:
+ rules.handle_skipping_plan()
+ rules.saw_plan(line[1], line_counter)
+ elif line[0] == 'bail':
+ rules.handle_bail(line[1])
+ elif line[0] == 'version':
+ rules.saw_version_at(line_counter)
+ elif line[0] == 'diagnostic':
+ print(line[1])
+
+ rules.check(line_counter)
+ sys.exit(rules.check_errors())
diff --git a/build-aux/update-po b/build-aux/update-po
new file mode 100755
index 00000000..ce1ad4be
--- /dev/null
+++ b/build-aux/update-po
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+find_build_directory() {
+ local build_dirs=(*/.ninja_log)
+
+ if [[ ! -e ${build_dirs[0]} ]]; then
+ echo "error: No build directory found. Have you run 'meson build' yet?" >&2
+ return 1
+ elif (( ${#build_dirs[*]} > 1 )); then
+ echo "error: Multiple build directories found. Unable to proceed." >&2
+ return 1
+ fi
+
+ printf '%s\n' "${build_dirs[0]%/*}"
+}
+
+
+filter_targets_by_name() {
+ if command -v jq &>/dev/null; then
+ jq --arg re "$1" -r 'map(.name)[] | select(match($re))'
+ else
+ json_pp | awk -v filter="$1" -F'[:"]' \
+ '$2 == "name" && $(NF - 1) ~ filter { print $(NF - 1) }'
+ fi
+}
+
+# Make things simple and require that we're in the build root rather than
+# trying to chase down the location of this script and the relative build dir.
+if [[ ! -d .git ]]; then
+ echo "This script must be run from the root of the repository" >&2
+ exit 1
+fi
+
+build_dir=$(find_build_directory) || exit 1
+
+mapfile -t targets < \
+ <(meson introspect "$build_dir" --targets | filter_targets_by_name "-update-po$")
+
+ninja -C "$build_dir" "${targets[@]}"
diff --git a/doc/meson.build b/doc/meson.build
new file mode 100644
index 00000000..7c9631cb
--- /dev/null
+++ b/doc/meson.build
@@ -0,0 +1,138 @@
+manpages = [
+ { 'name': 'alpm-hooks.5' },
+ { 'name': 'pacman.8' },
+ { 'name': 'makepkg.8' },
+ { 'name': 'makepkg-template.1' },
+ { 'name': 'repo-add.8' },
+ { 'name': 'vercmp.8' },
+ { 'name': 'pkgdelta.8' },
+ { 'name': 'pacman-key.8' },
+ { 'name': 'PKGBUILD.5', 'extra_depends' : [ 'PKGBUILD-example.txt' ] },
+ { 'name': 'makepkg.conf.5' },
+ { 'name': 'pacman.conf.5' },
+ { 'name': 'libalpm.3' },
+ { 'name': 'BUILDINFO.5' },
+]
+
+asciidoc_conf = join_paths(meson.current_source_dir(), 'asciidoc.conf')
+
+asciidoc_opts = [
+ '-f', asciidoc_conf,
+ '-a', 'pacman_version="@0@"'.format(PACKAGE_VERSION),
+ '-a', 'pacman_date=@0@'.format(run_command('date', '+%Y-%m-%d').stdout().strip()),
+ '-a', 'pkgdatadir=@0@'.format(PKGDATADIR),
+ '-a', 'localstatedir=@0@'.format(LOCALSTATEDIR),
+ '-a', 'sysconfdir=@0@'.format(SYSCONFDIR),
+ '-a', 'datarootdir=@0@'.format(DATAROOTDIR),
+]
+
+html_targets = []
+html_files = []
+
+foreach page : manpages
+ manpage = page['name']
+ htmlpage = '@0@.html'.format(manpage)
+ input = '@0@.asciidoc'.format(manpage)
+
+ section = page['name'].split('.')[-1]
+
+ mandirn = join_paths(MANDIR, 'man' + section)
+
+ custom_target(
+ manpage,
+ command : [
+ A2X,
+ '--no-xmllint',
+ '-d', 'manpage',
+ '-f', 'manpage',
+ '--xsltproc-opts', '-param man.endnotes.list.enabled 0 -param man.endnotes.are.numbered 0',
+ '-D', '@OUTDIR@',
+ '--asciidoc-opts', ' '.join(asciidoc_opts),
+ '@INPUT@',
+ ],
+ input : input,
+ output : [manpage],
+ depend_files : [
+ asciidoc_conf,
+ ] + page.get('extra_depends', []),
+ install : true,
+ install_dir : mandirn,
+ )
+
+ html = custom_target(
+ htmlpage,
+ command : [
+ ASCIIDOC,
+ ] + asciidoc_opts + [
+ '-a', 'linkcss',
+ '-a', 'toc',
+ '-a', 'icons',
+ '-a', 'max-width=960px',
+ '-a', 'stylesheet=asciidoc-override.css',
+ '-o', '@OUTPUT@',
+ '@INPUT@',
+ ],
+ input : input,
+ output : [htmlpage],
+ depend_files : [
+ asciidoc_conf,
+ 'asciidoc-override.css',
+ ] + page.get('extra_depends', []),
+ build_by_default : false,
+ install : false,
+ )
+ html_targets += [html]
+ html_files += [htmlpage]
+endforeach
+
+run_target('html',
+ command : ['/bin/true'],
+ depends : html_targets)
+
+custom_target(
+ 'website.tar.gz',
+ command : [
+ 'bsdtar', 'czf', '@OUTPUT@',
+ '-C', meson.current_build_dir(),
+ ] + html_files + [
+ '-C', meson.current_source_dir(),
+ 'submitting-patches.html',
+ 'translation-help.html',
+ 'HACKING.html',
+ 'index.html',
+ 'asciidoc-override.css',
+ '-C', '/etc/asciidoc/stylesheets/',
+ 'asciidoc.css',
+ '-C', '/etc/asciidoc/javascripts/',
+ 'asciidoc.js',
+ '-C', '/etc/asciidoc/',
+ 'images',
+ ],
+ output : ['website.tar.gz'],
+ build_by_default : false,
+ depends : html_targets,
+)
+
+meson.add_install_script(MESON_MAKE_SYMLINK,
+ 'repo-add.8',
+ join_paths(MANDIR, 'man8/repo-remove.8'))
+
+doxygen = find_program('doxygen', required : get_option('doxygen'))
+if doxygen.found() and not get_option('doxygen').disabled()
+ doxyconf = configuration_data()
+ doxyconf.set('OUTPUT_DIRECTORY', meson.current_build_dir())
+ doxyfile = configure_file(
+ input : 'Doxyfile.in',
+ output : 'Doxyfile',
+ configuration : doxyconf,
+ install : false)
+
+ custom_target(
+ 'doxygen',
+ input : doxyfile,
+ output : ['man3'],
+ command : [doxygen, doxyfile],
+ build_by_default : true,
+ install : true,
+ install_dir : MANDIR)
+endif
diff --git a/lib/libalpm/meson.build b/lib/libalpm/meson.build
new file mode 100644
index 00000000..84c3dde3
--- /dev/null
+++ b/lib/libalpm/meson.build
@@ -0,0 +1,33 @@
+libalpm_sources = files('''
+ add.h add.c
+ alpm.h alpm.c
+ alpm_list.h alpm_list.c
+ backup.h backup.c
+ base64.h base64.c
+ be_local.c
+ be_package.c
+ be_sync.c
+ conflict.h conflict.c
+ db.h db.c
+ delta.h delta.c
+ deps.h deps.c
+ diskspace.h diskspace.c
+ dload.h dload.c
+ error.c
+ filelist.h filelist.c
+ graph.h graph.c
+ group.h group.c
+ handle.h handle.c
+ hook.h hook.c
+ libarchive-compat.h
+ log.h log.c
+ package.h package.c
+ pkghash.h pkghash.c
+ rawstr.c
+ remove.h remove.c
+ signing.c signing.h
+ sync.h sync.c
+ trans.h trans.c
+ util.h util.c
+ version.c
+'''.split())
diff --git a/lib/libalpm/po/meson.build b/lib/libalpm/po/meson.build
new file mode 100644
index 00000000..cec28a15
--- /dev/null
+++ b/lib/libalpm/po/meson.build
@@ -0,0 +1,15 @@
+i18n.gettext(
+ 'libalpm',
+ args : [
+ '--directory=@0@'.format(meson.current_source_dir()),
+ '--msgid-bugs-address=http://bugs.archlinux.org/index.php?project=3',
+ '--copyright-holder="Pacman Development Team <pacman-dev@archlinux.org>"',
+ '--language', 'c',
+
+ '--keyword=_',
+ '--flag=_:1:c-format',
+
+ '--keyword=_n:1,2',
+ '--flag=_n:1:c-format',
+ '--flag=_n:2:c-format',
+ ])
diff --git a/meson.build b/meson.build
new file mode 100644
index 00000000..95895300
--- /dev/null
+++ b/meson.build
@@ -0,0 +1,487 @@
+project('pacman',
+ 'c',
+ version : '5.1.0',
+ license : 'GPLv2+',
+ default_options : [
+ 'c_std=gnu99',
+ 'prefix=/usr',
+ 'sysconfdir=/etc',
+ 'localstatedir=/var',
+ ],
+ meson_version : '>= 0.47')
+
+libalpm_version = '11.0.1'
+
+cc = meson.get_compiler('c')
+
+# commandline options
+PREFIX = get_option('prefix')
+DATAROOTDIR = join_paths(PREFIX, get_option('datarootdir'))
+SYSCONFDIR = join_paths(PREFIX, get_option('sysconfdir'))
+LOCALSTATEDIR = join_paths(PREFIX, get_option('localstatedir'))
+LOCALEDIR = join_paths(PREFIX, get_option('localedir'))
+ROOTDIR = get_option('root-dir')
+BINDIR = join_paths(PREFIX, get_option('bindir'))
+MANDIR = join_paths(PREFIX, get_option('mandir'))
+BUILDSCRIPT = get_option('buildscript')
+LIBMAKEPKGDIR = join_paths(PREFIX, DATAROOTDIR, 'makepkg')
+PKGDATADIR = join_paths(PREFIX, DATAROOTDIR, meson.project_name())
+
+PYTHON = import('python').find_installation('python3')
+M4 = find_program('m4')
+SED = find_program('sed')
+DU = find_program('du')
+LDCONFIG = get_option('ldconfig')
+MESON_MAKE_SYMLINK = join_paths(meson.source_root(), 'build-aux/meson-make-symlink.sh')
+
+BASH = find_program('bash4', 'bash')
+if BASH.found()
+ bash_version = run_command(BASH, '-c', 'IFS=.; echo "${BASH_VERSINFO[*]:0:3}"').stdout()
+
+ have_bash = bash_version.version_compare('>= 4.4.0')
+endif
+if not have_bash
+ error('bash >= 4.4.0 is required for pacman scripts.')
+endif
+
+bashcompletion = dependency('bash-completion', required : false)
+if bashcompletion.found()
+ BASHCOMPDIR = bashcompletion.get_pkgconfig_variable('completionsdir')
+else
+ BASHCOMPDIR = join_paths(DATAROOTDIR, 'bash-completion/completions')
+endif
+
+if get_option('use-git-version')
+ PACKAGE_VERSION = run_command(
+ find_program('git'),
+ 'describe',
+ '--abbrev=4',
+ '--dirty').stdout().strip().strip('v')
+else
+ PACKAGE_VERSION = meson.project_version()
+endif
+
+conf = configuration_data()
+conf.set('_GNU_SOURCE', true)
+conf.set_quoted('PACKAGE', meson.project_name())
+conf.set_quoted('PACKAGE_VERSION', PACKAGE_VERSION)
+conf.set_quoted('LOCALEDIR', LOCALEDIR)
+conf.set_quoted('SCRIPTLET_SHELL', get_option('scriptlet-shell'))
+conf.set_quoted('LDCONFIG', LDCONFIG)
+conf.set_quoted('LIB_VERSION', meson.project_version())
+conf.set_quoted('SYSHOOKDIR', join_paths(DATAROOTDIR, 'libalpm/hooks/'))
+conf.set_quoted('CONFFILE', join_paths(SYSCONFDIR, 'pacman.conf'))
+conf.set_quoted('DBPATH', join_paths(LOCALSTATEDIR, 'lib/pacman'))
+conf.set_quoted('GPGDIR', join_paths(SYSCONFDIR, 'pacman.d/gnupg'))
+conf.set_quoted('LOGFILE', join_paths(LOCALSTATEDIR, 'log/pacman.log'))
+conf.set_quoted('CACHEDIR', join_paths(LOCALSTATEDIR, 'cache/pacman/pkg'))
+conf.set_quoted('HOOKDIR', join_paths(SYSCONFDIR, 'pacman.d/hooks/'))
+conf.set_quoted('ROOTDIR', ROOTDIR)
+
+if get_option('i18n')
+ if not cc.has_function('ngettext')
+ error('ngettext not found but NLS support requested')
+ endif
+ conf.set('ENABLE_NLS', 1)
+endif
+
+# dependencies
+libarchive = dependency('libarchive',
+ version : '>=3.0.0',
+ static : get_option('buildstatic'))
+
+libcurl = dependency('libcurl',
+ version : '>=7.32.0',
+ required : get_option('curl'),
+ static : get_option('buildstatic'))
+conf.set('HAVE_LIBCURL', libcurl.found())
+
+want_gpgme = get_option('gpgme')
+gpgme_config = find_program('gpgme-config', required : want_gpgme)
+if not want_gpgme.disabled() and gpgme_config.found()
+ gpgme_version = run_command(gpgme_config, '--version').stdout().strip()
+
+ needed_gpgme_version = '>=1.3.0'
+ have = gpgme_version.version_compare(needed_gpgme_version)
+ if want_gpgme.enabled() and not have
+ error('gpgme @0@ is needed for GPG signature support'.format(needed_gpgme_version))
+ endif
+
+ gpgme_libs = [
+ cc.find_library('gpgme', required : have,
+ dirs : [get_option('gpgme-libdir')]),
+ cc.find_library('gpg-error', required : have,
+ dirs : [get_option('gpgme-libdir')]),
+ cc.find_library('assuan', required : have,
+ dirs : [get_option('gpgme-libdir')]),
+ ]
+
+ conf.set('HAVE_LIBGPGME', have)
+else
+ gpgme_libs = []
+ conf.set('HAVE_LIBGPGME', false)
+endif
+
+want_crypto = get_option('crypto')
+if want_crypto == 'openssl'
+ libcrypto = dependency('libcrypto', static : get_option('buildstatic'))
+ if not libcrypto.found()
+ error('openssl support requested but not found')
+ endif
+ crypto_provider = libcrypto
+ conf.set10('HAVE_LIBSSL', true)
+elif want_crypto == 'nettle'
+ libnettle = dependency('nettle', static : get_option('buildstatic'))
+ if not libnettle.found()
+ error('nettle support requested but not found')
+ endif
+ crypto_provider = libnettle
+ conf.set10('HAVE_LIBNETTLE', true)
+else
+ error('unhandled crypto value @0@'.format(want_crypto))
+endif
+
+foreach header : [
+ 'mntent.h',
+ 'sys/mnttab.h',
+ 'sys/mount.h',
+ 'sys/param.h',
+ 'sys/statvfs.h',
+ 'sys/types.h',
+ 'sys/ucred.h',
+ 'termios.h',
+ ]
+ if cc.has_header(header)
+ conf.set('HAVE_' + header.underscorify().to_upper(), true)
+ endif
+endforeach
+
+foreach sym : [
+ 'dup2',
+ 'fork',
+ 'getcwd',
+ 'getmntent',
+ 'getmntinfo',
+ 'gettimeofday',
+ 'memmove',
+ 'memset',
+ 'mkdir',
+ 'realpath',
+ 'regcomp',
+ 'rmdir',
+ 'setenv',
+ 'setlocale',
+ 'strcasecmp',
+ 'strchr',
+ 'strcspn',
+ 'strdup',
+ 'strerror',
+ 'strndup',
+ 'strnlen',
+ 'strnlen',
+ 'strrchr',
+ 'strsep',
+ 'strsep',
+ 'strstr',
+ 'strtol',
+ 'swprintf',
+ 'tcflush',
+ 'tcflush',
+ 'uname',
+ 'wcwidth',
+ ]
+ have = cc.has_function(sym, args : '-D_GNU_SOURCE')
+ conf.set10('HAVE_' + sym.to_upper(), have)
+endforeach
+
+foreach member : [
+ ['struct stat', 'st_blksize', '''#include <sys/stat.h>'''],
+ ['struct statvfs', 'f_flag', '''#include <sys/statvfs.h>'''],
+ ['struct statfs', 'f_flags', '''#include <sys/param.h>
+ #include <sys/mount.h>'''],
+ ]
+ have = cc.has_member(member[0], member[1], prefix : member[2])
+ conf.set('HAVE_' + '_'.join([member[0], member[1]]).underscorify().to_upper(), have)
+endforeach
+
+if conf.has('HAVE_STRUCT_STATVFS_F_FLAG')
+ conf.set('FSSTATSTYPE', 'struct statvfs')
+elif conf.has('HAVE_STRUCT_STATFS_F_FLAGS')
+ conf.set('FSSTATSTYPE', 'struct statfs')
+endif
+
+if get_option('buildtype') == 'debug'
+ extra_cflags = [
+ '-Wcast-align',
+ '-Wclobbered',
+ '-Wempty-body',
+ '-Wfloat-equal',
+ '-Wformat-nonliteral',
+ '-Wformat-security',
+ '-Wignored-qualifiers',
+ '-Winit-self',
+ '-Wlogical-op',
+ '-Wmissing-declarations',
+ '-Wmissing-field-initializers',
+ '-Wmissing-parameter-type',
+ '-Wmissing-prototypes',
+ '-Wold-style-declaration',
+ '-Woverride-init',
+ '-Wpointer-arith',
+ '-Wredundant-decls',
+ '-Wshadow',
+ '-Wsign-compare',
+ '-Wstrict-aliasing',
+ '-Wstrict-overflow=5',
+ '-Wstrict-prototypes',
+ '-Wtype-limits',
+ '-Wuninitialized',
+ '-Wunused-but-set-parameter',
+ '-Wunused-parameter',
+ '-Wwrite-strings',
+ ]
+ add_project_arguments(cc.get_supported_arguments(extra_cflags), language : 'c')
+
+ conf.set('PACMAN_DEBUG', 1)
+endif
+
+config_h = configure_file(
+ output : 'config.h',
+ configuration : conf)
+add_project_arguments('-include', 'config.h', language : 'c')
+
+default_duflags = ' -sk --apparent-size'
+default_sedinplaceflags = ' --follow-symlinks -i'
+inodecmd = 'stat -c \'%i %n\''
+ownercmd = 'stat -c \'%u:%g\''
+modecmd = 'stat -c \'%a\''
+strip_binaries = '--strip-all'
+strip_shared = '--strip-unneeded'
+strip_static = '--strip-debug'
+
+os = host_machine.system()
+if os.startswith('darwin')
+ inodecmd = '/usr/bin/stat -f \'%i %n\''
+ ownercmd = '/usr/bin/stat -f \'%u:%g\''
+ modecmd = '/usr/bin/stat -f \'%lp\''
+ default_sedinplaceflags = ' -i \'\''
+ default_duflags = ' -sk'
+ strip_binaries = ''
+ strip_shared = '-s'
+ strip_static = '-s'
+elif os.contains('bsd') or os == 'dragonfly'
+ inodecmd = 'stat -f \'%i %n\''
+ ownercmd = 'stat -f \'%u:%g\''
+ modecmd = 'stat -f \'%lp\''
+ default_sedinplaceflags = ' -i \'\''
+ default_duflags = ' -sk'
+endif
+
+duflags = get_option('duflags')
+if duflags == 'autodetect'
+ duflags = default_duflags
+endif
+
+sedinplaceflags = get_option('sedinplaceflags')
+if sedinplaceflags == 'auto'
+ sedinplaceflags = default_sedinplaceflags
+endif
+
+chost = run_command(cc, '-dumpmachine').stdout().strip()
+carch = chost.split('-')[0]
+
+# annoyingly, we have to maintain two sets of configuration_data which is
+# largely identical, but which distinguishes between quoting needs.
+substs = configuration_data()
+substs.set('SED', SED.path())
+substs.set('M4', M4.path())
+substs.set('CARCH', carch)
+substs.set('CHOST', chost)
+substs.set('PKGEXT', get_option('pkg-ext'))
+substs.set('SRCEXT', get_option('src-ext'))
+substs.set('ROOTDIR', ROOTDIR)
+substs.set('LOCALEDIR', LOCALEDIR)
+substs.set('sysconfdir', SYSCONFDIR)
+substs.set('localstatedir', LOCALSTATEDIR)
+substs.set('PKGDATADIR', PKGDATADIR)
+substs.set('PREFIX', PREFIX)
+substs.set('BASH', BASH.path())
+substs.set('PACKAGE_VERSION', PACKAGE_VERSION)
+substs.set('PACKAGE_NAME', meson.project_name())
+substs.set('BUILDSCRIPT', BUILDSCRIPT)
+substs.set('TEMPLATE_DIR', get_option('makepkg-template-dir'))
+substs.set('DEBUGSUFFIX', get_option('debug-suffix'))
+substs.set('INODECMD', inodecmd)
+substs.set('OWNERCMD', ownercmd)
+substs.set('MODECMD', modecmd)
+substs.set('SEDINPLACEFLAGS', sedinplaceflags)
+substs.set('SEDPATH', SED.path())
+substs.set('DUFLAGS', duflags)
+substs.set('DUPATH', DU.path())
+substs.set('LIBMAKEPKGDIR', LIBMAKEPKGDIR)
+substs.set('STRIP_BINARIES', strip_binaries)
+substs.set('STRIP_SHARED', strip_shared)
+substs.set('STRIP_STATIC', strip_static)
+
+subdir('lib/libalpm')
+subdir('src/common')
+subdir('src/pacman')
+subdir('src/util')
+subdir('scripts')
+
+# Internationalization
+if get_option('i18n')
+ i18n = import('i18n')
+ subdir('lib/libalpm/po')
+ subdir('src/pacman/po')
+ subdir('scripts/po')
+endif
+
+want_doc = get_option('doc')
+ASCIIDOC = find_program('asciidoc', required : want_doc)
+A2X = find_program('a2x', required : want_doc)
+build_doc = A2X.found() and not want_doc.disabled()
+if build_doc
+ subdir('doc')
+endif
+
+includes = include_directories('src/common', 'lib/libalpm')
+
+libcommon = static_library(
+ 'common',
+ libcommon_sources,
+ include_directories : includes,
+ install : false)
+
+libalpm = library(
+ 'alpm',
+ libalpm_sources,
+ version : libalpm_version,
+ include_directories : includes,
+ dependencies : [crypto_provider, libarchive, libcurl] + gpgme_libs,
+ link_with : [libcommon],
+ install : true)
+
+install_headers(
+ 'lib/libalpm/alpm.h',
+ 'lib/libalpm/alpm_list.h')
+
+# TODO: libs.private seem quite wrong here
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(
+ libalpm,
+ name : 'libalpm',
+ description : 'Arch Linux package management library',
+ version : libalpm_version,
+ url : 'http://www.archlinux.org/pacman/')
+
+pacman_bin = executable(
+ 'pacman',
+ pacman_sources,
+ include_directories : includes,
+ link_with : [libalpm, libcommon],
+ dependencies : [libarchive],
+ install : true,
+)
+
+executable(
+ 'pacman-conf',
+ pacman_conf_sources,
+ include_directories : includes,
+ link_with : [libalpm],
+ install : true,
+)
+
+executable(
+ 'cleanupdelta',
+ cleanupdelta_sources,
+ include_directories : includes,
+ link_with : [libalpm],
+ install : true,
+)
+
+executable(
+ 'testpkg',
+ testpkg_sources,
+ include_directories : includes,
+ link_with : [libalpm],
+ install : true,
+)
+
+executable(
+ 'vercmp',
+ vercmp_sources,
+ include_directories : includes,
+ link_with : [libalpm],
+ install : true,
+)
+
+configure_file(
+ input : 'etc/makepkg.conf.in',
+ output : 'makepkg.conf',
+ configuration : substs,
+ install_dir : SYSCONFDIR)
+
+configure_file(
+ input : 'etc/pacman.conf.in',
+ output : 'pacman.conf',
+ configuration : substs,
+ install_dir : SYSCONFDIR)
+
+install_data(
+ 'proto/PKGBUILD-split.proto',
+ 'proto/PKGBUILD-vcs.proto',
+ 'proto/PKGBUILD.proto',
+ 'proto/proto.install',
+ install_dir : join_paths(DATAROOTDIR, 'pacman'))
+
+TEST_ENV = environment()
+TEST_ENV.set('PMTEST_SCRIPTLIB_DIR', join_paths(meson.source_root(), 'scripts/library/'))
+TEST_ENV.set('PMTEST_LIBMAKEPKG_DIR', join_paths(meson.build_root(), 'scripts/libmakepkg/'))
+TEST_ENV.set('PMTEST_UTIL_DIR', meson.build_root() + '/')
+TEST_ENV.set('PMTEST_SCRIPT_DIR', join_paths(meson.build_root(), 'scripts/'))
+
+subdir('test/pacman')
+subdir('test/scripts')
+subdir('test/util')
+
+message('\n '.join([
+ '@0@ @1@'.format(meson.project_name(), meson.project_version()),
+ 'Build information:',
+ ' prefix : @0@'.format(PREFIX),
+ ' sysconfdir : @0@'.format(SYSCONFDIR),
+ ' conf file : @0@'.format(join_paths(SYSCONFDIR, 'pacman.conf')),
+ ' localstatedir : @0@'.format(LOCALSTATEDIR),
+ ' database dir : @0@'.format(join_paths(LOCALSTATEDIR, 'lib/pacman/')),
+ ' cache dir : @0@'.format(join_paths(LOCALSTATEDIR, 'cache/pacman/pkg/')),
+ ' compiler : @0@ @1@'.format(cc.get_id(), cc.version()),
+ '',
+ ' Architecture : @0@'.format(carch),
+ ' Host Type : @0@'.format(chost),
+ ' File inode command : @0@'.format(inodecmd),
+ ' File owner command : @0@'.format(ownercmd),
+ ' File mode command : @0@'.format(modecmd),
+ ' Directory size command : @0@ @1@'.format(DU.path(), duflags),
+ ' In-place sed command : @0@ @1@'.format(SED.path(), sedinplaceflags),
+ ' libalpm version : @0@'.format(libalpm_version),
+ ' pacman version : @0@'.format(PACKAGE_VERSION),
+ '',
+ 'Directory and file information:',
+ ' root working directory : @0@'.format(ROOTDIR),
+ ' package extension : @0@'.format(get_option('pkg-ext')),
+ ' source pkg extension : @0@'.format(get_option('src-ext')),
+ ' build script name : @0@'.format(BUILDSCRIPT),
+ ' template directory : @0@'.format(get_option('makepkg-template-dir')),
+ '',
+ 'Compilation options:',
+ ' i18n support : @0@'.format(get_option('i18n')),
+ ' Build docs : @0@'.format(build_doc),
+ ' debug build : @0@'.format(get_option('buildtype') == 'debug'),
+ ' Use libcurl : @0@'.format(conf.get('HAVE_LIBCURL')),
+ ' Use GPGME : @0@'.format(conf.get('HAVE_LIBGPGME')),
+ ' Use OpenSSL : @0@'.format(conf.has('HAVE_LIBSSL') and
+ conf.get('HAVE_LIBSSL') == 1),
+ ' Use nettle : @0@'.format(conf.has('HAVE_LIBNETTLE') and
+ conf.get('HAVE_LIBNETTLE') == 1),
+ '',
+]))
diff --git a/meson_options.txt b/meson_options.txt
new file mode 100644
index 00000000..422a9ae4
--- /dev/null
+++ b/meson_options.txt
@@ -0,0 +1,61 @@
+# build behavior
+option('use-git-version', type : 'boolean', value : false,
+ description : 'take version information from git')
+option('buildstatic', type : 'boolean', value : false,
+ description : 'if true, build staticly linked binaries')
+
+# directories and filenames
+option('root-dir', type : 'string', value : '/',
+ description : 'set the location of the root operating directory')
+
+option('pkg-ext', type : 'string', value : '.pkg.tar.gz',
+ description : 'set the file extension used by packages')
+
+option('src-ext', type : 'string', value : '.src.tar.gz',
+ description : 'set the file extension used by source packages')
+
+option('scriptlet-shell', type : 'string', value : '/bin/sh',
+ description : 'The full path of the shell used to run install scriptlets')
+
+option('ldconfig', type : 'string', value : '/sbin/ldconfig',
+ description : 'set the full path to ldconfig')
+
+option('buildscript', type : 'string', value : 'PKGBUILD',
+ description : 'set the build script name used by makepkg')
+
+option('datarootdir', type : 'string', value : 'share',
+ description : 'FIXME')
+
+option('makepkg-template-dir', type : 'string', value : '/usr/share/makepkg-template',
+ description : 'template dir used by makepkg-template')
+
+option('debug-suffix', type : 'string', value : 'debug',
+ description : 'suffix for split debugging symbol packages used by makepkg')
+
+# dependencies, features
+option('doc', type : 'feature', value : 'auto',
+ description : 'generate docs and manpages')
+
+option('doxygen', type : 'feature', value : 'disabled',
+ description : 'generate doxygen manpages and html')
+
+option('curl', type : 'feature', value : 'auto',
+ description : 'use curl to download files')
+
+option('crypto', type : 'combo', choices : ['openssl', 'nettle'],
+ description : 'select crypto implementation')
+
+option('gpgme', type : 'feature', value : 'auto',
+ description : 'use GPGME for PGP signature verification')
+option('gpgme-libdir', type : 'string', value : '/usr/lib',
+ description : 'search directory for gpgme libraries.')
+
+option('i18n', type : 'boolean', value : true,
+ description : 'enable localization of pacman, libalpm and scripts')
+
+# tools
+option('duflags', type : 'string', value : 'autodetect',
+ description : 'flags to pass to du to measure file size')
+
+option('sedinplaceflags', type : 'string', value : 'auto',
+ description : 'flags to pass to sed to edit a file in-place')
diff --git a/scripts/libmakepkg/integrity/meson.build b/scripts/libmakepkg/integrity/meson.build
new file mode 100644
index 00000000..9aa9061c
--- /dev/null
+++ b/scripts/libmakepkg/integrity/meson.build
@@ -0,0 +1,20 @@
+libmakepkg_module = 'integrity'
+
+sources = [
+ 'generate_checksum.sh.in',
+ 'generate_signature.sh.in',
+ 'verify_checksum.sh.in',
+ 'verify_signature.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/lint_config/meson.build b/scripts/libmakepkg/lint_config/meson.build
new file mode 100644
index 00000000..884d63d7
--- /dev/null
+++ b/scripts/libmakepkg/lint_config/meson.build
@@ -0,0 +1,18 @@
+libmakepkg_module = 'lint_config'
+
+sources = [
+ 'paths.sh.in',
+ 'variable.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/lint_package/meson.build b/scripts/libmakepkg/lint_package/meson.build
new file mode 100644
index 00000000..8eb1aaf7
--- /dev/null
+++ b/scripts/libmakepkg/lint_package/meson.build
@@ -0,0 +1,20 @@
+libmakepkg_module = 'lint_package'
+
+sources = [
+ 'build_references.sh.in',
+ 'dotfiles.sh.in',
+ 'file_names.sh.in',
+ 'missing_backup.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/lint_pkgbuild/meson.build b/scripts/libmakepkg/lint_pkgbuild/meson.build
new file mode 100644
index 00000000..9067c9d6
--- /dev/null
+++ b/scripts/libmakepkg/lint_pkgbuild/meson.build
@@ -0,0 +1,37 @@
+libmakepkg_module = 'lint_pkgbuild'
+
+sources = [
+ 'arch.sh.in',
+ 'backup.sh.in',
+ 'changelog.sh.in',
+ 'checkdepends.sh.in',
+ 'conflicts.sh.in',
+ 'depends.sh.in',
+ 'epoch.sh.in',
+ 'install.sh.in',
+ 'makedepends.sh.in',
+ 'optdepends.sh.in',
+ 'options.sh.in',
+ 'package_function.sh.in',
+ 'pkgbase.sh.in',
+ 'pkglist.sh.in',
+ 'pkgname.sh.in',
+ 'pkgrel.sh.in',
+ 'pkgver.sh.in',
+ 'provides.sh.in',
+ 'source.sh.in',
+ 'util.sh.in',
+ 'variable.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/meson.build b/scripts/libmakepkg/meson.build
new file mode 100644
index 00000000..07475b4d
--- /dev/null
+++ b/scripts/libmakepkg/meson.build
@@ -0,0 +1,31 @@
+libmakepkg_modules = [
+ { 'name' : 'integrity', 'has_subdir' : true },
+ { 'name' : 'lint_config', 'has_subdir' : true },
+ { 'name' : 'lint_package', 'has_subdir' : true },
+ { 'name' : 'lint_pkgbuild', 'has_subdir' : true },
+ { 'name' : 'source', 'has_subdir' : true },
+ { 'name' : 'srcinfo', },
+ { 'name' : 'tidy', 'has_subdir' : true },
+ { 'name' : 'util', 'has_subdir' : true },
+]
+
+mkdir_p = 'mkdir -p $DESTDIR/@0@'
+
+foreach module : libmakepkg_modules
+ custom_target(
+ 'libmakepkg_@0@'.format(module['name']),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : '@0@.sh.in'.format(module['name']),
+ output : '@BASENAME@',
+ install : true,
+ install_dir : join_paths(get_option('datadir'), 'makepkg'))
+
+ if module.get('has_subdir', false)
+ subdir(module['name'])
+ path = join_paths(get_option('prefix'),
+ get_option('datadir'),
+ 'makepkg',
+ module['name'])
+ meson.add_install_script('sh', '-c', mkdir_p.format(path))
+ endif
+endforeach
diff --git a/scripts/libmakepkg/source/meson.build b/scripts/libmakepkg/source/meson.build
new file mode 100644
index 00000000..59326133
--- /dev/null
+++ b/scripts/libmakepkg/source/meson.build
@@ -0,0 +1,22 @@
+libmakepkg_module = 'source'
+
+sources = [
+ 'bzr.sh.in',
+ 'file.sh.in',
+ 'git.sh.in',
+ 'hg.sh.in',
+ 'local.sh.in',
+ 'svn.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/tidy/meson.build b/scripts/libmakepkg/tidy/meson.build
new file mode 100644
index 00000000..052ac7a2
--- /dev/null
+++ b/scripts/libmakepkg/tidy/meson.build
@@ -0,0 +1,23 @@
+libmakepkg_module = 'tidy'
+
+sources = [
+ 'docs.sh.in',
+ 'emptydirs.sh.in',
+ 'libtool.sh.in',
+ 'purge.sh.in',
+ 'staticlibs.sh.in',
+ 'strip.sh.in',
+ 'zipman.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/libmakepkg/util/meson.build b/scripts/libmakepkg/util/meson.build
new file mode 100644
index 00000000..b0e829c4
--- /dev/null
+++ b/scripts/libmakepkg/util/meson.build
@@ -0,0 +1,24 @@
+libmakepkg_module = 'util'
+
+sources = [
+ 'compress.sh.in',
+ 'error.sh.in',
+ 'message.sh.in',
+ 'option.sh.in',
+ 'parseopts.sh.in',
+ 'pkgbuild.sh.in',
+ 'source.sh.in',
+ 'util.sh.in',
+]
+
+foreach src : sources
+ output_dir = join_paths(get_option('datadir'), 'makepkg', libmakepkg_module)
+
+ custom_target(
+ libmakepkg_module + '_' + src.underscorify(),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : src,
+ output : '@BASENAME@',
+ install : true,
+ install_dir : output_dir)
+endforeach
diff --git a/scripts/meson.build b/scripts/meson.build
new file mode 100644
index 00000000..1fe3fb78
--- /dev/null
+++ b/scripts/meson.build
@@ -0,0 +1,66 @@
+scripts = [
+ 'makepkg-template.pl.in',
+ 'makepkg.sh.in',
+ 'pacman-db-upgrade.sh.in',
+ 'pacman-key.sh.in',
+ 'pkgdelta.sh.in',
+ 'repo-add.sh.in'
+]
+
+library_files = [
+ 'library/human_to_size.sh',
+ 'library/size_to_human.sh',
+]
+
+SCRIPT_EDITOR = find_program(configure_file(
+ input : join_paths(meson.source_root(), 'build-aux/edit-script.sh.in'),
+ output : 'edit-script.sh',
+ configuration : substs))
+
+m4_edit = generator(
+ M4,
+ arguments : ['-P', '-I', meson.current_source_dir(), '@INPUT@'],
+ output : '@PLAINNAME@',
+ capture : true)
+
+foreach script : scripts
+ custom_target(
+ script,
+ input : m4_edit.process(script),
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@', '0755'],
+ output : script.split('.')[0],
+ depend_files : library_files,
+ install : true,
+ install_dir : get_option('bindir'))
+endforeach
+
+foreach symlink : ['repo-remove', 'repo-elephant']
+ meson.add_install_script(MESON_MAKE_SYMLINK,
+ 'repo-add',
+ join_paths(BINDIR, symlink))
+endforeach
+
+subdir('libmakepkg')
+
+custom_target(
+ 'bash_completion',
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : 'completion/bash_completion.in',
+ output : 'pacman',
+ install : true,
+ install_dir : BASHCOMPDIR)
+
+foreach symlink : ['pacman-key', 'makepkg']
+ meson.add_install_script(MESON_MAKE_SYMLINK,
+ 'pacman',
+ join_paths(BASHCOMPDIR, symlink))
+endforeach
+
+zsh_completion_dir = join_paths(DATAROOTDIR, 'zsh/site-functions')
+custom_target(
+ 'zsh_completion',
+ command : [ SCRIPT_EDITOR, '@INPUT@', '@OUTPUT@' ],
+ input : 'completion/zsh_completion.in',
+ output : '_pacman',
+ install : true,
+ install_dir : zsh_completion_dir)
diff --git a/scripts/po/meson.build b/scripts/po/meson.build
new file mode 100644
index 00000000..d8b8c51c
--- /dev/null
+++ b/scripts/po/meson.build
@@ -0,0 +1,15 @@
+i18n.gettext(
+ 'pacman-scripts',
+ args : [
+ '--directory=@0@'.format(meson.current_source_dir()),
+ '--msgid-bugs-address=http://bugs.archlinux.org/index.php?project=3',
+ '--copyright-holder="Pacman Development Team <pacman-dev@archlinux.org>"',
+ '--language', 'shell',
+
+ '--keyword=_',
+ '--flag=_:1:c-format',
+
+ '--keyword=_n:1,2',
+ '--flag=_n:1:c-format',
+ '--flag=_n:2:c-format',
+ ])
diff --git a/src/common/meson.build b/src/common/meson.build
new file mode 100644
index 00000000..1443be3b
--- /dev/null
+++ b/src/common/meson.build
@@ -0,0 +1,4 @@
+libcommon_sources = files('''
+ ini.c ini.h
+ util-common.c util-common.h
+'''.split())
diff --git a/src/pacman/meson.build b/src/pacman/meson.build
new file mode 100644
index 00000000..6926f676
--- /dev/null
+++ b/src/pacman/meson.build
@@ -0,0 +1,23 @@
+pacman_sources = files('''
+ check.h check.c
+ conf.h conf.c
+ database.c
+ deptest.c
+ files.c
+ package.h package.c
+ pacman.h pacman.c
+ query.c
+ remove.c
+ sighandler.h sighandler.c
+ sync.c
+ callback.h callback.c
+ upgrade.c
+ util.h util.c
+'''.split())
+
+pacman_conf_sources = files('''
+ pacman-conf.c
+ util.h util.c
+ callback.h callback.c
+ conf.h conf.c
+'''.split())
diff --git a/src/pacman/po/meson.build b/src/pacman/po/meson.build
new file mode 100644
index 00000000..eb45fa1c
--- /dev/null
+++ b/src/pacman/po/meson.build
@@ -0,0 +1,15 @@
+i18n.gettext(
+ 'pacman',
+ args : [
+ '--directory=@0@'.format(meson.current_source_dir()),
+ '--msgid-bugs-address=http://bugs.archlinux.org/index.php?project=3',
+ '--copyright-holder="Pacman Development Team <pacman-dev@archlinux.org>"',
+ '--language', 'c',
+
+ '--keyword=_',
+ '--flag=_:1:c-format',
+
+ '--keyword=_n:1,2',
+ '--flag=_n:1:c-format',
+ '--flag=_n:2:c-format',
+ ])
diff --git a/src/util/meson.build b/src/util/meson.build
new file mode 100644
index 00000000..cc219670
--- /dev/null
+++ b/src/util/meson.build
@@ -0,0 +1,3 @@
+cleanupdelta_sources = files('cleanupdelta.c')
+testpkg_sources = files('testpkg.c')
+vercmp_sources = files('vercmp.c')
diff --git a/test/pacman/meson.build b/test/pacman/meson.build
new file mode 100644
index 00000000..dbdb429e
--- /dev/null
+++ b/test/pacman/meson.build
@@ -0,0 +1,357 @@
+pacman_tests = [
+ { 'name': 'tests/backup001.py' },
+ { 'name': 'tests/clean001.py' },
+ { 'name': 'tests/clean002.py' },
+ { 'name': 'tests/clean003.py' },
+ { 'name': 'tests/clean004.py' },
+ { 'name': 'tests/clean005.py' },
+ { 'name': 'tests/config001.py' },
+ { 'name': 'tests/config002.py' },
+ { 'name': 'tests/database001.py' },
+ { 'name': 'tests/database002.py' },
+ { 'name': 'tests/database010.py' },
+ { 'name': 'tests/database011.py' },
+ { 'name': 'tests/database012.py' },
+ { 'name': 'tests/dbonly-extracted-files.py' },
+ { 'name': 'tests/depconflict100.py' },
+ { 'name': 'tests/depconflict110.py' },
+ { 'name': 'tests/depconflict111.py' },
+ { 'name': 'tests/depconflict120.py' },
+ { 'name': 'tests/dependency-cycle-fixed-by-upgrade.py' },
+ { 'name': 'tests/deprange001.py',
+ 'should_fail': true },
+ { 'name': 'tests/deptest001.py' },
+ { 'name': 'tests/dummy001.py' },
+ { 'name': 'tests/epoch001.py' },
+ { 'name': 'tests/epoch002.py' },
+ { 'name': 'tests/epoch003.py' },
+ { 'name': 'tests/epoch004.py' },
+ { 'name': 'tests/epoch005.py' },
+ { 'name': 'tests/epoch010.py' },
+ { 'name': 'tests/epoch011.py' },
+ { 'name': 'tests/epoch012.py' },
+ { 'name': 'tests/file-conflict-with-installed-pkg.py' },
+ { 'name': 'tests/fileconflict001.py' },
+ { 'name': 'tests/fileconflict002.py' },
+ { 'name': 'tests/fileconflict003.py' },
+ { 'name': 'tests/fileconflict004.py' },
+ { 'name': 'tests/fileconflict005.py' },
+ { 'name': 'tests/fileconflict006.py' },
+ { 'name': 'tests/fileconflict007.py' },
+ { 'name': 'tests/fileconflict008.py' },
+ { 'name': 'tests/fileconflict009.py' },
+ { 'name': 'tests/fileconflict010.py' },
+ { 'name': 'tests/fileconflict011.py' },
+ { 'name': 'tests/fileconflict012.py' },
+ { 'name': 'tests/fileconflict013.py' },
+ { 'name': 'tests/fileconflict015.py' },
+ { 'name': 'tests/fileconflict016.py' },
+ { 'name': 'tests/fileconflict017.py' },
+ { 'name': 'tests/fileconflict020.py' },
+ { 'name': 'tests/fileconflict021.py' },
+ { 'name': 'tests/fileconflict022.py' },
+ { 'name': 'tests/fileconflict023.py' },
+ { 'name': 'tests/fileconflict024.py' },
+ { 'name': 'tests/fileconflict025.py' },
+ { 'name': 'tests/fileconflict030.py' },
+ { 'name': 'tests/fileconflict031.py' },
+ { 'name': 'tests/fileconflict032.py' },
+ { 'name': 'tests/hook-abortonfail.py' },
+ { 'name': 'tests/hook-description-reused.py' },
+ { 'name': 'tests/hook-exec-reused.py' },
+ { 'name': 'tests/hook-exec-with-arguments.py' },
+ { 'name': 'tests/hook-file-change-packages.py' },
+ { 'name': 'tests/hook-file-remove-trigger-match.py' },
+ { 'name': 'tests/hook-file-upgrade-nomatch.py' },
+ { 'name': 'tests/hook-invalid-trigger.py' },
+ { 'name': 'tests/hook-pkg-install-trigger-match.py' },
+ { 'name': 'tests/hook-pkg-postinstall-trigger-match.py' },
+ { 'name': 'tests/hook-pkg-remove-trigger-match.py' },
+ { 'name': 'tests/hook-pkg-upgrade-trigger-match.py' },
+ { 'name': 'tests/hook-target-list.py' },
+ { 'name': 'tests/hook-type-reused.py' },
+ { 'name': 'tests/hook-upgrade-trigger-no-match.py' },
+ { 'name': 'tests/hook-when-reused.py' },
+ { 'name': 'tests/ignore001.py' },
+ { 'name': 'tests/ignore002.py' },
+ { 'name': 'tests/ignore003.py' },
+ { 'name': 'tests/ignore004.py' },
+ { 'name': 'tests/ignore005.py' },
+ { 'name': 'tests/ignore006.py' },
+ { 'name': 'tests/ignore007.py' },
+ { 'name': 'tests/ignore008.py' },
+ { 'name': 'tests/ldconfig001.py' },
+ { 'name': 'tests/ldconfig002.py' },
+ { 'name': 'tests/ldconfig003.py' },
+ { 'name': 'tests/mode001.py' },
+ { 'name': 'tests/mode002.py' },
+ { 'name': 'tests/mode003.py' },
+ { 'name': 'tests/noupgrade-inverted.py' },
+ { 'name': 'tests/overwrite-files-match-negated.py' },
+ { 'name': 'tests/overwrite-files-match.py' },
+ { 'name': 'tests/overwrite-files-nonmatch.py' },
+ { 'name': 'tests/pacman001.py' },
+ { 'name': 'tests/pacman002.py' },
+ { 'name': 'tests/pacman003.py' },
+ { 'name': 'tests/pacman004.py' },
+ { 'name': 'tests/pacman005.py' },
+ { 'name': 'tests/provision001.py' },
+ { 'name': 'tests/provision002.py' },
+ { 'name': 'tests/provision003.py' },
+ { 'name': 'tests/provision004.py' },
+ { 'name': 'tests/provision010.py' },
+ { 'name': 'tests/provision011.py' },
+ { 'name': 'tests/provision012.py' },
+ { 'name': 'tests/provision020.py' },
+ { 'name': 'tests/provision021.py' },
+ { 'name': 'tests/provision022.py' },
+ { 'name': 'tests/query001.py' },
+ { 'name': 'tests/query002.py' },
+ { 'name': 'tests/query003.py' },
+ { 'name': 'tests/query004.py' },
+ { 'name': 'tests/query005.py' },
+ { 'name': 'tests/query006.py',
+ # expect failure on 32 bit machines
+ 'should_fail': cc.sizeof('ssize_t') < 8 },
+ { 'name': 'tests/query007.py' },
+ { 'name': 'tests/query010.py' },
+ { 'name': 'tests/query011.py' },
+ { 'name': 'tests/query012.py' },
+ { 'name': 'tests/querycheck001.py' },
+ { 'name': 'tests/querycheck002.py' },
+ { 'name': 'tests/querycheck_fast_file_type.py' },
+ { 'name': 'tests/reason001.py' },
+ { 'name': 'tests/remove-assumeinstalled.py' },
+ { 'name': 'tests/remove-directory-replaced-with-symlink.py' },
+ { 'name': 'tests/remove-optdepend-of-installed-package.py' },
+ { 'name': 'tests/remove-recursive-cycle.py' },
+ { 'name': 'tests/remove001.py' },
+ { 'name': 'tests/remove002.py' },
+ { 'name': 'tests/remove010.py' },
+ { 'name': 'tests/remove011.py' },
+ { 'name': 'tests/remove012.py' },
+ { 'name': 'tests/remove020.py' },
+ { 'name': 'tests/remove021.py' },
+ { 'name': 'tests/remove030.py' },
+ { 'name': 'tests/remove031.py' },
+ { 'name': 'tests/remove040.py' },
+ { 'name': 'tests/remove041.py' },
+ { 'name': 'tests/remove042.py' },
+ { 'name': 'tests/remove043.py' },
+ { 'name': 'tests/remove044.py' },
+ { 'name': 'tests/remove045.py' },
+ { 'name': 'tests/remove047.py' },
+ { 'name': 'tests/remove049.py' },
+ { 'name': 'tests/remove050.py' },
+ { 'name': 'tests/remove051.py' },
+ { 'name': 'tests/remove052.py' },
+ { 'name': 'tests/remove060.py' },
+ { 'name': 'tests/remove070.py' },
+ { 'name': 'tests/remove071.py' },
+ { 'name': 'tests/replace100.py' },
+ { 'name': 'tests/replace101.py' },
+ { 'name': 'tests/replace102.py' },
+ { 'name': 'tests/replace103.py' },
+ { 'name': 'tests/replace104.py' },
+ { 'name': 'tests/replace110.py',
+ 'should_fail': true },
+ { 'name': 'tests/scriptlet001.py' },
+ { 'name': 'tests/scriptlet002.py' },
+ { 'name': 'tests/sign001.py' },
+ { 'name': 'tests/sign002.py' },
+ { 'name': 'tests/skip-remove-with-glob-chars.py' },
+ { 'name': 'tests/smoke001.py' },
+ { 'name': 'tests/smoke002.py' },
+ { 'name': 'tests/smoke003.py' },
+ { 'name': 'tests/smoke004.py' },
+ { 'name': 'tests/symlink-replace-with-dir.py' },
+ { 'name': 'tests/symlink001.py' },
+ { 'name': 'tests/symlink002.py' },
+ { 'name': 'tests/symlink010.py' },
+ { 'name': 'tests/symlink011.py' },
+ { 'name': 'tests/symlink012.py' },
+ { 'name': 'tests/symlink020.py' },
+ { 'name': 'tests/symlink021.py' },
+ { 'name': 'tests/sync-install-assumeinstalled.py' },
+ { 'name': 'tests/sync-nodepversion01.py' },
+ { 'name': 'tests/sync-nodepversion02.py' },
+ { 'name': 'tests/sync-nodepversion03.py' },
+ { 'name': 'tests/sync-nodepversion04.py' },
+ { 'name': 'tests/sync-nodepversion05.py' },
+ { 'name': 'tests/sync-nodepversion06.py' },
+ { 'name': 'tests/sync-sysupgrade-print-replaced-packages.py' },
+ { 'name': 'tests/sync-update-assumeinstalled.py' },
+ { 'name': 'tests/sync-update-package-removing-required-provides.py',
+ 'should_fail': true },
+ { 'name': 'tests/sync001.py' },
+ { 'name': 'tests/sync002.py' },
+ { 'name': 'tests/sync003.py' },
+ { 'name': 'tests/sync009.py' },
+ { 'name': 'tests/sync010.py' },
+ { 'name': 'tests/sync011.py' },
+ { 'name': 'tests/sync012.py' },
+ { 'name': 'tests/sync020.py' },
+ { 'name': 'tests/sync021.py' },
+ { 'name': 'tests/sync022.py' },
+ { 'name': 'tests/sync023.py' },
+ { 'name': 'tests/sync024.py' },
+ { 'name': 'tests/sync030.py' },
+ { 'name': 'tests/sync031.py' },
+ { 'name': 'tests/sync040.py' },
+ { 'name': 'tests/sync041.py' },
+ { 'name': 'tests/sync042.py' },
+ { 'name': 'tests/sync043.py' },
+ { 'name': 'tests/sync044.py' },
+ { 'name': 'tests/sync045.py' },
+ { 'name': 'tests/sync046.py' },
+ { 'name': 'tests/sync050.py' },
+ { 'name': 'tests/sync051.py' },
+ { 'name': 'tests/sync052.py' },
+ { 'name': 'tests/sync100.py' },
+ { 'name': 'tests/sync1000.py' },
+ { 'name': 'tests/sync1003.py' },
+ { 'name': 'tests/sync1004.py' },
+ { 'name': 'tests/sync1008.py' },
+ { 'name': 'tests/sync101.py' },
+ { 'name': 'tests/sync102.py' },
+ { 'name': 'tests/sync103.py' },
+ { 'name': 'tests/sync104.py' },
+ { 'name': 'tests/sync110.py' },
+ { 'name': 'tests/sync1100.py' },
+ { 'name': 'tests/sync1101.py' },
+ { 'name': 'tests/sync1102.py' },
+ { 'name': 'tests/sync1103.py' },
+ { 'name': 'tests/sync1104.py' },
+ { 'name': 'tests/sync1105.py' },
+ { 'name': 'tests/sync120.py' },
+ { 'name': 'tests/sync130.py' },
+ { 'name': 'tests/sync131.py' },
+ { 'name': 'tests/sync132.py' },
+ { 'name': 'tests/sync133.py' },
+ { 'name': 'tests/sync134.py' },
+ { 'name': 'tests/sync135.py' },
+ { 'name': 'tests/sync136.py' },
+ { 'name': 'tests/sync137.py' },
+ { 'name': 'tests/sync138.py' },
+ { 'name': 'tests/sync139.py' },
+ { 'name': 'tests/sync140.py' },
+ { 'name': 'tests/sync141.py' },
+ { 'name': 'tests/sync150.py' },
+ { 'name': 'tests/sync200.py' },
+ { 'name': 'tests/sync300.py' },
+ { 'name': 'tests/sync306.py' },
+ { 'name': 'tests/sync400.py' },
+ { 'name': 'tests/sync401.py' },
+ { 'name': 'tests/sync402.py' },
+ { 'name': 'tests/sync403.py',
+ 'should_fail': true },
+ { 'name': 'tests/sync404.py' },
+ { 'name': 'tests/sync405.py' },
+ { 'name': 'tests/sync406.py',
+ 'should_fail': true },
+ { 'name': 'tests/sync407.py' },
+ { 'name': 'tests/sync500.py' },
+ { 'name': 'tests/sync501.py' },
+ { 'name': 'tests/sync502.py' },
+ { 'name': 'tests/sync503.py' },
+ { 'name': 'tests/sync600.py' },
+ { 'name': 'tests/sync700.py' },
+ { 'name': 'tests/sync701.py' },
+ { 'name': 'tests/sync702.py' },
+ { 'name': 'tests/sync890.py' },
+ { 'name': 'tests/sync891.py' },
+ { 'name': 'tests/sync892.py' },
+ { 'name': 'tests/sync893.py' },
+ { 'name': 'tests/sync895.py' },
+ { 'name': 'tests/sync896.py' },
+ { 'name': 'tests/sync897.py' },
+ { 'name': 'tests/sync898.py' },
+ { 'name': 'tests/sync899.py' },
+ { 'name': 'tests/sync900.py' },
+ { 'name': 'tests/sync901.py' },
+ { 'name': 'tests/sync990.py' },
+ { 'name': 'tests/sync992.py' },
+ { 'name': 'tests/sync993.py' },
+ { 'name': 'tests/sync999.py' },
+ { 'name': 'tests/trans001.py' },
+ { 'name': 'tests/type001.py' },
+ { 'name': 'tests/unresolvable001.py' },
+ { 'name': 'tests/upgrade001.py' },
+ { 'name': 'tests/upgrade002.py' },
+ { 'name': 'tests/upgrade003.py' },
+ { 'name': 'tests/upgrade004.py' },
+ { 'name': 'tests/upgrade005.py' },
+ { 'name': 'tests/upgrade006.py' },
+ { 'name': 'tests/upgrade010.py' },
+ { 'name': 'tests/upgrade011.py' },
+ { 'name': 'tests/upgrade013.py' },
+ { 'name': 'tests/upgrade020.py' },
+ { 'name': 'tests/upgrade021.py' },
+ { 'name': 'tests/upgrade022.py' },
+ { 'name': 'tests/upgrade023.py' },
+ { 'name': 'tests/upgrade024.py' },
+ { 'name': 'tests/upgrade025.py' },
+ { 'name': 'tests/upgrade026.py' },
+ { 'name': 'tests/upgrade027.py' },
+ { 'name': 'tests/upgrade028.py' },
+ { 'name': 'tests/upgrade029.py' },
+ { 'name': 'tests/upgrade030.py' },
+ { 'name': 'tests/upgrade031.py' },
+ { 'name': 'tests/upgrade032.py' },
+ { 'name': 'tests/upgrade040.py' },
+ { 'name': 'tests/upgrade041.py' },
+ { 'name': 'tests/upgrade042.py' },
+ { 'name': 'tests/upgrade043.py' },
+ { 'name': 'tests/upgrade045.py' },
+ { 'name': 'tests/upgrade050.py' },
+ { 'name': 'tests/upgrade051.py' },
+ { 'name': 'tests/upgrade052.py' },
+ { 'name': 'tests/upgrade053.py' },
+ { 'name': 'tests/upgrade054.py' },
+ { 'name': 'tests/upgrade055.py' },
+ { 'name': 'tests/upgrade056.py' },
+ { 'name': 'tests/upgrade057.py' },
+ { 'name': 'tests/upgrade058.py' },
+ { 'name': 'tests/upgrade059.py' },
+ { 'name': 'tests/upgrade060.py' },
+ { 'name': 'tests/upgrade061.py' },
+ { 'name': 'tests/upgrade070.py' },
+ { 'name': 'tests/upgrade071.py' },
+ { 'name': 'tests/upgrade072.py' },
+ { 'name': 'tests/upgrade073.py' },
+ { 'name': 'tests/upgrade074.py' },
+ { 'name': 'tests/upgrade075.py' },
+ { 'name': 'tests/upgrade076.py' },
+ { 'name': 'tests/upgrade077.py' },
+ { 'name': 'tests/upgrade078.py',
+ 'should_fail': true },
+ { 'name': 'tests/upgrade080.py' },
+ { 'name': 'tests/upgrade081.py' },
+ { 'name': 'tests/upgrade082.py' },
+ { 'name': 'tests/upgrade083.py' },
+ { 'name': 'tests/upgrade084.py' },
+ { 'name': 'tests/upgrade090.py' },
+ { 'name': 'tests/upgrade100.py' },
+ { 'name': 'tests/xfercommand001.py' },
+]
+
+foreach testobj : pacman_tests
+ input = testobj.get('name')
+ test_name = input.split('/')[1]
+ should_fail = testobj.get('should_fail', false)
+
+ test(
+ test_name,
+ PYTHON,
+ args : [
+ join_paths(meson.source_root(), 'build-aux/tap-driver.py'),
+ join_paths(meson.current_source_dir(), 'pactest.py'),
+ '--scriptlet-shell', get_option('scriptlet-shell'),
+ '--bindir', meson.build_root(),
+ '--ldconfig', LDCONFIG,
+ '--verbose',
+ join_paths(meson.current_source_dir(), input)
+ ],
+ depends : [pacman_bin],
+ should_fail : should_fail)
+endforeach
diff --git a/test/scripts/meson.build b/test/scripts/meson.build
new file mode 100644
index 00000000..0bdbfb2f
--- /dev/null
+++ b/test/scripts/meson.build
@@ -0,0 +1,15 @@
+tests = [
+ 'parseopts_test.sh',
+ 'makepkg-template_test.sh',
+ 'human_to_size_test.sh',
+]
+
+foreach tst : tests
+ test(tst,
+ PYTHON,
+ env : TEST_ENV,
+ args : [
+ join_paths(meson.source_root(), 'build-aux/tap-driver.py'),
+ join_paths(meson.current_source_dir(), tst),
+ ])
+endforeach
diff --git a/test/util/meson.build b/test/util/meson.build
new file mode 100644
index 00000000..07b29e5c
--- /dev/null
+++ b/test/util/meson.build
@@ -0,0 +1,6 @@
+test('vercmptest',
+ BASH,
+ env : TEST_ENV,
+ args : [
+ join_paths(meson.current_source_dir(), 'vercmptest.sh')
+ ])