aboutsummaryrefslogtreecommitdiff
path: root/gbp
diff options
context:
space:
mode:
Diffstat (limited to 'gbp')
-rw-r--r--gbp/command_wrappers.py10
-rw-r--r--gbp/config.py50
-rw-r--r--gbp/deb/dscfile.py4
-rw-r--r--gbp/deb/git.py5
-rw-r--r--gbp/deb/upstreamsource.py28
-rw-r--r--gbp/format.py44
-rw-r--r--gbp/git/modifier.py8
-rw-r--r--gbp/git/repository.py2
-rw-r--r--gbp/git/vfs.py2
-rw-r--r--gbp/pkg/__init__.py192
-rw-r--r--gbp/rpm/__init__.py962
-rw-r--r--gbp/rpm/git.py105
-rw-r--r--gbp/rpm/lib_rpm.py47
-rw-r--r--gbp/rpm/linkedlist.py214
-rw-r--r--gbp/rpm/policy.py72
-rwxr-xr-xgbp/scripts/buildpackage.py56
-rwxr-xr-xgbp/scripts/clone.py2
-rw-r--r--gbp/scripts/common/import_orig.py41
-rw-r--r--gbp/scripts/create_remote_repo.py2
-rw-r--r--gbp/scripts/dch.py37
-rw-r--r--gbp/scripts/import_dsc.py6
-rw-r--r--gbp/scripts/import_dscs.py2
-rw-r--r--gbp/scripts/import_orig.py14
-rwxr-xr-xgbp/scripts/import_srpm.py462
-rwxr-xr-xgbp/scripts/pq.py92
-rwxr-xr-xgbp/scripts/pull.py2
-rw-r--r--gbp/scripts/supercommand.py59
-rw-r--r--gbp/tmpfile.py38
28 files changed, 2390 insertions, 168 deletions
diff --git a/gbp/command_wrappers.py b/gbp/command_wrappers.py
index 8a131e1..b8bd21f 100644
--- a/gbp/command_wrappers.py
+++ b/gbp/command_wrappers.py
@@ -40,8 +40,7 @@ class Command(object):
capture_stderr=False):
self.cmd = cmd
self.args = args
- self.run_error = "Couldn't run '%s'" % (" ".join([self.cmd] +
- self.args))
+ self.run_error = "'%s' failed" % (" ".join([self.cmd] + self.args))
self.shell = shell
self.retcode = 1
self.stderr = ''
@@ -91,12 +90,11 @@ class Command(object):
try:
retcode = self.__call(args)
if retcode < 0:
- err_detail = "%s was terminated by signal %d" % (self.cmd,
- -retcode)
+ err_detail = "it was terminated by signal %d" % -retcode
elif retcode > 0:
- err_detail = "%s returned %d" % (self.cmd, retcode)
+ err_detail = "it exited with %d" % retcode
except OSError as err:
- err_detail = "Execution failed: %s" % err
+ err_detail = "execution failed: %s" % err
retcode = 1
if retcode and not quiet:
log.err("%s: %s" % (self.run_error, err_detail))
diff --git a/gbp/config.py b/gbp/config.py
index fc31076..174eba4 100644
--- a/gbp/config.py
+++ b/gbp/config.py
@@ -106,6 +106,7 @@ class GbpOptionParser(OptionParser):
'postimport' : '',
'hooks' : 'True',
'debian-tag' : 'debian/%(version)s',
+ 'debian-tag-msg' : '%(pkg)s Debian release %(version)s',
'upstream-tag' : 'upstream/%(version)s',
'import-msg' : 'Imported Upstream version %(version)s',
'commit-msg' : 'Update changelog for %(version)s release',
@@ -154,6 +155,8 @@ class GbpOptionParser(OptionParser):
'allow-unauthenticated': 'False',
'symlink-orig': 'True',
'purge': 'True',
+ 'drop': 'False',
+ 'commit': 'False',
}
help = {
'debian-branch':
@@ -167,6 +170,9 @@ class GbpOptionParser(OptionParser):
'debian-tag':
("Format string for debian tags, "
"default is '%(debian-tag)s'"),
+ 'debian-tag-msg':
+ ("Format string for signed debian-tag messages, "
+ "default is '%(debian-tag-msg)s'"),
'upstream-tag':
("Format string for upstream tags, "
"default is '%(upstream-tag)s'"),
@@ -196,6 +202,8 @@ class GbpOptionParser(OptionParser):
"Include the full commit message instead of only the first line, default is '%(full)s'",
'meta':
"Parse meta tags in commit messages, default is '%(meta)s'",
+ 'meta-closes':
+ "Meta tags for the bts close commands, default is '%(meta-closes)s'",
'ignore-new':
"Build with uncommited changes in the source tree, default is '%(ignore-new)s'",
'ignore-branch':
@@ -249,10 +257,10 @@ class GbpOptionParser(OptionParser):
("Set up tracking for remote branches, "
"default is '%(track)s'"),
'author-is-committer':
- ("Use the authors's name also as the comitter's name, "
+ ("Use the authors's name also as the committer's name, "
"default is '%(author-is-committer)s'"),
'author-date-is-committer-date':
- ("Use the authors's date as the comitter's date, "
+ ("Use the authors's date as the committer's date, "
"default is '%(author-date-is-committer-date)s'"),
'create-missing-branches':
("Create missing branches automatically, "
@@ -290,6 +298,11 @@ class GbpOptionParser(OptionParser):
"'%(symlink-orig)s'"),
'purge':
"Purge exported package build directory. Default is '%(purge)s'",
+ 'drop':
+ ("In case of 'export' drop the patch-queue branch "
+ "after export. Default is '%(drop)s'"),
+ 'commit':
+ "commit changes after export, Default is '%(commit)s'",
}
def_config_files = [ '/etc/git-buildpackage/gbp.conf',
@@ -516,4 +529,37 @@ class GbpOptionParserDebian(GbpOptionParser):
'cleaner' : '/bin/true',
} )
+
+class GbpOptionParserRpm(GbpOptionParser):
+ """
+ Handles commandline options and parsing of config files for rpm tools
+ """
+ defaults = dict(GbpOptionParser.defaults)
+ defaults.update({
+ 'tmp-dir' : '/var/tmp/gbp/',
+ 'vendor' : 'Downstream',
+ 'packaging-branch' : 'master',
+ 'packaging-dir' : '',
+ 'packaging-tag' : 'packaging/%(version)s',
+ })
+
+ help = dict(GbpOptionParser.help)
+ help.update({
+ 'tmp-dir':
+ "Base directory under which temporary directories are "
+ "created, default is '%(tmp-dir)s'",
+ 'vendor':
+ "Distribution vendor name, default is '%(vendor)s'",
+ 'packaging-branch':
+ "Branch the packaging is being maintained on, rpm counterpart "
+ "of the 'debian-branch' option, default is "
+ "'%(packaging-branch)s'",
+ 'packaging-dir':
+ "Subdir for RPM packaging files, default is "
+ "'%(packaging-dir)s'",
+ 'packaging-tag':
+ "Format string for packaging tags, RPM counterpart of the "
+ "'debian-tag' option, default is '%(packaging-tag)s'",
+ })
+
# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/deb/dscfile.py b/gbp/deb/dscfile.py
index e2492dc..0671328 100644
--- a/gbp/deb/dscfile.py
+++ b/gbp/deb/dscfile.py
@@ -20,12 +20,12 @@ import os
import re
from gbp.errors import GbpError
-from gbp.pkg import UpstreamSource
+from gbp.deb.upstreamsource import DebianUpstreamSource
from gbp.deb.policy import DebianPkgPolicy
class DscFile(object):
"""Keeps all needed data read from a dscfile"""
- compressions = r"(%s)" % '|'.join(UpstreamSource.known_compressions())
+ compressions = r"(%s)" % '|'.join(DebianUpstreamSource.known_compressions())
pkg_re = re.compile(r'Source:\s+(?P<pkg>.+)\s*')
version_re = re.compile(r'Version:\s((?P<epoch>\d+)\:)?'
'(?P<version>[%s]+)\s*$'
diff --git a/gbp/deb/git.py b/gbp/deb/git.py
index 7a328be..2a848d4 100644
--- a/gbp/deb/git.py
+++ b/gbp/deb/git.py
@@ -1,6 +1,6 @@
# vim: set fileencoding=utf-8 :
#
-# (C) 2011 Guido Günther <agx@sigxcpu.org>
+# (C) 2011,2014 Guido Günther <agx@sigxcpu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
@@ -19,6 +19,7 @@
import re
from gbp.git import GitRepository, GitRepositoryError
from gbp.deb.pristinetar import DebianPristineTar
+from gbp.format import format_msg
class DebianGitRepository(GitRepository):
"""A git repository that holds the source of a Debian package"""
@@ -104,7 +105,7 @@ class DebianGitRepository(GitRepository):
>>> DebianGitRepository.version_to_tag("debian/%(version)s", "0:0~0")
'debian/0%0_0'
"""
- return format % dict(version=DebianGitRepository._sanitize_version(version))
+ return format_msg(format, dict(version=DebianGitRepository._sanitize_version(version)))
@staticmethod
def _sanitize_version(version):
diff --git a/gbp/deb/upstreamsource.py b/gbp/deb/upstreamsource.py
new file mode 100644
index 0000000..7eb555a
--- /dev/null
+++ b/gbp/deb/upstreamsource.py
@@ -0,0 +1,28 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2013 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Debian-specific upstream sources"""
+
+from gbp.pkg import UpstreamSource
+from gbp.deb.policy import DebianPkgPolicy
+
+
+class DebianUpstreamSource(UpstreamSource):
+ """Upstream source class for Debian"""
+ def __init__(self, name, unpacked=None):
+ super(DebianUpstreamSource, self).__init__(name,
+ unpacked,
+ DebianPkgPolicy)
diff --git a/gbp/format.py b/gbp/format.py
new file mode 100644
index 0000000..2a4af15
--- /dev/null
+++ b/gbp/format.py
@@ -0,0 +1,44 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2014 Guido Guenther <agx@sigxcpu.org>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Format a message"""
+
+from gbp.errors import GbpError
+
+def format_msg(msg, args):
+ """
+ Format a strin with the given dict. Be a bit more verbose than
+ default python about the error cause.
+
+ >>> format_msg("%(foo)", {})
+ Traceback (most recent call last):
+ ...
+ GbpError: Failed to format %(foo): Missing value 'foo' in {}
+ >>> format_msg("%(foo)", {'foo': 'bar'})
+ Traceback (most recent call last):
+ ...
+ GbpError: Failed to format %(foo) with {'foo': 'bar'}: incomplete format
+ >>> format_msg("A %(foo)s is a %(bar)s", {'foo': 'dog', 'bar': 'mamal'})
+ 'A dog is a mamal'
+ """
+ try:
+ return msg % args
+ except ValueError as e:
+ raise GbpError("Failed to format %s with %s: %s" % (msg, args, e))
+ except KeyError as e:
+ raise GbpError("Failed to format %s: Missing value %s in %s" % (msg, e, args))
+
+
diff --git a/gbp/git/modifier.py b/gbp/git/modifier.py
index bc98649..2452e0b 100644
--- a/gbp/git/modifier.py
+++ b/gbp/git/modifier.py
@@ -41,7 +41,7 @@ class GitTz(datetime.tzinfo):
return datetime.timedelta(0)
class GitModifier(object):
- """Stores authorship/comitter information"""
+ """Stores authorship/committer information"""
def __init__(self, name=None, email=None, date=None):
"""
@param name: the modifier's name
@@ -77,10 +77,10 @@ class GitModifier(object):
"datetime object or git raw date" % date)
def _get_env(self, who):
- """Get author or comitter information as env var dictionary"""
+ """Get author or committer information as env var dictionary"""
who = who.upper()
if who not in ['AUTHOR', 'COMMITTER']:
- raise GitModifierError("Neither comitter nor author")
+ raise GitModifierError("Neither committer nor author")
extra_env = {}
if self.name:
@@ -130,7 +130,7 @@ class GitModifier(object):
def get_committer_env(self):
"""
- Get env vars for comitter information
+ Get env vars for committer information
>>> g = GitModifier("foo", "bar")
>>> g.get_committer_env()
diff --git a/gbp/git/repository.py b/gbp/git/repository.py
index 10b9030..23f9482 100644
--- a/gbp/git/repository.py
+++ b/gbp/git/repository.py
@@ -1408,7 +1408,7 @@ class GitRepository(object):
@param parents: parents of this commit
@param author: authorship information
@type author: C{dict} with keys 'name' and 'email' or L{GitModifier}
- @param committer: comitter information
+ @param committer: committer information
@type committer: C{dict} with keys 'name' and 'email'
"""
extra_env = {}
diff --git a/gbp/git/vfs.py b/gbp/git/vfs.py
index 81649eb..5d5e132 100644
--- a/gbp/git/vfs.py
+++ b/gbp/git/vfs.py
@@ -41,7 +41,7 @@ class GitVfs(object):
return self._data.read(size)
def close(self):
- return self.close()
+ return self._data.close()
def __init__(self, repo, committish=None):
"""
diff --git a/gbp/pkg/__init__.py b/gbp/pkg/__init__.py
index e68fc61..34f81cd 100644
--- a/gbp/pkg/__init__.py
+++ b/gbp/pkg/__init__.py
@@ -34,6 +34,66 @@ compressor_opts = { 'gzip' : [ '-n', 'gz' ],
compressor_aliases = { 'bz2' : 'bzip2',
'gz' : 'gzip', }
+# Supported archive formats
+archive_formats = [ 'tar', 'zip' ]
+
+# Map combined file extensions to archive and compression format
+archive_ext_aliases = { 'tgz' : ('tar', 'gzip'),
+ 'tbz2' : ('tar', 'bzip2'),
+ 'tlz' : ('tar', 'lzma'),
+ 'txz' : ('tar', 'xz')}
+
+def parse_archive_filename(filename):
+ """
+ Given an filename return the basename (i.e. filename without the
+ archive and compression extensions), archive format and compression
+ method used.
+
+ @param filename: the name of the file
+ @type filename: string
+ @return: tuple containing basename, archive format and compression method
+ @rtype: C{tuple} of C{str}
+
+ >>> parse_archive_filename("abc.tar.gz")
+ ('abc', 'tar', 'gzip')
+ >>> parse_archive_filename("abc.tar.bz2")
+ ('abc', 'tar', 'bzip2')
+ >>> parse_archive_filename("abc.def.tbz2")
+ ('abc.def', 'tar', 'bzip2')
+ >>> parse_archive_filename("abc.def.tar.xz")
+ ('abc.def', 'tar', 'xz')
+ >>> parse_archive_filename("abc.zip")
+ ('abc', 'zip', None)
+ >>> parse_archive_filename("abc.lzma")
+ ('abc', None, 'lzma')
+ >>> parse_archive_filename("abc.tar.foo")
+ ('abc.tar.foo', None, None)
+ >>> parse_archive_filename("abc")
+ ('abc', None, None)
+ """
+ (base_name, archive_fmt, compression) = (filename, None, None)
+
+ # Split filename to pieces
+ split = filename.split(".")
+ if len(split) > 1:
+ if split[-1] in archive_ext_aliases:
+ base_name = ".".join(split[:-1])
+ (archive_fmt, compression) = archive_ext_aliases[split[-1]]
+ elif split[-1] in archive_formats:
+ base_name = ".".join(split[:-1])
+ (archive_fmt, compression) = (split[-1], None)
+ else:
+ for (c, o) in compressor_opts.iteritems():
+ if o[1] == split[-1]:
+ base_name = ".".join(split[:-1])
+ compression = c
+ if len(split) > 2 and split[-2] in archive_formats:
+ base_name = ".".join(split[:-2])
+ archive_fmt = split[-2]
+
+ return (base_name, archive_fmt, compression)
+
+
class PkgPolicy(object):
"""
Common helpers for packaging policy.
@@ -72,25 +132,63 @@ class PkgPolicy(object):
return True if cls.upstreamversion_re.match(version) else False
@staticmethod
- def get_compression(orig_file):
+ def guess_upstream_src_version(filename, extra_regex=r''):
"""
- Given an orig file return the compression used
-
- >>> PkgPolicy.get_compression("abc.tar.gz")
- 'gzip'
- >>> PkgPolicy.get_compression("abc.tar.bz2")
- 'bzip2'
- >>> PkgPolicy.get_compression("abc.tar.foo")
- >>> PkgPolicy.get_compression("abc")
+ Guess the package name and version from the filename of an upstream
+ archive.
+
+ @param filename: filename (archive or directory) from which to guess
+ @type filename: C{string}
+ @param extra_regex: additional regex to apply, needs a 'package' and a
+ 'version' group
+ @return: (package name, version) or ('', '')
+ @rtype: tuple
+
+ >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.gz')
+ ('foo-bar', '0.2')
+ >>> PkgPolicy.guess_upstream_src_version('foo-Bar_0.2.orig.tar.gz')
+ ('', '')
+ >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2.tar.gz')
+ ('git-bar', '0.2')
+ >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2-rc1.tar.gz')
+ ('git-bar', '0.2-rc1')
+ >>> PkgPolicy.guess_upstream_src_version('git-bar-0.2:~-rc1.tar.gz')
+ ('git-bar', '0.2:~-rc1')
+ >>> PkgPolicy.guess_upstream_src_version('git-Bar-0A2d:rc1.tar.bz2')
+ ('git-Bar', '0A2d:rc1')
+ >>> PkgPolicy.guess_upstream_src_version('git-1.tar.bz2')
+ ('git', '1')
+ >>> PkgPolicy.guess_upstream_src_version('kvm_87+dfsg.orig.tar.gz')
+ ('kvm', '87+dfsg')
+ >>> PkgPolicy.guess_upstream_src_version('foo-Bar-a.b.tar.gz')
+ ('', '')
+ >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.xz')
+ ('foo-bar', '0.2')
+ >>> PkgPolicy.guess_upstream_src_version('foo-bar_0.2.orig.tar.lzma')
+ ('foo-bar', '0.2')
+ >>> PkgPolicy.guess_upstream_src_version('foo-bar-0.2.zip')
+ ('foo-bar', '0.2')
+ >>> PkgPolicy.guess_upstream_src_version('foo-bar-0.2.tlz')
+ ('foo-bar', '0.2')
"""
- try:
- ext = orig_file.rsplit('.',1)[1]
- except IndexError:
- return None
- for (c, o) in compressor_opts.iteritems():
- if o[1] == ext:
- return c
- return None
+ version_chars = r'[a-zA-Z\d\.\~\-\:\+]'
+ basename = parse_archive_filename(os.path.basename(filename))[0]
+
+ version_filters = map ( lambda x: x % version_chars,
+ ( # Debian upstream tarball: package_'<version>.orig.tar.gz'
+ r'^(?P<package>[a-z\d\.\+\-]+)_(?P<version>%s+)\.orig',
+ # Upstream 'package-<version>.tar.gz'
+ # or Debian native 'package_<version>.tar.gz'
+ # or directory 'package-<version>':
+ r'^(?P<package>[a-zA-Z\d\.\+\-]+)(-)(?P<version>[0-9]%s*)'))
+ if extra_regex:
+ version_filters = extra_regex + version_filters
+
+ for filter in version_filters:
+ m = re.match(filter, basename)
+ if m:
+ return (m.group('package'), m.group('version'))
+ return ('', '')
@staticmethod
def has_orig(orig_file, dir):
@@ -140,8 +238,9 @@ class UpstreamSource(object):
@cvar _unpacked: path to the unpacked source tree
@type _unpacked: string
"""
- def __init__(self, name, unpacked=None):
+ def __init__(self, name, unpacked=None, pkg_policy=PkgPolicy):
self._orig = False
+ self._pkg_policy = pkg_policy
self._path = name
self.unpacked = unpacked
@@ -278,63 +377,12 @@ class UpstreamSource(object):
except gbpc.CommandExecFailed:
# repackArchive already printed an error
raise GbpError
- return UpstreamSource(newarchive)
+ return type(self)(newarchive)
@staticmethod
def known_compressions():
return [ args[1][-1] for args in compressor_opts.items() ]
def guess_version(self, extra_regex=r''):
- """
- Guess the package name and version from the filename of an upstream
- archive.
-
- @param extra_regex: extra regular expression to check
- @type extra_regex: raw C{string}
-
- >>> UpstreamSource('foo-bar_0.2.orig.tar.gz').guess_version()
- ('foo-bar', '0.2')
- >>> UpstreamSource('foo-Bar_0.2.orig.tar.gz').guess_version()
- >>> UpstreamSource('git-bar-0.2.tar.gz').guess_version()
- ('git-bar', '0.2')
- >>> UpstreamSource('git-bar-0.2-rc1.tar.gz').guess_version()
- ('git-bar', '0.2-rc1')
- >>> UpstreamSource('git-bar-0.2:~-rc1.tar.gz').guess_version()
- ('git-bar', '0.2:~-rc1')
- >>> UpstreamSource('git-Bar-0A2d:rc1.tar.bz2').guess_version()
- ('git-Bar', '0A2d:rc1')
- >>> UpstreamSource('git-1.tar.bz2').guess_version()
- ('git', '1')
- >>> UpstreamSource('kvm_87+dfsg.orig.tar.gz').guess_version()
- ('kvm', '87+dfsg')
- >>> UpstreamSource('foo-Bar_0.2.orig.tar.gz').guess_version()
- >>> UpstreamSource('foo-Bar-a.b.tar.gz').guess_version()
- >>> UpstreamSource('foo-bar_0.2.orig.tar.xz').guess_version()
- ('foo-bar', '0.2')
- >>> UpstreamSource('foo-bar_0.2.orig.tar.lzma').guess_version()
- ('foo-bar', '0.2')
-
- @param extra_regex: additional regex to apply, needs a 'package' and a
- 'version' group
- @return: (package name, version) or None.
- @rtype: tuple
- """
- version_chars = r'[a-zA-Z\d\.\~\-\:\+]'
- if self.is_dir():
- extensions = ''
- else:
- extensions = r'\.tar\.(%s)' % "|".join(self.known_compressions())
-
- version_filters = map ( lambda x: x % (version_chars, extensions),
- ( # Debian upstream tarball: package_'<version>.orig.tar.gz'
- r'^(?P<package>[a-z\d\.\+\-]+)_(?P<version>%s+)\.orig%s',
- # Upstream 'package-<version>.tar.gz'
- # or directory 'package-<version>':
- r'^(?P<package>[a-zA-Z\d\.\+\-]+)-(?P<version>[0-9]%s*)%s'))
- if extra_regex:
- version_filters = extra_regex + version_filters
-
- for filter in version_filters:
- m = re.match(filter, os.path.basename(self.path))
- if m:
- return (m.group('package'), m.group('version'))
+ return self._pkg_policy.guess_upstream_src_version(self.path,
+ extra_regex)
diff --git a/gbp/rpm/__init__.py b/gbp/rpm/__init__.py
new file mode 100644
index 0000000..87f82ff
--- /dev/null
+++ b/gbp/rpm/__init__.py
@@ -0,0 +1,962 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2006,2007 Guido Guenther <agx@sigxcpu.org>
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""provides some rpm source package related helpers"""
+
+import commands
+import sys
+import os
+import re
+import tempfile
+import glob
+import shutil as shutil
+from optparse import OptionParser
+from collections import defaultdict
+
+import gbp.command_wrappers as gbpc
+from gbp.errors import GbpError
+from gbp.git import GitRepositoryError
+from gbp.patch_series import (PatchSeries, Patch)
+import gbp.log
+from gbp.pkg import (UpstreamSource, compressor_opts, parse_archive_filename)
+from gbp.rpm.policy import RpmPkgPolicy
+from gbp.rpm.linkedlist import LinkedList
+from gbp.rpm.lib_rpm import librpm, get_librpm_log
+
+
+class NoSpecError(Exception):
+ """Spec file parsing error"""
+ pass
+
+class MacroExpandError(Exception):
+ """Macro expansion in spec file failed"""
+ pass
+
+
+class RpmUpstreamSource(UpstreamSource):
+ """Upstream source class for RPM packages"""
+ def __init__(self, name, unpacked=None, **kwargs):
+ super(RpmUpstreamSource, self).__init__(name,
+ unpacked,
+ RpmPkgPolicy,
+ **kwargs)
+
+
+class SrcRpmFile(object):
+ """Keeps all needed data read from a source rpm"""
+ def __init__(self, srpmfile):
+ # Do not required signed packages to be able to import
+ ts_vsflags = (librpm.RPMVSF_NOMD5HEADER | librpm.RPMVSF_NORSAHEADER |
+ librpm.RPMVSF_NOSHA1HEADER | librpm.RPMVSF_NODSAHEADER |
+ librpm.RPMVSF_NOMD5 | librpm.RPMVSF_NORSA |
+ librpm.RPMVSF_NOSHA1 | librpm.RPMVSF_NODSA)
+ srpmfp = open(srpmfile)
+ self.rpmhdr = librpm.ts(vsflags=ts_vsflags).hdrFromFdno(srpmfp.fileno())
+ srpmfp.close()
+ self.srpmfile = os.path.abspath(srpmfile)
+
+ @property
+ def version(self):
+ """Get the (downstream) version of the RPM package"""
+ version = dict(upstreamversion = self.rpmhdr[librpm.RPMTAG_VERSION],
+ release = self.rpmhdr[librpm.RPMTAG_RELEASE])
+ if self.rpmhdr[librpm.RPMTAG_EPOCH] is not None:
+ version['epoch'] = str(self.rpmhdr[librpm.RPMTAG_EPOCH])
+ return version
+
+ @property
+ def name(self):
+ """Get the name of the RPM package"""
+ return self.rpmhdr[librpm.RPMTAG_NAME]
+
+ @property
+ def upstreamversion(self):
+ """Get the upstream version of the RPM package"""
+ return self.rpmhdr[librpm.RPMTAG_VERSION]
+
+ @property
+ def packager(self):
+ """Get the packager of the RPM package"""
+ return self.rpmhdr[librpm.RPMTAG_PACKAGER]
+
+ def unpack(self, dest_dir):
+ """
+ Unpack the source rpm to tmpdir.
+ Leave the cleanup to the caller in case of an error.
+ """
+ gbpc.RunAtCommand('rpm2cpio',
+ [self.srpmfile, '|', 'cpio', '-id'],
+ shell=True)(dir=dest_dir)
+
+
+class SpecFile(object):
+ """Class for parsing/modifying spec files"""
+ tag_re = re.compile(r'^(?P<name>[a-z]+)(?P<num>[0-9]+)?\s*:\s*'
+ '(?P<value>\S(.*\S)?)\s*$', flags=re.I)
+ directive_re = re.compile(r'^%(?P<name>[a-z]+)(?P<num>[0-9]+)?'
+ '(\s+(?P<args>.*))?$', flags=re.I)
+ gbptag_re = re.compile(r'^\s*#\s*gbp-(?P<name>[a-z-]+)'
+ '(\s*:\s*(?P<args>\S.*))?$', flags=re.I)
+ # Here "sections" stand for all scripts, scriptlets and other directives,
+ # but not macros
+ section_identifiers = ('package', 'description', 'prep', 'build', 'install',
+ 'clean', 'check', 'pre', 'preun', 'post', 'postun', 'verifyscript',
+ 'files', 'changelog', 'triggerin', 'triggerpostin', 'triggerun',
+ 'triggerpostun')
+
+ def __init__(self, filename=None, filedata=None):
+
+ self._content = LinkedList()
+
+ # Check args: only filename or filedata can be given, not both
+ if filename is None and filedata is None:
+ raise NoSpecError("No filename or raw data given for parsing!")
+ elif filename and filedata:
+ raise NoSpecError("Both filename and raw data given, don't know "
+ "which one to parse!")
+ elif filename:
+ # Load spec file into our special data structure
+ self.specfile = os.path.basename(filename)
+ self.specdir = os.path.dirname(os.path.abspath(filename))
+ try:
+ with open(filename) as spec_file:
+ for line in spec_file.readlines():
+ self._content.append(line)
+ except IOError as err:
+ raise NoSpecError("Unable to read spec file: %s" % err)
+ else:
+ self.specfile = None
+ self.specdir = None
+ for line in filedata.splitlines():
+ self._content.append(line + '\n')
+
+ # Use rpm-python to parse the spec file content
+ self._filtertags = ("excludearch", "excludeos", "exclusivearch",
+ "exclusiveos","buildarch")
+ self._listtags = self._filtertags + ('source', 'patch',
+ 'requires', 'conflicts', 'recommends',
+ 'suggests', 'supplements', 'enhances',
+ 'provides', 'obsoletes', 'buildrequires',
+ 'buildconflicts', 'buildrecommends',
+ 'buildsuggests', 'buildsupplements',
+ 'buildenhances', 'collections',
+ 'nosource', 'nopatch')
+ self._specinfo = self._parse_filtered_spec(self._filtertags)
+
+ # Other initializations
+ source_header = self._specinfo.packages[0].header
+ self.name = source_header[librpm.RPMTAG_NAME]
+ self.upstreamversion = source_header[librpm.RPMTAG_VERSION]
+ self.release = source_header[librpm.RPMTAG_RELEASE]
+ # rpm-python returns epoch as 'long', convert that to string
+ self.epoch = str(source_header[librpm.RPMTAG_EPOCH]) \
+ if source_header[librpm.RPMTAG_EPOCH] != None else None
+ self.packager = source_header[librpm.RPMTAG_PACKAGER]
+ self._tags = {}
+ self._special_directives = defaultdict(list)
+ self._gbp_tags = defaultdict(list)
+
+ # Parse extra info from spec file
+ self._parse_content()
+
+ # Find 'Packager' tag. Needed to circumvent a bug in python-rpm where
+ # spec.sourceHeader[librpm.RPMTAG_PACKAGER] is not reset when a new spec
+ # file is parsed
+ if 'packager' not in self._tags:
+ self.packager = None
+
+ self.orig_src = self._guess_orig_file()
+
+ def _parse_filtered_spec(self, skip_tags):
+ """Parse a filtered spec file in rpm-python"""
+ skip_tags = [tag.lower() for tag in skip_tags]
+ with tempfile.NamedTemporaryFile(prefix='gbp') as filtered:
+ filtered.writelines(str(line) for line in self._content
+ if str(line).split(":")[0].strip().lower() not in skip_tags)
+ filtered.flush()
+ try:
+ # Parse two times to circumvent a rpm-python problem where
+ # macros are not expanded if used before their definition
+ librpm.spec(filtered.name)
+ return librpm.spec(filtered.name)
+ except ValueError as err:
+ rpmlog = get_librpm_log()
+ gbp.log.debug("librpm log:\n %s" %
+ "\n ".join(rpmlog))
+ raise GbpError("RPM error while parsing %s: %s (%s)" %
+ (self.specfile, err, rpmlog[-1]))
+
+ @property
+ def version(self):
+ """Get the (downstream) version"""
+ version = dict(upstreamversion = self.upstreamversion,
+ release = self.release)
+ if self.epoch != None:
+ version['epoch'] = self.epoch
+ return version
+
+ @property
+ def specpath(self):
+ """Get the dir/filename"""
+ return os.path.join(self.specdir, self.specfile)
+
+ @property
+ def ignorepatches(self):
+ """Get numbers of ignored patches as a sorted list"""
+ if 'ignore-patches' in self._gbp_tags:
+ data = self._gbp_tags['ignore-patches'][-1]['args'].split()
+ return sorted([int(num) for num in data])
+ return []
+
+ def _patches(self):
+ """Get all patch tags as a dict"""
+ if 'patch' not in self._tags:
+ return {}
+ return {patch['num']: patch for patch in self._tags['patch']['lines']}
+
+ def _sources(self):
+ """Get all source tags as a dict"""
+ if 'source' not in self._tags:
+ return {}
+ return {src['num']: src for src in self._tags['source']['lines']}
+
+ def sources(self):
+ """Get all source tags as a dict"""
+ return {src['num']: src['linevalue']
+ for src in self._sources().values()}
+
+ def _macro_replace(self, matchobj):
+ macro_dict = {'name': self.name,
+ 'version': self.upstreamversion,
+ 'release': self.release}
+
+ if matchobj.group(2) in macro_dict:
+ return macro_dict[matchobj.group(2)]
+ raise MacroExpandError("Unknown macro '%s'" % matchobj.group(0))
+
+ def macro_expand(self, text):
+ """
+ Expand the rpm macros (that gbp knows of) in the given text.
+
+ @param text: text to check for macros
+ @type text: C{str}
+ @return: text with macros expanded
+ @rtype: C{str}
+ """
+ # regexp to match '%{macro}' and '%macro'
+ macro_re = re.compile(r'%({)?(?P<macro_name>[a-z_][a-z0-9_]*)(?(1)})', flags=re.I)
+ return macro_re.sub(self._macro_replace, text)
+
+ def write_spec_file(self):
+ """
+ Write, possibly updated, spec to disk
+ """
+ with open(os.path.join(self.specdir, self.specfile), 'w') as spec_file:
+ for line in self._content:
+ spec_file.write(str(line))
+
+ def _parse_tag(self, lineobj):
+ """Parse tag line"""
+
+ line = str(lineobj)
+
+ matchobj = self.tag_re.match(line)
+ if not matchobj:
+ return False
+
+ tagname = matchobj.group('name').lower()
+ tagnum = int(matchobj.group('num')) if matchobj.group('num') else None
+ # 'Source:' tags
+ if tagname == 'source':
+ tagnum = 0 if tagnum is None else tagnum
+ # 'Patch:' tags
+ elif tagname == 'patch':
+ tagnum = -1 if tagnum is None else tagnum
+
+ # Record all tag locations
+ try:
+ header = self._specinfo.packages[0].header
+ tagvalue = header[getattr(librpm, 'RPMTAG_%s' % tagname.upper())]
+ except AttributeError:
+ tagvalue = None
+ # We don't support "multivalue" tags like "Provides:" or "SourceX:"
+ # Rpm python doesn't support many of these, thus the explicit list
+ if type(tagvalue) is int or type(tagvalue) is long:
+ tagvalue = str(tagvalue)
+ elif type(tagvalue) is list or tagname in self._listtags:
+ tagvalue = None
+ elif not tagvalue:
+ # Rpm python doesn't give the following, for reason or another
+ if tagname not in ('buildroot', 'autoprov', 'autoreq',
+ 'autoreqprov') + self._filtertags:
+ gbp.log.warn("BUG: '%s:' tag not found by rpm" % tagname)
+ tagvalue = matchobj.group('value')
+ linerecord = {'line': lineobj,
+ 'num': tagnum,
+ 'linevalue': matchobj.group('value')}
+ if tagname in self._tags:
+ self._tags[tagname]['value'] = tagvalue
+ self._tags[tagname]['lines'].append(linerecord)
+ else:
+ self._tags[tagname] = {'value': tagvalue, 'lines': [linerecord]}
+
+ return tagname
+
+ @staticmethod
+ def _patch_macro_opts(args):
+ """Parse arguments of the '%patch' macro"""
+
+ patchparser = OptionParser()
+ patchparser.add_option("-p", dest="strip")
+ patchparser.add_option("-s", dest="silence")
+ patchparser.add_option("-P", dest="patchnum")
+ patchparser.add_option("-b", dest="backup")
+ patchparser.add_option("-E", dest="removeempty")
+ arglist = args.split()
+ return patchparser.parse_args(arglist)[0]
+
+ @staticmethod
+ def _setup_macro_opts(args):
+ """Parse arguments of the '%setup' macro"""
+
+ setupparser = OptionParser()
+ setupparser.add_option("-n", dest="name")
+ setupparser.add_option("-c", dest="create_dir", action="store_true")
+ setupparser.add_option("-D", dest="no_delete_dir", action="store_true")
+ setupparser.add_option("-T", dest="no_unpack_default",
+ action="store_true")
+ setupparser.add_option("-b", dest="unpack_before")
+ setupparser.add_option("-a", dest="unpack_after")
+ setupparser.add_option("-q", dest="quiet", action="store_true")
+ arglist = args.split()
+ return setupparser.parse_args(arglist)[0]
+
+ def _parse_directive(self, lineobj):
+ """Parse special directive/scriptlet/macro lines"""
+
+ line = str(lineobj)
+ matchobj = self.directive_re.match(line)
+ if not matchobj:
+ return None
+
+ directivename = matchobj.group('name')
+ # '%patch' macros
+ directiveid = None
+ if directivename == 'patch':
+ opts = self._patch_macro_opts(matchobj.group('args'))
+ if matchobj.group('num'):
+ directiveid = int(matchobj.group('num'))
+ elif opts.patchnum:
+ directiveid = int(opts.patchnum)
+ else:
+ directiveid = -1
+
+ # Record special directive/scriptlet/macro locations
+ if directivename in self.section_identifiers + ('setup', 'patch'):
+ linerecord = {'line': lineobj,
+ 'id': directiveid,
+ 'args': matchobj.group('args')}
+ self._special_directives[directivename].append(linerecord)
+ return directivename
+
+ def _parse_gbp_tag(self, linenum, lineobj):
+ """Parse special git-buildpackage tags"""
+
+ line = str(lineobj)
+ matchobj = self.gbptag_re.match(line)
+ if matchobj:
+ gbptagname = matchobj.group('name').lower()
+ if gbptagname not in ('ignore-patches', 'patch-macros'):
+ gbp.log.info("Found unrecognized Gbp tag on line %s: '%s'" %
+ (linenum, line))
+ if matchobj.group('args'):
+ args = matchobj.group('args').strip()
+ else:
+ args = None
+ record = {'line': lineobj, 'args': args}
+ self._gbp_tags[gbptagname].append(record)
+ return gbptagname
+
+ return None
+
+ def _parse_content(self):
+ """
+ Go through spec file content line-by-line and (re-)parse info from it
+ """
+ in_preamble = True
+ for linenum, lineobj in enumerate(self._content):
+ matched = False
+ if in_preamble:
+ if self._parse_tag(lineobj):
+ continue
+ matched = self._parse_directive(lineobj)
+ if matched:
+ if matched in self.section_identifiers:
+ in_preamble = False
+ continue
+ self._parse_gbp_tag(linenum, lineobj)
+
+ # Update sources info (basically possible macros expanded by rpm)
+ # And, double-check that we parsed spec content correctly
+ patches = self._patches()
+ sources = self._sources()
+ for name, num, typ in self._specinfo.sources:
+ # workaround rpm parsing bug
+ if typ == 1 or typ == 9:
+ if num in sources:
+ sources[num]['linevalue'] = name
+ else:
+ gbp.log.err("BUG: failed to parse all 'Source' tags!")
+ elif typ == 2 or typ == 10:
+ # Patch tag without any number defined is treated by RPM as
+ # having number (2^31-1), we use number -1
+ if num >= pow(2,30):
+ num = -1
+ if num in patches:
+ patches[num]['linevalue'] = name
+ else:
+ gbp.log.err("BUG: failed to parse all 'Patch' tags!")
+
+ def _delete_tag(self, tag, num):
+ """Delete a tag"""
+ key = tag.lower()
+ tagname = '%s%s' % (tag, num) if num is not None else tag
+ if key not in self._tags:
+ gbp.log.warn("Trying to delete non-existent tag '%s:'" % tag)
+ return None
+
+ sparedlines = []
+ prev = None
+ for line in self._tags[key]['lines']:
+ if line['num'] == num:
+ gbp.log.debug("Removing '%s:' tag from spec" % tagname)
+ prev = self._content.delete(line['line'])
+ else:
+ sparedlines.append(line)
+ self._tags[key]['lines'] = sparedlines
+ if not self._tags[key]['lines']:
+ self._tags.pop(key)
+ return prev
+
+ def _set_tag(self, tag, num, value, insertafter):
+ """Set a tag value"""
+ key = tag.lower()
+ tagname = '%s%s' % (tag, num) if num is not None else tag
+ value = value.strip()
+ if not value:
+ raise GbpError("Cannot set empty value to '%s:' tag" % tag)
+
+ # Check type of tag, we don't support values for 'multivalue' tags
+ try:
+ header = self._specinfo.packages[0].header
+ tagvalue = header[getattr(librpm, 'RPMTAG_%s' % tagname.upper())]
+ except AttributeError:
+ tagvalue = None
+ tagvalue = None if type(tagvalue) is list else value
+
+ # Try to guess the correct indentation from the previous or next tag
+ indent_re = re.compile(r'^([a-z]+([0-9]+)?\s*:\s*)', flags=re.I)
+ match = indent_re.match(str(insertafter))
+ if not match:
+ match = indent_re.match(str(insertafter.next))
+ indent = 12 if not match else len(match.group(1))
+ text = '%-*s%s\n' % (indent, '%s:' % tagname, value)
+ if key in self._tags:
+ self._tags[key]['value'] = tagvalue
+ for line in reversed(self._tags[key]['lines']):
+ if line['num'] == num:
+ gbp.log.debug("Updating '%s:' tag in spec" % tagname)
+ line['line'].set_data(text)
+ line['linevalue'] = value
+ return line['line']
+
+ gbp.log.debug("Adding '%s:' tag after '%s...' line in spec" %
+ (tagname, str(insertafter)[0:20]))
+ line = self._content.insert_after(insertafter, text)
+ linerec = {'line': line, 'num': num, 'linevalue': value}
+ if key in self._tags:
+ self._tags[key]['lines'].append(linerec)
+ else:
+ self._tags[key] = {'value': tagvalue, 'lines': [linerec]}
+ return line
+
+ def set_tag(self, tag, num, value, insertafter=None):
+ """Update a tag in spec file content"""
+ key = tag.lower()
+ tagname = '%s%s' % (tag, num) if num is not None else tag
+ if key in ('patch', 'vcs'):
+ if key in self._tags:
+ insertafter = key
+ elif not insertafter in self._tags:
+ insertafter = 'name'
+ after_line = self._tags[insertafter]['lines'][-1]['line']
+ if value:
+ self._set_tag(tag, num, value, after_line)
+ elif key in self._tags:
+ self._delete_tag(tag, num)
+ else:
+ raise GbpError("Setting '%s:' tag not supported" % tagname)
+
+ def _delete_special_macro(self, name, identifier):
+ """Delete a special macro line in spec file content"""
+ if name != 'patch':
+ raise GbpError("Deleting '%s:' macro not supported" % name)
+
+ key = name.lower()
+ fullname = '%%%s%s' % (name, identifier)
+ sparedlines = []
+ prev = None
+ for line in self._special_directives[key]:
+ if line['id'] == identifier:
+ gbp.log.debug("Removing '%s' macro from spec" % fullname)
+ prev = self._content.delete(line['line'])
+ else:
+ sparedlines.append(line)
+ self._special_directives[key] = sparedlines
+ if not prev:
+ gbp.log.warn("Tried to delete non-existent macro '%s'" % fullname)
+ return prev
+
+ def _set_special_macro(self, name, identifier, args, insertafter):
+ """Update a special macro line in spec file content"""
+ key = name.lower()
+ fullname = '%%%s%s' % (name, identifier)
+ if key != 'patch':
+ raise GbpError("Setting '%s' macro not supported" % name)
+
+ updated = 0
+ text = "%%%s%d %s\n" % (name, identifier, args)
+ for line in self._special_directives[key]:
+ if line['id'] == identifier:
+ gbp.log.debug("Updating '%s' macro in spec" % fullname)
+ line['args'] = args
+ line['line'].set_data(text)
+ ret = line['line']
+ updated += 1
+ if not updated:
+ gbp.log.debug("Adding '%s' macro after '%s...' line in spec" %
+ (fullname, str(insertafter)[0:20]))
+ ret = self._content.insert_after(insertafter, text)
+ linerec = {'line': ret, 'id': identifier, 'args': args}
+ self._special_directives[key].append(linerec)
+ return ret
+
+ def _set_section(self, name, text):
+ """Update/create a complete section in spec file."""
+ if name not in self.section_identifiers:
+ raise GbpError("Not a valid section directive: '%s'" % name)
+ # Delete section, if it exists
+ if name in self._special_directives:
+ if len(self._special_directives[name]) > 1:
+ raise GbpError("Multiple %%%s sections found, don't know "
+ "which to update" % name)
+ line = self._special_directives[name][0]['line']
+ gbp.log.debug("Removing content of %s section" % name)
+ while line.next:
+ match = self.directive_re.match(str(line.next))
+ if match and match.group('name') in self.section_identifiers:
+ break
+ self._content.delete(line.next)
+ else:
+ gbp.log.debug("Adding %s section to the end of spec file" % name)
+ line = self._content.append('%%%s\n' % name)
+ linerec = {'line': line, 'id': None, 'args': None}
+ self._special_directives[name] = [linerec]
+ # Add new lines
+ gbp.log.debug("Updating content of %s section" % name)
+ for linetext in text.splitlines():
+ line = self._content.insert_after(line, linetext + '\n')
+
+ def set_changelog(self, text):
+ """Update or create the %changelog section"""
+ self._set_section('changelog', text)
+
+ def get_changelog(self):
+ """Get the %changelog section"""
+ text = ''
+ if 'changelog' in self._special_directives:
+ line = self._special_directives['changelog'][0]['line']
+ while line.next:
+ line = line.next
+ match = self.directive_re.match(str(line))
+ if match and match.group('name') in self.section_identifiers:
+ break
+ text += str(line)
+ return text
+
+ def update_patches(self, patches, commands):
+ """Update spec with new patch tags and patch macros"""
+ # Remove non-ignored patches
+ tag_prev = None
+ macro_prev = None
+ ignored = self.ignorepatches
+ # Remove 'Patch:̈́' tags
+ for tag in self._patches().values():
+ if not tag['num'] in ignored:
+ tag_prev = self._delete_tag('patch', tag['num'])
+ # Remove a preceding comment if it seems to originate from GBP
+ if re.match("^\s*#.*patch.*auto-generated",
+ str(tag_prev), flags=re.I):
+ tag_prev = self._content.delete(tag_prev)
+
+ # Remove '%patch:' macros
+ for macro in self._special_directives['patch']:
+ if not macro['id'] in ignored:
+ macro_prev = self._delete_special_macro('patch', macro['id'])
+ # Remove surrounding if-else
+ macro_next = macro_prev.next
+ if (str(macro_prev).startswith('%if') and
+ str(macro_next).startswith('%endif')):
+ self._content.delete(macro_next)
+ macro_prev = self._content.delete(macro_prev)
+
+ # Remove a preceding comment line if it ends with '.patch' or
+ # '.diff' plus an optional compression suffix
+ if re.match("^\s*#.+(patch|diff)(\.(gz|bz2|xz|lzma))?\s*$",
+ str(macro_prev), flags=re.I):
+ macro_prev = self._content.delete(macro_prev)
+
+ if len(patches) == 0:
+ return
+
+ # Determine where to add Patch tag lines
+ if tag_prev:
+ gbp.log.debug("Adding 'Patch' tags in place of the removed tags")
+ tag_line = tag_prev
+ elif 'patch' in self._tags:
+ gbp.log.debug("Adding new 'Patch' tags after the last 'Patch' tag")
+ tag_line = self._tags['patch']['lines'][-1]['line']
+ elif 'source' in self._tags:
+ gbp.log.debug("Didn't find any old 'Patch' tags, adding new "
+ "patches after the last 'Source' tag.")
+ tag_line = self._tags['source']['lines'][-1]['line']
+ else:
+ gbp.log.debug("Didn't find any old 'Patch' or 'Source' tags, "
+ "adding new patches after the last 'Name' tag.")
+ tag_line = self._tags['name']['lines'][-1]['line']
+
+ # Determine where to add %patch macro lines
+ if 'patch-macros' in self._gbp_tags:
+ gbp.log.debug("Adding '%patch' macros after the start marker")
+ macro_line = self._gbp_tags['patch-macros'][-1]['line']
+ elif macro_prev:
+ gbp.log.debug("Adding '%patch' macros in place of the removed "
+ "macros")
+ macro_line = macro_prev
+ elif self._special_directives['patch']:
+ gbp.log.debug("Adding new '%patch' macros after the last existing"
+ "'%patch' macro")
+ macro_line = self._special_directives['patch'][-1]['line']
+ elif self._special_directives['setup']:
+ gbp.log.debug("Didn't find any old '%patch' macros, adding new "
+ "patches after the last '%setup' macro")
+ macro_line = self._special_directives['setup'][-1]['line']
+ elif self._special_directives['prep']:
+ gbp.log.warn("Didn't find any old '%patch' or '%setup' macros, "
+ "adding new patches directly after '%prep' directive")
+ macro_line = self._special_directives['prep'][-1]['line']
+ else:
+ raise GbpError("Couldn't determine where to add '%patch' macros")
+
+ startnum = sorted(ignored)[-1] + 1 if ignored else 0
+ gbp.log.debug("Starting autoupdate patch numbering from %s" % startnum)
+ # Add a comment indicating gbp generated patch tags
+ comment_text = "# Patches auto-generated by git-buildpackage:\n"
+ tag_line = self._content.insert_after(tag_line, comment_text)
+ for ind, patch in enumerate(patches):
+ cmds = commands[patch] if patch in commands else {}
+ patchnum = startnum + ind
+ tag_line = self._set_tag("Patch", patchnum, patch, tag_line)
+ # Add '%patch' macro and a preceding comment line
+ comment_text = "# %s\n" % patch
+ macro_line = self._content.insert_after(macro_line, comment_text)
+ macro_line = self._set_special_macro('patch', patchnum, '-p1',
+ macro_line)
+ for cmd, args in cmds.iteritems():
+ if cmd in ('if', 'ifarch'):
+ self._content.insert_before(macro_line, '%%%s %s\n' %
+ (cmd, args))
+ macro_line = self._content.insert_after(macro_line,
+ '%endif\n')
+ # We only support one command per patch, for now
+ break
+
+ def patchseries(self, unapplied=False, ignored=False):
+ """Return non-ignored patches of the RPM as a gbp patchseries"""
+ series = PatchSeries()
+ if 'patch' in self._tags:
+ tags = self._patches()
+ applied = []
+ for macro in self._special_directives['patch']:
+ if macro['id'] in tags:
+ applied.append((macro['id'], macro['args']))
+ ignored = set() if ignored else set(self.ignorepatches)
+
+ # Put all patches that are applied first in the series
+ for num, args in applied:
+ if num not in ignored:
+ opts = self._patch_macro_opts(args)
+ strip = int(opts.strip) if opts.strip else 0
+ filename = os.path.basename(tags[num]['linevalue'])
+ series.append(Patch(os.path.join(self.specdir, filename),
+ strip=strip))
+ # Finally, append all unapplied patches to the series, if requested
+ if unapplied:
+ applied_nums = set([num for num, _args in applied])
+ unapplied = set(tags.keys()).difference(applied_nums)
+ for num in sorted(unapplied):
+ if num not in ignored:
+ filename = os.path.basename(tags[num]['linevalue'])
+ series.append(Patch(os.path.join(self.specdir,
+ filename), strip=0))
+ return series
+
+ def _guess_orig_prefix(self, orig):
+ """Guess prefix for the orig file"""
+ # Make initial guess about the prefix in the archive
+ filename = orig['filename']
+ name, version = RpmPkgPolicy.guess_upstream_src_version(filename)
+ if name and version:
+ prefix = "%s-%s/" % (name, version)
+ else:
+ prefix = orig['filename_base'] + "/"
+
+ # Refine our guess about the prefix
+ for macro in self._special_directives['setup']:
+ args = macro['args']
+ opts = self._setup_macro_opts(args)
+ srcnum = None
+ if opts.no_unpack_default:
+ if opts.unpack_before:
+ srcnum = int(opts.unpack_before)
+ elif opts.unpack_after:
+ srcnum = int(opts.unpack_after)
+ else:
+ srcnum = 0
+ if srcnum == orig['num']:
+ if opts.create_dir:
+ prefix = ''
+ elif opts.name:
+ try:
+ prefix = self.macro_expand(opts.name) + '/'
+ except MacroExpandError as err:
+ gbp.log.warn("Couldn't determine prefix from %%setup "\
+ "macro (%s). Using filename base as a " \
+ "fallback" % err)
+ prefix = orig['filename_base'] + '/'
+ else:
+ # RPM default
+ prefix = "%s-%s/" % (self.name, self.upstreamversion)
+ break
+ return prefix
+
+ def _guess_orig_file(self):
+ """
+ Try to guess the name of the primary upstream/source archive.
+ Returns a dict with all the relevant information.
+ """
+ orig = None
+ sources = self.sources()
+ for num, filename in sorted(sources.iteritems()):
+ src = {'num': num, 'filename': os.path.basename(filename),
+ 'uri': filename}
+ src['filename_base'], src['archive_fmt'], src['compression'] = \
+ parse_archive_filename(os.path.basename(filename))
+ if (src['filename_base'].startswith(self.name) and
+ src['archive_fmt']):
+ # Take the first archive that starts with pkg name
+ orig = src
+ break
+ # otherwise we take the first archive
+ elif not orig and src['archive_fmt']:
+ orig = src
+ # else don't accept
+ if orig:
+ orig['prefix'] = self._guess_orig_prefix(orig)
+
+ return orig
+
+
+def parse_srpm(srpmfile):
+ """parse srpm by creating a SrcRpmFile object"""
+ try:
+ srcrpm = SrcRpmFile(srpmfile)
+ except IOError, err:
+ raise GbpError, "Error reading src.rpm file: %s" % err
+ except librpm.error, err:
+ raise GbpError, "RPM error while reading src.rpm: %s" % err
+
+ return srcrpm
+
+
+def guess_spec_fn(file_list, preferred_name=None):
+ """Guess spec file from a list of filenames"""
+ specs = []
+ for filepath in file_list:
+ filename = os.path.basename(filepath)
+ # Stop at the first file matching the preferred name
+ if filename == preferred_name:
+ gbp.log.debug("Found a preferred spec file %s" % filepath)
+ specs = [filepath]
+ break
+ if filename.endswith(".spec"):
+ gbp.log.debug("Found spec file %s" % filepath)
+ specs.append(filepath)
+ if len(specs) == 0:
+ raise NoSpecError("No spec file found.")
+ elif len(specs) > 1:
+ raise NoSpecError("Multiple spec files found (%s), don't know which "
+ "to use." % ', '.join(specs))
+ return specs[0]
+
+
+def guess_spec(topdir, recursive=True, preferred_name=None):
+ """Guess a spec file"""
+ file_list = []
+ if not topdir:
+ topdir = '.'
+ for root, dirs, files in os.walk(topdir):
+ file_list.extend([os.path.join(root, fname) for fname in files])
+ if not recursive:
+ del dirs[:]
+ # Skip .git dir in any case
+ if '.git' in dirs:
+ dirs.remove('.git')
+ return SpecFile(os.path.abspath(guess_spec_fn(file_list, preferred_name)))
+
+
+def guess_spec_repo(repo, treeish, topdir='', recursive=True, preferred_name=None):
+ """
+ Try to find/parse the spec file from a given git treeish.
+ """
+ topdir = topdir.rstrip('/') + ('/') if topdir else ''
+ try:
+ file_list = [nam for (mod, typ, sha, nam) in
+ repo.list_tree(treeish, recursive, topdir) if typ == 'blob']
+ except GitRepositoryError as err:
+ raise NoSpecError("Cannot find spec file from treeish %s, Git error: %s"
+ % (treeish, err))
+ spec_path = guess_spec_fn(file_list, preferred_name)
+ return spec_from_repo(repo, treeish, spec_path)
+
+
+def spec_from_repo(repo, treeish, spec_path):
+ """Get and parse a spec file from a give Git treeish"""
+ try:
+ spec = SpecFile(filedata=repo.show('%s:%s' % (treeish, spec_path)))
+ spec.specdir = os.path.dirname(spec_path)
+ spec.specfile = os.path.basename(spec_path)
+ return spec
+ except GitRepositoryError as err:
+ raise NoSpecError("Git error: %s" % err)
+
+
+def string_to_int(val_str):
+ """
+ Convert string of possible unit identifier to int.
+
+ @param val_str: value to be converted
+ @type val_str: C{str}
+ @return: value as integer
+ @rtype: C{int}
+
+ >>> string_to_int("1234")
+ 1234
+ >>> string_to_int("123k")
+ 125952
+ >>> string_to_int("1234K")
+ 1263616
+ >>> string_to_int("1M")
+ 1048576
+ """
+ units = {'k': 1024,
+ 'm': 1024**2,
+ 'g': 1024**3,
+ 't': 1024**4}
+
+ if val_str[-1].lower() in units:
+ return int(val_str[:-1]) * units[val_str[-1].lower()]
+ else:
+ return int(val_str)
+
+
+def split_version_str(version):
+ """
+ Parse full version string and split it into individual "version
+ components", i.e. upstreamversion, epoch and release
+
+ @param version: full version of a package
+ @type version: C{str}
+ @return: individual version components
+ @rtype: C{dict}
+
+ >>> split_version_str("1")
+ {'release': None, 'epoch': None, 'upstreamversion': '1'}
+ >>> split_version_str("1.2.3-5.3")
+ {'release': '5.3', 'epoch': None, 'upstreamversion': '1.2.3'}
+ >>> split_version_str("3:1.2.3")
+ {'release': None, 'epoch': '3', 'upstreamversion': '1.2.3'}
+ >>> split_version_str("3:1-0")
+ {'release': '0', 'epoch': '3', 'upstreamversion': '1'}
+ """
+ ret = {'epoch': None, 'upstreamversion': None, 'release': None}
+
+ e_vr = version.split(":", 1)
+ if len(e_vr) == 1:
+ v_r = e_vr[0].split("-", 1)
+ else:
+ ret['epoch'] = e_vr[0]
+ v_r = e_vr[1].split("-", 1)
+ ret['upstreamversion'] = v_r[0]
+ if len(v_r) > 1:
+ ret['release'] = v_r[1]
+
+ return ret
+
+def compose_version_str(evr):
+ """
+ Compose a full version string from individual "version components",
+ i.e. epoch, version and release
+
+ @param evr: dict of version components
+ @type evr: C{dict} of C{str}
+ @return: full version
+ @rtype: C{str}
+
+ >>> compose_version_str({'epoch': '', 'upstreamversion': '1.0'})
+ '1.0'
+ >>> compose_version_str({'epoch': '2', 'upstreamversion': '1.0', 'release': None})
+ '2:1.0'
+ >>> compose_version_str({'epoch': None, 'upstreamversion': '1', 'release': '0'})
+ '1-0'
+ >>> compose_version_str({'epoch': '2', 'upstreamversion': '1.0', 'release': '2.3'})
+ '2:1.0-2.3'
+ >>> compose_version_str({'epoch': '2', 'upstreamversion': '', 'release': '2.3'})
+ """
+ if 'upstreamversion' in evr and evr['upstreamversion']:
+ version = ""
+ if 'epoch' in evr and evr['epoch']:
+ version += "%s:" % evr['epoch']
+ version += evr['upstreamversion']
+ if 'release' in evr and evr['release']:
+ version += "-%s" % evr['release']
+ if version:
+ return version
+ return None
+
+
+# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/rpm/git.py b/gbp/rpm/git.py
new file mode 100644
index 0000000..c7cc023
--- /dev/null
+++ b/gbp/rpm/git.py
@@ -0,0 +1,105 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2011 Guido Günther <agx@sigxcpu.org>
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+import re
+
+from gbp.git import GitRepository, GitRepositoryError
+from gbp.pkg.pristinetar import PristineTar
+from gbp.rpm import compose_version_str
+
+class RpmGitRepository(GitRepository):
+ """A git repository that holds the source of an RPM package"""
+
+ def __init__(self, path):
+ super(RpmGitRepository, self).__init__(path)
+ self.pristine_tar = PristineTar(self)
+
+ def find_version(self, format, str_fields):
+ """
+ Check if a certain version is stored in this repo and return the SHA1
+ of the related commit. That is, an annotated tag is dereferenced to the
+ commit object it points to.
+
+ @param format: tag pattern
+ @type format: C{str}
+ @param str_fields: arguments for format string ('upstreamversion', 'release', 'vendor'...)
+ @type str_fields: C{dict} of C{str}
+ @return: sha1 of the commit the tag references to
+ """
+ try:
+ tag = self.version_to_tag(format, str_fields)
+ except KeyError:
+ return None
+ if self.has_tag(tag): # new tags are injective
+ # dereference to a commit object
+ return self.rev_parse("%s^0" % tag)
+ return None
+
+ @staticmethod
+ def version_to_tag(format, str_fields):
+ """
+ Generate a tag from a given format and a version
+
+ @param format: tag pattern
+ @type format: C{str}
+ @param str_fields: arguments for format string ('upstreamversion', 'release', 'vendor'...)
+ @type str_fields: C{dict} of C{str}
+ @return: version tag
+
+ >>> RpmGitRepository.version_to_tag("packaging/%(version)s", dict(epoch='0', upstreamversion='0~0'))
+ 'packaging/0%0_0'
+ >>> RpmGitRepository.version_to_tag("%(vendor)s/v%(version)s", dict(upstreamversion='1.0', release='2', vendor="myvendor"))
+ 'myvendor/v1.0-2'
+ """
+ version_tag = format % dict(str_fields,
+ version=compose_version_str(str_fields))
+ return RpmGitRepository._sanitize_tag(version_tag)
+
+ @staticmethod
+ def _sanitize_tag(tag):
+ """sanitize a version so git accepts it as a tag
+
+ >>> RpmGitRepository._sanitize_tag("0.0.0")
+ '0.0.0'
+ >>> RpmGitRepository._sanitize_tag("0.0~0")
+ '0.0_0'
+ >>> RpmGitRepository._sanitize_tag("0:0.0")
+ '0%0.0'
+ >>> RpmGitRepository._sanitize_tag("0%0~0")
+ '0%0_0'
+ """
+ return tag.replace('~', '_').replace(':', '%')
+
+ @property
+ def pristine_tar_branch(self):
+ """
+ The name of the pristine-tar branch, whether it already exists or
+ not.
+ """
+ return PristineTar.branch
+
+ def has_pristine_tar_branch(self):
+ """
+ Wheter the repo has a I{pristine-tar} branch.
+
+ @return: C{True} if the repo has pristine-tar commits already, C{False}
+ otherwise
+ @rtype: C{Bool}
+ """
+ return True if self.has_branch(self.pristine_tar_branch) else False
+
+# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/rpm/lib_rpm.py b/gbp/rpm/lib_rpm.py
new file mode 100644
index 0000000..4bad44e
--- /dev/null
+++ b/gbp/rpm/lib_rpm.py
@@ -0,0 +1,47 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Wrapper module for librpm"""
+
+import tempfile
+
+import gbp.log
+from gbp.rpm.policy import RpmPkgPolicy
+
+try:
+ # Try to load special RPM lib to be used for GBP (only)
+ librpm = __import__(RpmPkgPolicy.python_rpmlib_module_name)
+except ImportError:
+ gbp.log.warn("Failed to import '%s' as rpm python module, using host's "
+ "default rpm library instead" %
+ RpmPkgPolicy.python_rpmlib_module_name)
+ import rpm as librpm
+
+# Module initialization
+_rpmlog = tempfile.NamedTemporaryFile(prefix='gbp_rpmlog')
+_rpmlogfd = _rpmlog.file
+librpm.setVerbosity(librpm.RPMLOG_INFO)
+librpm.setLogFile(_rpmlogfd)
+
+
+def get_librpm_log(truncate=True):
+ """Get rpmlib log output"""
+ _rpmlogfd.seek(0)
+ log = [line.strip() for line in _rpmlogfd.readlines()]
+ if truncate:
+ _rpmlogfd.truncate(0)
+ return log
+
diff --git a/gbp/rpm/linkedlist.py b/gbp/rpm/linkedlist.py
new file mode 100644
index 0000000..74d897b
--- /dev/null
+++ b/gbp/rpm/linkedlist.py
@@ -0,0 +1,214 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Simple implementation of a doubly linked list"""
+
+import collections
+
+import gbp.log
+
+
+class LinkedListNode(object):
+ """Node of the linked list"""
+
+ def __init__(self, data="", prev_node=None, next_node=None):
+ self.prev = prev_node
+ self.next = next_node
+ self._data = data
+
+ def __str__(self):
+ return str(self.data)
+
+ @property
+ def data(self):
+ """Get data stored into node"""
+ if self._data is None:
+ gbp.log.debug("BUG: referencing a deleted node!")
+ return("")
+ return self._data
+
+ def set_data(self, data):
+ """
+ Set data stored into node
+
+ >>> node = LinkedListNode('foo')
+ >>> node.data
+ 'foo'
+ >>> node.set_data('bar')
+ >>> node.data
+ 'bar'
+ >>> node.set_data(None)
+ >>> node.data
+ ''
+ """
+ if data is None:
+ gbp.log.debug("BUG: trying to store 'None', not allowed")
+ data = ""
+ self._data = data
+
+
+ def delete(self):
+ """Delete node"""
+ if self.prev:
+ self.prev.next = self.next
+ if self.next:
+ self.next.prev = self.prev
+ self._data = None
+
+
+class LinkedListIterator(collections.Iterator):
+ """Iterator for the linked list"""
+
+ def __init__(self, obj):
+ self._next = obj.first
+
+ def next(self):
+ ret = self._next
+ if ret:
+ self._next = ret.next
+ else:
+ raise StopIteration
+ return ret
+
+
+class LinkedList(collections.Iterable):
+ """Doubly linked list"""
+
+ def __init__(self):
+ self._first = None
+ self._last = None
+
+ def __iter__(self):
+ return LinkedListIterator(self)
+
+ def __len__(self):
+ for num, data in enumerate(self):
+ pass
+ return num + 1
+
+ @property
+ def first(self):
+ """Get the first node of the list"""
+ return self._first
+
+ def prepend(self, data):
+ """
+ Insert to the beginning of list
+
+ >>> list = LinkedList()
+ >>> [str(data) for data in list]
+ []
+ >>> node = list.prepend("foo")
+ >>> len(list)
+ 1
+ >>> node = list.prepend("bar")
+ >>> [str(data) for data in list]
+ ['bar', 'foo']
+ """
+ if self._first is None:
+ new = self._first = self._last = LinkedListNode(data)
+ else:
+ new = self.insert_before(self._first, data)
+ return new
+
+ def append(self, data):
+ """
+ Insert to the end of list
+
+ >>> list = LinkedList()
+ >>> node = list.append('foo')
+ >>> len(list)
+ 1
+ >>> node = list.append('bar')
+ >>> [str(data) for data in list]
+ ['foo', 'bar']
+ """
+ if self._last is None:
+ return self.prepend(data)
+ else:
+ return self.insert_after(self._last, data)
+
+ def insert_before(self, node, data=""):
+ """
+ Insert before a node
+
+ >>> list = LinkedList()
+ >>> node1 = list.append('foo')
+ >>> node2 = list.insert_before(node1, 'bar')
+ >>> node3 = list.insert_before(node1, 'baz')
+ >>> [str(data) for data in list]
+ ['bar', 'baz', 'foo']
+ """
+ new = LinkedListNode(data, prev_node=node.prev, next_node=node)
+ if node.prev:
+ node.prev.next = new
+ else:
+ self._first = new
+ node.prev = new
+ return new
+
+ def insert_after(self, node, data=""):
+ """
+ Insert after a node
+
+ >>> list = LinkedList()
+ >>> node1 = list.prepend('foo')
+ >>> node2 = list.insert_after(node1, 'bar')
+ >>> node3 = list.insert_after(node1, 'baz')
+ >>> [str(data) for data in list]
+ ['foo', 'baz', 'bar']
+ """
+ new = LinkedListNode(data, prev_node=node, next_node=node.next)
+ if node.next:
+ node.next.prev = new
+ else:
+ self._last = new
+ node.next = new
+ return new
+
+ def delete(self, node):
+ """
+ Delete node
+
+ >>> list = LinkedList()
+ >>> node1 = list.prepend('foo')
+ >>> node2 = list.insert_after(node1, 'bar')
+ >>> node3 = list.insert_before(node2, 'baz')
+ >>> [str(data) for data in list]
+ ['foo', 'baz', 'bar']
+ >>> str(list.delete(node3))
+ 'foo'
+ >>> [str(data) for data in list]
+ ['foo', 'bar']
+ >>> print "%s" % node3
+ <BLANKLINE>
+ >>> str(list.delete(node1))
+ 'bar'
+ >>> [str(data) for data in list]
+ ['bar']
+ >>> list.delete(node2)
+ >>> [str(data) for data in list]
+ []
+ """
+ ret = node.prev
+ if node is self._first:
+ ret = self._first = self._first.next
+ if node is self._last:
+ self._last = self._last.prev
+ node.delete()
+ return ret
+
+# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/rpm/policy.py b/gbp/rpm/policy.py
new file mode 100644
index 0000000..f8cb863
--- /dev/null
+++ b/gbp/rpm/policy.py
@@ -0,0 +1,72 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Default packaging policy for RPM"""
+
+import re
+from gbp.pkg import PkgPolicy, parse_archive_filename
+
+class RpmPkgPolicy(PkgPolicy):
+ """Packaging policy for RPM"""
+
+ # Special rpmlib python module for GBP (only)
+ python_rpmlib_module_name = "rpm"
+
+ alnum = 'a-zA-Z0-9'
+ # Valid characters for RPM pkg name
+ name_whitelist_chars = '._+%{}\-'
+ # Valid characters for RPM pkg version
+ version_whitelist_chars = '._+%{}~'
+
+ # Regexp for checking the validity of package name
+ packagename_re = re.compile("^[%s][%s%s]+$" %
+ (alnum, alnum, name_whitelist_chars))
+ packagename_msg = ("Package names must be at least two characters long, "
+ "start with an alphanumeric and can only contain "
+ "alphanumerics or characters in %s" %
+ list(name_whitelist_chars))
+
+ # Regexp for checking the validity of package (upstream) version
+ upstreamversion_re = re.compile("^[0-9][%s%s]*$" %
+ (alnum, version_whitelist_chars))
+ upstreamversion_msg = ("Upstream version numbers must start with a digit "
+ "and can only containg alphanumerics or characters "
+ "in %s" % list(version_whitelist_chars))
+
+ @classmethod
+ def is_valid_orig_archive(cls, filename):
+ """
+ Is this a valid orig source archive
+
+ @param filename: upstream source archive filename
+ @type filename: C{str}
+ @return: true if valid upstream source archive filename
+ @rtype: C{bool}
+
+ >>> RpmPkgPolicy.is_valid_orig_archive("foo/bar_baz.tar.gz")
+ True
+ >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar.tar")
+ True
+ >>> RpmPkgPolicy.is_valid_orig_archive("foo.bar")
+ False
+ >>> RpmPkgPolicy.is_valid_orig_archive("foo.gz")
+ False
+ """
+ _base, arch_fmt, _compression = parse_archive_filename(filename)
+ if arch_fmt:
+ return True
+ return False
+
diff --git a/gbp/scripts/buildpackage.py b/gbp/scripts/buildpackage.py
index c077b9e..e96e8e7 100755
--- a/gbp/scripts/buildpackage.py
+++ b/gbp/scripts/buildpackage.py
@@ -1,6 +1,6 @@
# vim: set fileencoding=utf-8 :
#
-# (C) 2006-2013 Guido Günther <agx@sigxcpu.org>
+# (C) 2006-2014 Guido Günther <agx@sigxcpu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
@@ -15,7 +15,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-"""run commands to build a debian package out of a git repository"""
+"""Build a debian package out of a GIT repository"""
import ConfigParser
import errno
@@ -29,7 +29,9 @@ from gbp.command_wrappers import (Command,
from gbp.config import (GbpOptionParserDebian, GbpOptionGroup)
from gbp.deb.git import (GitRepositoryError, DebianGitRepository)
from gbp.deb.source import DebianSource, DebianSourceError
+from gbp.format import format_msg
from gbp.git.vfs import GitVfs
+from gbp.deb.upstreamsource import DebianUpstreamSource
from gbp.errors import GbpError
import gbp.log
import gbp.notifications
@@ -37,7 +39,7 @@ from gbp.scripts.common.buildpackage import (index_name, wc_name,
git_archive_submodules,
git_archive_single, dump_tree,
write_wc, drop_index)
-from gbp.pkg import (UpstreamSource, compressor_opts, compressor_aliases)
+from gbp.pkg import compressor_opts, compressor_aliases, parse_archive_filename
def git_archive(repo, cp, output_dir, treeish, comp_type, comp_level, with_submodules):
"create a compressed orig tarball in output_dir using git_archive"
@@ -171,7 +173,7 @@ def extract_orig(orig_tarball, dest_dir):
gbp.log.info("Extracting %s to '%s'" % (os.path.basename(orig_tarball), dest_dir))
move_old_export(dest_dir)
- upstream = UpstreamSource(orig_tarball)
+ upstream = DebianUpstreamSource(orig_tarball)
upstream.unpack(dest_dir)
# Check if tarball extracts into a single folder or not:
@@ -241,6 +243,8 @@ def pristine_tar_build_orig(repo, cp, output_dir, options):
def get_upstream_tree(repo, cp, options):
"""Determine the upstream tree from the given options"""
if options.upstream_tree.upper() == 'TAG':
+ if cp['Upstream-Version'] is None:
+ raise GitRepositoryError("Can't determine upstream version from changelog")
upstream_tree = repo.version_to_tag(options.upstream_tag,
cp['Upstream-Version'])
elif options.upstream_tree.upper() == 'BRANCH':
@@ -316,7 +320,7 @@ def guess_comp_type(repo, comp_type, cp, tarball_dir):
else:
commit = repo.pristine_tar_branch
tarball = repo.get_commit_info(commit)['subject']
- comp_type = du.DebianPkgPolicy.get_compression(tarball)
+ (base_name, archive_fmt, comp_type) = parse_archive_filename(tarball)
gbp.log.debug("Determined compression type '%s'" % comp_type)
if not comp_type:
comp_type = 'gzip'
@@ -399,6 +403,7 @@ def build_parser(name, prefix=None):
tag_group.add_boolean_config_file_option(option_name="sign-tags", dest="sign_tags")
tag_group.add_config_file_option(option_name="keyid", dest="keyid")
tag_group.add_config_file_option(option_name="debian-tag", dest="debian_tag")
+ tag_group.add_config_file_option(option_name="debian-tag-msg", dest="debian_tag_msg")
tag_group.add_config_file_option(option_name="upstream-tag", dest="upstream_tag")
orig_group.add_config_file_option(option_name="upstream-tree", dest="upstream_tree")
orig_group.add_boolean_config_file_option(option_name="pristine-tar", dest="pristine_tar")
@@ -482,6 +487,13 @@ def parse_args(argv, prefix):
return options, args, dpkg_args
+class Hook(RunAtCommand):
+ "A hook run during the build"
+ def __init__(self, name, *args, **kwargs):
+ RunAtCommand.__init__(self, *args, **kwargs)
+ self.run_error = '%s-hook %s' % (name, self.run_error)
+
+
def main(argv):
retval = 0
prefix = "git-"
@@ -547,9 +559,9 @@ def main(argv):
# Run postexport hook
if options.postexport:
- RunAtCommand(options.postexport, shell=True,
- extra_env={'GBP_GIT_DIR': repo.git_dir,
- 'GBP_TMP_DIR': tmp_dir})(dir=tmp_dir)
+ Hook('Postexport', options.postexport, shell=True,
+ extra_env={'GBP_GIT_DIR': repo.git_dir,
+ 'GBP_TMP_DIR': tmp_dir})(dir=tmp_dir)
major = (source.changelog.debian_version if source.is_native()
else source.changelog.upstream_version)
@@ -569,9 +581,9 @@ def main(argv):
build_dir = repo_dir
if options.prebuild:
- RunAtCommand(options.prebuild, shell=True,
- extra_env={'GBP_GIT_DIR': repo.git_dir,
- 'GBP_BUILD_DIR': build_dir})(dir=build_dir)
+ Hook('Prebuild', options.prebuild, shell=True,
+ extra_env={'GBP_GIT_DIR': repo.git_dir,
+ 'GBP_BUILD_DIR': build_dir})(dir=build_dir)
setup_pbuilder(options)
# Finally build the package:
@@ -584,24 +596,26 @@ def main(argv):
source.changelog.noepoch,
changes_file_suffix(dpkg_args)))
gbp.log.debug("Looking for changes file %s" % changes)
- Command(options.postbuild, shell=True,
- extra_env={'GBP_CHANGES_FILE': changes,
- 'GBP_BUILD_DIR': build_dir})()
+ Hook('Postbuild', options.postbuild, shell=True,
+ extra_env={'GBP_CHANGES_FILE': changes,
+ 'GBP_BUILD_DIR': build_dir})()
if options.tag or options.tag_only:
- gbp.log.info("Tagging %s" % source.changelog.version)
tag = repo.version_to_tag(options.debian_tag, source.changelog.version)
+ gbp.log.info("Tagging %s as %s" % (source.changelog.version, tag))
if options.retag and repo.has_tag(tag):
repo.delete_tag(tag)
+ tag_msg = format_msg(options.debian_tag_msg,
+ dict(pkg=source.sourcepkg,
+ version=source.changelog.version))
repo.create_tag(name=tag,
- msg="%s Debian release %s" % (source.sourcepkg,
- source.changelog.version),
+ msg=tag_msg,
sign=options.sign_tags, keyid=options.keyid)
if options.posttag:
sha = repo.rev_parse("%s^{}" % tag)
- Command(options.posttag, shell=True,
- extra_env={'GBP_TAG': tag,
- 'GBP_BRANCH': branch or '(no branch)',
- 'GBP_SHA1': sha})()
+ Hook('Posttag', options.posttag, shell=True,
+ extra_env={'GBP_TAG': tag,
+ 'GBP_BRANCH': branch or '(no branch)',
+ 'GBP_SHA1': sha})()
except CommandExecFailed:
retval = 1
except (GbpError, GitRepositoryError) as err:
diff --git a/gbp/scripts/clone.py b/gbp/scripts/clone.py
index 62d0dcc..8078854 100755
--- a/gbp/scripts/clone.py
+++ b/gbp/scripts/clone.py
@@ -17,7 +17,7 @@
#
# inspired by dom-git-checkout
#
-"""clone a repo and set it up for gbp"""
+"""Clone a GIT repository and set it up for gbp"""
import ConfigParser
import sys
diff --git a/gbp/scripts/common/import_orig.py b/gbp/scripts/common/import_orig.py
index c2c53a6..8e18e97 100644
--- a/gbp/scripts/common/import_orig.py
+++ b/gbp/scripts/common/import_orig.py
@@ -32,29 +32,26 @@ except ImportError:
pass
-class OrigUpstreamSource(UpstreamSource):
- """Upstream source that will be imported"""
-
- def needs_repack(self, options):
- """
- Determine if the upstream sources needs to be repacked
-
- We repack if
- 1. we want to filter out files and use pristine tar since we want
- to make a filtered tarball available to pristine-tar
- 2. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir)
- and want to use filters
- 3. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir)
- and want to use pristine-tar
- """
- if ((options.pristine_tar and options.filter_pristine_tar and len(options.filters) > 0)):
+def orig_needs_repack(upstream_source, options):
+ """
+ Determine if the upstream sources needs to be repacked
+
+ We repack if
+ 1. we want to filter out files and use pristine tar since we want
+ to make a filtered tarball available to pristine-tar
+ 2. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir)
+ and want to use filters
+ 3. when we don't have a suitable upstream tarball (e.g. zip archive or unpacked dir)
+ and want to use pristine-tar
+ """
+ if ((options.pristine_tar and options.filter_pristine_tar and len(options.filters) > 0)):
+ return True
+ elif not upstream_source.is_orig():
+ if len(options.filters):
return True
- elif not self.is_orig():
- if len(options.filters):
- return True
- elif options.pristine_tar:
- return True
- return False
+ elif options.pristine_tar:
+ return True
+ return False
def cleanup_tmp_tree(tree):
diff --git a/gbp/scripts/create_remote_repo.py b/gbp/scripts/create_remote_repo.py
index f0e680b..b8923cd 100644
--- a/gbp/scripts/create_remote_repo.py
+++ b/gbp/scripts/create_remote_repo.py
@@ -16,7 +16,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Based on the aa-create-git-repo and dom-new-git-repo shell scripts
-"""Create a remote repo based on the current one"""
+"""Create a remote GIT repository based on the current one"""
import ConfigParser
import sys
diff --git a/gbp/scripts/dch.py b/gbp/scripts/dch.py
index f36f287..ce19c9a 100644
--- a/gbp/scripts/dch.py
+++ b/gbp/scripts/dch.py
@@ -15,7 +15,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-"""Generate Debian changelog entries from git commit messages"""
+"""Generate Debian changelog entries from GIT commit messages"""
import ConfigParser
import os.path
@@ -64,17 +64,19 @@ def get_author_email(repo, use_git_config):
return author, email
-def fixup_section(repo, git_author, options, dch_options):
+def fixup_section(repo, use_git_author, options, dch_options):
"""
- Fixup the changelog header and trailer's comitter and email address
+ Fixup the changelog header and trailer's committer and email address
It might otherwise point to the last git committer instead of the person
creating the changelog
- This apply --distribution and --urgency options passed to git-dch
+
+ This also applies --distribution and --urgency options passed to gbp dch
"""
- author, email = get_author_email(repo, git_author)
+ author, email = get_author_email(repo, use_git_author)
used_options = ['distribution', 'urgency']
- header_opts = []
+ opts = []
+ mainttrailer_opts = [ '--nomainttrailer', '--mainttrailer', '-t' ]
# This must not be done for snapshots or snapshots changelog entries
# will not be concatenated
@@ -83,11 +85,17 @@ def fixup_section(repo, git_author, options, dch_options):
val = getattr(options, opt)
if val:
gbp.log.debug("Set header option '%s' to '%s'" % (opt, val))
- header_opts.append("--%s=%s" % (opt, val))
+ opts.append("--%s=%s" % (opt, val))
else:
gbp.log.debug("Snapshot enabled: do not fixup options in header")
- ChangeLog.spawn_dch(msg='', author=author, email=email, dch_options=dch_options+header_opts)
+ if use_git_author:
+ for opt in mainttrailer_opts:
+ if opt in dch_options:
+ break
+ else:
+ opts.append(mainttrailer_opts[0])
+ ChangeLog.spawn_dch(msg='', author=author, email=email, dch_options=dch_options+opts)
def snapshot_version(version):
@@ -153,9 +161,9 @@ def mangle_changelog(changelog, cp, snapshot=''):
raise GbpError("Error mangling changelog %s" % e)
-def do_release(changelog, repo, cp, git_author, dch_options):
+def do_release(changelog, repo, cp, use_git_author, dch_options):
"""Remove the snapshot header and set the distribution"""
- author, email = get_author_email(repo, git_author)
+ author, email = get_author_email(repo, use_git_author)
(release, snapshot) = snapshot_version(cp['Version'])
if snapshot:
cp['MangledVersion'] = release
@@ -343,10 +351,9 @@ def build_parser(name):
help="Increment the Debian release number for a Debian Team upload, and add a Team upload changelog comment.")
version_group.add_option("--security", dest="security", action="store_true", default=False,
help="Increment the Debian release number for a security upload and add a security upload changelog comment.")
- version_group.add_boolean_config_file_option(option_name="git-author", dest="git_author")
+ version_group.add_boolean_config_file_option(option_name="git-author", dest="use_git_author")
commit_group.add_boolean_config_file_option(option_name="meta", dest="meta")
- commit_group.add_config_file_option(option_name="meta-closes", dest="meta_closes",
- help="Meta tags for the bts close commands, default is '%(meta-closes)s'")
+ commit_group.add_config_file_option(option_name="meta-closes", dest="meta_closes")
commit_group.add_boolean_config_file_option(option_name="full", dest="full")
commit_group.add_config_file_option(option_name="id-length", dest="idlen",
help="include N digits of the commit id in the changelog entry, default is '%(id-length)s'",
@@ -505,11 +512,11 @@ def main(argv):
version=version_change,
dch_options=dch_options)
- fixup_section(repo, git_author=options.git_author, options=options,
+ fixup_section(repo, use_git_author=options.use_git_author, options=options,
dch_options=dch_options)
if options.release:
- do_release(changelog, repo, cp, git_author=options.git_author,
+ do_release(changelog, repo, cp, use_git_author=options.use_git_author,
dch_options=dch_options)
elif options.snapshot:
(snap, version) = do_snapshot(changelog, repo, options.snapshot_number)
diff --git a/gbp/scripts/import_dsc.py b/gbp/scripts/import_dsc.py
index 600b394..c0bf650 100644
--- a/gbp/scripts/import_dsc.py
+++ b/gbp/scripts/import_dsc.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""Import a Debian source package into a git repository"""
+"""Import a Debian source package into a GIT repository"""
import ConfigParser
import sys
@@ -26,8 +26,8 @@ import glob
import pipes
import time
import gbp.command_wrappers as gbpc
-from gbp.pkg import UpstreamSource
from gbp.deb.dscfile import DscFile
+from gbp.deb.upstreamsource import DebianUpstreamSource
from gbp.deb.git import (DebianGitRepository, GitRepositoryError)
from gbp.deb.changelog import ChangeLog
from gbp.git import rfc822_date_to_git
@@ -328,7 +328,7 @@ def main(argv):
set_bare_repo_options(options)
dirs['tmp'] = os.path.abspath(tempfile.mkdtemp(dir='..'))
- upstream = UpstreamSource(src.tgz)
+ upstream = DebianUpstreamSource(src.tgz)
upstream.unpack(dirs['tmp'], options.filters)
format = [(options.upstream_tag, "Upstream"), (options.debian_tag, "Debian")][src.native]
diff --git a/gbp/scripts/import_dscs.py b/gbp/scripts/import_dscs.py
index 28413fa..2a71560 100644
--- a/gbp/scripts/import_dscs.py
+++ b/gbp/scripts/import_dscs.py
@@ -14,7 +14,7 @@
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""Import multiple dsc files in one go"""
+"""Import multiple dsc files into GIT in one go"""
import glob
import os
diff --git a/gbp/scripts/import_orig.py b/gbp/scripts/import_orig.py
index 542896e..6256431 100644
--- a/gbp/scripts/import_orig.py
+++ b/gbp/scripts/import_orig.py
@@ -15,7 +15,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-"""Import a new upstream version into a git repository"""
+"""Import a new upstream version into a GIT repository"""
import ConfigParser
import os
@@ -23,13 +23,15 @@ import sys
import tempfile
import gbp.command_wrappers as gbpc
from gbp.deb import (DebianPkgPolicy, parse_changelog_repo)
+from gbp.deb.upstreamsource import DebianUpstreamSource
from gbp.deb.uscan import (Uscan, UscanError)
from gbp.deb.changelog import ChangeLog, NoChangeLogError
from gbp.deb.git import (GitRepositoryError, DebianGitRepository)
from gbp.config import GbpOptionParserDebian, GbpOptionGroup, no_upstream_branch_msg
from gbp.errors import GbpError
+from gbp.format import format_msg
import gbp.log
-from gbp.scripts.common.import_orig import (OrigUpstreamSource, cleanup_tmp_tree,
+from gbp.scripts.common.import_orig import (orig_needs_repack, cleanup_tmp_tree,
ask_package_name, ask_package_version,
repack_source, is_link_target)
@@ -80,7 +82,7 @@ def upstream_import_commit_msg(options, version):
def detect_name_and_version(repo, source, options):
# Guess defaults for the package name and version from the
# original tarball.
- (guessed_package, guessed_version) = source.guess_version() or ('', '')
+ guessed_package, guessed_version = source.guess_version()
# Try to find the source package name
try:
@@ -167,7 +169,7 @@ def find_source(use_uscan, args):
elif len(args) == 0:
raise GbpError("No archive to import specified. Try --help.")
else:
- archive = OrigUpstreamSource(args[0])
+ archive = DebianUpstreamSource(args[0])
return archive
@@ -300,7 +302,7 @@ def main(argv):
source.unpack(tmpdir, options.filters)
gbp.log.debug("Unpacked '%s' to '%s'" % (source.path, source.unpacked))
- if source.needs_repack(options):
+ if orig_needs_repack(source, options):
gbp.log.debug("Filter pristine-tar: repacking '%s' from '%s'" % (source.path, source.unpacked))
(source, tmpdir) = repack_source(source, sourcepackage, version, tmpdir, options.filters)
@@ -372,7 +374,7 @@ def main(argv):
epoch = '%s:' % cp.epoch
info = { 'version': "%s%s-1" % (epoch, version) }
env = { 'GBP_BRANCH': options.debian_branch }
- gbpc.Command(options.postimport % info, extra_env=env, shell=True)()
+ gbpc.Command(format_msg(options.postimport, info), extra_env=env, shell=True)()
# Update working copy and index if we've possibly updated the
# checked out branch
current_branch = repo.get_branch()
diff --git a/gbp/scripts/import_srpm.py b/gbp/scripts/import_srpm.py
new file mode 100755
index 0000000..958f5ff
--- /dev/null
+++ b/gbp/scripts/import_srpm.py
@@ -0,0 +1,462 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2006,2007,2011 Guido Guenther <agx@sigxcpu.org>
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Import an RPM source package into a GIT repository"""
+
+import ConfigParser
+import sys
+import re
+import os
+import glob
+import time
+import shutil
+import errno
+import urllib2
+
+import gbp.tmpfile as tempfile
+import gbp.command_wrappers as gbpc
+from gbp.rpm import (parse_srpm, guess_spec, SpecFile, NoSpecError,
+ RpmUpstreamSource, compose_version_str)
+from gbp.rpm.git import (RpmGitRepository, GitRepositoryError)
+from gbp.git.modifier import GitModifier
+from gbp.config import (GbpOptionParserRpm, GbpOptionGroup,
+ no_upstream_branch_msg)
+from gbp.errors import GbpError
+import gbp.log
+from gbp.pkg import parse_archive_filename
+
+no_packaging_branch_msg = """
+Repository does not have branch '%s' for packaging/distribution sources.
+You need to reate it or use --packaging-branch to specify it.
+"""
+
+class SkipImport(Exception):
+ """Nothing imported"""
+ pass
+
+
+def download_file(target_dir, url):
+ """Download a remote file"""
+ gbp.log.info("Downloading '%s'..." % url)
+ try:
+ urlobj = urllib2.urlopen(url)
+ local_fn = os.path.join(target_dir, os.path.basename(url))
+ with open(local_fn, "wb") as local_file:
+ local_file.write(urlobj.read())
+ except urllib2.HTTPError as err:
+ raise GbpError("Download failed: %s" % err)
+ except urllib2.URLError as err:
+ raise GbpError("Download failed: %s" % err.reason)
+ return local_fn
+
+def download_source(pkg, dirs):
+ """Download package from a remote location"""
+ if re.match(r'[a-z]{1,5}://', pkg):
+ mode = 'python urllib2'
+ else:
+ mode = 'yumdownloader'
+
+ tmpdir = tempfile.mkdtemp(dir=dirs['tmp_base'], prefix='download_')
+ gbp.log.info("Trying to download '%s' using '%s'..." % (pkg, mode))
+ if mode == 'yumdownloader':
+ gbpc.RunAtCommand('yumdownloader',
+ ['--source', '--destdir=', '.', pkg],
+ shell=False)(dir=tmpdir)
+ else:
+ download_file(tmpdir, pkg)
+ srpm = glob.glob(os.path.join(tmpdir, '*.src.rpm'))[0]
+ return srpm
+
+
+def committer_from_author(author, options):
+ """Get committer info based on options"""
+ committer = GitModifier()
+ if options.author_is_committer:
+ committer.name = author.name
+ committer.email = author.email
+ return committer
+
+
+def move_tag_stamp(repo, tag_format, tag_str_fields):
+ "Move tag out of the way appending the current timestamp"
+ old = repo.version_to_tag(tag_format, tag_str_fields)
+ new = repo.version_to_tag('%s~%d' % (tag_format, int(time.time())),
+ tag_str_fields)
+ repo.move_tag(old, new)
+
+
+def set_bare_repo_options(options):
+ """Modify options for import into a bare repository"""
+ if options.pristine_tar:
+ gbp.log.info("Bare repository: setting %s option '--no-pristine-tar'")
+ options.pristine_tar = False
+
+
+def force_to_branch_head(repo, branch):
+ """Checkout branch and reset --hard"""
+ if repo.get_branch() == branch:
+ # Update HEAD if we modified the checked out branch
+ repo.force_head(branch, hard=True)
+ # Checkout packaging branch
+ repo.set_branch(branch)
+
+
+def parse_args(argv):
+ """Parse commandline arguments"""
+ try:
+ parser = GbpOptionParserRpm(command=os.path.basename(argv[0]),
+ prefix='',
+ usage='%prog [options] /path/to/package'
+ '.src.rpm')
+ except ConfigParser.ParsingError, err:
+ gbp.log.err(err)
+ return None, None
+
+ import_group = GbpOptionGroup(parser, "import options",
+ "pristine-tar and filtering")
+ tag_group = GbpOptionGroup(parser, "tag options",
+ "options related to git tag creation")
+ branch_group = GbpOptionGroup(parser, "version and branch naming options",
+ "version number and branch layout options")
+
+ for group in [import_group, branch_group, tag_group ]:
+ parser.add_option_group(group)
+
+ parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
+ default=False, help="verbose command execution")
+ parser.add_config_file_option(option_name="color", dest="color",
+ type='tristate')
+ parser.add_config_file_option(option_name="color-scheme",
+ dest="color_scheme")
+ parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir")
+ parser.add_config_file_option(option_name="vendor", action="store",
+ dest="vendor")
+ parser.add_option("--download", action="store_true", dest="download",
+ default=False, help="download source package")
+ branch_group.add_config_file_option(option_name="packaging-branch",
+ dest="packaging_branch")
+ branch_group.add_config_file_option(option_name="upstream-branch",
+ dest="upstream_branch")
+ branch_group.add_boolean_config_file_option(
+ option_name="create-missing-branches",
+ dest="create_missing_branches")
+ branch_group.add_option("--orphan-packaging", action="store_true",
+ dest="orphan_packaging", default=False,
+ help="The packaging branch doesn't base on upstream")
+ branch_group.add_option("--native", action="store_true",
+ dest="native", default=False,
+ help="This is a dist native package, no separate "
+ "upstream branch")
+
+ tag_group.add_boolean_config_file_option(option_name="sign-tags",
+ dest="sign_tags")
+ tag_group.add_config_file_option(option_name="keyid",
+ dest="keyid")
+ tag_group.add_config_file_option(option_name="packaging-tag",
+ dest="packaging_tag")
+ tag_group.add_config_file_option(option_name="upstream-tag",
+ dest="upstream_tag")
+
+ import_group.add_config_file_option(option_name="filter",
+ dest="filters", action="append")
+ import_group.add_boolean_config_file_option(option_name="pristine-tar",
+ dest="pristine_tar")
+ import_group.add_option("--allow-same-version", action="store_true",
+ dest="allow_same_version", default=False,
+ help="allow to import already imported version")
+ import_group.add_boolean_config_file_option(
+ option_name="author-is-committer",
+ dest="author_is_committer")
+ import_group.add_config_file_option(option_name="packaging-dir",
+ dest="packaging_dir")
+ (options, args) = parser.parse_args(argv[1:])
+ gbp.log.setup(options.color, options.verbose, options.color_scheme)
+ return options, args
+
+
+def main(argv):
+ """Main function of the git-import-srpm script"""
+ dirs = dict(top=os.path.abspath(os.curdir))
+
+ ret = 0
+ skipped = False
+
+ options, args = parse_args(argv)
+
+ if len(args) != 1:
+ gbp.log.err("Need to give exactly one package to import. Try --help.")
+ return 1
+ try:
+ dirs['tmp_base'] = tempfile.mkdtemp(dir=options.tmp_dir,
+ prefix='import-srpm')
+ except GbpError as err:
+ gbp.log.err(err)
+ return 1
+ try:
+ srpm = args[0]
+ if options.download:
+ srpm = download_source(srpm, dirs)
+
+ # Real srpm, we need to unpack, first
+ true_srcrpm = False
+ if not os.path.isdir(srpm) and not srpm.endswith(".spec"):
+ src = parse_srpm(srpm)
+ true_srcrpm = True
+ dirs['pkgextract'] = tempfile.mkdtemp(dir=dirs['tmp_base'],
+ prefix='pkgextract_')
+ gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract'])
+ src.unpack(dirs['pkgextract'])
+ preferred_spec = src.name + '.spec'
+ srpm = dirs['pkgextract']
+ elif os.path.isdir(srpm):
+ preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec'
+ else:
+ preferred_spec = None
+
+ # Find and parse spec file
+ if os.path.isdir(srpm):
+ gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm)
+ dirs['src'] = os.path.abspath(srpm)
+ spec = guess_spec(srpm, True, preferred_spec)
+ else:
+ gbp.log.debug("Trying to import an srpm from '%s' with spec "\
+ "file '%s'" % (os.path.dirname(srpm), srpm))
+ dirs['src'] = os.path.abspath(os.path.dirname(srpm))
+ spec = SpecFile(srpm)
+
+ # Check the repository state
+ try:
+ repo = RpmGitRepository('.')
+ is_empty = repo.is_empty()
+
+ (clean, out) = repo.is_clean()
+ if not clean and not is_empty:
+ gbp.log.err("Repository has uncommitted changes, commit "
+ "these first: ")
+ raise GbpError, out
+
+ except GitRepositoryError:
+ gbp.log.info("No git repository found, creating one.")
+ is_empty = True
+ repo = RpmGitRepository.create(spec.name)
+ os.chdir(repo.path)
+
+ if repo.bare:
+ set_bare_repo_options(options)
+
+ # Create more tempdirs
+ dirs['origsrc'] = tempfile.mkdtemp(dir=dirs['tmp_base'],
+ prefix='origsrc_')
+ dirs['packaging_base'] = tempfile.mkdtemp(dir=dirs['tmp_base'],
+ prefix='packaging_')
+ dirs['packaging'] = os.path.join(dirs['packaging_base'],
+ options.packaging_dir)
+ try:
+ os.mkdir(dirs['packaging'])
+ except OSError as err:
+ if err.errno != errno.EEXIST:
+ raise
+
+ if true_srcrpm:
+ # For true src.rpm we just take everything
+ files = os.listdir(dirs['src'])
+ else:
+ # Need to copy files to the packaging directory given by caller
+ files = [os.path.basename(patch.path) \
+ for patch in spec.patchseries(unapplied=True, ignored=True)]
+ for filename in spec.sources().values():
+ files.append(os.path.basename(filename))
+ files.append(os.path.join(spec.specdir, spec.specfile))
+ # Don't copy orig source archive, though
+ if spec.orig_src and spec.orig_src['filename'] in files:
+ files.remove(spec.orig_src['filename'])
+
+ for fname in files:
+ fpath = os.path.join(dirs['src'], fname)
+ if os.path.exists(fpath):
+ shutil.copy2(fpath, dirs['packaging'])
+ else:
+ gbp.log.err("File '%s' listed in spec not found" % fname)
+ raise GbpError
+
+ # Unpack orig source archive
+ if spec.orig_src:
+ orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename'])
+ sources = RpmUpstreamSource(orig_tarball)
+ sources.unpack(dirs['origsrc'], options.filters)
+ else:
+ sources = None
+
+ src_tag_format = options.packaging_tag if options.native \
+ else options.upstream_tag
+ tag_str_fields = dict(spec.version, vendor=options.vendor.lower())
+ src_tag = repo.version_to_tag(src_tag_format, tag_str_fields)
+ ver_str = compose_version_str(spec.version)
+
+ if repo.find_version(options.packaging_tag, tag_str_fields):
+ gbp.log.warn("Version %s already imported." % ver_str)
+ if options.allow_same_version:
+ gbp.log.info("Moving tag of version '%s' since import forced" %
+ ver_str)
+ move_tag_stamp(repo, options.packaging_tag, tag_str_fields)
+ else:
+ raise SkipImport
+
+ if is_empty:
+ options.create_missing_branches = True
+
+ # Determine author and committer info, currently same info is used
+ # for both sources and packaging files
+ author = None
+ if spec.packager:
+ match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>',
+ spec.packager.strip())
+ if match:
+ author = GitModifier(match.group('name'), match.group('email'))
+ if not author:
+ author = GitModifier()
+ gbp.log.debug("Couldn't determine packager info")
+ committer = committer_from_author(author, options)
+
+ # Import sources
+ if sources:
+ src_commit = repo.find_version(src_tag_format, tag_str_fields)
+ if not src_commit:
+ gbp.log.info("Tag %s not found, importing sources" % src_tag)
+
+ branch = [options.upstream_branch,
+ options.packaging_branch][options.native]
+ if not repo.has_branch(branch):
+ if options.create_missing_branches:
+ gbp.log.info("Will create missing branch '%s'" %
+ branch)
+ else:
+ gbp.log.err(no_upstream_branch_msg % branch + "\n"
+ "Also check the --create-missing-branches option.")
+ raise GbpError
+ src_vendor = "Native" if options.native else "Upstream"
+ msg = "%s version %s" % (src_vendor, spec.upstreamversion)
+ src_commit = repo.commit_dir(sources.unpacked,
+ "Imported %s" % msg,
+ branch,
+ author=author,
+ committer=committer,
+ create_missing_branch=options.create_missing_branches)
+ repo.create_tag(name=src_tag,
+ msg=msg,
+ commit=src_commit,
+ sign=options.sign_tags,
+ keyid=options.keyid)
+
+ if not options.native:
+ if options.pristine_tar:
+ archive_fmt = parse_archive_filename(orig_tarball)[1]
+ if archive_fmt == 'tar':
+ repo.pristine_tar.commit(orig_tarball,
+ 'refs/heads/%s' %
+ options.upstream_branch)
+ else:
+ gbp.log.warn('Ignoring pristine-tar, %s archives '
+ 'not supported' % archive_fmt)
+ else:
+ gbp.log.info("No orig source archive imported")
+
+ # Import packaging files. For native packages we assume that also
+ # packaging files are found in the source tarball
+ if not options.native or not sources:
+ gbp.log.info("Importing packaging files")
+ branch = options.packaging_branch
+ if not repo.has_branch(branch):
+ if options.create_missing_branches:
+ gbp.log.info("Will create missing branch '%s'" % branch)
+ else:
+ gbp.log.err(no_packaging_branch_msg % branch + "\n"
+ "Also check the --create-missing-branches "
+ "option.")
+ raise GbpError
+
+ tag = repo.version_to_tag(options.packaging_tag, tag_str_fields)
+ msg = "%s release %s" % (options.vendor, ver_str)
+
+ if options.orphan_packaging or not sources:
+ commit = repo.commit_dir(dirs['packaging_base'],
+ "Imported %s" % msg,
+ branch,
+ author=author,
+ committer=committer,
+ create_missing_branch=options.create_missing_branches)
+ else:
+ # Copy packaging files to the unpacked sources dir
+ try:
+ pkgsubdir = os.path.join(sources.unpacked,
+ options.packaging_dir)
+ os.mkdir(pkgsubdir)
+ except OSError as err:
+ if err.errno != errno.EEXIST:
+ raise
+ for fname in os.listdir(dirs['packaging']):
+ shutil.copy2(os.path.join(dirs['packaging'], fname),
+ pkgsubdir)
+ commit = repo.commit_dir(sources.unpacked,
+ "Imported %s" % msg,
+ branch,
+ other_parents=[src_commit],
+ author=author,
+ committer=committer,
+ create_missing_branch=options.create_missing_branches)
+ # Import patches on top of the source tree
+ # (only for non-native packages with non-orphan packaging)
+ force_to_branch_head(repo, options.packaging_branch)
+
+ # Create packaging tag
+ repo.create_tag(name=tag,
+ msg=msg,
+ commit=commit,
+ sign=options.sign_tags,
+ keyid=options.keyid)
+
+ force_to_branch_head(repo, options.packaging_branch)
+
+ except KeyboardInterrupt:
+ ret = 1
+ gbp.log.err("Interrupted. Aborting.")
+ except gbpc.CommandExecFailed:
+ ret = 1
+ except GitRepositoryError as err:
+ gbp.log.err("Git command failed: %s" % err)
+ ret = 1
+ except GbpError as err:
+ if len(err.__str__()):
+ gbp.log.err(err)
+ ret = 1
+ except NoSpecError as err:
+ gbp.log.err("Failed determine spec file: %s" % err)
+ ret = 1
+ except SkipImport:
+ skipped = True
+ finally:
+ os.chdir(dirs['top'])
+ gbpc.RemoveTree(dirs['tmp_base'])()
+
+ if not ret and not skipped:
+ gbp.log.info("Version '%s' imported under '%s'" % (ver_str, spec.name))
+ return ret
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
+
+# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/scripts/pq.py b/gbp/scripts/pq.py
index fc205bf..194145e 100755
--- a/gbp/scripts/pq.py
+++ b/gbp/scripts/pq.py
@@ -1,6 +1,6 @@
# vim: set fileencoding=utf-8 :
#
-# (C) 2011 Guido Günther <agx@sigxcpu.org>
+# (C) 2011,2014 Guido Günther <agx@sigxcpu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
@@ -15,7 +15,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-"""manage patches in a patch queue"""
+"""Manage Debian patches on a patch queue branch"""
import ConfigParser
import errno
@@ -35,6 +35,7 @@ from gbp.scripts.common.pq import (is_pq_branch, pq_branch_name, pq_branch_base,
switch_to_pq_branch, apply_single_patch,
apply_and_commit_patch,
drop_pq, get_maintainer_from_control)
+from gbp.dch import extract_bts_cmds
PATCH_DIR = "debian/patches/"
SERIES_FILE = os.path.join(PATCH_DIR,"series")
@@ -67,6 +68,71 @@ def generate_patches(repo, start, end, outdir, options):
return patches
+def compare_series(old, new):
+ """
+ Compare new pathes to lists of patches already exported
+
+ >>> compare_series(['a', 'b'], ['b', 'c'])
+ (['c'], ['a'])
+ >>> compare_series([], [])
+ ([], [])
+ """
+ added = set(new).difference(old)
+ removed = set(old).difference(new)
+ return (list(added), list(removed))
+
+
+def format_series_diff(added, removed, options):
+ """
+ Format the patch differences into a suitable commit message
+
+ >>> format_series_diff(['a'], ['b'], None)
+ 'Rediff patches\\n\\nAdded a: <REASON>\\nDropped b: <REASON>\\n'
+ """
+ if len(added) == 1 and not removed:
+ # Single patch added, create a more thorough commit message
+ patch = Patch(os.path.join('debian', 'patches', added[0]))
+ msg = patch.subject
+ bugs, dummy = extract_bts_cmds(patch.long_desc.split('\n'), options)
+ if bugs:
+ msg += '\n'
+ for k, v in bugs.items():
+ msg += '\n%s: %s' % (k, ', '.join(v))
+ else:
+ msg = "Rediff patches\n\n"
+ for p in added:
+ msg += 'Added %s: <REASON>\n' % p
+ for p in removed:
+ msg += 'Dropped %s: <REASON>\n' % p
+ return msg
+
+
+def commit_patches(repo, branch, patches, options):
+ """
+ Commit chanages exported from patch queue
+ """
+ clean, dummy = repo.is_clean()
+ if clean:
+ return ([], [])
+
+ vfs = gbp.git.vfs.GitVfs(repo, branch)
+ try:
+ oldseries = vfs.open('debian/patches/series')
+ oldpatches = [ p.strip() for p in oldseries.readlines() ]
+ oldseries.close()
+ except IOError:
+ # No series file yet
+ oldpatches = []
+ newpatches = [ p[len(PATCH_DIR):] for p in patches ]
+
+ # FIXME: handle case were only the contents of the patches changed
+ added, removed = compare_series(oldpatches, newpatches)
+ msg = format_series_diff(added, removed, options)
+ repo.add_files(PATCH_DIR)
+ repo.commit_staged(msg=msg)
+ return added, removed
+
+
def export_patches(repo, branch, options):
"""Export patches from the pq branch into a patch series"""
if is_pq_branch(branch):
@@ -90,10 +156,22 @@ def export_patches(repo, branch, options):
with open(SERIES_FILE, 'w') as seriesfd:
for patch in patches:
seriesfd.write(os.path.relpath(patch, PATCH_DIR) + '\n')
- GitCommand('status')(['--', PATCH_DIR])
+ if options.commit:
+ added, removed = commit_patches(repo, branch, patches, options)
+ if added:
+ what = 'patches' if len(added) > 1 else 'patch'
+ gbp.log.info("Added %s %s to patch series" % (what, ', '.join(added)))
+ if removed:
+ what = 'patches' if len(removed) > 1 else 'patch'
+ gbp.log.info("Removed %s %s from patch series" % (what, ', '.join(removed)))
+ else:
+ GitCommand('status')(['--', PATCH_DIR])
else:
gbp.log.info("No patches on '%s' - nothing to do." % pq_branch)
+ if options.drop:
+ drop_pq(repo, branch)
+
def safe_patches(series):
"""
@@ -174,8 +252,9 @@ def import_quilt_patches(repo, branch, series, tries, force):
gbp.log.debug("Applying %s" % patch.path)
try:
apply_and_commit_patch(repo, patch, maintainer, patch.topic)
- except (GbpError, GitRepositoryError):
- gbp.log.err("Failed to apply '%s'" % patch.path)
+ except (GbpError, GitRepositoryError) as e:
+ gbp.log.err("Failed to apply '%s': %s" % (patch.path, e))
+ repo.force_head('HEAD', hard=True)
repo.set_branch(branch)
repo.delete_branch(pq_branch)
break
@@ -233,11 +312,14 @@ def build_parser(name):
help="verbose command execution")
parser.add_option("--topic", dest="topic", help="in case of 'apply' topic (subdir) to put patch into")
parser.add_config_file_option(option_name="time-machine", dest="time_machine", type="int")
+ parser.add_boolean_config_file_option("drop", dest='drop')
+ parser.add_boolean_config_file_option(option_name="commit", dest="commit")
parser.add_option("--force", dest="force", action="store_true", default=False,
help="in case of import even import if the branch already exists")
parser.add_config_file_option(option_name="color", dest="color", type='tristate')
parser.add_config_file_option(option_name="color-scheme",
dest="color_scheme")
+ parser.add_config_file_option(option_name="meta-closes", dest="meta_closes")
return parser
diff --git a/gbp/scripts/pull.py b/gbp/scripts/pull.py
index fb0d827..65e3e49 100755
--- a/gbp/scripts/pull.py
+++ b/gbp/scripts/pull.py
@@ -17,7 +17,7 @@
#
# heavily inspired by dom-safe-pull which is © 2009 Stéphane Glondu <steph@glondu.net>
#
-"""fast forward debian, upstream and pristine-tar branch"""
+"""Pull remote changes and fast forward debian, upstream and pristine-tar branch"""
import ConfigParser
import sys
diff --git a/gbp/scripts/supercommand.py b/gbp/scripts/supercommand.py
index 2eb64de..83c8446 100644
--- a/gbp/scripts/supercommand.py
+++ b/gbp/scripts/supercommand.py
@@ -17,6 +17,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Supercommand for all gbp commands"""
+import glob
+import os
import re
import sys
@@ -42,8 +44,18 @@ The most commonly used commands are:
import-orig - import a new upstream tarball
import-dsc - import a single Debian source package
import-dscs - import multiple Debian source packages
+
+Use '--list-cmds' to list all available commands.
"""
+def version(prog):
+ try:
+ from gbp.version import gbp_version
+ except ImportError:
+ gbp_version = '[Unknown version]'
+ print("%s %s" % (os.path.basename(prog), gbp_version))
+
+
def import_command(cmd):
"""
Import the module that implements the given command
@@ -56,6 +68,40 @@ def import_command(cmd):
return __import__('gbp.scripts.%s' % modulename, fromlist='main', level=0)
+def pymod_to_cmd(mod):
+ """
+ >>> pymod_to_cmd('/x/y/z/a_cmd.py')
+ 'a-cmd'
+ """
+ return os.path.basename(mod.rsplit('.', 1)[0]).replace('_','-')
+
+
+def get_available_commands(path):
+ cmds = []
+ for f in glob.glob(os.path.join(path, '*.py')):
+ if os.path.basename(f) in ['__init__.py', 'supercommand.py']:
+ continue
+ cmds.append((pymod_to_cmd(f), f))
+ return cmds
+
+
+def list_available_commands():
+ mod = __import__('gbp.scripts', fromlist='main', level=0)
+ path = os.path.dirname(mod.__file__)
+ maxlen = 0
+
+ print("Available commands in %s\n" % path)
+ cmds = sorted(get_available_commands(path))
+ for cmd in cmds:
+ if len(cmd[0]) > maxlen:
+ maxlen = len(cmd[0])
+ for cmd in cmds:
+ mod = import_command(cmd[0])
+ doc = mod.__doc__
+ print(" %s - %s" % (cmd[0].rjust(maxlen), doc))
+ print('')
+
+
def supercommand(argv=None):
argv = argv or sys.argv
@@ -63,12 +109,18 @@ def supercommand(argv=None):
usage()
return 1
- cmd = argv[1]
+ prg, cmd = argv[0:2]
args = argv[1:]
- if cmd in ['--help', '-h']:
+ if cmd in ['--help', '-h', 'help' ]:
usage()
return 0
+ elif cmd in [ '--version', 'version' ]:
+ version(argv[0])
+ return 0
+ elif cmd in [ '--list-cmds', 'list-cmds' ]:
+ list_available_commands()
+ return 0
try:
module = import_command(cmd)
@@ -81,4 +133,7 @@ def supercommand(argv=None):
return module.main(args)
+if __name__ == '__main__':
+ sys.exit(supercommand())
+
# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
diff --git a/gbp/tmpfile.py b/gbp/tmpfile.py
new file mode 100644
index 0000000..e1ad308
--- /dev/null
+++ b/gbp/tmpfile.py
@@ -0,0 +1,38 @@
+# vim: set fileencoding=utf-8 :
+#
+# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Temporary directory handling"""
+
+import os
+import tempfile
+
+from gbp.errors import GbpError
+
+def mkdtemp(dir, **kwargs):
+ """Create temporary directory"""
+ try:
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ except OSError as (dummy_e, msg):
+ raise GbpError, "Unable to create dir %s (%s)" % (dir, msg)
+
+ try:
+ return os.path.abspath(tempfile.mkdtemp(dir=dir, **kwargs))
+ except OSError as (dummy_e, msg):
+ raise GbpError, "Unable to create tmpdir under %s (%s)" % (dir, msg)
+
+# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
+