Skip to content

Commit

Permalink
Support using the auto-detected name
Browse files Browse the repository at this point in the history
Rework to absorb a lot of the work we were doing in makefiles so that we
can avoid setting the package name manually in 'make autospecnew'.
  • Loading branch information
bwarden committed May 9, 2019
1 parent f354bff commit 27d5256
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 11 deletions.
7 changes: 5 additions & 2 deletions autospec/autospec.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,9 @@ def main():
parser.add_argument("-t", "--target", dest="target", action="store",
default=None,
help="Target location to create or reuse")
parser.add_argument("-P", "--package-dir", dest="package_dir", action="store",
default=None,
help="Location under which to create/clone a package")
parser.add_argument("-i", "--integrity", action="store_true",
default=False,
help="Search for package signature from source URL and "
Expand Down Expand Up @@ -247,7 +250,7 @@ def package(args, url, name, archives, workingdir, infile_dict):
# of static analysis on the content of the tarball.
#
filemanager = files.FileManager()
tarball.process(url, name, args.version, args.target, archives, filemanager)
tarball.process(url, name, args.version, args.target, args.package_dir, archives, filemanager)
_dir = tarball.path

if args.license_only:
Expand Down Expand Up @@ -341,7 +344,7 @@ def package(args, url, name, archives, workingdir, infile_dict):

examine_abi(build.download_path)
if os.path.exists("/var/lib/rpm"):
pkg_scan.get_whatrequires(tarball.name)
pkg_scan.get_whatrequires(tarball.name, build.download_path)

write_out(build.download_path + "/release", tarball.release + "\n")

Expand Down
11 changes: 7 additions & 4 deletions autospec/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@

license_fetch = None
license_show = None
git_uri = None
git_pull_uri = None
git_push_uri = None
os_packages = set()
config_file = None
old_version = None
Expand Down Expand Up @@ -547,7 +548,8 @@ def parse_config_files(path, bump, filemanager, version):
global parallel_build
global license_fetch
global license_show
global git_uri
global git_pull_uri
global git_push_uri
global os_packages
global urlban
global config_file
Expand Down Expand Up @@ -585,7 +587,8 @@ def parse_config_files(path, bump, filemanager, version):
print("Missing autospec section..")
sys.exit(1)

git_uri = config['autospec'].get('git', None)
git_push_uri = config['autospec'].get('git', None)
git_pull_uri = config['autospec'].get('git_pull', git_push_uri)
license_fetch = config['autospec'].get('license_fetch', None)
license_show = config['autospec'].get('license_show', None)
packages_file = config['autospec'].get('packages_file', None)
Expand All @@ -610,7 +613,7 @@ def parse_config_files(path, bump, filemanager, version):
# Read values from options.conf (and deprecated files) and rewrite as necessary
read_config_opts(path)

if not git_uri:
if not git_pull_uri:
print("Warning: Set [autospec][git] upstream template for remote git URI configuration")
if not license_fetch:
print("Warning: Set [autospec][license_fetch] uri for license fetch support")
Expand Down
7 changes: 5 additions & 2 deletions autospec/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,15 @@ def commit_to_git(path):
call("git init", stdout=subprocess.DEVNULL, cwd=path)

# This config is used for setting the remote URI, so it is optional.
if config.git_uri:
if config.git_pull_uri:
try:
call("git config --get remote.origin.url", cwd=path)
except subprocess.CalledProcessError:
upstream_uri = config.git_uri % {'NAME': tarball.name}
upstream_uri = config.git_pull_uri % {'NAME': tarball.name}
call("git remote add origin %s" % upstream_uri, cwd=path)
push_uri = config.git_push_uri % {'NAME': tarball.name}
if push_uri != upstream_uri:
call("git remote set-url origin --push %s" % upstream_uri, cwd=path)

for config_file in config.config_files:
call("git add %s" % config_file, cwd=path, check=False)
Expand Down
5 changes: 3 additions & 2 deletions autospec/pkg_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,14 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import subprocess

import config
import util


def get_whatrequires(pkg):
def get_whatrequires(pkg, download_path):
"""
Write list of packages.
Expand All @@ -47,5 +48,5 @@ def get_whatrequires(pkg):
util.print_warning("dnf repoquery whatrequires for {} failed with: {}".format(pkg, err))
return

util.write_out('whatrequires', '# This file contains recursive sources that '
util.write_out(os.path.join(download_path, 'whatrequires'), '# This file contains recursive sources that '
'require this package\n' + out)
16 changes: 15 additions & 1 deletion autospec/tarball.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,14 @@ def prepare_and_extract(extract_cmd):
call(extract_cmd)


def write_makefile(name, url, archives):
makefile_path = os.path.join(build.download_path, "Makefile")
if not os.path.exists(makefile_path):
write_out(makefile_path,
"PKG_NAME := %s\nURL = %s\nARCHIVES = %s\n\ninclude ../common/Makefile.common\n"
% (name, url, ' '.join(archives)))


def process_archives(archives):
"""Download and process archives."""
for archive, destination in zip(archives[::2], archives[1::2]):
Expand Down Expand Up @@ -544,7 +552,7 @@ def process_archives(archives):
write_upstream(sha1, os.path.basename(archive), mode="a")


def process(url_arg, name_arg, ver_arg, target, archives_arg, filemanager):
def process(url_arg, name_arg, ver_arg, target, package_dir, archives_arg, filemanager):
"""Download and process the tarball at url_arg."""
global url
global name
Expand All @@ -565,6 +573,11 @@ def process(url_arg, name_arg, ver_arg, target, archives_arg, filemanager):
# name is created by adding ".gcov" to the package name (if a gcov file
# exists)
set_gcov()
# build the target path, if necessary
if not target and package_dir:
target = os.path.join(package_dir, name)
# create the path, by cloning existing repo or manually, as necessary
call("make clone_%s" % name, check=False, stderr=subprocess.DEVNULL)
# download the tarball to tar_path
tar_path = download_tarball(target)
# write the sha of the upstream tarfile to the "upstream" file
Expand All @@ -576,6 +589,7 @@ def process(url_arg, name_arg, ver_arg, target, archives_arg, filemanager):
# Now that the metadata has been collected print the header
print_header()
# write out the Makefile with the name, url, and archives we found
write_makefile(name, url, archives)
# prepare directory and extract tarball
prepare_and_extract(extract_cmd)
# locate or download archives and move them into the right spot
Expand Down

0 comments on commit 27d5256

Please sign in to comment.