Skip to content

Commit

Permalink
Merge pull request #741 from conda/pin-versions
Browse files Browse the repository at this point in the history
[WIP] pin dependencies when building a package
  • Loading branch information
ilanschnell committed Jan 28, 2016
2 parents 3c495b3 + f5e1075 commit 4cccc8a
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 33 deletions.
59 changes: 41 additions & 18 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@
else:
shell_path = '/bin/bash'

channel_urls=()
override_channels = False
verbose = True

def prefix_files():
'''
Expand Down Expand Up @@ -129,6 +132,14 @@ def rewrite_file_with_new_prefix(path, data, old_prefix, new_prefix):
os.chmod(path, stat.S_IMODE(st.st_mode) | stat.S_IWUSR) # chmod u+w
return data


def get_run_dists(m):
prefix = join(cc.envs_dirs[0], '_run')
rm_rf(prefix)
create_env(prefix, [ms.spec for ms in m.ms_depends('run')])
return sorted(linked(prefix))


def create_info_files(m, files, include_recipe=True):
'''
Creates the metadata files that will be stored in the built package.
Expand Down Expand Up @@ -173,10 +184,29 @@ def create_info_files(m, files, include_recipe=True):
print("WARNING: anaconda.org only recognizes about/readme as README.md and README.rst",
file=sys.stderr)

info_index = m.info_index()
pin_depends = m.get_value('build/pin_depends')
if pin_depends:
dists = get_run_dists(m)
with open(join(config.info_dir, 'requires'), 'w') as fo:
fo.write("""\
# This file as created when building:
#
# %s.tar.bz2 (on '%s')
#
# It can be used to create the runtime environment of this package using:
# $ conda create --name <env> --file <this file>
""" % (m.dist(), cc.subdir))
for dist in sorted(dists + [m.dist()]):
fo.write('%s\n' % '='.join(dist.rsplit('-', 2)))
if pin_depends == 'strict':
info_index['depends'] = [' '.join(dist.rsplit('-', 2))
for dist in dists]

# Deal with Python 2 and 3's different json module type reqs
mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}
with open(join(config.info_dir, 'index.json'), **mode_dict) as fo:
json.dump(m.info_index(), fo, indent=2, sort_keys=True)
json.dump(info_index, fo, indent=2, sort_keys=True)

if include_recipe:
with open(join(config.info_dir, 'recipe.json'), **mode_dict) as fo:
Expand Down Expand Up @@ -250,25 +280,23 @@ def create_info_files(m, files, include_recipe=True):
shutil.copyfile(join(m.path, m.get_value('app/icon')),
join(config.info_dir, 'icon.png'))

def get_build_index(clear_cache=True, channel_urls=(), override_channels=False):
def get_build_index(clear_cache=True):
if clear_cache:
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
return get_index(channel_urls=[url_path(config.croot)] + list(channel_urls),
prepend=not override_channels)
prepend=not override_channels)

def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(),
override_channels=False):
def create_env(prefix, specs, clear_cache=True):
'''
Create a conda envrionment for the given prefix and specs.
'''
if not isdir(config.bldpkgs_dir):
os.makedirs(config.bldpkgs_dir)
update_index(config.bldpkgs_dir)
if specs: # Don't waste time if there is nothing to do
index = get_build_index(clear_cache=True, channel_urls=channel_urls,
override_channels=override_channels)
index = get_build_index(clear_cache=True)

warn_on_old_conda_build(index)

Expand Down Expand Up @@ -304,7 +332,6 @@ def warn_on_old_conda_build(index):
""" % (vers_inst[0], pkgs[-1].version), file=sys.stderr)



def rm_pkgs_cache(dist):
'''
Removes dist from the package cache.
Expand All @@ -320,8 +347,7 @@ def bldpkg_path(m):
'''
return join(config.bldpkgs_dir, '%s.tar.bz2' % m.dist())

def build(m, get_src=True, verbose=True, post=None, channel_urls=(),
override_channels=False, include_recipe=True):
def build(m, get_src=True, post=None, include_recipe=True):
'''
Build the package with the specified metadata.
Expand Down Expand Up @@ -368,14 +394,12 @@ def build(m, get_src=True, verbose=True, post=None, channel_urls=(),
# Version number could be missing due to dependency on source info.
print("BUILD START:", m.dist())
create_env(config.build_prefix,
[ms.spec for ms in m.ms_depends('build')],
verbose=verbose, channel_urls=channel_urls,
override_channels=override_channels)
[ms.spec for ms in m.ms_depends('build')])

if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
print("%s is installed as a build dependency. Removing." %
m.name())
index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
index = get_build_index(clear_cache=False)
actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
assert not plan.nothing_to_do(actions), actions
plan.display_actions(actions, index)
Expand Down Expand Up @@ -483,7 +507,7 @@ def build(m, get_src=True, verbose=True, post=None, channel_urls=(),
print("STOPPING BUILD BEFORE POST:", m.dist())


def test(m, verbose=True, channel_urls=(), override_channels=False, move_broken=True):
def test(m, move_broken=True):
'''
Execute any test scripts for the given package.
Expand Down Expand Up @@ -533,8 +557,7 @@ def test(m, verbose=True, channel_urls=(), override_channels=False, move_broken=
# as the tests are run by perl, we need to specify it
specs += ['perl %s*' % environ.get_perl_ver()]

create_env(config.test_prefix, specs, verbose=verbose,
channel_urls=channel_urls, override_channels=override_channels)
create_env(config.test_prefix, specs)

env = dict(os.environ)
env.update(environ.get_dict(m, prefix=config.test_prefix))
Expand Down Expand Up @@ -595,6 +618,6 @@ def tests_failed(m, move_broken):
if not isdir(config.broken_dir):
os.makedirs(config.broken_dir)

if move_broken:
if move_broken:
shutil.move(bldpkg_path(m), join(config.broken_dir, "%s.tar.bz2" % m.dist()))
sys.exit("TESTS FAILED: " + m.dist())
20 changes: 9 additions & 11 deletions conda_build/main_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,9 @@ def execute(args, parser):
from conda_build.metadata import MetaData

check_external()
channel_urls = args.channel or ()
build.channel_urls = args.channel or ()
build.override_channels = args.override_channels
build.verbose = not args.quiet

if on_win:
# needs to happen before any c extensions are imported that might be
Expand Down Expand Up @@ -322,9 +324,7 @@ def execute(args, parser):
if not isdir(config.bldpkgs_dir):
makedirs(config.bldpkgs_dir)
update_index(config.bldpkgs_dir)
index = build.get_build_index(clear_cache=True,
channel_urls=channel_urls,
override_channels=args.override_channels)
index = build.get_build_index(clear_cache=True)

already_built = []
to_build_recursive = []
Expand Down Expand Up @@ -378,8 +378,7 @@ def execute(args, parser):
print(build.bldpkg_path(m))
continue
elif args.test:
build.test(m, verbose=not args.quiet,
channel_urls=channel_urls, override_channels=args.override_channels, move_broken=False)
build.test(m, move_broken=False)
elif args.source:
source.provide(m.path, m.get_section('source'))
print('Source tree in:', source.get_dir())
Expand All @@ -396,9 +395,8 @@ def execute(args, parser):
else:
post = None
try:
build.build(m, verbose=not args.quiet, post=post,
channel_urls=channel_urls,
override_channels=args.override_channels, include_recipe=args.include_recipe)
build.build(m, post=post,
include_recipe=args.include_recipe)
except (RuntimeError, SystemExit) as e:
error_str = str(e)
if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
Expand Down Expand Up @@ -461,8 +459,8 @@ def execute(args, parser):
continue

if not args.notest:
build.test(m, verbose=not args.quiet,
channel_urls=channel_urls, override_channels=args.override_channels)
build.test(m)

binstar_upload = True

if need_cleanup:
Expand Down
20 changes: 16 additions & 4 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,14 @@ def ensure_valid_license_family(meta):
"about/license_family '%s' not allowed. Allowed families are %s." %
(license_family, comma_join(sorted(allowed_license_families)))))

def ensure_valid_fields(meta):
try:
pin_depends = meta['build']['pin_depends']
except KeyError:
pin_depends = ''
if pin_depends not in ('', 'record', 'strict'):
raise RuntimeError("build/pin_depends cannot be '%s'" % pin_depends)

def parse(data):
data = select_lines(data, ns_cfg())
res = yamlize(data)
Expand All @@ -154,10 +162,11 @@ def parse(data):
res[section] = {}
if res[section].get(key, None) is None:
res[section][key] = []

# ensure those are strings
for field in ('package/version', 'build/string', 'source/svn_rev',
'source/git_tag', 'source/git_branch', 'source/md5',
'source/git_rev', 'source/path'):
for field in ('package/version', 'build/string', 'build/pin_depends',
'source/svn_rev', 'source/git_tag', 'source/git_branch',
'source/md5', 'source/git_rev', 'source/path'):
section, key = field.split('/')
if res.get(section) is None:
res[section] = {}
Expand Down Expand Up @@ -188,6 +197,7 @@ def parse(data):
elif val in falses:
res[section][key] = False

ensure_valid_fields(res)
ensure_valid_license_family(res)
return sanitize(res)

Expand Down Expand Up @@ -256,7 +266,9 @@ def _git_clean(source_meta):
'no_link', 'binary_relocation', 'script', 'noarch_python',
'has_prefix_files', 'binary_has_prefix_files', 'script_env',
'detect_binary_files_with_prefix', 'rpaths',
'always_include_files', 'skip', 'msvc_compiler'],
'always_include_files', 'skip', 'msvc_compiler',
'pin_depends' # pin_depends is experimental still
],
'requirements': ['build', 'run', 'conflicts'],
'app': ['entry', 'icon', 'summary', 'type', 'cli_opts',
'own_environment'],
Expand Down

0 comments on commit 4cccc8a

Please sign in to comment.