diff --git a/.CLMTrunkChecklist b/.CLMTrunkChecklist
index 6d76a47a12..8774800891 100644
--- a/.CLMTrunkChecklist
+++ b/.CLMTrunkChecklist
@@ -42,6 +42,8 @@ This should show no diffs
(8) Make the trunk tag
+(9) Send an email to clm-dev with the contents of the latest ChangeLog
+entry (until we have automated this for the git repo)
NOTES:
diff --git a/.config_files.xml b/.config_files.xml
new file mode 100644
index 0000000000..8e4868b94f
--- /dev/null
+++ b/.config_files.xml
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+
+ char
+ unset
+
+ $SRCROOT
+ $CIMEROOT/src/components/data_comps/dlnd
+ $CIMEROOT/src/components/stub_comps/slnd
+ $CIMEROOT/src/components/xcpl_comps/xlnd
+
+ case_comps
+ env_case.xml
+ Root directory of the case land model component
+ $CIMEROOT/config/xml_schemas/config_compsets.xsd
+
+
+
diff --git a/.gitignore b/.gitignore
index bcd4cec71d..7ec31341da 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,11 @@
+# directories checked out by manage_externals, and other files created
+# by manage_externals
+manage_externals.log
+src/fates/
+tools/PTCLM/
+cime/
+components/
+
# ignore svn directories
**/.svn/**
.svn/
@@ -25,7 +33,4 @@ CMakeFiles/
core.*
*.gz
*.log !run.log
-
-
-
-
+*.pyc
diff --git a/Externals.cfg b/Externals.cfg
new file mode 100644
index 0000000000..cfd2b65564
--- /dev/null
+++ b/Externals.cfg
@@ -0,0 +1,38 @@
+[clm]
+local_path = .
+protocol = externals_only
+externals = Externals_CLM.cfg
+required = True
+
+[cism]
+local_path = components/cism
+protocol = git
+repo_url = https://github.com/ESCOMP/cism-wrapper
+tag = cism2_1_46
+externals = Externals_CISM.cfg
+required = True
+
+[rtm]
+local_path = components/rtm
+protocol = git
+repo_url = https://github.com/ESCOMP/rtm
+tag = rtm1_0_65
+required = True
+
+[mosart]
+local_path = components/mosart
+protocol = git
+repo_url = https://github.com/ESCOMP/mosart
+tag = mosart1_0_30
+required = True
+
+[cime]
+local_path = cime
+protocol = git
+repo_url = https://github.com/CESM-Development/cime
+tag = clm4518/n04/cime5.4.0-alpha.03
+required = True
+
+[externals_description]
+schema_version = 1.0.0
+
diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg
new file mode 100644
index 0000000000..38ee16f0ed
--- /dev/null
+++ b/Externals_CLM.cfg
@@ -0,0 +1,17 @@
+[fates]
+local_path = src/fates
+protocol = git
+repo_url = https://github.com/NCAR/fates-release
+tag = fates_s1.4.1_a3.0.0_rev2
+required = True
+
+[PTCLM]
+local_path = tools/PTCLM
+protocol = git
+repo_url = https://github.com/ESCOMP/ptclm
+tag = PTCLM2_171216c
+required = True
+
+[externals_description]
+schema_version = 1.0.0
+
diff --git a/LICENSE b/LICENSE
index cb8a9e9f5a..2ba2f9c2d8 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2005-2017, University Corporation for Atmospheric Research (UCAR)
+Copyright (c) 2005-2018, University Corporation for Atmospheric Research (UCAR)
All rights reserved.
Developed by:
diff --git a/README.rst b/README.rst
index a67ec63abc..90f6eae015 100644
--- a/README.rst
+++ b/README.rst
@@ -1,10 +1,12 @@
-===
-CLM
-===
+====
+CTSM
+====
-The Community Land Model, CLM, is part of the Community Earth System Model.
+The Community Terrestrial Systems Model.
-See the CESM web site for documentation and information:
+This includes the Community Land Model (CLM5 and CLM4.5) of the Community Earth System Model.
-http://www.cesm.ucar.edu
+For documentation, quick start, diagnostics, model output and
+references, see
+http://www.cesm.ucar.edu/models/cesm2.0/land/
diff --git a/README_EXTERNALS b/README_EXTERNALS
deleted file mode 100644
index 66afc744f6..0000000000
--- a/README_EXTERNALS
+++ /dev/null
@@ -1,56 +0,0 @@
-Some guidelines on working with externals in CLM:
-
-Also see:
-
- https://wiki.ucar.edu/display/ccsm/Creating+a+CLM+Tag
-
- https://wiki.ucar.edu/display/ccsm/Using+SVN+to+Work+with+CLM+Development+Branches
-
-Example taken from bulletin board forum for "Subversion Issues" in the
-thread for "Introduction to Subversion"...(070208)
-
-Working with externals:
-
-checkout the HEAD of clm's trunk into working copy directory
-> svn co $SVN/clm2/trunk clm_trunk_head_wc
-
-view the property set for clm's external definitions
-> svn propget svn:externals clm_trunk_head_wc
-
-view revision, URL and other useful information specific to external files
-> cd clm_trunk_head_wc/components/clm/src
-> svn info main
-
-create new clm branch for mods required of clm
-> svn copy $SVN/clm2/trunk_tags/ $SVN/clm2/branches/ -m "appropriate message"
-
-have external directories in working copy refer to new clm branch to make changes
-> svn switch $SVN/clm2/branches//src/main main
-
---make changes to clm files--
-
-when satisfied with changes and testing, commit to HEAD of clm branch
-> svn commit main -m "appropriate message"
-
-tag new version of clm branch - review naming conventions!
-> svn copy $SVN/clm2/branches/ $SVN/clm2/branch_tags/_tags/ -m "appropriate message"
-
-have external directories in working copy refer to new clm tag
-> svn switch $SVN/clm2/branch_tags/_tags//src/main main
-
-modify clm's property for external definitions in working copy
-> vi clm_trunk_head_wc/SVN_EXTERNAL_DIRECTORIES
-
---point definition to URL of new-tag-name--
-
-set the property - don't forget the 'dot' at the end!
-> svn propset svn:externals -F SVN_EXTERNAL_DIRECTORIES clm_trunk_head_wc
-
---continue with other clm mods--
-
-commit changes from working copy directory to HEAD of clm trunk - NOTE: a commit from here will *NOT* recurse to external directories
-> cd clm_trunk_head_wc
-> svn commit -m "appropriate message"
-
-tag new version of clm trunk
-> svn copy $SVN/clm2/trunk $SVN/clm2/trunk_tags/ -m "appropriate message"
diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst
new file mode 100644
index 0000000000..01c8f669aa
--- /dev/null
+++ b/README_EXTERNALS.rst
@@ -0,0 +1,124 @@
+Obtaining the full model code and associated scripting infrastructure
+=====================================================================
+
+CTSM is released via github. You will need some familiarity with git in order
+to modify the code and commit these changes. However, to simply checkout and run the
+code, no git knowledge is required other than what is documented in the following steps.
+
+To obtain the CTSM code you need to do the following:
+
+#. Clone the repository. ::
+
+ git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox
+
+ This will create a directory ``my_ctsm_sandbox/`` in your current working directory.
+
+#. Run the script **manage_externals/checkout_externals**. ::
+
+ ./manage_externals/checkout_externals
+
+ The **checkout_externals** script is a package manager that will
+ populate the ctsm directory with the relevant versions of each of the
+ components along with the CIME infrastructure code.
+
+At this point you have a working version of CTSM.
+
+To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ .
+
+More details on checkout_externals
+----------------------------------
+
+The file **Externals.cfg** in your top-level CTSM directory tells
+**checkout_externals** which tag/branch of each component should be
+brought in to generate your sandbox. (This file serves the same purpose
+as SVN_EXTERNAL_DIRECTORIES when CLM was in a subversion repository.)
+
+NOTE: Just like svn externals, checkout_externals will always attempt
+to make the working copy exactly match the externals description. If
+you manually modify an external without updating Externals.cfg, e.g. switch
+to a different tag, then rerunning checkout_externals will switch you
+back to the external described in Externals.cfg. See below
+documentation `Customizing your CTSM sandbox`_ for more details.
+
+**You need to rerun checkout_externals whenever Externals.cfg has
+changed** (unless you have already manually updated the relevant
+external(s) to have the correct branch/tag checked out). Common times
+when this is needed are:
+
+* After checking out a new CTSM branch/tag
+
+* After merging some other CTSM branch/tag into your currently
+ checked-out branch
+
+**checkout_externals** must be run from the root of the source
+tree. For example, if you cloned CTSM with::
+
+ git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox
+
+then you must run **checkout_externals** from
+``/path/to/my_ctsm_sandbox``.
+
+To see more details of **checkout_externals**, issue ::
+
+ ./manage_externals/checkout_externals --help
+
+Customizing your CTSM sandbox
+=============================
+
+There are several use cases to consider when you want to customize or modify your CTSM sandbox.
+
+Switching to a different CTSM branch or tag
+-------------------------------------------
+
+If you have already checked out a branch or tag and **HAVE NOT MADE ANY
+MODIFICATIONS** it is simple to change your sandbox. Say that you
+checked out ctsm1.0.0 but really wanted to have ctsm1.1.0;
+you would simply do the following::
+
+ git checkout ctsm1.1.0
+ ./manage_externals/checkout_externals
+
+You should **not** use this method if you have made any source code
+changes, or if you have any ongoing CTSM cases that were created from
+this sandbox. In these cases, it is often easiest to do a second **git
+clone**.
+
+Pointing to a different version of a component
+----------------------------------------------
+
+Each entry in **Externals.cfg** has the following form (we use CIME as an
+example below)::
+
+ [cime]
+ local_path = cime
+ protocol = git
+ repo_url = https://github.com/CESM-Development/cime
+ tag = cime5.4.0-alpha.20
+ required = True
+
+Each entry specifies either a tag or a branch. To point to a new tag:
+
+#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing
+ ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above)
+
+#. Checkout the new component(s)::
+
+ ./manage_externals/checkout_externals
+
+Keep in mind that changing individual components from a tag may result
+in an invalid model (won't compile, won't run, not scientifically
+meaningful) and is unsupported.
+
+Committing your change to Externals.cfg
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+After making this change, it's a good idea to commit the change in your
+local CTSM git repository. First create a branch in your local
+repository, then commit it. (Unlike with subversion, branches are stored
+locally unless you explicitly push them up to github. Feel free to
+create whatever local branches you'd like.) For example::
+
+ git checkout -b my_ctsm_branch
+ git add Externals.cfg
+ git commit -m "Update CIME to cime5.4.0-alpha.20"
+
diff --git a/SVN_EXTERNAL_DIRECTORIES b/SVN_EXTERNAL_DIRECTORIES
deleted file mode 100644
index 06d710da44..0000000000
--- a/SVN_EXTERNAL_DIRECTORIES
+++ /dev/null
@@ -1,2 +0,0 @@
-src/fates https://github.com/NCAR/fates-release/tags/fates_s1.4.1_a3.0.0_rev2
-tools/PTCLM https://github.com/ESCOMP/ptclm/tags/PTCLM2_171216c
diff --git a/SVN_EXTERNAL_DIRECTORIES.standalone b/SVN_EXTERNAL_DIRECTORIES.standalone
deleted file mode 100644
index dc37d702cc..0000000000
--- a/SVN_EXTERNAL_DIRECTORIES.standalone
+++ /dev/null
@@ -1,4 +0,0 @@
-cime https://github.com/CESM-Development/cime/tags/clm4518/n02/cime5.4.0-alpha.03
-components/cism https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_40
-components/rtm https://github.com/ESCOMP/rtm/tags/rtm1_0_65
-components/mosart https://github.com/ESCOMP/mosart/tags/mosart1_0_30
diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm
index 4c0d7df810..45431c9323 100755
--- a/bld/CLMBuildNamelist.pm
+++ b/bld/CLMBuildNamelist.pm
@@ -59,6 +59,7 @@ SYNOPSIS
Create the namelist for CLM
REQUIRED OPTIONS
+ -cimeroot "directory" Path to cime directory
-config "filepath" Read the given CLM configuration cache file.
Default: "config_cache.xml".
-d "directory" Directory where output namelist file will be written
@@ -245,7 +246,8 @@ sub process_commandline {
# the array!
$nl_flags->{'cmdline'} = "@ARGV";
- my %opts = ( config => "config_cache.xml",
+ my %opts = ( cimeroot => undef,
+ config => "config_cache.xml",
csmdata => undef,
clm_usr_name => undef,
co2_type => undef,
@@ -282,6 +284,7 @@ sub process_commandline {
);
GetOptions(
+ "cimeroot=s" => \$opts{'cimeroot'},
"clm_demand=s" => \$opts{'clm_demand'},
"co2_ppmv=f" => \$opts{'co2_ppmv'},
"co2_type=s" => \$opts{'co2_type'},
@@ -345,9 +348,9 @@ sub check_for_perl_utils {
my $cfgdir = shift;
my $opts_ref = shift;
- # Determine CESM root directory and perl5lib root directory
- my $cesmroot = abs_path( "$cfgdir/../../../");
- my $perl5lib_dir = "$cesmroot/cime/utils/perl5lib";
+ # Determine CIME root directory and perl5lib root directory
+ my $cimeroot = $opts_ref->{'cimeroot'};
+ my $perl5lib_dir = "$cimeroot/utils/perl5lib";
#-----------------------------------------------------------------------------
# Add $perl5lib_dir to the list of paths that Perl searches for modules
@@ -4397,7 +4400,6 @@ sub main {
my $cfg = read_configure_definition($cfgdir, \%opts);
my $physv = config_files::clm_phys_vers->new( $cfg->get('phys') );
- my $cesmroot = abs_path( "$cfgdir/../../../");
my $definition = read_namelist_definition($cfgdir, \%opts, \%nl_flags, $physv);
my $defaults = read_namelist_defaults($cfgdir, \%opts, \%nl_flags, $cfg, $physv);
diff --git a/bld/configure b/bld/configure
index 099d738818..b333259a98 100755
--- a/bld/configure
+++ b/bld/configure
@@ -52,6 +52,7 @@ OPTIONS
-cache Name of output cache file (default: config_cache.xml).
-cachedir Name of directory where output cache file is written
(default: CLM build directory).
+ -cimeroot REQUIRED: Path to cime directory
-clm_root Root directory of clm source code
(default: directory above location of this script)
-cppdefs A string of user specified CPP defines. Appended to
@@ -122,6 +123,7 @@ my %opts = (
phys => "clm4_0",
nofire => undef,
noio => undef,
+ cimeroot => undef,
clm_root => undef,
spinup => "normal",
);
@@ -131,6 +133,7 @@ GetOptions(
"cache=s" => \$opts{'cache'},
"cachedir=s" => \$opts{'cachedir'},
"snicar_frc=s" => \$opts{'snicar_frc'},
+ "cimeroot=s" => \$opts{'cimeroot'},
"clm_root=s" => \$opts{'clm_root'},
"cppdefs=s" => \$opts{'cppdefs'},
"comp_intf=s" => \$opts{'comp_intf'},
@@ -176,9 +179,9 @@ my %cfg = (); # build configuration
# Make sure we can find required perl modules and configuration files.
# Look for them in the directory that contains the configure script.
-my $cesmroot = abs_path( "$cfgdir/../../../");
-my $casecfgdir = "$cesmroot/cime/scripts/Tools";
-my $perl5lib = "$cesmroot/cime/utils/perl5lib/";
+my $cimeroot = $opts{'cimeroot'};
+my $casecfgdir = "$cimeroot/scripts/Tools";
+my $perl5lib = "$cimeroot/utils/perl5lib/";
# The Build::Config module provides utilities to store and manipulate the configuration.
my $file = "$perl5lib/Build/Config.pm";
diff --git a/cime_config/.gitignore b/cime_config/.gitignore
new file mode 100644
index 0000000000..5bf1840ccf
--- /dev/null
+++ b/cime_config/.gitignore
@@ -0,0 +1,3 @@
+buildnmlc
+buildcppc
+
diff --git a/cime_config/buildcpp b/cime_config/buildcpp
index 335da0edc9..7a3bf0083e 100644
--- a/cime_config/buildcpp
+++ b/cime_config/buildcpp
@@ -16,7 +16,8 @@ def buildcpp(case):
"""
caseroot = case.get_value("CASEROOT")
- srcroot = case.get_value("SRCROOT")
+ cimeroot = case.get_value("CIMEROOT")
+ lnd_root = case.get_value("COMP_ROOT_DIR_LND")
lnd_grid = case.get_value("LND_GRID")
mask_grid = case.get_value("MASK_GRID")
clm_usrdat_name = case.get_value("CLM_USRDAT_NAME")
@@ -35,9 +36,9 @@ def buildcpp(case):
if not os.path.isdir(clmconf):
os.makedirs(clmconf)
- cmd = os.path.join(srcroot,"components","clm","bld","configure")
- command = "%s %s %s -usr_src %s -comp_intf mct " \
- %(cmd, config_opts, clm_config_opts, os.path.join(caseroot,"SourceMods","src.clm"))
+ cmd = os.path.join(lnd_root,"bld","configure")
+ command = "%s -cimeroot %s %s %s -usr_src %s -comp_intf mct " \
+ %(cmd, cimeroot, config_opts, clm_config_opts, os.path.join(caseroot,"SourceMods","src.clm"))
run_cmd_no_fail(command, from_dir=clmconf)
diff --git a/cime_config/buildlib b/cime_config/buildlib
index 105e3ce1b4..6b86bb0b65 100755
--- a/cime_config/buildlib
+++ b/cime_config/buildlib
@@ -28,7 +28,7 @@ def _main_func():
with Case(caseroot) as case:
casetools = case.get_value("CASETOOLS")
- srcroot = case.get_value("SRCROOT")
+ lnd_root = case.get_value("COMP_ROOT_DIR_LND")
gmake_j = case.get_value("GMAKE_J")
gmake = case.get_value("GMAKE")
mach = case.get_value("MACH")
@@ -48,7 +48,7 @@ def _main_func():
# create Filepath file and clm_cppdefs for clm4_0
#-------------------------------------------------------
# the call to configure here creates BOTH the Filepath file and the clm_cppdefs
- cmd = os.path.join(os.path.join(srcroot,"components","clm","cime_config","buildcpp"))
+ cmd = os.path.join(os.path.join(lnd_root,"cime_config","buildcpp"))
logger.info(" ...calling clm buildcpp to set build time options")
try:
mod = imp.load_source("buildcpp", cmd)
@@ -68,22 +68,21 @@ def _main_func():
#-------------------------------------------------------
filepath_file = os.path.join(bldroot,"Filepath")
if not os.path.isfile(filepath_file):
- srcroot = case.get_value("SRCROOT")
caseroot = case.get_value("CASEROOT")
paths = [os.path.join(caseroot,"SourceMods","src.clm"),
- os.path.join(srcroot,"components","clm","src","main"),
- os.path.join(srcroot,"components","clm","src","biogeophys"),
- os.path.join(srcroot,"components","clm","src","biogeochem"),
- os.path.join(srcroot,"components","clm","src","soilbiogeochem"),
- os.path.join(srcroot,"components","clm","src","dyn_subgrid"),
- os.path.join(srcroot,"components","clm","src","init_interp"),
- os.path.join(srcroot,"components","clm","src","fates"),
- os.path.join(srcroot,"components","clm","src","fates","main"),
- os.path.join(srcroot,"components","clm","src","fates","biogeophys"),
- os.path.join(srcroot,"components","clm","src","fates","biogeochem"),
- os.path.join(srcroot,"components","clm","src","fates","fire"),
- os.path.join(srcroot,"components","clm","src","utils"),
- os.path.join(srcroot,"components","clm","src","cpl")]
+ os.path.join(lnd_root,"src","main"),
+ os.path.join(lnd_root,"src","biogeophys"),
+ os.path.join(lnd_root,"src","biogeochem"),
+ os.path.join(lnd_root,"src","soilbiogeochem"),
+ os.path.join(lnd_root,"src","dyn_subgrid"),
+ os.path.join(lnd_root,"src","init_interp"),
+ os.path.join(lnd_root,"src","fates"),
+ os.path.join(lnd_root,"src","fates","main"),
+ os.path.join(lnd_root,"src","fates","biogeophys"),
+ os.path.join(lnd_root,"src","fates","biogeochem"),
+ os.path.join(lnd_root,"src","fates","fire"),
+ os.path.join(lnd_root,"src","utils"),
+ os.path.join(lnd_root,"src","cpl")]
with open(filepath_file, "w") as filepath:
filepath.write("\n".join(paths))
filepath.write("\n")
diff --git a/cime_config/buildnml b/cime_config/buildnml
index 682510e07a..dc11d2735b 100755
--- a/cime_config/buildnml
+++ b/cime_config/buildnml
@@ -36,7 +36,7 @@ def buildnml(case, caseroot, compname):
if compname != "clm":
raise AttributeError
- srcroot = case.get_value("SRCROOT")
+ lnd_root = case.get_value("COMP_ROOT_DIR_LND")
din_loc_root = case.get_value("DIN_LOC_ROOT")
ccsm_co2_ppmv = case.get_value("CCSM_CO2_PPMV")
clm_co2_type = case.get_value("CLM_CO2_TYPE")
@@ -105,7 +105,7 @@ def buildnml(case, caseroot, compname):
if not filecmp.cmp(file1, file2):
call_buildcpp = True
if call_buildcpp:
- cmd = os.path.join(os.path.join(srcroot,"components","clm","cime_config","buildcpp"))
+ cmd = os.path.join(os.path.join(lnd_root,"cime_config","buildcpp"))
logger.info(" ...calling clm buildcpp to set build time options")
try:
mod = imp.load_source("buildcpp", cmd)
@@ -225,13 +225,13 @@ def buildnml(case, caseroot, compname):
create_namelist_infile(case, user_nl_file, namelist_infile, "\n".join(infile_lines))
- cmd = os.path.join(srcroot, "components","clm","bld","build-namelist")
+ cmd = os.path.join(lnd_root,"bld","build-namelist")
- command = ("%s -infile %s -csmdata %s -inputdata %s %s -namelist \"&clm_inparm start_ymd=%s %s/ \" "
+ command = ("%s -cimeroot %s -infile %s -csmdata %s -inputdata %s %s -namelist \"&clm_inparm start_ymd=%s %s/ \" "
"%s %s -res %s %s -clm_start_type %s -envxml_dir %s -l_ncpl %s "
"-lnd_frac %s -glc_nec %s -co2_ppmv %s -co2_type %s -config %s "
"%s %s %s"
- %(cmd, infile, din_loc_root, inputdata_file, ignore, start_ymd, clm_namelist_opts,
+ %(cmd, _CIMEROOT, infile, din_loc_root, inputdata_file, ignore, start_ymd, clm_namelist_opts,
nomeg, usecase, lnd_grid, clmusr, start_type, caseroot, lnd_ncpl,
lndfrac_file, glc_nec, ccsm_co2_ppmv, clm_co2_type, config_cache_file,
clm_bldnml_opts, spinup, tuning))
diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml
index 2d51f524f2..27c907616b 100644
--- a/cime_config/testdefs/ExpectedTestFails.xml
+++ b/cime_config/testdefs/ExpectedTestFails.xml
@@ -1,29 +1,32 @@
- FAIL ERP_D_Lm9.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
- FAIL SMS_D_Lm13.f10_f10_musgs.I1850Clm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
- FAIL SMS_D_Lm13.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
- FAIL ERP_D_Ld10.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_decStart SHAREDLIB_BUILD
- FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst
- FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst
- FAIL ERI_N2_Ld9.f19_g17.I2000Clm50BgcCrop.cheyenne_intel.clm-default RUN
- FAIL SMS_Ld5_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN
- FAIL ERP_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN
- FAIL ERP_D.f10_f10_musgs.IHistClm50Bgc.cheyenne_gnu.clm-decStart RUN
- FAIL ERS_Ly5_P72x1.f10_f10_musgs.IHistClm45BgcCrop.cheyenne_intel.clm-cropMonthOutput RUN
- FAIL ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropGs.cheyenne_gnu.clm-monthly RUN
- FAIL SMS_D_Ld5.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
- FAIL SMS_D_Lm6.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
- FAIL SMS_D_Lm6_P144x1.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
- FAIL ERP_Ld5.f10_f10_musgs.I2000Clm50Vic.cheyenne_gnu.clm-decStart COMPARE_base_rest
- FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.hobart_nag.clm-FatesAllVars COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesLogging COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-Fates COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesNoFire COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesPPhys COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesST3 COMPARE_base_rest
- FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesAllVars COMPARE_base_rest
- FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_gnu.clm-Fates COMPARE_base_rest
- FAIL ERS_D_Mmpi-serial_Ld5.1x1_brazil.I2000Clm50FatesGs.yellowstone_pgi.clm-fates COMPARE_base_rest
+
+ FAIL ERP_D_Lm9.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
+ FAIL SMS_D_Lm13.f10_f10_musgs.I1850Clm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
+ FAIL SMS_D_Lm13.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_monthly RUN
+ FAIL ERP_D_Ld10.f10_f10_musgs.IHistClm50BgcCrop.cheyenne_intel.clm-ciso_decStart SHAREDLIB_BUILD
+ FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst
+ FAIL NCK_Ld1.f10_f10_musgs.I2000Clm50Sp.cheyenne_intel.clm-default COMPARE_base_multiinst
+ FAIL ERI_N2_Ld9.f19_g17.I2000Clm50BgcCrop.cheyenne_intel.clm-default RUN
+ FAIL SMS_Ld5_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN
+ FAIL ERP_D_P48x1.f10_f10_musgs.IHistClm50Bgc.hobart_nag.clm-decStart RUN
+ FAIL ERS_Ly5_P72x1.f10_f10_musgs.IHistClm45BgcCrop.cheyenne_intel.clm-cropMonthOutput RUN
+ FAIL ERS_Lm20_Mmpi-serial.1x1_smallvilleIA.I2000Clm50BgcCropGs.cheyenne_gnu.clm-monthly RUN
+ FAIL SMS_D_Ld5.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
+ FAIL SMS_D_Lm6.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
+ FAIL SMS_D_Lm6_P144x1.f45_f45_mg37.I2000Clm50Fates.cheyenne_intel.clm-Fates RUN
+ FAIL ERP_Ld5.f10_f10_musgs.I2000Clm50Vic.cheyenne_gnu.clm-decStart COMPARE_base_rest
+ FAIL ERP_D.f10_f10_musgs.IHistClm50Bgc.cheyenne_gnu.clm-decStart COMPARE_base_rest
+
+
+ FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.hobart_nag.clm-FatesAllVars COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesLogging COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-Fates COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesNoFire COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesPPhys COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesST3 COMPARE_base_rest
+ FAIL ERP_Ld9.f45_f45.I2000Clm45Fates.cheyenne_intel.clm-FatesAllVars COMPARE_base_rest
+ FAIL ERS_Ld60.f45_f45.I2000Clm45Fates.cheyenne_gnu.clm-Fates COMPARE_base_rest
+
diff --git a/.ChangeLog_template b/doc/.ChangeLog_template
similarity index 95%
rename from .ChangeLog_template
rename to doc/.ChangeLog_template
index 5c11f0f6b7..527502df56 100644
--- a/.ChangeLog_template
+++ b/doc/.ChangeLog_template
@@ -13,7 +13,7 @@ Purpose of changes
Bugs fixed or introduced
------------------------
-Issues fixed (include CCTSM Issue #): [If none, remove this line]
+Issues fixed (include CTSM Issue #): [If none, remove this line]
CIME Issues fixed (include issue #): [If none, remove this line]
Known bugs introduced in this tag (include github issue ID): [If none, remove this line]
@@ -112,7 +112,7 @@ Changes answers relative to baseline:
Detailed list of changes
------------------------
-List any svn externals directories updated (cime, rtm, mosart, cism, etc.):
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
List all files eliminated:
diff --git a/doc/ChangeLog b/doc/ChangeLog
index 9aa280dfc1..f5ac5000e2 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,4 +1,295 @@
===============================================================
+Tag name: clm4_5_18_r274
+Originator(s): sacks (Bill Sacks)
+Date: Tue Jan 30 05:55:53 MST 2018
+One-line Summary: Fix auto-detection of CIME_MODEL in a standalone checkout
+
+Purpose of changes
+------------------
+
+The auto-detection of whether CIME_MODEL is acme or cesm is broken in
+standalone checkouts of clm4_5_18_r273. (This auto-detection relied on
+whether there was an SVN_EXTERNAL_DIRECTORIES file present at the top
+level.)
+
+This tag points to a new cime version that fixes this issue.
+
+
+Bugs fixed or introduced
+------------------------
+
+Issues fixed (include CTSM Issue #):
+- #238 - clm4_5_18_r273 requires setting CIME_MODEL
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions): none
+
+Changes to CLM's user interface (e.g., new/renamed XML or namelist variables): none
+
+Changes made to namelist defaults (e.g., changed parameter values): none
+
+Changes to the datasets (e.g., parameter, surface or initial files): none
+
+Substantial timing or memory changes: none
+
+Notes of particular relevance for developers: (including Code reviews and testing)
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance): none
+
+Changes to tests or testing: none
+
+Code reviewed by: Erik Kluzek
+
+Did you follow the steps in .CLMTrunkChecklist: yes
+
+CLM testing:
+
+ [PASS means all tests PASS and OK means tests PASS other than expected fails.]
+
+ build-namelist tests:
+
+ cheyenne - not run
+
+ unit-tests (components/clm/src):
+
+ cheyenne - pass
+
+ tools-tests (components/clm/test/tools):
+
+ cheyenne - not run
+
+ PTCLM testing (components/clm/tools/shared/PTCLM/test):
+
+ cheyenne - not run
+
+ regular tests (aux_clm):
+
+ cheyenne_intel ---- pass
+ cheyenne_gnu ------ pass
+ hobart_nag -------- pass
+ hobart_pgi -------- pass
+ hobart_intel ------ pass
+
+ Note: I ran the cheyenne tests with CIME_MODEL=cesm defined in my
+ environment, and the hobart tests without this setting.
+
+CLM tag used for the baseline comparisons: clm4_5_18_r273
+
+
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+Detailed list of changes
+------------------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+
+- cime: clm4518/n03/cime5.4.0-alpha.03 -> clm4518/n04/cime5.4.0-alpha.03
+ Fix to auto-detect that CIME_MODEL=cesm based on presence of
+ manage_externals rather than SVN_EXTERNAL_DIRECTORIES
+
+List all files eliminated: none
+
+List all files added and what they do: none
+
+List all existing files that have been modified, and describe the changes:
+
+========= Fix the documentation of some expected fails
+M cime_config/testdefs/ExpectedTestFails.xml
+
+===============================================================
+===============================================================
+Tag name: clm4_5_18_r273
+Originator(s): sacks (Bill Sacks)
+Date: Fri Jan 26 15:26:06 MST 2018
+One-line Summary: Support a standalone checkout from git
+
+Purpose of changes
+------------------
+
+This PR allows a standalone git-based checkout to work. Starting with
+this tag, we'll be using git rather than svn for CLM/CTSM development.
+
+The biggest changes here are pulling in manage_externals, which serves
+the role of svn externals. After cloning the repository, you can get all
+of the necessary externals by running:
+
+./manage_externals/checkout_externals
+
+See the file README_EXTERNALS.rst at the top level of the repository for
+more details on using this tool.
+
+Other than that, this PR mostly involves changes to allow you to run
+from the new directory structure of a standalone checkout: Now all of
+the CLM/CTSM directories appear at the top level of the checkout, rather
+than nested under components/clm.
+
+Bugs fixed or introduced
+------------------------
+
+Known bugs introduced in this tag (include github issue ID):
+
+- https://github.com/ESCOMP/ctsm/issues/220 For new standalone
+ organization, need to generalize paths for tools and testing
+
+
+Notes of particular relevance for users
+---------------------------------------
+
+Caveats for users (e.g., need to interpolate initial conditions):
+
+- The svn trunk is now frozen. All future development will occur in the
+ master branch of https://github.com/ESCOMP/ctsm
+
+- A standalone git checkout will have all CLM directories (src, bld,
+ etc.) at the top level, rather than under components/clm.
+
+- To get all externals after cloning the git repository, run
+ ./manage_externals/checkout_externals
+
+ See the file README_EXTERNALS.rst at the top level of the repository
+ for more details on using this tool.
+
+- More details on working with the git repository will be coming soon
+
+Changes to CLM's user interface (e.g., new/renamed XML or namelist variables): none
+
+Changes made to namelist defaults (e.g., changed parameter values): none
+
+Changes to the datasets (e.g., parameter, surface or initial files): none
+
+Substantial timing or memory changes: none
+
+Notes of particular relevance for developers: (including Code reviews and testing)
+---------------------------------------------
+
+Caveats for developers (e.g., code that is duplicated that requires double maintenance):
+
+- build-namelist unit tests currently do not run (#220)
+
+- tools tests, and the tools builds themselves, are expected not to work (#220)
+
+Changes to tests or testing: none
+
+Code reviewed by: Ben Andre, Erik Kluzek
+
+Did you follow the steps in .CLMTrunkChecklist: yes
+
+CLM testing:
+
+ [PASS means all tests PASS and OK means tests PASS other than expected fails.]
+
+ build-namelist tests:
+
+ cheyenne - FAIL (#220)
+
+ unit-tests (components/clm/src):
+
+ cheyenne - pass
+
+ tools-tests (components/clm/test/tools):
+
+ cheyenne - NOT RUN; EXPECTED TO FAIL (#220)
+
+ PTCLM testing (components/clm/tools/shared/PTCLM/test):
+
+ cheyenne - NOT RUN; MAY FAIL DUE TO #220
+
+ regular tests (aux_clm):
+
+ cheyenne_intel ---- pass
+ cheyenne_gnu ------ pass
+ hobart_nag -------- pass
+ hobart_pgi -------- pass
+ hobart_intel ------ pass
+
+CLM tag used for the baseline comparisons: clm4_5_18_r272
+Answer changes
+--------------
+
+Changes answers relative to baseline: NO
+
+Detailed list of changes
+------------------------
+
+List any externals directories updated (cime, rtm, mosart, cism, fates, etc.):
+
+- cime: clm4518/n02/cime5.4.0-alpha.03 -> clm4518/n03/cime5.4.0-alpha.03
+ Has some changes needed for new standalone directory structure to work
+ (these changes are now on cime master, though see #225 before updating
+ to cime master)
+
+- cism: cism2_1_40 -> cism2_1_46
+ Points to cism in git
+ cism2_1_41, 42, 43 and 44 contained very minor changes
+ cism2_1_45 and 46 are the git migration
+
+List all files eliminated:
+
+========= Handle externals via manage_externals rather than svn externals
+D README_EXTERNALS
+D SVN_EXTERNAL_DIRECTORIES
+D SVN_EXTERNAL_DIRECTORIES.standalone
+
+List all files added and what they do:
+
+========= Added this directory and everything below it. Pulled in via
+ git subtree from
+ https://github.com/NCAR/manage_externals/. This is the new
+ tool for pulling in and updating externals (replacing svn
+ externals).
+A manage_externals/
+
+========= Handle externals via manage_externals rather than svn externals
+A Externals.cfg
+A Externals_CLM.cfg
+A README_EXTERNALS.rst
+
+========= Handle new directory structure of standalone checkouts
+A .config_files.xml
+
+========= Add some .gitignore files
+A src/dyn_subgrid/.gitignore
+A src/unit_test_shr/.gitignore
+A src/unit_test_stubs/main/.gitignore
+A src/unit_test_stubs/utils/.gitignore
+A src/utils/.gitignore
+A cime_config/.gitignore
+
+List all existing files that have been modified, and describe the changes:
+
+========= Handle new directory structure of standalone checkouts
+M bld/CLMBuildNamelist.pm
+M bld/configure
+M cime_config/buildcpp
+M cime_config/buildlib
+M cime_config/buildnml
+M src/CMakeLists.txt
+
+========= Documentation updates for move to git
+M .CLMTrunkChecklist
+M .gitignore
+M LICENSE
+M README.rst
+M src/README.unit_testing
+
+========= Separately document expected fails for aux_clm vs fates test
+ lists
+M cime_config/testdefs/ExpectedTestFails.xml
+
+========= ChangeLog now only stored in doc directory, not in top-level
+ directory
+R095 .ChangeLog_template doc/.ChangeLog_template
+R070 UpDateChangeLog.pl doc/UpdateChangelog.pl
+
+===============================================================
+===============================================================
Tag name: clm4_5_18_r272
Originator(s): erik (Erik Kluzek)
Date: Thu Jan 25 01:17:01 MST 2018
diff --git a/doc/ChangeSum b/doc/ChangeSum
index d0df950aaf..cfcddc8cc0 100644
--- a/doc/ChangeSum
+++ b/doc/ChangeSum
@@ -1,5 +1,7 @@
Tag Who Date Summary
============================================================================================================================
+ clm4_5_18_r274 sacks 01/30/2018 Fix auto-detection of CIME_MODEL in a standalone checkout
+ clm4_5_18_r273 sacks 01/26/2018 Support a standalone checkout from git
clm4_5_18_r272 erik 01/25/2018 Bring in latest FATES release version to CLM trunk: fates_s1.4.1_a3.0.0_rev2
clm4_5_18_r271 erik 01/20/2018 Update testlist to v2 and remove yellowstone
clm4_5_18_r270 sacks 12/20/2017 Always use multiple elevation classes for glacier, even with stub glc
diff --git a/UpDateChangeLog.pl b/doc/UpdateChangelog.pl
similarity index 70%
rename from UpDateChangeLog.pl
rename to doc/UpdateChangelog.pl
index 27e4803122..49e5a67f13 100755
--- a/UpDateChangeLog.pl
+++ b/doc/UpdateChangelog.pl
@@ -28,10 +28,6 @@ sub usage {
$ProgName [options]
OPTIONS
- -compbrnch version Enter clm branch version to compare to (under branch_tags in repo).
- [or -cb]
- -comptrunk version Enter clm trunk version to compare to (under trunk_tags in repo).
- [or -ct]
-help [or -h] Help on this script.
-update [or -u] Just update the date/time for the latest tag
In this case no other arguments should be given.
@@ -46,24 +42,16 @@ sub usage {
To document a new tag
$ProgName clm4_5_2_r097 "Description of this tag"
-
- To document a new tag and compare expected fails to previous tag.
-
- $ProgName clm4_5_2_r097 "Description of this tag" -ct clm4_5_2_r096
EOF
}
my %opts = {
help => 0,
update => 0,
- comptrunk => undef,
- compbrnch => undef,
};
GetOptions(
"h|help" => \$opts{'help'},
"u|update" => \$opts{'update'},
- "ct|comptrunk=s" => \$opts{'comptrunk'},
- "cb|compbrnch=s" => \$opts{'compbrnch'},
);
if ( $opts{'help'} ) {
usage();
@@ -138,17 +126,6 @@ sub usage {
} elsif ( $_ =~ /One-line Summary:/ ) {
chomp( $_ );
print $fh "$_ $sum\n";
- } elsif ( $_ =~ /CLM tag used for the baseline comparison tests if applicable:/ ) {
- chomp( $_ );
- if ( defined($opts{'comptrunk'}) ) {
- print $fh "$_ $opts{'comptrunk'}\n";
- &AddExpectedFailDiff( $fh, "trunk_tags/$opts{'comptrunk'}" );
- } elsif ( defined($opts{'compbrnch'}) ) {
- print $fh "$_ $opts{'compbrnch'}\n";
- &AddExpectedFailDiff( $fh, "branch_tags/$opts{'compbrnch'}" );
- } else {
- print $fh "$_\n";
- }
} else {
print $fh $_;
}
@@ -228,31 +205,3 @@ sub usage {
system( "$EDITOR $changelog" );
system( "$EDITOR $changesum" );
}
-system( "/bin/cp -fp $changelog components/clm/doc/." );
-system( "/bin/cp -fp $changesum components/clm/doc/." );
-system( "/bin/chmod 0444 components/clm/doc/$changelog" );
-system( "/bin/chmod 0444 components/clm/doc/$changesum" );
-
-sub AddExpectedFailDiff {
-#
-# Add information about the expected fail difference
-#
- my $fh = shift;
- my $version = shift;
-
- my $SVN_MOD_URL = "https://svn-ccsm-models.cgd.ucar.edu/clm2/";
- my $expectedFail = `find . -name 'expected*Fail*.xml' -print`;
- if ( $expectedFail eq "" ) {
- die "ERROR:: expectedFails file NOT found here\n";
- }
-
- `svn ls $SVN_MOD_URL/$version` || die "ERROR:: Bad version to compare to: $version\n";
- `svn ls $SVN_MOD_URL/$version/$expectedFail` || die "ERROR:: expectedFails file NOT found in: $version\n";
- print $fh "\nDifference in expected fails from testing:\n\n";
- my $diff = `svn diff --old $SVN_MOD_URL/$version/$expectedFail \ \n --new $expectedFail`;
- if ( $diff eq "" ) {
- print $fh " No change in expected failures in testing\n";
- } else {
- print $fh $diff;
- }
-}
diff --git a/manage_externals/.dir_locals.el b/manage_externals/.dir_locals.el
new file mode 100644
index 0000000000..a370490e92
--- /dev/null
+++ b/manage_externals/.dir_locals.el
@@ -0,0 +1,12 @@
+; -*- mode: Lisp -*-
+
+((python-mode
+ . (
+ ;; fill the paragraph to 80 columns when using M-q
+ (fill-column . 80)
+
+ ;; Use 4 spaces to indent in Python
+ (python-indent-offset . 4)
+ (indent-tabs-mode . nil)
+ )))
+
diff --git a/manage_externals/.github/ISSUE_TEMPLATE.md b/manage_externals/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..8ecb2ae64b
--- /dev/null
+++ b/manage_externals/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,6 @@
+### Summary of Issue:
+### Expected behavior and actual behavior:
+### Steps to reproduce the problem (should include model description file(s) or link to publi c repository):
+### What is the changeset ID of the code, and the machine you are using:
+### have you modified the code? If so, it must be committed and available for testing:
+### Screen output or log file showing the error message and context:
diff --git a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md b/manage_externals/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..b68b1fb5e2
--- /dev/null
+++ b/manage_externals/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,17 @@
+[ 50 character, one line summary ]
+
+[ Description of the changes in this commit. It should be enough
+ information for someone not following this development to understand.
+ Lines should be wrapped at about 72 characters. ]
+
+User interface changes?: [ No/Yes ]
+[ If yes, describe what changed, and steps taken to ensure backward compatibilty ]
+
+Fixes: [Github issue #s] And brief description of each issue.
+
+Testing:
+ test removed:
+ unit tests:
+ system tests:
+ manual testing:
+
diff --git a/manage_externals/.gitignore b/manage_externals/.gitignore
new file mode 100644
index 0000000000..411de5d96e
--- /dev/null
+++ b/manage_externals/.gitignore
@@ -0,0 +1,14 @@
+# directories that are checked out by the tool
+cime/
+cime_config/
+components/
+
+# generated local files
+*.log
+
+# editor files
+*~
+*.bak
+
+# generated python files
+*.pyc
diff --git a/manage_externals/.travis.yml b/manage_externals/.travis.yml
new file mode 100644
index 0000000000..5da83c5654
--- /dev/null
+++ b/manage_externals/.travis.yml
@@ -0,0 +1,32 @@
+# NOTE(bja, 2017-11) travis-ci dosen't support python language builds
+# on mac os. As a work around, we use built-in python on linux, and
+# declare osx a 'generic' language, and create our own python env.
+
+language: python
+os: linux
+python:
+ - "2.7"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+matrix:
+ include:
+ - os: osx
+ language: generic
+ before_install:
+ # NOTE(bja, 2017-11) update is slow, 2.7.12 installed by default, good enough!
+ # - brew update
+ # - brew outdated python2 || brew upgrade python2
+ - virtualenv env -p python2
+ - source env/bin/activate
+install:
+ - pip install -r test/requirements.txt
+before_script:
+ - git --version
+script:
+ - cd test; make test
+ - cd test; make lint
+after_success:
+ - cd test; make coverage
+ - cd test; coveralls
+
\ No newline at end of file
diff --git a/manage_externals/LICENSE.txt b/manage_externals/LICENSE.txt
new file mode 100644
index 0000000000..665ee03fbc
--- /dev/null
+++ b/manage_externals/LICENSE.txt
@@ -0,0 +1,34 @@
+Copyright (c) 2017-2018, University Corporation for Atmospheric Research (UCAR)
+All rights reserved.
+
+Developed by:
+ University Corporation for Atmospheric Research - National Center for Atmospheric Research
+ https://www2.cesm.ucar.edu/working-groups/sewg
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the "Software"),
+to deal with the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom
+the Software is furnished to do so, subject to the following conditions:
+
+ - Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimers.
+ - Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimers in the documentation
+ and/or other materials provided with the distribution.
+ - Neither the names of [Name of Development Group, UCAR],
+ nor the names of its contributors may be used to endorse or promote
+ products derived from this Software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/manage_externals/README.md b/manage_externals/README.md
new file mode 100644
index 0000000000..b699a26081
--- /dev/null
+++ b/manage_externals/README.md
@@ -0,0 +1,172 @@
+-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --
+
+-n [![Build Status](https://travis-ci.org/NCAR/manage_externals.svg?branch=master)](https://travis-ci.org/NCAR/manage_externals)
+[![Coverage Status](https://coveralls.io/repos/github/NCAR/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/NCAR/manage_externals?branch=master)
+```
+
+usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace]
+ [-d] [--no-logging]
+
+checkout_externals manages checking out CESM externals from revision control
+based on a externals description file. By default only the required
+externals are checkout out.
+
+NOTE: checkout_externals *MUST* be run from the root of the source tree.
+
+Running checkout_externals without the '--status' option will always attempt to
+synchronize the working copy with the externals description.
+
+optional arguments:
+ -h, --help show this help message and exit
+ -e [EXTERNALS], --externals [EXTERNALS]
+ The externals description filename. Default:
+ Externals.cfg.
+ -o, --optional By default only the required externals are checked
+ out. This flag will also checkout the optional
+ externals.
+ -S, --status Output status of the repositories managed by
+ checkout_externals. By default only summary
+ information is provided. Use verbose output to see
+ details.
+ -v, --verbose Output additional information to the screen and log
+ file. This flag can be used up to two times,
+ increasing the verbosity level each time.
+ --backtrace DEVELOPER: show exception backtraces as extra
+ debugging output
+ -d, --debug DEVELOPER: output additional debugging information to
+ the screen and log file.
+ --no-logging DEVELOPER: disable logging.
+
+```
+NOTE: checkout_externals *MUST* be run from the root of the source tree it
+is managing. For example, if you cloned CLM with:
+
+ $ git clone git@github.com/ncar/clm clm-dev
+
+Then the root of the source tree is /path/to/clm-dev. If you obtained
+CLM via a checkout of CESM:
+
+ $ git clone git@github.com/escomp/cesm cesm-dev
+
+and you need to checkout the CLM externals, then the root of the
+source tree is /path/to/cesm-dev. Do *NOT* run checkout_externals
+from within /path/to/cesm-dev/components/clm.
+
+The root of the source tree will be referred to as `${SRC_ROOT}` below.
+
+# Supported workflows
+
+ * Checkout all required components from the default externals
+ description file:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/checkout_externals
+
+ * To update all required components to the current values in the
+ externals description file, re-run checkout_externals:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/checkout_externals
+
+ If there are *any* modifications to *any* working copy according
+ to the git or svn 'status' command, checkout_externals
+ will not update any external repositories. Modifications
+ include: modified files, added files, removed files, or missing
+ files.
+
+ * Checkout all required components from a user specified externals
+ description file:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/checkout_externals --excernals myCESM.xml
+
+ * Status summary of the repositories managed by checkout_externals:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/checkout_externals --status
+
+ ./cime
+ s ./components/cism
+ ./components/mosart
+ e-o ./components/rtm
+ M ./src/fates
+ e-o ./tools/PTCLM
+
+ where:
+ * column one indicates the status of the repository in relation
+ to the externals description file.
+ * column two indicates whether the working copy has modified files.
+ * column three shows how the repository is managed, optional or required
+
+ Column one will be one of these values:
+ * s : out-of-sync : repository is checked out at a different commit
+ compared with the externals description
+ * e : empty : directory does not exist - checkout_externals has not been run
+ * ? : unknown : directory exists but .git or .svn directories are missing
+
+ Column two will be one of these values:
+ * M : Modified : modified, added, deleted or missing files
+ * : blank / space : clean
+ * - : dash : no meaningful state, for empty repositories
+
+ Column three will be one of these values:
+ * o : optional : optionally repository
+ * : blank / space : required repository
+
+ * Detailed git or svn status of the repositories managed by checkout_externals:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/checkout_externals --status --verbose
+
+# Externals description file
+
+ The externals description contains a list of the external
+ repositories that are used and their version control locations. Each
+ external has:
+
+ * name (string) : component name, e.g. cime, cism, clm, cam, etc.
+
+ * required (boolean) : whether the component is a required checkout
+
+ * local_path (string) : component path *relative* to where
+ checkout_externals is called.
+
+ * protoctol (string) : version control protocol that is used to
+ manage the component. Valid values are 'git', 'svn',
+ 'externals_only'.
+
+ Note: 'externals_only' will only process the external's own
+ external description file without trying to manage a repository
+ for the component. This is used for retreiving externals for
+ standalone components like cam and clm.
+
+ * repo_url (string) : URL for the repository location, examples:
+ * https://svn-ccsm-models.cgd.ucar.edu/glc
+ * git@github.com:esmci/cime.git
+ * /path/to/local/repository
+
+ If a repo url is determined to be a local path (not a network url)
+ then user expansion, e.g. ~/, and environment variable expansion,
+ e.g. $HOME or $REPO_ROOT, will be performed.
+
+ Relative paths are difficult to get correct, especially for mixed
+ use repos like clm. It is advised that local paths expand to
+ absolute paths. If relative paths are used, they should be
+ relative to one level above local_path. If local path is
+ 'src/foo', the the relative url should be relative to
+ 'src'.
+
+ * tag (string) : tag to checkout
+
+ This can also be a git SHA-1
+
+ * branch (string) : branch to checkout
+
+ Note: either tag or branch must be supplied, but not both.
+
+ * externals (string) : relative path to the external's own external
+ description file that should also be used. It is *relative* to the
+ component local_path. For example, the CESM externals description
+ will load clm. CLM has additional externals that must be
+ downloaded to be complete. Those additional externals are managed
+ from the clm source root by the file pointed to by 'externals'.
diff --git a/manage_externals/README_FIRST b/manage_externals/README_FIRST
new file mode 100644
index 0000000000..c8a47d7806
--- /dev/null
+++ b/manage_externals/README_FIRST
@@ -0,0 +1,54 @@
+CESM is comprised of a number of different components that are
+developed and managed independently. Each component may have
+additional 'external' dependancies and optional parts that are also
+developed and managed independently.
+
+The checkout_externals.py tool manages retreiving and updating the
+components and their externals so you have a complete set of source
+files for the model.
+
+checkout_externals.py relies on a model description file that
+describes what components are needed, where to find them and where to
+put them in the source tree. The default file is called "CESM.xml"
+regardless of whether you are checking out CESM or a standalone
+component.
+
+checkout_externals requires access to git and svn repositories that
+require authentication. checkout_externals may pass through
+authentication requests, but it will not cache them for you. For the
+best and most robust user experience, you should have svn and git
+working without password authentication. See:
+
+ https://help.github.com/articles/connecting-to-github-with-ssh/
+
+ ?svn ref?
+
+NOTE: checkout_externals.py *MUST* be run from the root of the source
+tree it is managing. For example, if you cloned CLM with:
+
+ $ git clone git@github.com/ncar/clm clm-dev
+
+Then the root of the source tree is /path/to/cesm-dev. If you obtained
+CLM via an svn checkout of CESM and you need to checkout the CLM
+externals, then the root of the source tree for CLM is:
+
+ /path/to/cesm-dev/components/clm
+
+The root of the source tree will be referred to as ${SRC_ROOT} below.
+
+To get started quickly, checkout all required components from the
+default model description file:
+
+ $ cd ${SRC_ROOT}
+ $ ./checkout_cesm/checkout_externals.py
+
+For additional information about using checkout model, please see:
+
+ ${SRC_ROOT}/checkout_cesm/README
+
+or run:
+
+ $ cd ${SRC_ROOT}
+ $ ./checkout_cesm/checkout_externals.py --help
+
+
diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals
new file mode 100755
index 0000000000..a0698baef0
--- /dev/null
+++ b/manage_externals/checkout_externals
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+"""Main driver wrapper around the manic/checkout utility.
+
+Tool to assemble external respositories represented in an externals
+description file.
+
+"""
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import sys
+import traceback
+
+import manic
+
+if sys.hexversion < 0x02070000:
+ print(70 * '*')
+ print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0]))
+ print('It appears that you are running python {0}'.format(
+ '.'.join(str(x) for x in sys.version_info[0:3])))
+ print(70 * '*')
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ ARGS = manic.checkout.commandline_arguments()
+ try:
+ RET_STATUS, _ = manic.checkout.main(ARGS)
+ sys.exit(RET_STATUS)
+ except Exception as error: # pylint: disable=broad-except
+ manic.printlog(str(error))
+ if ARGS.backtrace:
+ traceback.print_exc()
+ sys.exit(1)
diff --git a/manage_externals/manic/__init__.py b/manage_externals/manic/__init__.py
new file mode 100644
index 0000000000..e4d9b552d3
--- /dev/null
+++ b/manage_externals/manic/__init__.py
@@ -0,0 +1,9 @@
+"""Public API for the manage_externals library
+"""
+
+import manic.checkout as checkout
+from manic.utils import printlog
+
+__all__ = [
+ 'checkout', 'printlog',
+]
diff --git a/manage_externals/manic/checkout.py b/manage_externals/manic/checkout.py
new file mode 100755
index 0000000000..42854e3d59
--- /dev/null
+++ b/manage_externals/manic/checkout.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env python
+
+"""
+Tool to assemble respositories represented in a model-description file.
+
+If loaded as a module (e.g., in a component's buildcpp), it can be used
+to check the validity of existing subdirectories and load missing sources.
+"""
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import argparse
+import logging
+import os
+import os.path
+import sys
+
+from manic.externals_description import create_externals_description
+from manic.externals_description import read_externals_description_file
+from manic.externals_status import check_safe_to_update_repos
+from manic.sourcetree import SourceTree
+from manic.utils import printlog
+from manic.global_constants import VERSION_SEPERATOR, LOG_FILE_NAME
+
+if sys.hexversion < 0x02070000:
+ print(70 * '*')
+ print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0]))
+ print('It appears that you are running python {0}'.format(
+ VERSION_SEPERATOR.join(str(x) for x in sys.version_info[0:3])))
+ print(70 * '*')
+ sys.exit(1)
+
+
+# ---------------------------------------------------------------------
+#
+# User input
+#
+# ---------------------------------------------------------------------
+def commandline_arguments(args=None):
+ """Process the command line arguments
+
+ Params: args - optional args. Should only be used during systems
+ testing.
+
+ Returns: processed command line arguments
+ """
+ description = '''
+%(prog)s manages checking out CESM externals from revision control
+based on a externals description file. By default only the required
+externals are checkout out.
+
+NOTE: %(prog)s *MUST* be run from the root of the source tree.
+
+Running %(prog)s without the '--status' option will always attempt to
+synchronize the working copy with the externals description.
+'''
+
+ epilog = '''
+```
+NOTE: %(prog)s *MUST* be run from the root of the source tree it
+is managing. For example, if you cloned CLM with:
+
+ $ git clone git@github.com/ncar/clm clm-dev
+
+Then the root of the source tree is /path/to/clm-dev. If you obtained
+CLM via a checkout of CESM:
+
+ $ git clone git@github.com/escomp/cesm cesm-dev
+
+and you need to checkout the CLM externals, then the root of the
+source tree is /path/to/cesm-dev. Do *NOT* run %(prog)s
+from within /path/to/cesm-dev/components/clm.
+
+The root of the source tree will be referred to as `${SRC_ROOT}` below.
+
+
+# Supported workflows
+
+ * Checkout all required components from the default externals
+ description file:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/%(prog)s
+
+ * To update all required components to the current values in the
+ externals description file, re-run %(prog)s:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/%(prog)s
+
+ If there are *any* modifications to *any* working copy according
+ to the git or svn 'status' command, %(prog)s
+ will not update any external repositories. Modifications
+ include: modified files, added files, removed files, or missing
+ files.
+
+ * Checkout all required components from a user specified externals
+ description file:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/%(prog)s --excernals myCESM.xml
+
+ * Status summary of the repositories managed by %(prog)s:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/%(prog)s --status
+
+ ./cime
+ s ./components/cism
+ ./components/mosart
+ e-o ./components/rtm
+ M ./src/fates
+ e-o ./tools/PTCLM
+
+
+ where:
+ * column one indicates the status of the repository in relation
+ to the externals description file.
+ * column two indicates whether the working copy has modified files.
+ * column three shows how the repository is managed, optional or required
+
+ Column one will be one of these values:
+ * s : out-of-sync : repository is checked out at a different commit
+ compared with the externals description
+ * e : empty : directory does not exist - %(prog)s has not been run
+ * ? : unknown : directory exists but .git or .svn directories are missing
+
+ Column two will be one of these values:
+ * M : Modified : modified, added, deleted or missing files
+ * : blank / space : clean
+ * - : dash : no meaningful state, for empty repositories
+
+ Column three will be one of these values:
+ * o : optional : optionally repository
+ * : blank / space : required repository
+
+ * Detailed git or svn status of the repositories managed by %(prog)s:
+
+ $ cd ${SRC_ROOT}
+ $ ./manage_externals/%(prog)s --status --verbose
+
+# Externals description file
+
+ The externals description contains a list of the external
+ repositories that are used and their version control locations. Each
+ external has:
+
+ * name (string) : component name, e.g. cime, cism, clm, cam, etc.
+
+ * required (boolean) : whether the component is a required checkout
+
+ * local_path (string) : component path *relative* to where
+ %(prog)s is called.
+
+ * protoctol (string) : version control protocol that is used to
+ manage the component. Valid values are 'git', 'svn',
+ 'externals_only'.
+
+ Note: 'externals_only' will only process the external's own
+ external description file without trying to manage a repository
+ for the component. This is used for retreiving externals for
+ standalone components like cam and clm.
+
+ * repo_url (string) : URL for the repository location, examples:
+ * https://svn-ccsm-models.cgd.ucar.edu/glc
+ * git@github.com:esmci/cime.git
+ * /path/to/local/repository
+
+ If a repo url is determined to be a local path (not a network url)
+ then user expansion, e.g. ~/, and environment variable expansion,
+ e.g. $HOME or $REPO_ROOT, will be performed.
+
+ Relative paths are difficult to get correct, especially for mixed
+ use repos like clm. It is advised that local paths expand to
+ absolute paths. If relative paths are used, they should be
+ relative to one level above local_path. If local path is
+ 'src/foo', the the relative url should be relative to
+ 'src'.
+
+ * tag (string) : tag to checkout
+
+ This can also be a git SHA-1
+
+ * branch (string) : branch to checkout
+
+ Note: either tag or branch must be supplied, but not both.
+
+ * externals (string) : relative path to the external's own external
+ description file that should also be used. It is *relative* to the
+ component local_path. For example, the CESM externals description
+ will load clm. CLM has additional externals that must be
+ downloaded to be complete. Those additional externals are managed
+ from the clm source root by the file pointed to by 'externals'.
+
+'''
+
+ parser = argparse.ArgumentParser(
+ description=description, epilog=epilog,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ #
+ # user options
+ #
+ parser.add_argument('-e', '--externals', nargs='?',
+ default='Externals.cfg',
+ help='The externals description filename. '
+ 'Default: %(default)s.')
+
+ parser.add_argument('-o', '--optional', action='store_true', default=False,
+ help='By default only the required externals '
+ 'are checked out. This flag will also checkout the '
+ 'optional externals.')
+
+ parser.add_argument('-S', '--status', action='store_true', default=False,
+ help='Output status of the repositories managed by '
+ '%(prog)s. By default only summary information '
+ 'is provided. Use verbose output to see details.')
+
+ parser.add_argument('-v', '--verbose', action='count', default=0,
+ help='Output additional information to '
+ 'the screen and log file. This flag can be '
+ 'used up to two times, increasing the '
+ 'verbosity level each time.')
+
+ #
+ # developer options
+ #
+ parser.add_argument('--backtrace', action='store_true',
+ help='DEVELOPER: show exception backtraces as extra '
+ 'debugging output')
+
+ parser.add_argument('-d', '--debug', action='store_true', default=False,
+ help='DEVELOPER: output additional debugging '
+ 'information to the screen and log file.')
+
+ parser.add_argument('--no-logging', action='store_true',
+ help='DEVELOPER: disable logging.')
+
+ if args:
+ options = parser.parse_args(args)
+ else:
+ options = parser.parse_args()
+ return options
+
+
+# ---------------------------------------------------------------------
+#
+# main
+#
+# ---------------------------------------------------------------------
+def main(args):
+ """
+ Function to call when module is called from the command line.
+ Parse externals file and load required repositories or all repositories if
+ the --all option is passed.
+ """
+ if not args.no_logging:
+ logging.basicConfig(filename=LOG_FILE_NAME,
+ format='%(levelname)s : %(asctime)s : %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ level=logging.DEBUG)
+
+ program_name = os.path.basename(sys.argv[0])
+ logging.info('Beginning of %s', program_name)
+
+ load_all = False
+ if args.optional:
+ load_all = True
+
+ root_dir = os.path.abspath(os.getcwd())
+ external_data = read_externals_description_file(root_dir, args.externals)
+ external = create_externals_description(external_data)
+
+ source_tree = SourceTree(root_dir, external)
+ printlog('Checking status of externals: ', end='')
+ tree_status = source_tree.status()
+ printlog('')
+
+ if args.status:
+ # user requested status-only
+ for comp in sorted(tree_status.keys()):
+ tree_status[comp].log_status_message(args.verbose)
+ else:
+ # checkout / update the external repositories.
+ safe_to_update = check_safe_to_update_repos(tree_status)
+ if not safe_to_update:
+ # print status
+ for comp in sorted(tree_status.keys()):
+ tree_status[comp].log_status_message(args.verbose)
+ # exit gracefully
+ msg = """The external repositories labeled with 'M' above are not in a clean state.
+
+The following are two options for how to proceed:
+
+(1) Go into each external that is not in a clean state and issue either
+ an 'svn status' or a 'git status' command. Either revert or commit
+ your changes so that all externals are in a clean state. (Note,
+ though, that it is okay to have untracked files in your working
+ directory.) Then rerun {program_name}.
+
+(2) Alternatively, you do not have to rely on {program_name}. Instead, you
+ can manually update out-of-sync externals (labeled with 's' above)
+ as described in the configuration file {config_file}.
+""".format(program_name=program_name, config_file=args.externals)
+
+ printlog('-' * 70)
+ printlog(msg)
+ printlog('-' * 70)
+ else:
+ source_tree.checkout(args.verbose, load_all)
+ printlog('')
+
+ logging.info('%s completed without exceptions.', program_name)
+ # NOTE(bja, 2017-11) tree status is used by the systems tests
+ return 0, tree_status
diff --git a/manage_externals/manic/externals_description.py b/manage_externals/manic/externals_description.py
new file mode 100644
index 0000000000..f3d3fad78f
--- /dev/null
+++ b/manage_externals/manic/externals_description.py
@@ -0,0 +1,375 @@
+#!/usr/bin/env python
+
+"""Model description
+
+Model description is the representation of the various externals
+included in the model. It processes in input data structure, and
+converts it into a standard interface that is used by the rest of the
+system.
+
+To maintain backward compatibility, externals description files should
+follow semantic versioning rules, http://semver.org/
+
+
+
+"""
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import logging
+import os
+import os.path
+import re
+
+# ConfigParser was renamed in python2 to configparser. In python2,
+# ConfigParser returns byte strings, str, instead of unicode. We need
+# unicode to be compatible with xml and json parser and python3.
+try:
+ # python2
+ from ConfigParser import SafeConfigParser as config_parser
+ from ConfigParser import MissingSectionHeaderError
+ from ConfigParser import NoSectionError, NoOptionError
+
+ def config_string_cleaner(text):
+ """convert strings into unicode
+ """
+ return text.decode('utf-8')
+except ImportError:
+ # python3
+ from configparser import ConfigParser as config_parser
+ from configparser import MissingSectionHeaderError
+ from configparser import NoSectionError, NoOptionError
+
+ def config_string_cleaner(text):
+ """Python3 already uses unicode strings, so just return the string
+ without modification.
+
+ """
+ return text
+
+from .utils import printlog, fatal_error, str_to_bool, expand_local_url
+from .global_constants import EMPTY_STR, PPRINTER, VERSION_SEPERATOR
+
+#
+# Globals
+#
+DESCRIPTION_SECTION = 'externals_description'
+VERSION_ITEM = 'schema_version'
+
+
+def read_externals_description_file(root_dir, file_name):
+ """Given a file name containing a externals description, determine the
+ format and read it into it's internal representation.
+
+ """
+ root_dir = os.path.abspath(root_dir)
+ msg = 'In directory : {0}'.format(root_dir)
+ logging.info(msg)
+ printlog('Processing externals description file : {0}'.format(file_name))
+
+ file_path = os.path.join(root_dir, file_name)
+ if not os.path.exists(file_name):
+ msg = ('ERROR: Model description file, "{0}", does not '
+ 'exist at path:\n {1}\nDid you run from the root of '
+ 'the source tree?'.format(file_name, file_path))
+ fatal_error(msg)
+
+ externals_description = None
+ try:
+ config = config_parser()
+ config.read(file_path)
+ externals_description = config
+ except MissingSectionHeaderError:
+ # not a cfg file
+ pass
+
+ if externals_description is None:
+ msg = 'Unknown file format!'
+ fatal_error(msg)
+
+ return externals_description
+
+
+def create_externals_description(model_data, model_format='cfg'):
+ """Create the a externals description object from the provided data
+ """
+ externals_description = None
+ if model_format == 'dict':
+ externals_description = ExternalsDescriptionDict(model_data, )
+ elif model_format == 'cfg':
+ major, _, _ = get_cfg_schema_version(model_data)
+ if major == 1:
+ externals_description = ExternalsDescriptionConfigV1(model_data)
+ else:
+ msg = ('Externals description file has unsupported schema '
+ 'version "{0}".'.format(major))
+ fatal_error(msg)
+ else:
+ msg = 'Unknown model data format "{0}"'.format(model_format)
+ fatal_error(msg)
+ return externals_description
+
+
+def get_cfg_schema_version(model_cfg):
+ """Extract the major, minor, patch version of the config file schema
+
+ Params:
+ model_cfg - config parser object containing the externas description data
+
+ Returns:
+ major = integer major version
+ minor = integer minor version
+ patch = integer patch version
+ """
+ semver_str = ''
+ try:
+ semver_str = model_cfg.get(DESCRIPTION_SECTION, VERSION_ITEM)
+ except (NoSectionError, NoOptionError):
+ msg = ('externals description file must have the required '
+ 'section: "{0}" and item "{1}"'.format(DESCRIPTION_SECTION,
+ VERSION_ITEM))
+ fatal_error(msg)
+
+ # NOTE(bja, 2017-11) Assume we don't care about the
+ # build/pre-release metadata for now!
+ version_list = re.split(r'[-+]', semver_str)
+ version_str = version_list[0]
+ version = version_str.split(VERSION_SEPERATOR)
+ try:
+ major = int(version[0].strip())
+ minor = int(version[1].strip())
+ patch = int(version[2].strip())
+ except ValueError:
+ msg = ('Config file schema version must have integer digits for '
+ 'major, minor and patch versions. '
+ 'Received "{0}"'.format(version_str))
+ fatal_error(msg)
+ return major, minor, patch
+
+
+class ExternalsDescription(dict):
+ """Base externals description class that is independent of the user input
+ format. Different input formats can all be converted to this
+ representation to provide a consistent represtentation for the
+ rest of the objects in the system.
+
+ """
+ # keywords defining the interface into the externals description data
+ EXTERNALS = 'externals'
+ BRANCH = 'branch'
+ REPO = 'repo'
+ REQUIRED = 'required'
+ TAG = 'tag'
+ PATH = 'local_path'
+ PROTOCOL = 'protocol'
+ REPO_URL = 'repo_url'
+ NAME = 'name'
+
+ PROTOCOL_EXTERNALS_ONLY = 'externals_only'
+ PROTOCOL_GIT = 'git'
+ PROTOCOL_SVN = 'svn'
+ KNOWN_PRROTOCOLS = [PROTOCOL_GIT, PROTOCOL_SVN, PROTOCOL_EXTERNALS_ONLY]
+
+ # v1 xml keywords
+ _V1_TREE_PATH = 'TREE_PATH'
+ _V1_ROOT = 'ROOT'
+ _V1_TAG = 'TAG'
+ _V1_BRANCH = 'BRANCH'
+ _V1_REQ_SOURCE = 'REQ_SOURCE'
+
+ _source_schema = {REQUIRED: True,
+ PATH: 'string',
+ EXTERNALS: 'string',
+ REPO: {PROTOCOL: 'string',
+ REPO_URL: 'string',
+ TAG: 'string',
+ BRANCH: 'string',
+ }
+ }
+
+ def __init__(self):
+ """Convert the xml into a standardized dict that can be used to
+ construct the source objects
+
+ """
+ dict.__init__(self)
+
+ def _check_user_input(self):
+ """Run a series of checks to attempt to validate the user input and
+ detect errors as soon as possible.
+ """
+ self._check_optional()
+ self._validate()
+ self._check_data()
+
+ def _check_data(self):
+ """Check user supplied data is valid where possible.
+ """
+ for ext_name in self.keys():
+ if (self[ext_name][self.REPO][self.PROTOCOL]
+ not in self.KNOWN_PRROTOCOLS):
+ msg = 'Unknown repository protocol "{0}" in "{1}".'.format(
+ self[ext_name][self.REPO][self.PROTOCOL], ext_name)
+ fatal_error(msg)
+
+ if (self[ext_name][self.REPO][self.PROTOCOL]
+ != self.PROTOCOL_EXTERNALS_ONLY):
+ if (self[ext_name][self.REPO][self.TAG] and
+ self[ext_name][self.REPO][self.BRANCH]):
+ msg = ('Model description is over specified! Can not '
+ 'have both "tag" and "branch" in repo '
+ 'description for "{0}"'.format(ext_name))
+ fatal_error(msg)
+
+ if (not self[ext_name][self.REPO][self.TAG] and
+ not self[ext_name][self.REPO][self.BRANCH]):
+ msg = ('Model description is under specified! Must have '
+ 'either "tag" or "branch" in repo '
+ 'description for "{0}"'.format(ext_name))
+ fatal_error(msg)
+
+ if not self[ext_name][self.REPO][self.REPO_URL]:
+ msg = ('Model description is under specified! Must have '
+ 'either "repo_url" in repo '
+ 'description for "{0}"'.format(ext_name))
+ fatal_error(msg)
+
+ url = expand_local_url(
+ self[ext_name][self.REPO][self.REPO_URL], ext_name)
+ self[ext_name][self.REPO][self.REPO_URL] = url
+
+ def _check_optional(self):
+ """Some fields like externals, repo:tag repo:branch are
+ (conditionally) optional. We don't want the user to be
+ required to enter them in every externals description file, but
+ still want to validate the input. Check conditions and add
+ default values if appropriate.
+
+ """
+ for field in self:
+ # truely optional
+ if self.EXTERNALS not in self[field]:
+ self[field][self.EXTERNALS] = EMPTY_STR
+
+ # git and svn repos must tags and branches for validation purposes.
+ if self.TAG not in self[field][self.REPO]:
+ self[field][self.REPO][self.TAG] = EMPTY_STR
+ if self.BRANCH not in self[field][self.REPO]:
+ self[field][self.REPO][self.BRANCH] = EMPTY_STR
+ if self.REPO_URL not in self[field][self.REPO]:
+ self[field][self.REPO][self.REPO_URL] = EMPTY_STR
+
+ def _validate(self):
+ """Validate that the parsed externals description contains all necessary
+ fields.
+
+ """
+ def validate_data_struct(schema, data):
+ """Compare a data structure against a schema and validate all required
+ fields are present.
+
+ """
+ is_valid = False
+ in_ref = True
+ valid = True
+ if isinstance(schema, dict) and isinstance(data, dict):
+ for k in schema:
+ in_ref = in_ref and (k in data)
+ if in_ref:
+ valid = valid and (
+ validate_data_struct(schema[k], data[k]))
+ is_valid = in_ref and valid
+ else:
+ is_valid = isinstance(data, type(schema))
+ if not is_valid:
+ printlog(" Unmatched schema and data:")
+ if isinstance(schema, dict):
+ for item in schema:
+ printlog(" {0} schema = {1} ({2})".format(
+ item, schema[item], type(schema[item])))
+ printlog(" {0} data = {1} ({2})".format(
+ item, data[item], type(data[item])))
+ else:
+ printlog(" schema = {0} ({1})".format(
+ schema, type(schema)))
+ printlog(" data = {0} ({1})".format(data, type(data)))
+ return is_valid
+
+ for field in self:
+ valid = validate_data_struct(self._source_schema, self[field])
+ if not valid:
+ PPRINTER.pprint(self._source_schema)
+ PPRINTER.pprint(self[field])
+ msg = 'ERROR: source for "{0}" did not validate'.format(field)
+ fatal_error(msg)
+
+
+class ExternalsDescriptionDict(ExternalsDescription):
+ """Create a externals description object from a dictionary using the API
+ representations. Primarily used to simplify creating model
+ description files for unit testing.
+
+ """
+
+ def __init__(self, model_data):
+ """Parse a native dictionary into a externals description.
+ """
+ ExternalsDescription.__init__(self)
+ self.update(model_data)
+ self._check_user_input()
+
+
+class ExternalsDescriptionConfigV1(ExternalsDescription):
+ """Create a externals description object from a config_parser object,
+ schema version 1.
+
+ """
+
+ def __init__(self, model_data):
+ """Convert the xml into a standardized dict that can be used to
+ construct the source objects
+
+ """
+ ExternalsDescription.__init__(self)
+ self._remove_metadata(model_data)
+ self._parse_cfg(model_data)
+ self._check_user_input()
+
+ @staticmethod
+ def _remove_metadata(model_data):
+ """Remove the metadata section from the model configuration file so
+ that it is simpler to look through the file and construct the
+ externals description.
+
+ """
+ model_data.remove_section(DESCRIPTION_SECTION)
+
+ def _parse_cfg(self, cfg_data):
+ """Parse a config_parser object into a externals description.
+ """
+ def list_to_dict(input_list, convert_to_lower_case=True):
+ """Convert a list of key-value pairs into a dictionary.
+ """
+ output_dict = {}
+ for item in input_list:
+ key = config_string_cleaner(item[0].strip())
+ value = config_string_cleaner(item[1].strip())
+ if convert_to_lower_case:
+ key = key.lower()
+ output_dict[key] = value
+ return output_dict
+
+ for section in cfg_data.sections():
+ name = config_string_cleaner(section.lower().strip())
+ self[name] = {}
+ self[name].update(list_to_dict(cfg_data.items(section)))
+ self[name][self.REPO] = {}
+ loop_keys = self[name].copy().keys()
+ for item in loop_keys:
+ if item in self._source_schema:
+ if isinstance(self._source_schema[item], bool):
+ self[name][item] = str_to_bool(self[name][item])
+ if item in self._source_schema[self.REPO]:
+ self[name][self.REPO][item] = self[name][item]
+ del self[name][item]
diff --git a/manage_externals/manic/externals_status.py b/manage_externals/manic/externals_status.py
new file mode 100644
index 0000000000..d3d238f289
--- /dev/null
+++ b/manage_externals/manic/externals_status.py
@@ -0,0 +1,164 @@
+"""ExternalStatus
+
+Class to store status and state information about repositories and
+create a string representation.
+
+"""
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+from .global_constants import EMPTY_STR
+from .utils import printlog, indent_string
+from .global_constants import VERBOSITY_VERBOSE, VERBOSITY_DUMP
+
+
+class ExternalStatus(object):
+ """Class to represent the status of a given source repository or tree.
+
+ Individual repositories determine their own status in the
+ Repository objects. This object is just resposible for storing the
+ information and passing it up to a higher level for reporting or
+ global decisions.
+
+ There are two states of concern:
+
+ * If the repository is in-sync with the externals description file.
+
+ * If the repostiory working copy is clean and there are no pending
+ transactions (e.g. add, remove, rename, untracked files).
+
+ """
+ DEFAULT = '-'
+ UNKNOWN = '?'
+ EMPTY = 'e'
+ MODEL_MODIFIED = 's' # a.k.a. out-of-sync
+ DIRTY = 'M'
+
+ STATUS_OK = ' '
+ STATUS_ERROR = '!'
+
+ # source types
+ OPTIONAL = 'o'
+ STANDALONE = 's'
+ MANAGED = ' '
+
+ def __init__(self):
+ self.sync_state = self.DEFAULT
+ self.clean_state = self.DEFAULT
+ self.source_type = self.DEFAULT
+ self.path = EMPTY_STR
+ self.current_version = EMPTY_STR
+ self.expected_version = EMPTY_STR
+ self.status_output = EMPTY_STR
+
+ def log_status_message(self, verbosity):
+ """Write status message to the screen and log file
+ """
+ self._default_status_message()
+ if verbosity >= VERBOSITY_VERBOSE:
+ self._verbose_status_message()
+ if verbosity >= VERBOSITY_DUMP:
+ self._dump_status_message()
+
+ def _default_status_message(self):
+ """Return the default terse status message string
+ """
+ msg = '{sync}{clean}{src_type} {path}'.format(
+ sync=self.sync_state, clean=self.clean_state,
+ src_type=self.source_type, path=self.path)
+ printlog(msg)
+
+ def _verbose_status_message(self):
+ """Return the verbose status message string
+ """
+ clean_str = self.DEFAULT
+ if self.clean_state == self.STATUS_OK:
+ clean_str = 'clean sandbox'
+ elif self.clean_state == self.DIRTY:
+ clean_str = 'modified sandbox'
+
+ sync_str = 'on {0}'.format(self.current_version)
+ if self.sync_state != self.STATUS_OK:
+ sync_str = '{current} --> {expected}'.format(
+ current=self.current_version, expected=self.expected_version)
+ msg = ' {clean}, {sync}'.format(clean=clean_str, sync=sync_str)
+ printlog(msg)
+
+ def _dump_status_message(self):
+ """Return the dump status message string
+ """
+ msg = indent_string(self.status_output, 12)
+ printlog(msg)
+
+ def safe_to_update(self):
+ """Report if it is safe to update a repository. Safe is defined as:
+
+ * If a repository is empty, it is safe to update.
+
+ * If a repository exists and has a clean working copy state
+ with no pending transactions.
+
+ """
+ safe_to_update = False
+ repo_exists = self.exists()
+ if not repo_exists:
+ safe_to_update = True
+ else:
+ # If the repo exists, it must be in ok or modified
+ # sync_state. Any other sync_state at this point
+ # represents a logic error that should have been handled
+ # before now!
+ sync_safe = ((self.sync_state == ExternalStatus.STATUS_OK) or
+ (self.sync_state == ExternalStatus.MODEL_MODIFIED))
+ if sync_safe:
+ # The clean_state must be STATUS_OK to update. Otherwise we
+ # are dirty or there was a missed error previously.
+ if self.clean_state == ExternalStatus.STATUS_OK:
+ safe_to_update = True
+ return safe_to_update
+
+ def exists(self):
+ """Determine if the repo exists. This is indicated by:
+
+ * sync_state is not EMPTY
+
+ * if the sync_state is empty, then the valid states for
+ clean_state are default, empty or unknown. Anything else
+ and there was probably an internal logic error.
+
+ NOTE(bja, 2017-10) For the moment we are considering a
+ sync_state of default or unknown to require user intervention,
+ but we may want to relax this convention. This is probably a
+ result of a network error or internal logic error but more
+ testing is needed.
+
+ """
+ is_empty = (self.sync_state == ExternalStatus.EMPTY)
+ clean_valid = ((self.clean_state == ExternalStatus.DEFAULT) or
+ (self.clean_state == ExternalStatus.EMPTY) or
+ (self.clean_state == ExternalStatus.UNKNOWN))
+
+ if is_empty and clean_valid:
+ exists = False
+ else:
+ exists = True
+ return exists
+
+
+def check_safe_to_update_repos(tree_status):
+ """Check if *ALL* repositories are in a safe state to update. We don't
+ want to do a partial update of the repositories then die, leaving
+ the model in an inconsistent state.
+
+ Note: if there is an update to do, the repositories will by
+ definiation be out of synce with the externals description, so we
+ can't use that as criteria for updating.
+
+ """
+ safe_to_update = True
+ for comp in tree_status:
+ stat = tree_status[comp]
+ safe_to_update &= stat.safe_to_update()
+
+ return safe_to_update
diff --git a/manage_externals/manic/global_constants.py b/manage_externals/manic/global_constants.py
new file mode 100644
index 0000000000..0e91cffc90
--- /dev/null
+++ b/manage_externals/manic/global_constants.py
@@ -0,0 +1,18 @@
+"""Globals shared across modules
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import pprint
+
+EMPTY_STR = ''
+LOCAL_PATH_INDICATOR = '.'
+VERSION_SEPERATOR = '.'
+LOG_FILE_NAME = 'manage_externals.log'
+PPRINTER = pprint.PrettyPrinter(indent=4)
+
+VERBOSITY_DEFAULT = 0
+VERBOSITY_VERBOSE = 1
+VERBOSITY_DUMP = 2
diff --git a/manage_externals/manic/repository.py b/manage_externals/manic/repository.py
new file mode 100644
index 0000000000..9baa0667ca
--- /dev/null
+++ b/manage_externals/manic/repository.py
@@ -0,0 +1,65 @@
+"""Base class representation of a repository
+"""
+
+from .externals_description import ExternalsDescription
+from .utils import fatal_error
+from .global_constants import EMPTY_STR
+
+
+class Repository(object):
+ """
+ Class to represent and operate on a repository description.
+ """
+
+ def __init__(self, component_name, repo):
+ """
+ Parse repo externals description
+ """
+ self._name = component_name
+ self._protocol = repo[ExternalsDescription.PROTOCOL]
+ self._tag = repo[ExternalsDescription.TAG]
+ self._branch = repo[ExternalsDescription.BRANCH]
+ self._url = repo[ExternalsDescription.REPO_URL]
+
+ if self._url is EMPTY_STR:
+ fatal_error('repo must have a URL')
+
+ if self._tag is EMPTY_STR and self._branch is EMPTY_STR:
+ fatal_error('repo must have either a branch or a tag element')
+
+ if self._tag is not EMPTY_STR and self._branch is not EMPTY_STR:
+ fatal_error('repo cannot have both a tag and a branch element')
+
+ def checkout(self, base_dir_path, repo_dir_name, verbosity): # pylint: disable=unused-argument
+ """
+ If the repo destination directory exists, ensure it is correct (from
+ correct URL, correct branch or tag), and possibly update the source.
+ If the repo destination directory does not exist, checkout the correce
+ branch or tag.
+ """
+ msg = ('DEV_ERROR: checkout method must be implemented in all '
+ 'repository classes! {0}'.format(self.__class__.__name__))
+ fatal_error(msg)
+
+ def status(self, stat, repo_dir_path): # pylint: disable=unused-argument
+ """Report the status of the repo
+
+ """
+ msg = ('DEV_ERROR: status method must be implemented in all '
+ 'repository classes! {0}'.format(self.__class__.__name__))
+ fatal_error(msg)
+
+ def url(self):
+ """Public access of repo url.
+ """
+ return self._url
+
+ def tag(self):
+ """Public access of repo tag
+ """
+ return self._tag
+
+ def branch(self):
+ """Public access of repo branch.
+ """
+ return self._branch
diff --git a/manage_externals/manic/repository_factory.py b/manage_externals/manic/repository_factory.py
new file mode 100644
index 0000000000..c95e7a509b
--- /dev/null
+++ b/manage_externals/manic/repository_factory.py
@@ -0,0 +1,29 @@
+"""Factory for creating and initializing the appropriate repository class
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+from .repository_git import GitRepository
+from .repository_svn import SvnRepository
+from .externals_description import ExternalsDescription
+from .utils import fatal_error
+
+
+def create_repository(component_name, repo_info):
+ """Determine what type of repository we have, i.e. git or svn, and
+ create the appropriate object.
+
+ """
+ protocol = repo_info[ExternalsDescription.PROTOCOL].lower()
+ if protocol == 'git':
+ repo = GitRepository(component_name, repo_info)
+ elif protocol == 'svn':
+ repo = SvnRepository(component_name, repo_info)
+ elif protocol == 'externals_only':
+ repo = None
+ else:
+ msg = 'Unknown repo protocol "{0}"'.format(protocol)
+ fatal_error(msg)
+ return repo
diff --git a/manage_externals/manic/repository_git.py b/manage_externals/manic/repository_git.py
new file mode 100644
index 0000000000..d1198796ed
--- /dev/null
+++ b/manage_externals/manic/repository_git.py
@@ -0,0 +1,670 @@
+"""Class for interacting with git repositories
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import copy
+import os
+import re
+
+from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR
+from .global_constants import VERBOSITY_VERBOSE
+from .repository import Repository
+from .externals_status import ExternalStatus
+from .utils import expand_local_url, split_remote_url, is_remote_url
+from .utils import fatal_error, printlog
+from .utils import execute_subprocess
+
+
+class GitRepository(Repository):
+ """Class to represent and operate on a repository description.
+
+ For testing purpose, all system calls to git should:
+
+ * be isolated in separate functions with no application logic
+ * of the form:
+ - cmd = ['git', ...]
+ - value = execute_subprocess(cmd, output_to_caller={T|F},
+ status_to_caller={T|F})
+ - return value
+ * be static methods (not rely on self)
+ * name as _git_subcommand_args(user_args)
+
+ This convention allows easy unit testing of the repository logic
+ by mocking the specific calls to return predefined results.
+
+ """
+
+ # match XYZ of '* (HEAD detached at {XYZ}):
+ # e.g. * (HEAD detached at origin/feature-2)
+ RE_DETACHED = re.compile(
+ r'\* \((?:[\w]+[\s]+)?detached (?:at|from) ([\w\-./]+)\)')
+
+ # match tracking reference info, return XYZ from [XYZ]
+ # e.g. [origin/master]
+ RE_TRACKING = re.compile(r'\[([\w\-./]+)\]')
+
+ def __init__(self, component_name, repo):
+ """
+ Parse repo (a XML element).
+ """
+ Repository.__init__(self, component_name, repo)
+
+ # ----------------------------------------------------------------
+ #
+ # Public API, defined by Repository
+ #
+ # ----------------------------------------------------------------
+ def checkout(self, base_dir_path, repo_dir_name, verbosity):
+ """
+ If the repo destination directory exists, ensure it is correct (from
+ correct URL, correct branch or tag), and possibly update the source.
+ If the repo destination directory does not exist, checkout the correce
+ branch or tag.
+ """
+ repo_dir_path = os.path.join(base_dir_path, repo_dir_name)
+ if not os.path.exists(repo_dir_path):
+ self._clone_repo(base_dir_path, repo_dir_name, verbosity)
+ self._checkout_ref(repo_dir_path, verbosity)
+
+ def status(self, stat, repo_dir_path):
+ """
+ If the repo destination directory exists, ensure it is correct (from
+ correct URL, correct branch or tag), and possibly update the source.
+ If the repo destination directory does not exist, checkout the correct
+ branch or tag.
+ """
+ self._check_sync(stat, repo_dir_path)
+ if os.path.exists(repo_dir_path):
+ self._status_summary(stat, repo_dir_path)
+
+ # ----------------------------------------------------------------
+ #
+ # Internal work functions
+ #
+ # ----------------------------------------------------------------
+ def _clone_repo(self, base_dir_path, repo_dir_name, verbosity):
+ """Prepare to execute the clone by managing directory location
+ """
+ cwd = os.getcwd()
+ os.chdir(base_dir_path)
+ self._git_clone(self._url, repo_dir_name, verbosity)
+ os.chdir(cwd)
+
+ def _current_ref_from_branch_command(self, git_output):
+ """Parse output of the 'git branch' command to determine the current branch.
+ The line starting with '*' is the current branch. It can be one of:
+
+ feature2 36418b4 [origin/feature2] Work on feature2
+* feature3 36418b4 Work on feature2
+ master 9b75494 [origin/master] Initialize repository.
+
+* (HEAD detached at 36418b4) 36418b4 Work on feature2
+ feature2 36418b4 [origin/feature2] Work on feature2
+ master 9b75494 [origin/master] Initialize repository.
+
+* (HEAD detached at origin/feature2) 36418b4 Work on feature2
+ feature2 36418b4 [origin/feature2] Work on feature2
+ feature3 36418b4 Work on feature2
+ master 9b75494 [origin/master] Initialize repository.
+
+ Possible head states:
+
+ * detached from remote branch --> ref = remote/branch
+ * detached from tag --> ref = tag
+ * detached from sha --> ref = sha
+ * on local branch --> ref = branch
+ * on tracking branch --> ref = remote/branch
+
+ On a branch:
+ * cm-testing
+
+ Detached head from a tag:
+ * (HEAD detached at junk-tag)
+
+ Detached head from a hash
+ * (HEAD detached at 0246874c)
+
+ NOTE: Parsing the output of the porcelain is probably not a
+ great idea, but there doesn't appear to be a single plumbing
+ command that will return the same info.
+
+ """
+ lines = git_output.splitlines()
+ ref = ''
+ for line in lines:
+ if line.startswith('*'):
+ ref = line
+ break
+ current_ref = EMPTY_STR
+ if not ref:
+ # not a git repo? some other error? we return so the
+ # caller can handle.
+ pass
+ elif 'detached' in ref:
+ match = self.RE_DETACHED.search(ref)
+ try:
+ current_ref = match.group(1)
+ except BaseException:
+ msg = 'DEV_ERROR: regex to detect detached head state failed!'
+ msg += '\nref:\n{0}\ngit_output\n{1}\n'.format(ref, git_output)
+ fatal_error(msg)
+ elif '[' in ref:
+ match = self.RE_TRACKING.search(ref)
+ try:
+ current_ref = match.group(1)
+ except BaseException:
+ msg = 'DEV_ERROR: regex to detect tracking branch failed.'
+ fatal_error(msg)
+ else:
+ # assumed local branch
+ current_ref = ref.split()[1]
+
+ current_ref = current_ref.strip()
+ return current_ref
+
+ def _check_sync(self, stat, repo_dir_path):
+ """Determine whether a git repository is in-sync with the model
+ description.
+
+ Because repos can have multiple remotes, the only criteria is
+ whether the branch or tag is the same.
+
+ """
+ if not os.path.exists(repo_dir_path):
+ # NOTE(bja, 2017-10) condition should have been determined
+ # by _Source() object and should never be here!
+ stat.sync_state = ExternalStatus.STATUS_ERROR
+ else:
+ git_dir = os.path.join(repo_dir_path, '.git')
+ if not os.path.exists(git_dir):
+ # NOTE(bja, 2017-10) directory exists, but no git repo
+ # info.... Can't test with subprocess git command
+ # because git will move up directory tree until it
+ # finds the parent repo git dir!
+ stat.sync_state = ExternalStatus.UNKNOWN
+ else:
+ self._check_sync_logic(stat, repo_dir_path)
+
+ def _check_sync_logic(self, stat, repo_dir_path):
+ """Isolate the complicated synce logic so it is not so deeply nested
+ and a bit easier to understand.
+
+ Sync logic - only reporting on whether we are on the ref
+ (branch, tag, hash) specified in the externals description.
+
+
+ """
+ def compare_refs(current_ref, expected_ref):
+ """Compare the current and expected ref.
+
+ """
+ if current_ref == expected_ref:
+ status = ExternalStatus.STATUS_OK
+ else:
+ status = ExternalStatus.MODEL_MODIFIED
+ return status
+
+ cwd = os.getcwd()
+ os.chdir(repo_dir_path)
+
+ git_output = self._git_branch_vv()
+ current_ref = self._current_ref_from_branch_command(git_output)
+
+ if self._branch:
+ if self._url == LOCAL_PATH_INDICATOR:
+ expected_ref = self._branch
+ else:
+ remote_name = self._determine_remote_name()
+ if not remote_name:
+ # git doesn't know about this remote. by definition
+ # this is a modified state.
+ expected_ref = "unknown_remote/{0}".format(self._branch)
+ else:
+ expected_ref = "{0}/{1}".format(remote_name, self._branch)
+ else:
+ expected_ref = self._tag
+
+ stat.sync_state = compare_refs(current_ref, expected_ref)
+ if current_ref == EMPTY_STR:
+ stat.sync_state = ExternalStatus.UNKNOWN
+
+ stat.current_version = current_ref
+ stat.expected_version = expected_ref
+
+ os.chdir(cwd)
+
+ def _determine_remote_name(self):
+ """Return the remote name.
+
+ Note that this is for the *future* repo url and branch, not
+ the current working copy!
+
+ """
+ git_output = self._git_remote_verbose()
+ git_output = git_output.splitlines()
+ remote_name = ''
+ for line in git_output:
+ data = line.strip()
+ if not data:
+ continue
+ data = data.split()
+ name = data[0].strip()
+ url = data[1].strip()
+ if self._url == url:
+ remote_name = name
+ break
+ return remote_name
+
+ def _create_remote_name(self):
+ """The url specified in the externals description file was not known
+ to git. We need to add it, which means adding a unique and
+ safe name....
+
+ The assigned name needs to be safe for git to use, e.g. can't
+ look like a path 'foo/bar' and work with both remote and local paths.
+
+ Remote paths include but are not limited to: git, ssh, https,
+ github, gitlab, bitbucket, custom server, etc.
+
+ Local paths can be relative or absolute. They may contain
+ shell variables, e.g. ${REPO_ROOT}/repo_name, or username
+ expansion, i.e. ~/ or ~someuser/.
+
+ Relative paths must be at least one layer of redirection, i.e.
+ container/../ext_repo, but may be many layers deep, e.g.
+ container/../../../../../ext_repo
+
+ NOTE(bja, 2017-11)
+
+ The base name below may not be unique, for example if the
+ user has local paths like:
+
+ /path/to/my/repos/nice_repo
+ /path/to/other/repos/nice_repo
+
+ But the current implementation should cover most common
+ use cases for remotes and still provide usable names.
+
+ """
+ url = copy.deepcopy(self._url)
+ if is_remote_url(url):
+ url = split_remote_url(url)
+ else:
+ url = expand_local_url(url, self._name)
+ url = url.split('/')
+ repo_name = url[-1]
+ base_name = url[-2]
+ # repo name should nominally already be something that git can
+ # deal with. We need to remove other possibly troublesome
+ # punctuation, e.g. /, $, from the base name.
+ unsafe_characters = '!@#$%^&*()[]{}\\/,;~'
+ for unsafe in unsafe_characters:
+ base_name = base_name.replace(unsafe, '')
+ remote_name = "{0}_{1}".format(base_name, repo_name)
+ return remote_name
+
+ def _checkout_ref(self, repo_dir, verbosity):
+ """Checkout the user supplied reference
+ """
+ # import pdb; pdb.set_trace()
+ cwd = os.getcwd()
+ os.chdir(repo_dir)
+ if self._url.strip() == LOCAL_PATH_INDICATOR:
+ self._checkout_local_ref(verbosity)
+ else:
+ self._checkout_external_ref(verbosity)
+ os.chdir(cwd)
+
+ def _checkout_local_ref(self, verbosity):
+ """Checkout the reference considering the local repo only. Do not
+ fetch any additional remotes or specify the remote when
+ checkout out the ref.
+
+ """
+ if self._tag:
+ ref = self._tag
+ else:
+ ref = self._branch
+ self._check_for_valid_ref(ref)
+ self._git_checkout_ref(ref, verbosity)
+
+ def _checkout_external_ref(self, verbosity):
+ """Checkout the reference from a remote repository
+ """
+ remote_name = self._determine_remote_name()
+ if not remote_name:
+ remote_name = self._create_remote_name()
+ self._git_remote_add(remote_name, self._url)
+ self._git_fetch(remote_name)
+ if self._tag:
+ is_unique_tag, check_msg = self._is_unique_tag(self._tag,
+ remote_name)
+ if not is_unique_tag:
+ msg = ('In repo "{0}": tag "{1}" {2}'.format(
+ self._name, self._tag, check_msg))
+ fatal_error(msg)
+ ref = self._tag
+ else:
+ ref = '{0}/{1}'.format(remote_name, self._branch)
+ self._git_checkout_ref(ref, verbosity)
+
+ def _check_for_valid_ref(self, ref):
+ """Try some basic sanity checks on the user supplied reference so we
+ can provide a more useful error message than calledprocess
+ error...
+
+ """
+ is_tag = self._ref_is_tag(ref)
+ is_branch = self._ref_is_branch(ref)
+ is_commit = self._ref_is_commit(ref)
+
+ is_valid = is_tag or is_branch or is_commit
+ if not is_valid:
+ msg = ('In repo "{0}": reference "{1}" does not appear to be a '
+ 'valid tag, branch or commit! Please verify the reference '
+ 'name (e.g. spelling), is available from: {2} '.format(
+ self._name, ref, self._url))
+ fatal_error(msg)
+ return is_valid
+
+ def _is_unique_tag(self, ref, remote_name):
+ """Verify that a reference is a valid tag and is unique (not a branch)
+
+ Tags may be tag names, or SHA id's. It is also possible that a
+ branch and tag have the some name.
+
+ Note: values returned by git_showref_* and git_revparse are
+ shell return codes, which are zero for success, non-zero for
+ error!
+
+ """
+ is_tag = self._ref_is_tag(ref)
+ is_branch = self._ref_is_branch(ref, remote_name)
+ is_commit = self._ref_is_commit(ref)
+
+ msg = ''
+ is_unique_tag = False
+ if is_tag and not is_branch:
+ # unique tag
+ msg = 'is ok'
+ is_unique_tag = True
+ elif is_tag and is_branch:
+ msg = ('is both a branch and a tag. git may checkout the branch '
+ 'instead of the tag depending on your version of git.')
+ is_unique_tag = False
+ elif not is_tag and is_branch:
+ msg = ('is a branch, and not a tag. If you intended to checkout '
+ 'a branch, please change the externals description to be '
+ 'a branch. If you intended to checkout a tag, it does not '
+ 'exist. Please check the name.')
+ is_unique_tag = False
+ else: # not is_tag and not is_branch:
+ if is_commit:
+ # probably a sha1 or HEAD, etc, we call it a tag
+ msg = 'is ok'
+ is_unique_tag = True
+ else:
+ # undetermined state.
+ msg = ('does not appear to be a valid tag, branch or commit! '
+ 'Please check the name and repository.')
+ is_unique_tag = False
+
+ return is_unique_tag, msg
+
+ def _ref_is_tag(self, ref):
+ """Verify that a reference is a valid tag according to git.
+
+ Note: values returned by git_showref_* and git_revparse are
+ shell return codes, which are zero for success, non-zero for
+ error!
+ """
+ is_tag = False
+ value = self._git_showref_tag(ref)
+ if value == 0:
+ is_tag = True
+ return is_tag
+
+ def _ref_is_branch(self, ref, remote_name=None):
+ """Verify if a ref is any kind of branch (local, tracked remote,
+ untracked remote).
+
+ """
+ local_branch = False
+ remote_branch = False
+ if remote_name:
+ remote_branch = self._ref_is_remote_branch(ref, remote_name)
+ local_branch = self._ref_is_local_branch(ref)
+
+ is_branch = False
+ if local_branch or remote_branch:
+ is_branch = True
+ return is_branch
+
+ def _ref_is_local_branch(self, ref):
+ """Verify that a reference is a valid branch according to git.
+
+ show-ref branch returns local branches that have been
+ previously checked out. It will not necessarily pick up
+ untracked remote branches.
+
+ Note: values returned by git_showref_* and git_revparse are
+ shell return codes, which are zero for success, non-zero for
+ error!
+
+ """
+ is_branch = False
+ value = self._git_showref_branch(ref)
+ if value == 0:
+ is_branch = True
+ return is_branch
+
+ def _ref_is_remote_branch(self, ref, remote_name):
+ """Verify that a reference is a valid branch according to git.
+
+ show-ref branch returns local branches that have been
+ previously checked out. It will not necessarily pick up
+ untracked remote branches.
+
+ Note: values returned by git_showref_* and git_revparse are
+ shell return codes, which are zero for success, non-zero for
+ error!
+
+ """
+ is_branch = False
+ value = self._git_lsremote_branch(ref, remote_name)
+ if value == 0:
+ is_branch = True
+ return is_branch
+
+ def _ref_is_commit(self, ref):
+ """Verify that a reference is a valid commit according to git.
+
+ This could be a tag, branch, sha1 id, HEAD and potentially others...
+
+ Note: values returned by git_showref_* and git_revparse are
+ shell return codes, which are zero for success, non-zero for
+ error!
+ """
+ is_commit = False
+ value = self._git_revparse_commit(ref)
+ if value == 0:
+ is_commit = True
+ return is_commit
+
+ def _status_summary(self, stat, repo_dir_path):
+ """Determine the clean/dirty status of a git repository
+
+ """
+ cwd = os.getcwd()
+ os.chdir(repo_dir_path)
+ git_output = self._git_status_porcelain_v1z()
+ is_dirty = self._status_v1z_is_dirty(git_output)
+ if is_dirty:
+ stat.clean_state = ExternalStatus.DIRTY
+ else:
+ stat.clean_state = ExternalStatus.STATUS_OK
+
+ # Now save the verbose status output incase the user wants to
+ # see it.
+ stat.status_output = self._git_status_verbose()
+ os.chdir(cwd)
+
+ @staticmethod
+ def _status_v1z_is_dirty(git_output):
+ """Parse the git status output from --porcelain=v1 -z and determine if
+ the repo status is clean or dirty. Dirty means:
+
+ * modified files
+ * missing files
+ * added files
+ * removed
+ * renamed
+ * unmerged
+
+ Whether untracked files are considered depends on how the status
+ command was run (i.e., whether it was run with the '-u' option).
+
+ NOTE: Based on the above definition, the porcelain status
+ should be an empty string to be considered 'clean'. Of course
+ this assumes we only get an empty string from an status
+ command on a clean checkout, and not some error
+ condition... Could alse use 'git diff --quiet'.
+
+ """
+ is_dirty = False
+ if git_output:
+ is_dirty = True
+ return is_dirty
+
+ # ----------------------------------------------------------------
+ #
+ # system call to git for information gathering
+ #
+ # ----------------------------------------------------------------
+ @staticmethod
+ def _git_branch_vv():
+ """Run git branch -vv to obtain verbose branch information, including
+ upstream tracking and hash.
+
+ """
+ cmd = ['git', 'branch', '--verbose', '--verbose']
+ git_output = execute_subprocess(cmd, output_to_caller=True)
+ return git_output
+
+ @staticmethod
+ def _git_showref_tag(ref):
+ """Run git show-ref check if the user supplied ref is a tag.
+
+ could also use git rev-parse --quiet --verify tagname^{tag}
+ """
+ cmd = ['git', 'show-ref', '--quiet', '--verify',
+ 'refs/tags/{0}'.format(ref), ]
+ status = execute_subprocess(cmd, status_to_caller=True)
+ return status
+
+ @staticmethod
+ def _git_showref_branch(ref):
+ """Run git show-ref check if the user supplied ref is a local or
+ tracked remote branch.
+
+ """
+ cmd = ['git', 'show-ref', '--quiet', '--verify',
+ 'refs/heads/{0}'.format(ref), ]
+ status = execute_subprocess(cmd, status_to_caller=True)
+ return status
+
+ @staticmethod
+ def _git_lsremote_branch(ref, remote_name):
+ """Run git ls-remote to check if the user supplied ref is a remote
+ branch that is not being tracked
+
+ """
+ cmd = ['git', 'ls-remote', '--exit-code', '--heads',
+ remote_name, ref, ]
+ status = execute_subprocess(cmd, status_to_caller=True)
+ return status
+
+ @staticmethod
+ def _git_revparse_commit(ref):
+ """Run git rev-parse to detect if a reference is a SHA, HEAD or other
+ valid commit.
+
+ """
+ cmd = ['git', 'rev-parse', '--quiet', '--verify',
+ '{0}^{1}'.format(ref, '{commit}'), ]
+ status = execute_subprocess(cmd, status_to_caller=True)
+ return status
+
+ @staticmethod
+ def _git_status_porcelain_v1z():
+ """Run git status to obtain repository information.
+
+ This is run with '--untracked=no' to ignore untracked files.
+
+ The machine-portable format that is guaranteed not to change
+ between git versions or *user configuration*.
+
+ """
+ cmd = ['git', 'status', '--untracked-files=no', '--porcelain', '-z']
+ git_output = execute_subprocess(cmd, output_to_caller=True)
+ return git_output
+
+ @staticmethod
+ def _git_status_verbose():
+ """Run the git status command to obtain repository information.
+ """
+ cmd = ['git', 'status']
+ git_output = execute_subprocess(cmd, output_to_caller=True)
+ return git_output
+
+ @staticmethod
+ def _git_remote_verbose():
+ """Run the git remote command to obtain repository information.
+ """
+ cmd = ['git', 'remote', '--verbose']
+ git_output = execute_subprocess(cmd, output_to_caller=True)
+ return git_output
+
+ # ----------------------------------------------------------------
+ #
+ # system call to git for sideffects modifying the working tree
+ #
+ # ----------------------------------------------------------------
+ @staticmethod
+ def _git_clone(url, repo_dir_name, verbosity):
+ """Run git clone for the side effect of creating a repository.
+ """
+ cmd = ['git', 'clone', url, repo_dir_name]
+ if verbosity >= VERBOSITY_VERBOSE:
+ printlog(' {0}'.format(' '.join(cmd)))
+ execute_subprocess(cmd)
+
+ @staticmethod
+ def _git_remote_add(name, url):
+ """Run the git remote command to for the side effect of adding a remote
+ """
+ cmd = ['git', 'remote', 'add', name, url]
+ execute_subprocess(cmd)
+
+ @staticmethod
+ def _git_fetch(remote_name):
+ """Run the git fetch command to for the side effect of updating the repo
+ """
+ cmd = ['git', 'fetch', '--tags', remote_name]
+ execute_subprocess(cmd)
+
+ @staticmethod
+ def _git_checkout_ref(ref, verbosity):
+ """Run the git checkout command to for the side effect of updating the repo
+
+ Param: ref is a reference to a local or remote object in the
+ form 'origin/my_feature', or 'tag1'.
+
+ """
+ cmd = ['git', 'checkout', ref]
+ if verbosity >= VERBOSITY_VERBOSE:
+ printlog(' {0}'.format(' '.join(cmd)))
+ execute_subprocess(cmd)
diff --git a/manage_externals/manic/repository_svn.py b/manage_externals/manic/repository_svn.py
new file mode 100644
index 0000000000..b11d36e662
--- /dev/null
+++ b/manage_externals/manic/repository_svn.py
@@ -0,0 +1,276 @@
+"""Class for interacting with svn repositories
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import os
+import re
+import xml.etree.ElementTree as ET
+
+from .global_constants import EMPTY_STR, VERBOSITY_VERBOSE
+from .repository import Repository
+from .externals_status import ExternalStatus
+from .utils import fatal_error, indent_string, printlog
+from .utils import execute_subprocess
+
+
+class SvnRepository(Repository):
+ """
+ Class to represent and operate on a repository description.
+
+ For testing purpose, all system calls to svn should:
+
+ * be isolated in separate functions with no application logic
+ * of the form:
+ - cmd = ['svn', ...]
+ - value = execute_subprocess(cmd, output_to_caller={T|F},
+ status_to_caller={T|F})
+ - return value
+ * be static methods (not rely on self)
+ * name as _svn_subcommand_args(user_args)
+
+ This convention allows easy unit testing of the repository logic
+ by mocking the specific calls to return predefined results.
+
+ """
+ RE_URLLINE = re.compile(r'^URL:')
+
+ def __init__(self, component_name, repo):
+ """
+ Parse repo (a XML element).
+ """
+ Repository.__init__(self, component_name, repo)
+ if self._branch:
+ self._url = os.path.join(self._url, self._branch)
+ elif self._tag:
+ self._url = os.path.join(self._url, self._tag)
+ else:
+ msg = "DEV_ERROR in svn repository. Shouldn't be here!"
+ fatal_error(msg)
+
+ # ----------------------------------------------------------------
+ #
+ # Public API, defined by Repository
+ #
+ # ----------------------------------------------------------------
+ def checkout(self, base_dir_path, repo_dir_name, verbosity):
+ """Checkout or update the working copy
+
+ If the repo destination directory exists, switch the sandbox to
+ match the externals description.
+
+ If the repo destination directory does not exist, checkout the
+ correct branch or tag.
+
+ """
+ repo_dir_path = os.path.join(base_dir_path, repo_dir_name)
+ if os.path.exists(repo_dir_path):
+ cwd = os.getcwd()
+ os.chdir(repo_dir_path)
+ self._svn_switch(self._url, verbosity)
+ # svn switch can lead to a conflict state, but it gives a
+ # return code of 0. So now we need to make sure that we're
+ # in a clean (non-conflict) state.
+ self._abort_if_dirty(repo_dir_path,
+ "Expected clean state following switch")
+ os.chdir(cwd)
+ else:
+ self._svn_checkout(self._url, repo_dir_path, verbosity)
+
+ def status(self, stat, repo_dir_path):
+ """
+ Check and report the status of the repository
+ """
+ self._check_sync(stat, repo_dir_path)
+ if os.path.exists(repo_dir_path):
+ self._status_summary(stat, repo_dir_path)
+
+ # ----------------------------------------------------------------
+ #
+ # Internal work functions
+ #
+ # ----------------------------------------------------------------
+ def _check_sync(self, stat, repo_dir_path):
+ """Check to see if repository directory exists and is at the expected
+ url. Return: status object
+
+ """
+ if not os.path.exists(repo_dir_path):
+ # NOTE(bja, 2017-10) this state should have been handled by
+ # the source object and we never get here!
+ stat.sync_state = ExternalStatus.STATUS_ERROR
+ else:
+ svn_output = self._svn_info(repo_dir_path)
+ if not svn_output:
+ # directory exists, but info returned nothing. .svn
+ # directory removed or incomplete checkout?
+ stat.sync_state = ExternalStatus.UNKNOWN
+ else:
+ stat.sync_state, stat.current_version = \
+ self._check_url(svn_output, self._url)
+ stat.expected_version = '/'.join(self._url.split('/')[3:])
+
+ def _abort_if_dirty(self, repo_dir_path, message):
+ """Check if the repo is in a dirty state; if so, abort with a
+ helpful message.
+
+ """
+
+ stat = ExternalStatus()
+ self._status_summary(stat, repo_dir_path)
+ if stat.clean_state != ExternalStatus.STATUS_OK:
+ status = self._svn_status_verbose(repo_dir_path)
+ status = indent_string(status, 4)
+ errmsg = """In directory
+ {cwd}
+
+svn status now shows:
+{status}
+
+ERROR: {message}
+
+One possible cause of this problem is that there may have been untracked
+files in your working directory that had the same name as tracked files
+in the new revision.
+
+To recover: Clean up the above directory (resolving conflicts, etc.),
+then rerun checkout_externals.
+""".format(cwd=repo_dir_path,
+ message=message,
+ status=status)
+
+ fatal_error(errmsg)
+
+ @staticmethod
+ def _check_url(svn_output, expected_url):
+ """Determine the svn url from svn info output and return whether it
+ matches the expected value.
+
+ """
+ url = None
+ for line in svn_output.splitlines():
+ if SvnRepository.RE_URLLINE.match(line):
+ url = line.split(': ')[1].strip()
+ break
+ if not url:
+ status = ExternalStatus.UNKNOWN
+ elif url == expected_url:
+ status = ExternalStatus.STATUS_OK
+ else:
+ status = ExternalStatus.MODEL_MODIFIED
+
+ if url:
+ current_version = '/'.join(url.split('/')[3:])
+ else:
+ current_version = EMPTY_STR
+
+ return status, current_version
+
+ def _status_summary(self, stat, repo_dir_path):
+ """Report whether the svn repository is in-sync with the model
+ description and whether the sandbox is clean or dirty.
+
+ """
+ svn_output = self._svn_status_xml(repo_dir_path)
+ is_dirty = self.xml_status_is_dirty(svn_output)
+ if is_dirty:
+ stat.clean_state = ExternalStatus.DIRTY
+ else:
+ stat.clean_state = ExternalStatus.STATUS_OK
+
+ # Now save the verbose status output incase the user wants to
+ # see it.
+ stat.status_output = self._svn_status_verbose(repo_dir_path)
+
+ @staticmethod
+ def xml_status_is_dirty(svn_output):
+ """Parse svn status xml output and determine if the working copy is
+ clean or dirty. Dirty is defined as:
+
+ * modified files
+ * added files
+ * deleted files
+ * missing files
+
+ Unversioned files do not affect the clean/dirty status.
+
+ 'external' is also an acceptable state
+
+ """
+ # pylint: disable=invalid-name
+ SVN_EXTERNAL = 'external'
+ SVN_UNVERSIONED = 'unversioned'
+ # pylint: enable=invalid-name
+
+ is_dirty = False
+ xml_status = ET.fromstring(svn_output)
+ xml_target = xml_status.find('./target')
+ entries = xml_target.findall('./entry')
+ for entry in entries:
+ status = entry.find('./wc-status')
+ item = status.get('item')
+ if item == SVN_EXTERNAL:
+ continue
+ if item == SVN_UNVERSIONED:
+ continue
+ else:
+ is_dirty = True
+ break
+ return is_dirty
+
+ # ----------------------------------------------------------------
+ #
+ # system call to svn for information gathering
+ #
+ # ----------------------------------------------------------------
+ @staticmethod
+ def _svn_info(repo_dir_path):
+ """Return results of svn info command
+ """
+ cmd = ['svn', 'info', repo_dir_path]
+ output = execute_subprocess(cmd, output_to_caller=True)
+ return output
+
+ @staticmethod
+ def _svn_status_verbose(repo_dir_path):
+ """capture the full svn status output
+ """
+ cmd = ['svn', 'status', repo_dir_path]
+ svn_output = execute_subprocess(cmd, output_to_caller=True)
+ return svn_output
+
+ @staticmethod
+ def _svn_status_xml(repo_dir_path):
+ """
+ Get status of the subversion sandbox in repo_dir
+ """
+ cmd = ['svn', 'status', '--xml', repo_dir_path]
+ svn_output = execute_subprocess(cmd, output_to_caller=True)
+ return svn_output
+
+ # ----------------------------------------------------------------
+ #
+ # system call to svn for sideffects modifying the working tree
+ #
+ # ----------------------------------------------------------------
+ @staticmethod
+ def _svn_checkout(url, repo_dir_path, verbosity):
+ """
+ Checkout a subversion repository (repo_url) to checkout_dir.
+ """
+ cmd = ['svn', 'checkout', url, repo_dir_path]
+ if verbosity >= VERBOSITY_VERBOSE:
+ printlog(' {0}'.format(' '.join(cmd)))
+ execute_subprocess(cmd)
+
+ @staticmethod
+ def _svn_switch(url, verbosity):
+ """
+ Switch branches for in an svn sandbox
+ """
+ cmd = ['svn', 'switch', url]
+ if verbosity >= VERBOSITY_VERBOSE:
+ printlog(' {0}'.format(' '.join(cmd)))
+ execute_subprocess(cmd)
diff --git a/manage_externals/manic/sourcetree.py b/manage_externals/manic/sourcetree.py
new file mode 100644
index 0000000000..1f2d5a59e4
--- /dev/null
+++ b/manage_externals/manic/sourcetree.py
@@ -0,0 +1,306 @@
+"""
+
+FIXME(bja, 2017-11) External and SourceTree have a circular dependancy!
+"""
+
+import errno
+import logging
+import os
+
+from .externals_description import ExternalsDescription
+from .externals_description import read_externals_description_file
+from .externals_description import create_externals_description
+from .repository_factory import create_repository
+from .externals_status import ExternalStatus
+from .utils import fatal_error, printlog
+from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR
+from .global_constants import VERBOSITY_VERBOSE
+
+
+class _External(object):
+ """
+ _External represents an external object in side a SourceTree
+ """
+
+ # pylint: disable=R0902
+
+ def __init__(self, root_dir, name, ext_description):
+ """Parse an external description file into a dictionary of externals.
+
+ Input:
+
+ root_dir : string - the root directory path where
+ 'local_path' is relative to.
+
+ name : string - name of the ext_description object. may or may not
+ correspond to something in the path.
+
+ ext_description : dict - source ExternalsDescription object
+
+ """
+ self._name = name
+ self._repo = None
+ self._externals = EMPTY_STR
+ self._externals_sourcetree = None
+ self._stat = ExternalStatus()
+ # Parse the sub-elements
+
+ # _path : local path relative to the containing source tree
+ self._local_path = ext_description[ExternalsDescription.PATH]
+ # _repo_dir : full repository directory
+ repo_dir = os.path.join(root_dir, self._local_path)
+ self._repo_dir_path = os.path.abspath(repo_dir)
+ # _base_dir : base directory *containing* the repository
+ self._base_dir_path = os.path.dirname(self._repo_dir_path)
+ # repo_dir_name : base_dir_path + repo_dir_name = rep_dir_path
+ self._repo_dir_name = os.path.basename(self._repo_dir_path)
+ assert(os.path.join(self._base_dir_path, self._repo_dir_name)
+ == self._repo_dir_path)
+
+ self._required = ext_description[ExternalsDescription.REQUIRED]
+ self._externals = ext_description[ExternalsDescription.EXTERNALS]
+ if self._externals:
+ self._create_externals_sourcetree()
+ repo = create_repository(
+ name, ext_description[ExternalsDescription.REPO])
+ if repo:
+ self._repo = repo
+
+ def get_name(self):
+ """
+ Return the external object's name
+ """
+ return self._name
+
+ def get_local_path(self):
+ """
+ Return the external object's path
+ """
+ return self._local_path
+
+ def status(self):
+ """
+ If the repo destination directory exists, ensure it is correct (from
+ correct URL, correct branch or tag), and possibly update the external.
+ If the repo destination directory does not exist, checkout the correce
+ branch or tag.
+ If load_all is True, also load all of the the externals sub-externals.
+ """
+
+ self._stat.path = self.get_local_path()
+ if not self._required:
+ self._stat.source_type = ExternalStatus.OPTIONAL
+ elif self._local_path == LOCAL_PATH_INDICATOR:
+ # LOCAL_PATH_INDICATOR, '.' paths, are standalone
+ # component directories that are not managed by
+ # checkout_externals.
+ self._stat.source_type = ExternalStatus.STANDALONE
+ else:
+ # managed by checkout_externals
+ self._stat.source_type = ExternalStatus.MANAGED
+
+ ext_stats = {}
+
+ if not os.path.exists(self._repo_dir_path):
+ self._stat.sync_state = ExternalStatus.EMPTY
+ msg = ('status check: repository directory for "{0}" does not '
+ 'exist.'.format(self._name))
+ logging.info(msg)
+ self._stat.current_version = 'not checked out'
+ # NOTE(bja, 2018-01) directory doesn't exist, so we cannot
+ # use repo to determine the expected version. We just take
+ # a best-guess based on the assumption that only tag or
+ # branch should be set, but not both.
+ if not self._repo:
+ self._stat.expected_version = 'unknown'
+ else:
+ self._stat.expected_version = self._repo.tag() + self._repo.branch()
+ else:
+ if self._repo:
+ self._repo.status(self._stat, self._repo_dir_path)
+
+ if self._externals and self._externals_sourcetree:
+ # we expect externals and they exist
+ cwd = os.getcwd()
+ # SourceTree expecteds to be called from the correct
+ # root directory.
+ os.chdir(self._repo_dir_path)
+ ext_stats = self._externals_sourcetree.status(self._local_path)
+ os.chdir(cwd)
+
+ all_stats = {}
+ # don't add the root component because we don't manage it
+ # and can't provide useful info about it.
+ if self._local_path != LOCAL_PATH_INDICATOR:
+ # store the stats under tha local_path, not comp name so
+ # it will be sorted correctly
+ all_stats[self._stat.path] = self._stat
+
+ if ext_stats:
+ all_stats.update(ext_stats)
+
+ return all_stats
+
+ def checkout(self, verbosity, load_all):
+ """
+ If the repo destination directory exists, ensure it is correct (from
+ correct URL, correct branch or tag), and possibly update the external.
+ If the repo destination directory does not exist, checkout the correce
+ branch or tag.
+ If load_all is True, also load all of the the externals sub-externals.
+ """
+ if load_all:
+ pass
+ # Make sure we are in correct location
+
+ if not os.path.exists(self._repo_dir_path):
+ # repository directory doesn't exist. Need to check it
+ # out, and for that we need the base_dir_path to exist
+ try:
+ os.makedirs(self._base_dir_path)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ msg = 'Could not create directory "{0}"'.format(
+ self._base_dir_path)
+ fatal_error(msg)
+
+ if self._stat.source_type != ExternalStatus.STANDALONE:
+ if verbosity >= VERBOSITY_VERBOSE:
+ # NOTE(bja, 2018-01) probably do not want to pass
+ # verbosity in this case, because if (verbosity ==
+ # VERBOSITY_DUMP), then the previous status output would
+ # also be dumped, adding noise to the output.
+ self._stat.log_status_message(VERBOSITY_VERBOSE)
+
+ if self._repo:
+ if self._stat.sync_state == ExternalStatus.STATUS_OK:
+ # If we're already in sync, avoid showing verbose output
+ # from the checkout command, unless the verbosity level
+ # is 2 or more.
+ checkout_verbosity = verbosity - 1
+ else:
+ checkout_verbosity = verbosity
+ self._repo.checkout(self._base_dir_path,
+ self._repo_dir_name, checkout_verbosity)
+
+ def checkout_externals(self, verbosity, load_all):
+ """Checkout the sub-externals for this object
+ """
+ if self._externals:
+ if not self._externals_sourcetree:
+ self._create_externals_sourcetree()
+ self._externals_sourcetree.checkout(verbosity, load_all)
+
+ def _create_externals_sourcetree(self):
+ """
+ """
+ if not os.path.exists(self._repo_dir_path):
+ # NOTE(bja, 2017-10) repository has not been checked out
+ # yet, can't process the externals file. Assume we are
+ # checking status before code is checkoud out and this
+ # will be handled correctly later.
+ return
+
+ cwd = os.getcwd()
+ os.chdir(self._repo_dir_path)
+ if not os.path.exists(self._externals):
+ # NOTE(bja, 2017-10) this check is redundent with the one
+ # in read_externals_description_file!
+ msg = ('External externals description file "{0}" '
+ 'does not exist! In directory: {1}'.format(
+ self._externals, self._repo_dir_path))
+ fatal_error(msg)
+
+ externals_root = self._repo_dir_path
+ model_data = read_externals_description_file(externals_root,
+ self._externals)
+ externals = create_externals_description(model_data)
+ self._externals_sourcetree = SourceTree(externals_root, externals)
+ os.chdir(cwd)
+
+
+class SourceTree(object):
+ """
+ SourceTree represents a group of managed externals
+ """
+
+ def __init__(self, root_dir, model):
+ """
+ Build a SourceTree object from a model description
+ """
+ self._root_dir = os.path.abspath(root_dir)
+ self._all_components = {}
+ self._required_compnames = []
+ for comp in model:
+ src = _External(self._root_dir, comp, model[comp])
+ self._all_components[comp] = src
+ if model[comp][ExternalsDescription.REQUIRED]:
+ self._required_compnames.append(comp)
+
+ def status(self, relative_path_base=LOCAL_PATH_INDICATOR):
+ """Report the status components
+
+ FIXME(bja, 2017-10) what do we do about situations where the
+ user checked out the optional components, but didn't add
+ optional for running status? What do we do where the user
+ didn't add optional to the checkout but did add it to the
+ status. -- For now, we run status on all components, and try
+ to do the right thing based on the results....
+
+ """
+ load_comps = self._all_components.keys()
+
+ summary = {}
+ for comp in load_comps:
+ printlog('{0}, '.format(comp), end='')
+ stat = self._all_components[comp].status()
+ for name in stat.keys():
+ # check if we need to append the relative_path_base to
+ # the path so it will be sorted in the correct order.
+ if not stat[name].path.startswith(relative_path_base):
+ stat[name].path = os.path.join(relative_path_base,
+ stat[name].path)
+ # store under key = updated path, and delete the
+ # old key.
+ comp_stat = stat[name]
+ del stat[name]
+ stat[comp_stat.path] = comp_stat
+ summary.update(stat)
+
+ return summary
+
+ def checkout(self, verbosity, load_all, load_comp=None):
+ """
+ Checkout or update indicated components into the the configured
+ subdirs.
+
+ If load_all is True, recursively checkout all externals.
+ If load_all is False, load_comp is an optional set of components to load.
+ If load_all is True and load_comp is None, only load the required externals.
+ """
+ if verbosity >= VERBOSITY_VERBOSE:
+ printlog('Checking out externals: ')
+ else:
+ printlog('Checking out externals: ', end='')
+
+ if load_all:
+ load_comps = self._all_components.keys()
+ elif load_comp is not None:
+ load_comps = [load_comp]
+ else:
+ load_comps = self._required_compnames
+
+ # checkout the primary externals
+ for comp in load_comps:
+ if verbosity < VERBOSITY_VERBOSE:
+ printlog('{0}, '.format(comp), end='')
+ else:
+ # verbose output handled by the _External object, just
+ # output a newline
+ printlog(EMPTY_STR)
+ self._all_components[comp].checkout(verbosity, load_all)
+ printlog('')
+
+ # now give each external an opportunitity to checkout it's externals.
+ for comp in load_comps:
+ self._all_components[comp].checkout_externals(verbosity, load_all)
diff --git a/manage_externals/manic/utils.py b/manage_externals/manic/utils.py
new file mode 100644
index 0000000000..04f037fd70
--- /dev/null
+++ b/manage_externals/manic/utils.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+"""
+Common public utilities for manic package
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import logging
+import os
+import subprocess
+import sys
+
+from .global_constants import LOCAL_PATH_INDICATOR, LOG_FILE_NAME
+
+# ---------------------------------------------------------------------
+#
+# screen and logging output and functions to massage text for output
+#
+# ---------------------------------------------------------------------
+
+
+def log_process_output(output):
+ """Log each line of process output at debug level so it can be
+ filtered if necessary. By default, output is a single string, and
+ logging.debug(output) will only put log info heading on the first
+ line. This makes it hard to filter with grep.
+
+ """
+ output = output.split('\n')
+ for line in output:
+ logging.debug(line)
+
+
+def printlog(msg, **kwargs):
+ """Wrapper script around print to ensure that everything printed to
+ the screen also gets logged.
+
+ """
+ logging.info(msg)
+ if kwargs:
+ print(msg, **kwargs)
+ else:
+ print(msg)
+ sys.stdout.flush()
+
+
+def last_n_lines(the_string, n_lines, truncation_message=None):
+ """Returns the last n lines of the given string
+
+ Args:
+ the_string: str
+ n_lines: int
+ truncation_message: str, optional
+
+ Returns a string containing the last n lines of the_string
+
+ If truncation_message is provided, the returned string begins with
+ the given message if and only if the string is greater than n lines
+ to begin with.
+ """
+
+ lines = the_string.splitlines(True)
+ if len(lines) <= n_lines:
+ return the_string
+ else:
+ lines_subset = lines[-n_lines:]
+ str_truncated = ''.join(lines_subset)
+ if truncation_message:
+ str_truncated = truncation_message + '\n' + str_truncated
+ return str_truncated
+
+
+def indent_string(the_string, indent_level):
+ """Indents the given string by a given number of spaces
+
+ Args:
+ the_string: str
+ indent_level: int
+
+ Returns a new string that is the same as the_string, except that
+ each line is indented by 'indent_level' spaces.
+
+ In python3, this can be done with textwrap.indent.
+ """
+
+ lines = the_string.splitlines(True)
+ padding = ' ' * indent_level
+ lines_indented = [padding + line for line in lines]
+ return ''.join(lines_indented)
+
+# ---------------------------------------------------------------------
+#
+# error handling
+#
+# ---------------------------------------------------------------------
+
+
+def fatal_error(message):
+ """
+ Error output function
+ """
+ logging.error(message)
+ raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message))
+
+
+# ---------------------------------------------------------------------
+#
+# Data conversion / manipulation
+#
+# ---------------------------------------------------------------------
+def str_to_bool(bool_str):
+ """Convert a sting representation of as boolean into a true boolean.
+
+ Conversion should be case insensitive.
+ """
+ value = None
+ str_lower = bool_str.lower()
+ if (str_lower == 'true') or (str_lower == 't'):
+ value = True
+ elif (str_lower == 'false') or (str_lower == 'f'):
+ value = False
+ if value is None:
+ msg = ('ERROR: invalid boolean string value "{0}". '
+ 'Must be "true" or "false"'.format(bool_str))
+ fatal_error(msg)
+ return value
+
+
+REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@']
+
+
+def is_remote_url(url):
+ """check if the user provided a local file path instead of a
+ remote. If so, it must be expanded to an absolute
+ path.
+
+ """
+ remote_url = False
+ for prefix in REMOTE_PREFIXES:
+ if url.startswith(prefix):
+ remote_url = True
+ return remote_url
+
+
+def split_remote_url(url):
+ """check if the user provided a local file path or a
+ remote. If remote, try to strip off protocol info.
+
+ """
+ remote_url = is_remote_url(url)
+ if not remote_url:
+ return url
+
+ for prefix in REMOTE_PREFIXES:
+ url = url.replace(prefix, '')
+
+ if '@' in url:
+ url = url.split('@')[1]
+
+ if ':' in url:
+ url = url.split(':')[1]
+
+ return url
+
+
+def expand_local_url(url, field):
+ """check if the user provided a local file path instead of a
+ remote. If so, it must be expanded to an absolute
+ path.
+
+ Note: local paths of LOCAL_PATH_INDICATOR have special meaning and
+ represent local copy only, don't work with the remotes.
+
+ """
+ remote_url = is_remote_url(url)
+ if not remote_url:
+ if url.strip() == LOCAL_PATH_INDICATOR:
+ pass
+ else:
+ url = os.path.expandvars(url)
+ url = os.path.expanduser(url)
+ if not os.path.isabs(url):
+ msg = ('WARNING: Externals description for "{0}" contains a '
+ 'url that is not remote and does not expand to an '
+ 'absolute path. Version control operations may '
+ 'fail.\n\nurl={1}'.format(field, url))
+ printlog(msg)
+ else:
+ url = os.path.normpath(url)
+ return url
+
+
+# ---------------------------------------------------------------------
+#
+# subprocess
+#
+# ---------------------------------------------------------------------
+def execute_subprocess(commands, status_to_caller=False,
+ output_to_caller=False):
+ """Wrapper around subprocess.check_output to handle common
+ exceptions.
+
+ check_output runs a command with arguments and waits
+ for it to complete.
+
+ check_output raises an exception on a nonzero return code. if
+ status_to_caller is true, execute_subprocess returns the subprocess
+ return code, otherwise execute_subprocess treats non-zero return
+ status as an error and raises an exception.
+
+ """
+ msg = 'In directory: {0}\nexecute_subprocess running command:'.format(
+ os.getcwd())
+ logging.info(msg)
+ logging.info(commands)
+ return_to_caller = status_to_caller or output_to_caller
+ status = -1
+ output = ''
+ try:
+ logging.info(' '.join(commands))
+ output = subprocess.check_output(commands, stderr=subprocess.STDOUT,
+ universal_newlines=True)
+ log_process_output(output)
+ status = 0
+ except OSError as error:
+ msg = failed_command_msg(
+ 'Command execution failed. Does the executable exist?',
+ commands)
+ logging.error(error)
+ fatal_error(msg)
+ except ValueError as error:
+ msg = failed_command_msg(
+ 'DEV_ERROR: Invalid arguments trying to run subprocess',
+ commands)
+ logging.error(error)
+ fatal_error(msg)
+ except subprocess.CalledProcessError as error:
+ # Only report the error if we are NOT returning to the
+ # caller. If we are returning to the caller, then it may be a
+ # simple status check. If returning, it is the callers
+ # responsibility determine if an error occurred and handle it
+ # appropriately.
+ if not return_to_caller:
+ msg_context = ('Process did not run successfully; '
+ 'returned status {0}'.format(error.returncode))
+ msg = failed_command_msg(
+ msg_context,
+ commands,
+ output=error.output)
+ logging.error(error)
+ logging.error(msg)
+ log_process_output(error.output)
+ fatal_error(msg)
+ status = error.returncode
+
+ if status_to_caller and output_to_caller:
+ ret_value = (status, output)
+ elif status_to_caller:
+ ret_value = status
+ elif output_to_caller:
+ ret_value = output
+ else:
+ ret_value = None
+
+ return ret_value
+
+
+def failed_command_msg(msg_context, command, output=None):
+ """Template for consistent error messages from subprocess calls.
+
+ If 'output' is given, it should provide the output from the failed
+ command
+ """
+
+ if output:
+ output_truncated = last_n_lines(output, 20,
+ truncation_message='[... Output truncated for brevity ...]')
+ errmsg = ('Failed with output:\n' +
+ indent_string(output_truncated, 4) +
+ '\nERROR: ')
+ else:
+ errmsg = ''
+
+ command_str = ' '.join(command)
+ errmsg += """In directory
+ {cwd}
+{context}:
+ {command}
+""".format(cwd=os.getcwd(), context=msg_context, command=command_str)
+
+ if output:
+ errmsg += 'See above for output from failed command.\n'
+
+ errmsg += 'Please check the log file {log} for more details.'.format(
+ log=LOG_FILE_NAME)
+
+ return errmsg
diff --git a/manage_externals/test/.coveragerc b/manage_externals/test/.coveragerc
new file mode 100644
index 0000000000..8b681888b8
--- /dev/null
+++ b/manage_externals/test/.coveragerc
@@ -0,0 +1,7 @@
+[run]
+branch = True
+omit = test_unit_*.py
+ test_sys_*.py
+ /usr/*
+ .local/*
+ */site-packages/*
\ No newline at end of file
diff --git a/manage_externals/test/.gitignore b/manage_externals/test/.gitignore
new file mode 100644
index 0000000000..dd5795998f
--- /dev/null
+++ b/manage_externals/test/.gitignore
@@ -0,0 +1,7 @@
+# virtual environments
+env_python*
+
+# python code coverage tool output
+.coverage
+htmlcov
+
diff --git a/manage_externals/test/.pylint.rc b/manage_externals/test/.pylint.rc
new file mode 100644
index 0000000000..3e66113f7f
--- /dev/null
+++ b/manage_externals/test/.pylint.rc
@@ -0,0 +1,426 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=.git,.svn,env2
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=bad-continuation
+
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=
+
+
+[REPORTS]
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+msg-template={msg_id}:{line:3d},{column:2d}: {msg} ({symbol})
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio).You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages
+#reports=yes
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+
+[BASIC]
+
+# Naming hint for argument names
+argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct argument names
+argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Naming hint for attribute names
+attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct attribute names
+attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming hint for function names
+function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct function names
+function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for method names
+method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct method names
+method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+property-classes=abc.abstractproperty
+
+# Naming hint for variable names
+variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+# Regular expression matching correct variable names
+variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,future.builtins
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[IMPORTS]
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
diff --git a/manage_externals/test/Makefile b/manage_externals/test/Makefile
new file mode 100644
index 0000000000..0507597ab3
--- /dev/null
+++ b/manage_externals/test/Makefile
@@ -0,0 +1,121 @@
+python = not-set
+verbose = not-set
+debug = not-set
+
+ifneq ($(python), not-set)
+PYTHON=$(python)
+else
+PYTHON=python
+endif
+
+# we need the python path to point one level up to access the package
+# and executables
+PYPATH=PYTHONPATH=..:
+
+# common args for running tests
+TEST_ARGS=-m unittest discover
+
+ifeq ($(debug), not-set)
+ ifeq ($(verbose), not-set)
+ # summary only output
+ TEST_ARGS+=--buffer
+ else
+ # show individual test summary
+ TEST_ARGS+=--buffer --verbose
+ endif
+else
+ # show detailed test output
+ TEST_ARGS+=--verbose
+endif
+
+
+# auto reformat the code
+AUTOPEP8=autopep8
+AUTOPEP8_ARGS=--aggressive --in-place
+
+# run lint
+PYLINT=pylint
+PYLINT_ARGS=-j 2 --rcfile=.pylint.rc
+
+# code coverage
+COVERAGE=coverage
+COVERAGE_ARGS=--rcfile=.coveragerc --append
+
+# source files
+SRC = \
+ ../checkout_externals \
+ ../manic/*.py
+
+CHECKOUT_EXE = ../checkout_externals
+
+TEST_DIR = .
+
+README = ../README.md
+
+#
+# testing
+#
+.PHONY : utest
+utest : FORCE
+ $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_unit_*.py'
+
+.PHONY : stest
+stest : FORCE
+ $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_sys_*.py'
+
+.PHONY : test
+test : utest stest
+
+#
+# documentation
+#
+.PHONY : readme
+readme : $(CHECKOUT_EXE)
+ echo '-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --\n' > $(README)
+ echo -n '[![Build Status](https://travis-ci.org/NCAR/manage_externals.svg?branch=master)](https://travis-ci.org/NCAR/manage_externals)' >> $(README)
+ echo '[![Coverage Status](https://coveralls.io/repos/github/NCAR/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/NCAR/manage_externals?branch=master)' >> $(README)
+ echo '```\n' >> $(README)
+ $(CHECKOUT_EXE) --help >> $(README)
+
+#
+# coding standards
+#
+.PHONY : style
+style : FORCE
+ $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py
+
+.PHONY : lint
+lint : FORCE
+ $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py
+
+.PHONY : stylint
+stylint : style lint
+
+.PHONY : coverage
+coverage : FORCE
+ $(PYPATH) $(COVERAGE) erase
+ $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_unit_*.py'
+ $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_sys_*.py'
+ $(PYPATH) $(COVERAGE) html
+
+#
+# virtual environment creation
+#
+.PHONY : env
+env : FORCE
+ $(PYPATH) virtualenv --python $(PYTHON) $@_$(PYTHON)
+ . $@_$(PYTHON)/bin/activate; pip install -r requirements.txt
+
+#
+# utilites
+#
+.PHONY : clean
+clean : FORCE
+ -rm -rf *~ *.pyc tmp fake htmlcov
+
+.PHONY : clobber
+clobber : clean
+ -rm -rf env_*
+
+FORCE :
+
diff --git a/manage_externals/test/README.md b/manage_externals/test/README.md
new file mode 100644
index 0000000000..938a900eec
--- /dev/null
+++ b/manage_externals/test/README.md
@@ -0,0 +1,77 @@
+# Testing for checkout_externals
+
+NOTE: Python2 is the supported runtime environment. Python3 compatibility is
+in progress, complicated by the different proposed input methods
+(yaml, xml, cfg/ini, json) and their different handling of strings
+(unicode vs byte) in python2. Full python3 compatibility will be
+possible once the number of possible input formats has been narrowed.
+
+## Setup development environment
+
+Development environments should be setup for python2 and python3:
+
+```SH
+ cd checkout_externals/test
+ make python=python2 env
+ make python=python3 env
+```
+
+## Unit tests
+
+Tests should be run for both python2 and python3. It is recommended
+that you have seperate terminal windows open python2 and python3
+testing to avoid errors activating and deactivating environments.
+
+```SH
+ cd checkout_externals/test
+ . env_python2/bin/activate
+ make utest
+ deactivate
+```
+
+```SH
+ cd checkout_externals/test
+ . env_python2/bin/activate
+ make utest
+ deactivate
+```
+
+## System tests
+
+Not yet implemented.
+
+## Static analysis
+
+checkout_externals is difficult to test thoroughly because it relies
+on git and svn, and svn requires a live network connection and
+repository. Static analysis will help catch bugs in code paths that
+are not being executed, but it requires conforming to community
+standards and best practices. autopep8 and pylint should be run
+regularly for automatic code formatting and linting.
+
+```SH
+ cd checkout_externals/test
+ . env_python2/bin/activate
+ make lint
+ deactivate
+```
+
+The canonical formatting for the code is whatever autopep8
+generates. All issues identified by pylint should be addressed.
+
+
+## Code coverage
+
+All changes to the code should include maintaining existing tests and
+writing new tests for new or changed functionality. To ensure test
+coverage, run the code coverage tool:
+
+```SH
+ cd checkout_externals/test
+ . env_python2/bin/activate
+ make coverage
+ open -a Firefox.app htmlcov/index.html
+ deactivate
+```
+
+
diff --git a/manage_externals/test/doc/.gitignore b/manage_externals/test/doc/.gitignore
new file mode 100644
index 0000000000..d4e11e5ea0
--- /dev/null
+++ b/manage_externals/test/doc/.gitignore
@@ -0,0 +1,2 @@
+_build
+
diff --git a/manage_externals/test/doc/Makefile b/manage_externals/test/doc/Makefile
new file mode 100644
index 0000000000..18f4d5bf99
--- /dev/null
+++ b/manage_externals/test/doc/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+SPHINXPROJ = ManageExternals
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/manage_externals/test/doc/conf.py b/manage_externals/test/doc/conf.py
new file mode 100644
index 0000000000..469c0b0dc5
--- /dev/null
+++ b/manage_externals/test/doc/conf.py
@@ -0,0 +1,172 @@
+# -*- coding: utf-8 -*-
+#
+# Manage Externals documentation build configuration file, created by
+# sphinx-quickstart on Wed Nov 29 10:53:25 2017.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinx.ext.autodoc',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.viewcode',
+ 'sphinx.ext.githubpages']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Manage Externals'
+copyright = u'2017, CSEG at NCAR'
+author = u'CSEG at NCAR'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = u'1.0.0'
+# The full version, including alpha/beta/rc tags.
+release = u'1.0.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# This is required for the alabaster theme
+# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
+html_sidebars = {
+ '**': [
+ 'relations.html', # needs 'show_related': True theme option to display
+ 'searchbox.html',
+ ]
+}
+
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'ManageExternalsdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'ManageExternals.tex', u'Manage Externals Documentation',
+ u'CSEG at NCAR', 'manual'),
+]
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'manageexternals', u'Manage Externals Documentation',
+ [author], 1)
+]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'ManageExternals', u'Manage Externals Documentation',
+ author, 'ManageExternals', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+
+
diff --git a/manage_externals/test/doc/develop.rst b/manage_externals/test/doc/develop.rst
new file mode 100644
index 0000000000..b817b7b093
--- /dev/null
+++ b/manage_externals/test/doc/develop.rst
@@ -0,0 +1,202 @@
+Developer Guidelines
+====================
+
+The manage externals utilities are a light weight replacement for svn
+externals that will work with git repositories pulling in a mixture of
+git and svn dependencies.
+
+Given an externals description and a working copy:
+
+* *checkout_externals* attempts to make the working copy agree with the
+ externals description
+
+* *generate_externals* attempts to make the externals description agree
+ with the working copy.
+
+For these operations utilities should:
+
+* operate consistently across git and svn
+
+* operate simply with minimal user complexity
+
+* robustly across a wide range of repository states
+
+* provide explicit error messages when a problem occurs
+
+* leave the working copy in a valid state
+
+The utilities in manage externals are **NOT** generic wrappers around
+revision control operations or a replacement for common tasks. Users
+are expected to:
+
+* create branches prior to starting development
+
+* add remotes and push changes
+
+* create tags
+
+* delete branches
+
+These types of tasks are often highly workflow dependent, e.g. branch
+naming conventions may vary between repositories, have the potential
+to destroy user data, introduce significant code complexit and 'edge
+cases' that are extremely difficult to detect and test, and often
+require subtle decision making, especially if a problem occurs.
+
+Users who want to automate these types are encouraged to create their
+own tools. The externals description files are explicitly versioned
+and the internal APIs are intended to be stable for these purposes.
+
+Core Design Principles
+-----------------------
+
+1. Users can, and are actively encouraged to, modify the externals
+ directories using revision control outside of manage_externals
+ tools. You can't make any assumptions about the state of the
+ working copy. Examples: adding a remote, creating a branch,
+ switching to a branch, deleting the directory entirely.
+
+2. Give that the user can do anything, the manage externals library
+ can not preserve state between calls. The only information it can
+ rely on is what it expectes based on the content of the externals
+ description file, and what the actual state of the directory tree
+ is.
+
+3. Do *not* do anything that will possibly destroy user data!
+
+ a. Do not remove files from the file system. We are operating on
+ user supplied input. If you don't call 'rm', you can't
+ accidentally remove the user's data. Thinking of calling
+ ``shutil.rmtree(user_input)``? What if the user accidentally
+ specified user_input such that it resolves to their home
+ directory.... Yeah. Don't go there.
+
+ b. Rely on git and svn to do their job as much as possible. Don't
+ duplicate functionality. Examples:
+
+ i. We require the working copies to be 'clean' as reported by
+ ``git status`` and ``svn status``. What if there are misc
+ editor files floating around that prevent an update? Use the
+ git and svn ignore functionality so they are not
+ reported. Don't try to remove them from manage_externals or
+ determine if they are 'safe' to ignore.
+
+ ii. Do not use '--force'. Ever. This is a sign you are doing
+ something dangerous, it may not be what the user
+ wants. Remember, they are encouraged to modify their repo.
+
+4. There are often multiple ways to obtain a particular piece of
+ information from git. Scraping screen output is brittle and
+ generally not considered a stable API across different versions of
+ git. Given a choice between:
+
+ a. a lower level git 'plumbing' command that processes a
+ specific request and returns a sucess/failure status.
+
+ b. high level git command that produces a bunch of output
+ that must be processed.
+
+ We always prefer the former. It almost always involves
+ writing and maintaining less code and is more likely to be
+ stable.
+
+5. Backward compatibility is critical. We have *nested*
+ repositories. They are trivially easy to change versions. They may
+ have very different versions of the top level manage_externals. The
+ ability to read and work with old model description files is
+ critical to avoid problems for users. We also have automated tools
+ (testdb) that must generate and read external description
+ files. Backward compatibility will make staging changes vastly
+ simpler.
+
+Model Users
+-----------
+
+Consider the needs of the following model userswhen developing manage_externals:
+
+* Users who will checkout the code once, and never change versions.
+
+* Users who will checkout the code once, then work for several years,
+ never updating. before trying to update or request integration.
+
+* Users develope code but do not use revision control beyond the
+ initial checkout. If they have modified or untracked files in the
+ repo, they may be irreplacable. Don't destroy user data.
+
+* Intermediate users who are working with multiple repos or branches
+ on a regular basis. They may only use manage_externals weekly or
+ monthly. Keep the user interface and documentation simple and
+ explicit. The more command line options they have to remember or
+ look up, the more frustrated they git.
+
+* Software engineers who use the tools multiple times a day. It should
+ get out of their way.
+
+User Interface
+--------------
+
+Basic operation for the most standard use cases should be kept as
+simple as possible. Many users will only rarely run the manage
+utilities. Even advanced users don't like reading a lot of help
+documentation or struggling to remember commands and piece together
+what they need to run. Having many command line options, even if not
+needed, is exteremly frustrating and overwhelming for most users. A few
+simple, explicitly named commands are better than a single command
+with many options.
+
+How will users get help if something goes wrong? This is a custom,
+one-off solution. Searching the internet for manage_externals, will
+only return the user doc for this project at best. There isn't likely
+to be a stackoverflow question or blog post where someone else already
+answered a user's question. And very few people outside this community
+will be able to provide help if something goes wrong. The sooner we
+kick users out of these utilities and into standard version control
+tools, the better off they are going to be if they run into a problem.
+
+Repositories
+------------
+
+There are three basic types of repositories that must be considered:
+
+* container repositories - repositories that are always top level
+ repositories, and have a group of externals that must be managed.
+
+* simple repositories - repositories that are externals to another
+ repository, and do not have any of their own externals that will be
+ managed.
+
+* mixed use repositories - repositories that can act as a top level
+ container repository or as an external to a top level
+ container. They may also have their own sub-externals that are
+ required. They may have different externals needs depening on
+ whether they are top level or not.
+
+Repositories must be able to checkout and switch to both branches and
+tags.
+
+Development
+===========
+
+The functionality to manage externals is broken into a library of core
+functionality and applications built with the library.
+
+The core library is called 'manic', pseduo-homophone of (man)age
+(ex)ternals that is: short, pronounceable and spell-checkable. It is
+also no more or less meaningful to an unfamiliar user than a random
+jumble of letters forming an acronym.
+
+The core architecture of manic is:
+
+* externals description - an abstract description on an external,
+ including of how to obtain it, where to obtain it, where it goes in
+ the working tree.
+
+* externals - the software object representing an external.
+
+* source trees - collection of externals
+
+* repository wrappers - object oriented wrappers around repository
+ operations. So the higher level management of the soure tree and
+ external does not have to be concerned with how a particular
+ external is obtained and managed.
+
diff --git a/manage_externals/test/doc/index.rst b/manage_externals/test/doc/index.rst
new file mode 100644
index 0000000000..9ab287ad8c
--- /dev/null
+++ b/manage_externals/test/doc/index.rst
@@ -0,0 +1,22 @@
+.. Manage Externals documentation master file, created by
+ sphinx-quickstart on Wed Nov 29 10:53:25 2017.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to Manage Externals's documentation!
+============================================
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+
+ develop.rst
+ testing.rst
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/manage_externals/test/doc/testing.rst b/manage_externals/test/doc/testing.rst
new file mode 100644
index 0000000000..623f0e431c
--- /dev/null
+++ b/manage_externals/test/doc/testing.rst
@@ -0,0 +1,123 @@
+Testing
+=======
+
+The manage_externals package has an automated test suite. All pull
+requests are expected to pass 100% of the automated tests, as well as
+be pep8 and lint 'clean' and maintain approximately constant (at a
+minimum) level of code coverage.
+
+Quick Start
+-----------
+
+Do nothing approach
+~~~~~~~~~~~~~~~~~~~
+
+When you create a pull request on GitHub, Travis-CI continuous
+integration testing will run the test suite in both python2 and
+python3. Test results, lint results, and code coverage results are
+available online.
+
+Do something approach
+~~~~~~~~~~~~~~~~~~~~~
+
+In the test directory, run:
+
+.. code-block:: shell
+
+ make env
+ make lint
+ make test
+ make coverage
+
+
+Automated Testing
+-----------------
+
+The manage_externals manic library and executables are developed to be
+python2 and python3 compatible using only the standard library. The
+test suites meet the same requirements. But additional tools are
+required to provide lint and code coverage metrics and generate
+documentation. The requirements are maintained in the requirements.txt
+file, and can be automatically installed into an isolated environment
+via Makefile.
+
+Bootstrap requirements:
+
+* python2 - version 2.7.x or later
+
+* python3 - version 3.6 tested other versions may work
+
+* pip and virtualenv for python2 and python3
+
+Note: all make rules can be of the form ``make python=pythonX rule``
+or ``make rule`` depending if you want to use the default system
+python or specify a specific version.
+
+The Makefile in the test directory has the following rules:
+
+* ``make python=pythonX env`` - create a python virtual environment
+ for python2 or python3 and install all required packages. These
+ packages are required to run lint or coverage.
+
+* ``make style`` - runs autopep8
+
+* ``make lint`` - runs autopep8 and pylint
+
+* ``make test`` - run the full test suite
+
+* ``make utest`` - run jus the unit tests
+
+* ``make stest`` - run jus the system integration tests
+
+* ``make coverage`` - run the full test suite through the code
+ coverage tool and generate an html report.
+
+* ``make readme`` - automatically generate the README files.
+
+* ``make clean`` - remove editor and pyc files
+
+* ``make clobber`` - remove all generated test files, including
+ virtual environments, coverage reports, and temporary test
+ repository directories.
+
+Unit Tests
+----------
+
+Unit tests are probably not 'true unit tests' for the pedantic, but
+are pragmatic unit tests. They cover small practicle code blocks:
+functions, class methods, and groups of functions and class methods.
+
+System Integration Tests
+------------------------
+
+NOTE(bja, 2017-11) The systems integration tests currently do not include svn repositories.
+
+The manage_externals package is extremely tedious and error prone to test manually.
+
+Combinations that must be tested to ensure basic functionality are:
+
+* container repository pulling in simple externals
+
+* container repository pulling in mixed externals with sub-externals.
+
+* mixed repository acting as a container, pulling in simple externals and sub-externals
+
+Automatic system tests are handled the same way manual testing is done:
+
+* clone a test repository
+
+* create an externals description file for the test
+
+* run the executable with the desired args
+
+* check the results
+
+* potentially modify the repo (checkout a different branch)
+
+* rerun and test
+
+* etc
+
+The automated system stores small test repositories in the main repo
+by adding them as bare repositories. These repos are cloned via a
+subprocess call to git and manipulated during the tests.
diff --git a/manage_externals/test/repos/container.git/HEAD b/manage_externals/test/repos/container.git/HEAD
new file mode 100644
index 0000000000..cb089cd89a
--- /dev/null
+++ b/manage_externals/test/repos/container.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/manage_externals/test/repos/container.git/config b/manage_externals/test/repos/container.git/config
new file mode 100644
index 0000000000..e6da231579
--- /dev/null
+++ b/manage_externals/test/repos/container.git/config
@@ -0,0 +1,6 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
+ ignorecase = true
+ precomposeunicode = true
diff --git a/manage_externals/test/repos/container.git/description b/manage_externals/test/repos/container.git/description
new file mode 100644
index 0000000000..498b267a8c
--- /dev/null
+++ b/manage_externals/test/repos/container.git/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/container.git/info/exclude b/manage_externals/test/repos/container.git/info/exclude
new file mode 100644
index 0000000000..a5196d1be8
--- /dev/null
+++ b/manage_externals/test/repos/container.git/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
new file mode 100644
index 0000000000..f65234e17f
Binary files /dev/null and b/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 differ
diff --git a/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de b/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de
new file mode 100644
index 0000000000..9759965b1b
Binary files /dev/null and b/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de differ
diff --git a/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b b/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b
new file mode 100644
index 0000000000..d9976cc442
Binary files /dev/null and b/manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b differ
diff --git a/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 b/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03
new file mode 100644
index 0000000000..460fd77819
Binary files /dev/null and b/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 differ
diff --git a/manage_externals/test/repos/container.git/refs/heads/master b/manage_externals/test/repos/container.git/refs/heads/master
new file mode 100644
index 0000000000..3ae00f3af0
--- /dev/null
+++ b/manage_externals/test/repos/container.git/refs/heads/master
@@ -0,0 +1 @@
+715b8f3e4afe1802a178e1d603af404ba45d59de
diff --git a/manage_externals/test/repos/error/readme.txt b/manage_externals/test/repos/error/readme.txt
new file mode 100644
index 0000000000..6b5753377e
--- /dev/null
+++ b/manage_externals/test/repos/error/readme.txt
@@ -0,0 +1,3 @@
+Invalid or corrupted git repository (.git dir exists, but is empty) for error
+testing.
+
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/HEAD b/manage_externals/test/repos/mixed-cont-ext.git/HEAD
new file mode 100644
index 0000000000..cb089cd89a
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/config b/manage_externals/test/repos/mixed-cont-ext.git/config
new file mode 100644
index 0000000000..e6da231579
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/config
@@ -0,0 +1,6 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
+ ignorecase = true
+ precomposeunicode = true
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/description b/manage_externals/test/repos/mixed-cont-ext.git/description
new file mode 100644
index 0000000000..498b267a8c
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/info/exclude b/manage_externals/test/repos/mixed-cont-ext.git/info/exclude
new file mode 100644
index 0000000000..a5196d1be8
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 b/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7
new file mode 100644
index 0000000000..13d15a96a5
Binary files /dev/null and b/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 b/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69
new file mode 100644
index 0000000000..8c6b04837a
Binary files /dev/null and b/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
new file mode 100644
index 0000000000..f65234e17f
Binary files /dev/null and b/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 differ
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 b/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901
new file mode 100644
index 0000000000..619e38ee78
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901
@@ -0,0 +1,2 @@
+x=Ê;Ã0ÑÔ:Åvî§Ì=rJf`)ˆnoW)¦z“›g¼—Ïë«Aí•>.p¢ˆA
+!ìÜ w4ݵ¡¸Qªé€Øú=©Ã¤á¨ÏZ9ü0„þûkÌ éžG)*
\ No newline at end of file
diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master
new file mode 100644
index 0000000000..508331f329
--- /dev/null
+++ b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master
@@ -0,0 +1 @@
+06ea30b03ffa2f8574705f8b9583f7ca7e2dccf7
diff --git a/manage_externals/test/repos/simple-ext-fork.git/HEAD b/manage_externals/test/repos/simple-ext-fork.git/HEAD
new file mode 100644
index 0000000000..cb089cd89a
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/manage_externals/test/repos/simple-ext-fork.git/config b/manage_externals/test/repos/simple-ext-fork.git/config
new file mode 100644
index 0000000000..04eba17870
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/config
@@ -0,0 +1,8 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
+ ignorecase = true
+ precomposeunicode = true
+[remote "origin"]
+ url = /Users/andreb/projects/ncar/git-conversion/checkout-model-dev/cesm-demo-externals/manage_externals/test/repos/simple-ext.git
diff --git a/manage_externals/test/repos/simple-ext-fork.git/description b/manage_externals/test/repos/simple-ext-fork.git/description
new file mode 100644
index 0000000000..498b267a8c
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/simple-ext-fork.git/info/exclude b/manage_externals/test/repos/simple-ext-fork.git/info/exclude
new file mode 100644
index 0000000000..a5196d1be8
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f
new file mode 100644
index 0000000000..ae28c037e5
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8
new file mode 100644
index 0000000000..32d6896e3c
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677
new file mode 100644
index 0000000000..db51ce1953
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
new file mode 100644
index 0000000000..564e7bba63
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
@@ -0,0 +1,2 @@
+x%ŒK
+Â0@]çse&ßDÔ›L’!´˜¶„l¼½).¼Åãu.@Æ_ö¸Jê0ÇàìlM–Ä~v:ÄèmLÌÆi™åY*/ŸÛè@ŽpòÞWˆJ¥&Üå¿ø)´*Í
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f b/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f
new file mode 100644
index 0000000000..0d738af68b
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 b/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4
new file mode 100644
index 0000000000..b6284f8413
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf b/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf
new file mode 100644
index 0000000000..0999f0d4b9
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48
new file mode 100644
index 0000000000..9da8434f65
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26 b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26
new file mode 100644
index 0000000000..22065ba543
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26
@@ -0,0 +1,2 @@
+xmÉQ
+Â0EQ¿³Š·‚‚q®À
$ö•ÓL˜©îÞ€ôO¸_÷¤* çËõtÏÅ0Š°²õJ8Í¡ìbÅE?Ø‹g4ßNm±bag[b{ÚÂÑ_ÊIÇËcˆ>¹`ý}0…M”؇BÚÁs0/µâ¿}öï::
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b
new file mode 100644
index 0000000000..9a31c7ef2e
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b
@@ -0,0 +1,2 @@
+x•ŽKnÃ0³Ö)x”,ÊI½EÑŸ´–A¹Ü#t7o€ŒìÛ¶vp.žzS…ÁšÆƒ&oÑ„©d¦8¹xLd@™Ì‹›ÖCð6f¯%
+œpt$‰m&ŽJd…¦¡øhøÝ—½Á—VxÔÒ®ùÉpŸ7^/²o7°dK1ÂGDsØ#¯ë¿æ{o?Z 7®²€,\g½˜Á©¹£rPkÖSèkJ´^ë
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 b/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95
new file mode 100644
index 0000000000..d8ba654548
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95
@@ -0,0 +1,3 @@
+xUÌ[
+Â0…aŸ³ŠÙ@%Is+ˆ¨;™¤c/˜DÂq÷VðÅ×Ã>Æ ”w‡WJÚ˜>8ò!¤!&'ƒS=)í±×CòF+ÑI2‚ßO‚Ts^Xðn`Ä2ÖBcw'äÑw¨Á
+\ËØNqÝ›F—)ãò8îç3(«¬Œ2:é¥ÿü0x-<×!6,i ª9
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 b/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936
new file mode 100644
index 0000000000..9b40a0afa0
Binary files /dev/null and b/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 differ
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b
new file mode 100644
index 0000000000..3019d2bac0
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b
@@ -0,0 +1 @@
+x5ÉÁ
à DÑœ©b*°dni ¸‚Y´lä¤û Y–þéýX%bõ÷Û–ËÀ,`”½WÂ8Ê.£˜èG±Œ&àר-T$v¶Ú³p,Î=ì£ôˆ:-O}3áu:®±¸]”8æ…´k{÷|0
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
new file mode 100644
index 0000000000..1d27accb58
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
@@ -0,0 +1 @@
+x
ÈÁ
€ @ßT±øàeV` ›p ¹;£v¯É¼&מ±Äi+bø%˜œ£Ns(G7ñ®/nñ‚ÖÁÇ©-UlGj»ÐæV&¿”Yÿ+!|£òŠ
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca b/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca
new file mode 100644
index 0000000000..3e945cdeb1
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca
@@ -0,0 +1 @@
+x•ŽÛ 1EýNÓ€’Écfµ+ÈcÔÕÍFBÛw-Á¿Ëù©–2v0ÖmzOÈ^4rÈvˆ7›Íì"Å̉Íàˆ‚z‡&sb´$>D—}ÂD>£Nƒv“{ŠZ¼M˜I…¥?jƒ‹Ìpžs8ÄgøÓ½„qÚ¥ZŽ€qoj†fÕJ×{]þÕµÓ¥®¥Om/¨3Ü$ô¥‰Q_@ÞH©
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext-fork.git/packed-refs b/manage_externals/test/repos/simple-ext-fork.git/packed-refs
new file mode 100644
index 0000000000..b8f9e86308
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/packed-refs
@@ -0,0 +1,5 @@
+# pack-refs with: peeled fully-peeled sorted
+36418b4e5665956a90725c9a1b5a8e551c5f3d48 refs/heads/feature2
+9b75494003deca69527bb64bcaa352e801611dd2 refs/heads/master
+11a76e3d9a67313dec7ce1230852ab5c86352c5c refs/tags/tag1
+^9b75494003deca69527bb64bcaa352e801611dd2
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2
new file mode 100644
index 0000000000..d223b0362d
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2
@@ -0,0 +1 @@
+f268d4e56d067da9bd1d85e55bdc40a8bd2b0bca
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature
new file mode 100644
index 0000000000..8a18bf08e9
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature
@@ -0,0 +1 @@
+a42fe9144f5707bc1e9515ce1b44681f7aba6f95
diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1
new file mode 100644
index 0000000000..2764b552d5
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1
@@ -0,0 +1 @@
+8d2b3b35126224c975d23f109aa1e3cbac452989
diff --git a/manage_externals/test/repos/simple-ext.git/HEAD b/manage_externals/test/repos/simple-ext.git/HEAD
new file mode 100644
index 0000000000..cb089cd89a
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/HEAD
@@ -0,0 +1 @@
+ref: refs/heads/master
diff --git a/manage_externals/test/repos/simple-ext.git/config b/manage_externals/test/repos/simple-ext.git/config
new file mode 100644
index 0000000000..e6da231579
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/config
@@ -0,0 +1,6 @@
+[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = true
+ ignorecase = true
+ precomposeunicode = true
diff --git a/manage_externals/test/repos/simple-ext.git/description b/manage_externals/test/repos/simple-ext.git/description
new file mode 100644
index 0000000000..498b267a8c
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/description
@@ -0,0 +1 @@
+Unnamed repository; edit this file 'description' to name the repository.
diff --git a/manage_externals/test/repos/simple-ext.git/info/exclude b/manage_externals/test/repos/simple-ext.git/info/exclude
new file mode 100644
index 0000000000..a5196d1be8
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/info/exclude
@@ -0,0 +1,6 @@
+# git ls-files --others --exclude-from=.git/info/exclude
+# Lines that start with '#' are comments.
+# For a project mostly in C, the following would be a good set of
+# exclude patterns (uncomment them if you want to use them):
+# *.[oa]
+# *~
diff --git a/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f
new file mode 100644
index 0000000000..ae28c037e5
Binary files /dev/null and b/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8
new file mode 100644
index 0000000000..32d6896e3c
Binary files /dev/null and b/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
new file mode 100644
index 0000000000..564e7bba63
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c
@@ -0,0 +1,2 @@
+x%ŒK
+Â0@]çse&ßDÔ›L’!´˜¶„l¼½).¼Åãu.@Æ_ö¸Jê0ÇàìlM–Ä~v:ÄèmLÌÆi™åY*/ŸÛè@ŽpòÞWˆJ¥&Üå¿ø)´*Í
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48
new file mode 100644
index 0000000000..9da8434f65
Binary files /dev/null and b/manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801
new file mode 100644
index 0000000000..f65234e17f
Binary files /dev/null and b/manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 b/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2
new file mode 100644
index 0000000000..ba1b51f515
Binary files /dev/null and b/manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 differ
diff --git a/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 b/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04
new file mode 100644
index 0000000000..fb5feb96c2
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04
@@ -0,0 +1 @@
+x
ÂÑ €0@¿;Å›À?ÄZû š”& n¯Ç•nû¶Mÿ—kt"èÉa.aóÅ-Ñ >Á©¹£rPkÖSèkJ´^ë
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 b/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
new file mode 100644
index 0000000000..1d27accb58
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364
@@ -0,0 +1 @@
+x
ÈÁ
€ @ßT±øàeV` ›p ¹;£v¯É¼&מ±Äi+bø%˜œ£Ns(G7ñ®/nñ‚ÖÁÇ©-UlGj»ÐæV&¿”Yÿ+!|£òŠ
\ No newline at end of file
diff --git a/manage_externals/test/repos/simple-ext.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext.git/refs/heads/feature2
new file mode 100644
index 0000000000..01a0dd6e23
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/refs/heads/feature2
@@ -0,0 +1 @@
+36418b4e5665956a90725c9a1b5a8e551c5f3d48
diff --git a/manage_externals/test/repos/simple-ext.git/refs/heads/master b/manage_externals/test/repos/simple-ext.git/refs/heads/master
new file mode 100644
index 0000000000..5c67504966
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/refs/heads/master
@@ -0,0 +1 @@
+9b75494003deca69527bb64bcaa352e801611dd2
diff --git a/manage_externals/test/repos/simple-ext.git/refs/tags/tag1 b/manage_externals/test/repos/simple-ext.git/refs/tags/tag1
new file mode 100644
index 0000000000..ee595be8bd
--- /dev/null
+++ b/manage_externals/test/repos/simple-ext.git/refs/tags/tag1
@@ -0,0 +1 @@
+11a76e3d9a67313dec7ce1230852ab5c86352c5c
diff --git a/manage_externals/test/requirements.txt b/manage_externals/test/requirements.txt
new file mode 100644
index 0000000000..d66f6f1e67
--- /dev/null
+++ b/manage_externals/test/requirements.txt
@@ -0,0 +1,5 @@
+pylint>=1.7.0
+autopep8>=1.3.0
+coverage>=4.4.0
+coveralls>=1.2.0
+sphinx>=1.6.0
diff --git a/manage_externals/test/test_sys_checkout.py b/manage_externals/test/test_sys_checkout.py
new file mode 100644
index 0000000000..adf6470cdf
--- /dev/null
+++ b/manage_externals/test/test_sys_checkout.py
@@ -0,0 +1,1322 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the manic and
+checkout_externals module is already in the python path. This is
+usually handled by the makefile. If you call it directly, you may need
+to adjust your path.
+
+NOTE(bja, 2017-11) If a test fails, we want to keep the repo for that
+test. But the tests will keep running, so we need a unique name. Also,
+tearDown is always called after each test. I haven't figured out how
+to determine if an assertion failed and whether it is safe to clean up
+the test repos.
+
+So the solution is:
+
+* assign a unique id to each test repo.
+
+* never cleanup during the run.
+
+* Erase any existing repos at the begining of the module in
+setUpModule.
+
+"""
+
+# NOTE(bja, 2017-11) pylint complains that the module is too big, but
+# I'm still working on how to break up the tests and still have the
+# temporary directory be preserved....
+# pylint: disable=too-many-lines
+
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import logging
+import os
+import os.path
+import shutil
+import unittest
+
+from manic.externals_description import ExternalsDescription
+from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM
+from manic.externals_status import ExternalStatus
+from manic.repository_git import GitRepository
+from manic.utils import printlog, execute_subprocess
+from manic.global_constants import LOCAL_PATH_INDICATOR, VERBOSITY_DEFAULT
+from manic.global_constants import LOG_FILE_NAME
+from manic import checkout
+
+# ConfigParser was renamed in python2 to configparser. In python2,
+# ConfigParser returns byte strings, str, instead of unicode. We need
+# unicode to be compatible with xml and json parser and python3.
+try:
+ # python2
+ from ConfigParser import SafeConfigParser as config_parser
+except ImportError:
+ # python3
+ from configparser import ConfigParser as config_parser
+
+# ---------------------------------------------------------------------
+#
+# Global constants
+#
+# ---------------------------------------------------------------------
+
+# environment variable names
+MANIC_TEST_BARE_REPO_ROOT = 'MANIC_TEST_BARE_REPO_ROOT'
+MANIC_TEST_TMP_REPO_ROOT = 'MANIC_TEST_TMP_REPO_ROOT'
+
+# directory names
+TMP_REPO_DIR_NAME = 'tmp'
+BARE_REPO_ROOT_NAME = 'repos'
+CONTAINER_REPO_NAME = 'container.git'
+MIXED_REPO_NAME = 'mixed-cont-ext.git'
+SIMPLE_REPO_NAME = 'simple-ext.git'
+SIMPLE_FORK_NAME = 'simple-ext-fork.git'
+SIMPLE_LOCAL_ONLY_NAME = '.'
+ERROR_REPO_NAME = 'error'
+EXTERNALS_NAME = 'externals'
+SUB_EXTERNALS_PATH = 'src'
+CFG_NAME = 'externals.cfg'
+CFG_SUB_NAME = 'sub-externals.cfg'
+README_NAME = 'readme.txt'
+
+SVN_TEST_REPO = 'https://github.com/escomp/cesm'
+
+
+def setUpModule(): # pylint: disable=C0103
+ """Setup for all tests in this module. It is called once per module!
+ """
+ logging.basicConfig(filename=LOG_FILE_NAME,
+ format='%(levelname)s : %(asctime)s : %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ level=logging.DEBUG)
+ repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME)
+ repo_root = os.path.abspath(repo_root)
+ # delete if it exists from previous runs
+ try:
+ shutil.rmtree(repo_root)
+ except BaseException:
+ pass
+ # create clean dir for this run
+ os.mkdir(repo_root)
+ # set into the environment so var will be expanded in externals
+ # filess when executables are run
+ os.environ[MANIC_TEST_TMP_REPO_ROOT] = repo_root
+
+
+class GenerateExternalsDescriptionCfgV1(object):
+ """Class to provide building blocks to create
+ ExternalsDescriptionCfgV1 files.
+
+ Includes predefined files used in tests.
+
+ """
+
+ def __init__(self):
+ self._schema_version = '1.0.0'
+ self._config = None
+
+ def container_full(self, dest_dir):
+ """Create the full container config file with simple and mixed use
+ externals
+
+ """
+ self.create_config()
+ self.create_section(SIMPLE_REPO_NAME, 'simp_tag',
+ tag='tag1')
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_branch',
+ branch='feature2')
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_opt',
+ tag='tag1', required=False)
+
+ self.create_section(MIXED_REPO_NAME, 'mixed_req',
+ tag='tag1', externals=CFG_SUB_NAME)
+
+ self.create_section(MIXED_REPO_NAME, 'mixed_opt',
+ tag='tag1', externals=CFG_SUB_NAME,
+ required=False)
+
+ self._write_config(dest_dir)
+
+ def container_simple_required(self, dest_dir):
+ """Create a container externals file with only simple externals.
+
+ """
+ self.create_config()
+ self.create_section(SIMPLE_REPO_NAME, 'simp_tag',
+ tag='tag1')
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_branch',
+ branch='feature2')
+
+ self._write_config(dest_dir)
+
+ def container_simple_optional(self, dest_dir):
+ """Create a container externals file with optional simple externals
+
+ """
+ self.create_config()
+ self.create_section(SIMPLE_REPO_NAME, 'simp_req',
+ tag='tag1')
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_opt',
+ tag='tag1', required=False)
+
+ self._write_config(dest_dir)
+
+ def container_simple_svn(self, dest_dir):
+ """Create a container externals file with only simple externals.
+
+ """
+ self.create_config()
+ self.create_section(SIMPLE_REPO_NAME, 'simp_tag', tag='tag1')
+
+ self.create_svn_external('svn_branch', branch='trunk')
+ self.create_svn_external('svn_tag', tag='tags/cesm2.0.beta07')
+
+ self._write_config(dest_dir)
+
+ def mixed_simple_base(self, dest_dir):
+ """Create a mixed-use base externals file with only simple externals.
+
+ """
+ self.create_config()
+ self.create_section_ext_only('mixed_base')
+ self.create_section(SIMPLE_REPO_NAME, 'simp_tag',
+ tag='tag1')
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_branch',
+ branch='feature2')
+
+ self._write_config(dest_dir)
+
+ def mixed_simple_sub(self, dest_dir):
+ """Create a mixed-use sub externals file with only simple externals.
+
+ """
+ self.create_config()
+ self.create_section(SIMPLE_REPO_NAME, 'simp_tag',
+ tag='tag1', path=SUB_EXTERNALS_PATH)
+
+ self.create_section(SIMPLE_REPO_NAME, 'simp_branch',
+ branch='feature2', path=SUB_EXTERNALS_PATH)
+
+ self._write_config(dest_dir, filename=CFG_SUB_NAME)
+
+ def _write_config(self, dest_dir, filename=CFG_NAME):
+ """Write the configuration file to disk
+
+ """
+ dest_path = os.path.join(dest_dir, filename)
+ with open(dest_path, 'w') as configfile:
+ self._config.write(configfile)
+
+ def create_config(self):
+ """Create an config object and add the required metadata section
+
+ """
+ self._config = config_parser()
+ self.create_metadata()
+
+ def create_metadata(self):
+ """Create the metadata section of the config file
+ """
+ self._config.add_section(DESCRIPTION_SECTION)
+
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM,
+ self._schema_version)
+
+ def create_section(self, repo_type, name, tag='', branch='',
+ required=True, path=EXTERNALS_NAME, externals=''):
+ """Create a config section with autofilling some items and handling
+ optional items.
+
+ """
+ # pylint: disable=R0913
+ self._config.add_section(name)
+ self._config.set(name, ExternalsDescription.PATH,
+ os.path.join(path, name))
+
+ self._config.set(name, ExternalsDescription.PROTOCOL,
+ ExternalsDescription.PROTOCOL_GIT)
+
+ repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type)
+ self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
+
+ self._config.set(name, ExternalsDescription.REQUIRED, str(required))
+
+ if tag:
+ self._config.set(name, ExternalsDescription.TAG, tag)
+
+ if branch:
+ self._config.set(name, ExternalsDescription.BRANCH, branch)
+
+ if externals:
+ self._config.set(name, ExternalsDescription.EXTERNALS, externals)
+
+ def create_section_ext_only(self, name,
+ required=True, externals=CFG_SUB_NAME):
+ """Create a config section with autofilling some items and handling
+ optional items.
+
+ """
+ # pylint: disable=R0913
+ self._config.add_section(name)
+ self._config.set(name, ExternalsDescription.PATH, LOCAL_PATH_INDICATOR)
+
+ self._config.set(name, ExternalsDescription.PROTOCOL,
+ ExternalsDescription.PROTOCOL_EXTERNALS_ONLY)
+
+ self._config.set(name, ExternalsDescription.REPO_URL,
+ LOCAL_PATH_INDICATOR)
+
+ self._config.set(name, ExternalsDescription.REQUIRED, str(required))
+
+ if externals:
+ self._config.set(name, ExternalsDescription.EXTERNALS, externals)
+
+ def create_svn_external(self, name, tag='', branch=''):
+ """Create a config section for an svn repository.
+
+ """
+ self._config.add_section(name)
+ self._config.set(name, ExternalsDescription.PATH,
+ os.path.join(EXTERNALS_NAME, name))
+
+ self._config.set(name, ExternalsDescription.PROTOCOL,
+ ExternalsDescription.PROTOCOL_SVN)
+
+ self._config.set(name, ExternalsDescription.REPO_URL, SVN_TEST_REPO)
+
+ self._config.set(name, ExternalsDescription.REQUIRED, str(True))
+
+ if tag:
+ self._config.set(name, ExternalsDescription.TAG, tag)
+
+ if branch:
+ self._config.set(name, ExternalsDescription.BRANCH, branch)
+
+ @staticmethod
+ def create_branch(dest_dir, repo_name, branch, with_commit=False):
+ """Update a repository branch, and potentially the remote.
+ """
+ # pylint: disable=R0913
+ cwd = os.getcwd()
+ repo_root = os.path.join(dest_dir, EXTERNALS_NAME)
+ repo_root = os.path.join(repo_root, repo_name)
+ os.chdir(repo_root)
+ cmd = ['git', 'checkout', '-b', branch, ]
+ execute_subprocess(cmd)
+ if with_commit:
+ msg = 'start work on {0}'.format(branch)
+ with open(README_NAME, 'a') as handle:
+ handle.write(msg)
+ cmd = ['git', 'add', README_NAME, ]
+ execute_subprocess(cmd)
+ cmd = ['git', 'commit', '-m', msg, ]
+ execute_subprocess(cmd)
+ os.chdir(cwd)
+
+ def update_branch(self, dest_dir, name, branch, repo_type=None,
+ filename=CFG_NAME):
+ """Update a repository branch, and potentially the remote.
+ """
+ # pylint: disable=R0913
+ self._config.set(name, ExternalsDescription.BRANCH, branch)
+
+ if repo_type:
+ if repo_type == SIMPLE_LOCAL_ONLY_NAME:
+ repo_url = SIMPLE_LOCAL_ONLY_NAME
+ else:
+ repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}',
+ repo_type)
+ self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
+
+ try:
+ # remove the tag if it existed
+ self._config.remove_option(name, ExternalsDescription.TAG)
+ except BaseException:
+ pass
+
+ self._write_config(dest_dir, filename)
+
+ def update_svn_branch(self, dest_dir, name, branch, filename=CFG_NAME):
+ """Update a repository branch, and potentially the remote.
+ """
+ # pylint: disable=R0913
+ self._config.set(name, ExternalsDescription.BRANCH, branch)
+
+ try:
+ # remove the tag if it existed
+ self._config.remove_option(name, ExternalsDescription.TAG)
+ except BaseException:
+ pass
+
+ self._write_config(dest_dir, filename)
+
+ def update_tag(self, dest_dir, name, tag, repo_type=None,
+ filename=CFG_NAME, remove_branch=True):
+ """Update a repository tag, and potentially the remote
+
+ NOTE(bja, 2017-11) remove_branch=False should result in an
+ overspecified external with both a branch and tag. This is
+ used for error condition testing.
+
+ """
+ # pylint: disable=R0913
+ self._config.set(name, ExternalsDescription.TAG, tag)
+
+ if repo_type:
+ repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type)
+ self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
+
+ try:
+ # remove the branch if it existed
+ if remove_branch:
+ self._config.remove_option(name, ExternalsDescription.BRANCH)
+ except BaseException:
+ pass
+
+ self._write_config(dest_dir, filename)
+
+ def update_underspecify_branch_tag(self, dest_dir, name,
+ filename=CFG_NAME):
+ """Update a repository protocol, and potentially the remote
+ """
+ # pylint: disable=R0913
+ try:
+ # remove the branch if it existed
+ self._config.remove_option(name, ExternalsDescription.BRANCH)
+ except BaseException:
+ pass
+
+ try:
+ # remove the tag if it existed
+ self._config.remove_option(name, ExternalsDescription.TAG)
+ except BaseException:
+ pass
+
+ self._write_config(dest_dir, filename)
+
+ def update_underspecify_remove_url(self, dest_dir, name,
+ filename=CFG_NAME):
+ """Update a repository protocol, and potentially the remote
+ """
+ # pylint: disable=R0913
+ try:
+ # remove the repo url if it existed
+ self._config.remove_option(name, ExternalsDescription.REPO_URL)
+ except BaseException:
+ pass
+
+ self._write_config(dest_dir, filename)
+
+ def update_protocol(self, dest_dir, name, protocol, repo_type=None,
+ filename=CFG_NAME):
+ """Update a repository protocol, and potentially the remote
+ """
+ # pylint: disable=R0913
+ self._config.set(name, ExternalsDescription.PROTOCOL, protocol)
+
+ if repo_type:
+ repo_url = os.path.join('${MANIC_TEST_BARE_REPO_ROOT}', repo_type)
+ self._config.set(name, ExternalsDescription.REPO_URL, repo_url)
+
+ self._write_config(dest_dir, filename)
+
+
+class BaseTestSysCheckout(unittest.TestCase):
+ """Base class of reusable systems level test setup for
+ checkout_externals
+
+ """
+ # NOTE(bja, 2017-11) pylint complains about long method names, but
+ # it is hard to differentiate tests without making them more
+ # cryptic.
+ # pylint: disable=invalid-name
+
+ status_args = ['--status']
+ checkout_args = []
+ optional_args = ['--optional']
+ verbose_args = ['--status', '--verbose']
+
+ def setUp(self):
+ """Setup for all individual checkout_externals tests
+ """
+ # directory we want to return to after the test system and
+ # checkout_externals are done cd'ing all over the place.
+ self._return_dir = os.getcwd()
+
+ self._test_id = self.id().split('.')[-1]
+
+ # path to the executable
+ self._checkout = os.path.join('../checkout_externals')
+ self._checkout = os.path.abspath(self._checkout)
+
+ # directory where we have test repositories
+ self._bare_root = os.path.join(os.getcwd(), BARE_REPO_ROOT_NAME)
+ self._bare_root = os.path.abspath(self._bare_root)
+
+ # set into the environment so var will be expanded in externals files
+ os.environ[MANIC_TEST_BARE_REPO_ROOT] = self._bare_root
+
+ # set the input file generator
+ self._generator = GenerateExternalsDescriptionCfgV1()
+ # set the input file generator for secondary externals
+ self._sub_generator = GenerateExternalsDescriptionCfgV1()
+
+ def tearDown(self):
+ """Tear down for individual tests
+ """
+ # remove the env var we added in setup
+ del os.environ[MANIC_TEST_BARE_REPO_ROOT]
+
+ # return to our common starting point
+ os.chdir(self._return_dir)
+
+ def setup_test_repo(self, parent_repo_name):
+ """Setup the paths and clone the base test repo
+
+ """
+ # unique repo for this test
+ test_dir_name = self._test_id
+ print("Test repository name: {0}".format(test_dir_name))
+
+ parent_repo_dir = os.path.join(self._bare_root, parent_repo_name)
+ dest_dir = os.path.join(os.environ[MANIC_TEST_TMP_REPO_ROOT],
+ test_dir_name)
+ # pylint: disable=W0212
+ GitRepository._git_clone(parent_repo_dir, dest_dir, VERBOSITY_DEFAULT)
+ return dest_dir
+
+ @staticmethod
+ def _add_file_to_repo(under_test_dir, filename, tracked):
+ """Add a file to the repository so we can put it into a dirty state
+
+ """
+ cwd = os.getcwd()
+ os.chdir(under_test_dir)
+ with open(filename, 'w') as tmp:
+ tmp.write('Hello, world!')
+
+ if tracked:
+ # NOTE(bja, 2018-01) brittle hack to obtain repo dir and
+ # file name
+ path_data = filename.split('/')
+ repo_dir = os.path.join(path_data[0], path_data[1])
+ os.chdir(repo_dir)
+ tracked_file = path_data[2]
+ cmd = ['git', 'add', tracked_file]
+ execute_subprocess(cmd)
+
+ os.chdir(cwd)
+
+ @staticmethod
+ def execute_cmd_in_dir(under_test_dir, args):
+ """Extecute the checkout command in the appropriate repo dir with the
+ specified additional args
+
+ Note that we are calling the command line processing and main
+ routines and not using a subprocess call so that we get code
+ coverage results!
+
+ """
+ cwd = os.getcwd()
+ checkout_path = os.path.abspath('{0}/../../checkout_externals')
+ os.chdir(under_test_dir)
+ cmdline = ['--externals', CFG_NAME, ]
+ cmdline += args
+ repo_root = 'MANIC_TEST_BARE_REPO_ROOT={root}'.format(
+ root=os.environ[MANIC_TEST_BARE_REPO_ROOT])
+ manual_cmd = ('Test cmd:\npushd {cwd}; {env} {checkout} {args}'.format(
+ cwd=under_test_dir, env=repo_root, checkout=checkout_path,
+ args=' '.join(cmdline)))
+ printlog(manual_cmd)
+ options = checkout.commandline_arguments(cmdline)
+ overall_status, tree_status = checkout.main(options)
+ os.chdir(cwd)
+ return overall_status, tree_status
+
+ # ----------------------------------------------------------------
+ #
+ # Check results for generic perturbation of states
+ #
+ # ----------------------------------------------------------------
+ def _check_generic_empty_default_required(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.EMPTY)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.DEFAULT)
+ self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED)
+
+ def _check_generic_ok_clean_required(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED)
+
+ def _check_generic_ok_dirty_required(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.DIRTY)
+ self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED)
+
+ def _check_generic_modified_ok_required(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.MODEL_MODIFIED)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].source_type, ExternalStatus.MANAGED)
+
+ def _check_generic_empty_default_optional(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.EMPTY)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.DEFAULT)
+ self.assertEqual(tree[name].source_type, ExternalStatus.OPTIONAL)
+
+ def _check_generic_ok_clean_optional(self, tree, name):
+ self.assertEqual(tree[name].sync_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].clean_state, ExternalStatus.STATUS_OK)
+ self.assertEqual(tree[name].source_type, ExternalStatus.OPTIONAL)
+
+ # ----------------------------------------------------------------
+ #
+ # Check results for individual named externals
+ #
+ # ----------------------------------------------------------------
+ def _check_simple_tag_empty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_tag'.format(directory)
+ self._check_generic_empty_default_required(tree, name)
+
+ def _check_simple_tag_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_tag'.format(directory)
+ self._check_generic_ok_clean_required(tree, name)
+
+ def _check_simple_tag_dirty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_tag'.format(directory)
+ self._check_generic_ok_dirty_required(tree, name)
+
+ def _check_simple_branch_empty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_branch'.format(directory)
+ self._check_generic_empty_default_required(tree, name)
+
+ def _check_simple_branch_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_branch'.format(directory)
+ self._check_generic_ok_clean_required(tree, name)
+
+ def _check_simple_branch_modified(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_branch'.format(directory)
+ self._check_generic_modified_ok_required(tree, name)
+
+ def _check_simple_req_empty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_req'.format(directory)
+ self._check_generic_empty_default_required(tree, name)
+
+ def _check_simple_req_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_req'.format(directory)
+ self._check_generic_ok_clean_required(tree, name)
+
+ def _check_simple_opt_empty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_opt'.format(directory)
+ self._check_generic_empty_default_optional(tree, name)
+
+ def _check_simple_opt_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/simp_opt'.format(directory)
+ self._check_generic_ok_clean_optional(tree, name)
+
+ # ----------------------------------------------------------------
+ #
+ # Check results for groups of externals under specific conditions
+ #
+ # ----------------------------------------------------------------
+ def _check_container_simple_required_pre_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_empty(tree)
+ self._check_simple_branch_empty(tree)
+
+ def _check_container_simple_required_checkout(self, overall, tree):
+ # Note, this is the internal tree status just before checkout
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_empty(tree)
+ self._check_simple_branch_empty(tree)
+
+ def _check_container_simple_required_post_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree)
+ self._check_simple_branch_ok(tree)
+
+ def _check_container_simple_optional_pre_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_req_empty(tree)
+ self._check_simple_opt_empty(tree)
+
+ def _check_container_simple_optional_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_req_empty(tree)
+ self._check_simple_opt_empty(tree)
+
+ def _check_container_simple_optional_post_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_req_ok(tree)
+ self._check_simple_opt_empty(tree)
+
+ def _check_container_simple_optional_post_optional(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_req_ok(tree)
+ self._check_simple_opt_ok(tree)
+
+ def _check_container_simple_required_sb_modified(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree)
+ self._check_simple_branch_modified(tree)
+
+ def _check_container_simple_optional_st_dirty(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_dirty(tree)
+ self._check_simple_branch_ok(tree)
+
+ def _check_mixed_sub_simple_required_pre_checkout(self, overall, tree):
+ # Note, this is the internal tree status just before checkout
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_empty(tree, directory=EXTERNALS_NAME)
+ self._check_simple_branch_empty(tree, directory=EXTERNALS_NAME)
+ self._check_simple_tag_empty(tree, directory=SUB_EXTERNALS_PATH)
+ self._check_simple_branch_empty(tree, directory=SUB_EXTERNALS_PATH)
+
+ def _check_mixed_sub_simple_required_checkout(self, overall, tree):
+ # Note, this is the internal tree status just before checkout
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_empty(tree, directory=EXTERNALS_NAME)
+ self._check_simple_branch_empty(tree, directory=EXTERNALS_NAME)
+ self._check_simple_tag_empty(tree, directory=SUB_EXTERNALS_PATH)
+ self._check_simple_branch_empty(tree, directory=SUB_EXTERNALS_PATH)
+
+ def _check_mixed_sub_simple_required_post_checkout(self, overall, tree):
+ # Note, this is the internal tree status just before checkout
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree, directory=EXTERNALS_NAME)
+ self._check_simple_branch_ok(tree, directory=EXTERNALS_NAME)
+ self._check_simple_tag_ok(tree, directory=SUB_EXTERNALS_PATH)
+ self._check_simple_branch_ok(tree, directory=SUB_EXTERNALS_PATH)
+
+
+class TestSysCheckout(BaseTestSysCheckout):
+ """Run systems level tests of checkout_externals
+
+ """
+ # NOTE(bja, 2017-11) pylint complains about long method names, but
+ # it is hard to differentiate tests without making them more
+ # cryptic.
+ # pylint: disable=invalid-name
+
+ # ----------------------------------------------------------------
+ #
+ # Run systems tests
+ #
+ # ----------------------------------------------------------------
+ def test_container_simple_required(self):
+ """Verify that a container with simple subrepos
+ generates the correct initial status.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # status of empty repo
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_pre_checkout(overall, tree)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # status clean checked out
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_simple_optional(self):
+ """Verify that container with an optional simple subrepos
+ generates the correct initial status.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_optional(under_test_dir)
+
+ # check status of empty repo
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_optional_pre_checkout(overall, tree)
+
+ # checkout required
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_optional_checkout(overall, tree)
+
+ # status
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_optional_post_checkout(overall, tree)
+
+ # checkout optional
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.optional_args)
+ self._check_container_simple_optional_post_checkout(overall, tree)
+
+ # status
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_optional_post_optional(overall, tree)
+
+ def test_container_simple_verbose(self):
+ """Verify that container with simple subrepos runs with verbose status
+ output and generates the correct initial status.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # check verbose status
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.verbose_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_simple_dirty(self):
+ """Verify that a container with simple subrepos
+ and a dirty status exits gracefully.
+
+ """
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # add a file to the repo
+ tracked = True
+ self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt',
+ tracked)
+
+ # checkout: pre-checkout status should be dirty, did not
+ # modify working copy.
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_optional_st_dirty(overall, tree)
+
+ # verify status is still dirty
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_optional_st_dirty(overall, tree)
+
+ def test_container_simple_untracked(self):
+ """Verify that a container with simple subrepos and a untracked files
+ is not considered 'dirty' and will attempt an update.
+
+ """
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # add a file to the repo
+ tracked = False
+ self._add_file_to_repo(under_test_dir, 'externals/simp_tag/tmp.txt',
+ tracked)
+
+ # checkout: pre-checkout status should be clean, ignoring the
+ # untracked file.
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ # verify status is still clean
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_remote_branch(self):
+ """Verify that a container with remote branch change works
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # update the config file to point to a different remote with
+ # the same branch
+ self._generator.update_branch(under_test_dir, 'simp_branch',
+ 'feature2', SIMPLE_FORK_NAME)
+
+ # status of simp_branch should be out of sync
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # checkout new externals
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # status should be synced
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_remote_tag_same_branch(self):
+ """Verify that a container with remote tag change works. The new tag
+ should not be in the original repo, only the new remote
+ fork. The new tag is automatically fetched because it is on
+ the branch.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_tag(under_test_dir, 'simp_branch',
+ 'forked-feature-v1', SIMPLE_FORK_NAME)
+
+ # status of simp_branch should be out of sync
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # checkout new externals
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # status should be synced
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_remote_tag_fetch_all(self):
+ """Verify that a container with remote tag change works. The new tag
+ should not be in the original repo, only the new remote
+ fork. It should also not be on a branch that will be fetch,
+ and therefore not fetched by default with 'git fetch'. It will
+ only be retreived by 'git fetch --tags'
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_tag(under_test_dir, 'simp_branch',
+ 'abandoned-feature', SIMPLE_FORK_NAME)
+
+ # status of simp_branch should be out of sync
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # checkout new externals
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_sb_modified(overall, tree)
+
+ # status should be synced
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ def test_container_preserve_dot(self):
+ """Verify that after inital checkout, modifying an external git repo
+ url to '.' and the current branch will leave it unchanged.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_required_checkout(overall, tree)
+
+ # update the config file to point to a different remote with
+ # the same branch
+ self._generator.update_branch(under_test_dir, 'simp_branch',
+ 'feature2', SIMPLE_FORK_NAME)
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+
+ # verify status is clean and unmodified
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ # update branch to point to a new branch that only exists in
+ # the local fork
+ self._generator.create_branch(under_test_dir, 'simp_branch',
+ 'private-feature', with_commit=True)
+ self._generator.update_branch(under_test_dir, 'simp_branch',
+ 'private-feature',
+ SIMPLE_LOCAL_ONLY_NAME)
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+
+ # verify status is clean and unmodified
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_required_post_checkout(overall, tree)
+
+ @unittest.skip('test development inprogress')
+ def test_container_full(self):
+ """Verify that 'full' container with simple and mixed subrepos
+ generates the correct initial status.
+
+ """
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_full(under_test_dir)
+ overall, tree = self.execute_cmd_in_dir(
+ under_test_dir, self.status_args)
+ self.assertEqual(overall, 0)
+ overall, tree = self.execute_cmd_in_dir(
+ under_test_dir, self.checkout_args)
+ self.assertEqual(overall, 0)
+ overall, tree = self.execute_cmd_in_dir(
+ under_test_dir, self.status_args)
+ self.assertEqual(overall, 0)
+ _ = tree
+
+ def test_mixed_simple(self):
+ """Verify that a mixed use repo can serve as a 'full' container,
+ pulling in a set of externals and a seperate set of sub-externals.
+
+ """
+ #import pdb; pdb.set_trace()
+ # create repository
+ under_test_dir = self.setup_test_repo(MIXED_REPO_NAME)
+ # create top level externals file
+ self._generator.mixed_simple_base(under_test_dir)
+ # create sub-externals file
+ self._sub_generator.mixed_simple_sub(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_mixed_sub_simple_required_checkout(overall, tree)
+
+ # verify status is clean and unmodified
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_mixed_sub_simple_required_post_checkout(overall, tree)
+
+
+class TestSysCheckoutSVN(BaseTestSysCheckout):
+ """Run systems level tests of checkout_externals accessing svn repositories
+
+ SVN tests - these tests use the svn repository interface. Since
+ they require an active network connection, they are significantly
+ slower than the git tests. But svn testing is critical. So try to
+ design the tests to only test svn repository functionality
+ (checkout, switch) and leave generic testing of functionality like
+ 'optional' to the fast git tests.
+
+ Example timing as of 2017-11:
+
+ * All other git and unit tests combined take between 4-5 seconds
+
+ * Just checking if svn is available for a single test takes 2 seconds.
+
+ * The single svn test typically takes between 10 and 25 seconds
+ (depending on the network)!
+
+ NOTE(bja, 2017-11) To enable CI testing we can't use a real remote
+ repository that restricts access and it seems inappropriate to hit
+ a random open source repo. For now we are just hitting one of our
+ own github repos using the github svn server interface. This
+ should be "good enough" for basic checkout and swich
+ functionality. But if additional svn functionality is required, a
+ better solution will be necessary. I think eventually we want to
+ create a small local svn repository on the fly (doesn't require an
+ svn server or network connection!) and use it for testing.
+
+ """
+
+ def _check_svn_branch_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/svn_branch'.format(directory)
+ self._check_generic_ok_clean_required(tree, name)
+
+ def _check_svn_branch_dirty(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/svn_branch'.format(directory)
+ self._check_generic_ok_dirty_required(tree, name)
+
+ def _check_svn_tag_ok(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/svn_tag'.format(directory)
+ self._check_generic_ok_clean_required(tree, name)
+
+ def _check_svn_tag_modified(self, tree, directory=EXTERNALS_NAME):
+ name = './{0}/svn_tag'.format(directory)
+ self._check_generic_modified_ok_required(tree, name)
+
+ def _check_container_simple_svn_post_checkout(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree)
+ self._check_svn_branch_ok(tree)
+ self._check_svn_tag_ok(tree)
+
+ def _check_container_simple_svn_sb_dirty_st_mod(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree)
+ self._check_svn_tag_modified(tree)
+ self._check_svn_branch_dirty(tree)
+
+ def _check_container_simple_svn_sb_clean_st_mod(self, overall, tree):
+ self.assertEqual(overall, 0)
+ self._check_simple_tag_ok(tree)
+ self._check_svn_tag_modified(tree)
+ self._check_svn_branch_ok(tree)
+
+ @staticmethod
+ def have_svn_access():
+ """Check if we have svn access so we can enable tests that use svn.
+
+ """
+ have_svn = False
+ cmd = ['svn', 'ls', SVN_TEST_REPO, ]
+ try:
+ execute_subprocess(cmd)
+ have_svn = True
+ except BaseException:
+ pass
+ return have_svn
+
+ def skip_if_no_svn_access(self):
+ """Function decorator to disable svn tests when svn isn't available
+ """
+ have_svn = self.have_svn_access()
+ if not have_svn:
+ raise unittest.SkipTest("No svn access")
+
+ def test_container_simple_svn(self):
+ """Verify that a container repo can pull in an svn branch and svn tag.
+
+ """
+ self.skip_if_no_svn_access()
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_svn(under_test_dir)
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+
+ # verify status is clean and unmodified
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_svn_post_checkout(overall, tree)
+
+ # update description file to make the tag into a branch and
+ # trigger a switch
+ self._generator.update_svn_branch(under_test_dir, 'svn_tag', 'trunk')
+
+ # checkout
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+
+ # verify status is clean and unmodified
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.status_args)
+ self._check_container_simple_svn_post_checkout(overall, tree)
+
+ # add an untracked file to the repo
+ tracked = False
+ self._add_file_to_repo(under_test_dir,
+ 'externals/svn_branch/tmp.txt', tracked)
+
+ # run a no-op checkout: pre-checkout status should be clean,
+ # ignoring the untracked file.
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_svn_post_checkout(overall, tree)
+
+ # update description file to make the branch into a tag and
+ # trigger a modified sync status
+ self._generator.update_svn_branch(under_test_dir, 'svn_tag',
+ 'tags/cesm2.0.beta07')
+
+ # checkout: pre-checkout status should be clean and modified,
+ # will modify working copy.
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.checkout_args)
+ self._check_container_simple_svn_sb_clean_st_mod(overall, tree)
+
+ # verify status is still clean and unmodified, last
+ # checkout modified the working dir state.
+ overall, tree = self.execute_cmd_in_dir(under_test_dir,
+ self.verbose_args)
+ self._check_container_simple_svn_post_checkout(overall, tree)
+
+
+class TestSysCheckoutErrors(BaseTestSysCheckout):
+ """Run systems level tests of error conditions in checkout_externals
+
+ Error conditions - these tests are designed to trigger specific
+ error conditions and ensure that they are being handled as
+ runtime errors (and hopefully usefull error messages) instead of
+ the default internal message that won't mean anything to the
+ user, e.g. key error, called process error, etc.
+
+ These are not 'expected failures'. They are pass when a
+ RuntimeError is raised, fail if any other error is raised (or no
+ error is raised).
+
+ """
+
+ # NOTE(bja, 2017-11) pylint complains about long method names, but
+ # it is hard to differentiate tests without making them more
+ # cryptic.
+ # pylint: disable=invalid-name
+
+ def test_error_unknown_protocol(self):
+ """Verify that a runtime error is raised when the user specified repo
+ protocol is not known.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_protocol(under_test_dir, 'simp_branch',
+ 'this-protocol-does-not-exist')
+
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+ def test_error_switch_protocol(self):
+ """Verify that a runtime error is raised when the user switches
+ protocols, git to svn.
+
+ TODO(bja, 2017-11) This correctly results in an error, but it
+ isn't a helpful error message.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_protocol(under_test_dir, 'simp_branch', 'svn')
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+ def test_error_unknown_tag(self):
+ """Verify that a runtime error is raised when the user specified tag
+ does not exist.
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_tag(under_test_dir, 'simp_branch',
+ 'this-tag-does-not-exist', SIMPLE_REPO_NAME)
+
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+ def test_error_overspecify_tag_branch(self):
+ """Verify that a runtime error is raised when the user specified both
+ tag and a branch
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_tag(under_test_dir, 'simp_branch',
+ 'this-tag-does-not-exist', SIMPLE_REPO_NAME,
+ remove_branch=False)
+
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+ def test_error_underspecify_tag_branch(self):
+ """Verify that a runtime error is raised when the user specified
+ neither a tag or a branch
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_underspecify_branch_tag(under_test_dir,
+ 'simp_branch')
+
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+ def test_error_missing_url(self):
+ """Verify that a runtime error is raised when the user specified
+ neither a tag or a branch
+
+ """
+ # create repo
+ under_test_dir = self.setup_test_repo(CONTAINER_REPO_NAME)
+ self._generator.container_simple_required(under_test_dir)
+
+ # update the config file to point to a different remote with
+ # the tag instead of branch. Tag MUST NOT be in the original
+ # repo!
+ self._generator.update_underspecify_remove_url(under_test_dir,
+ 'simp_branch')
+
+ with self.assertRaises(RuntimeError):
+ self.execute_cmd_in_dir(under_test_dir, self.checkout_args)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_externals_description.py b/manage_externals/test/test_unit_externals_description.py
new file mode 100644
index 0000000000..1fb43b6797
--- /dev/null
+++ b/manage_externals/test/test_unit_externals_description.py
@@ -0,0 +1,343 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the checkout_externals.py module is
+already in the python path.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import os
+import os.path
+import shutil
+import unittest
+
+try:
+ # python2
+ from ConfigParser import SafeConfigParser as config_parser
+
+ def config_string_cleaner(text):
+ """convert strings into unicode
+ """
+ return text.decode('utf-8')
+except ImportError:
+ # python3
+ from configparser import ConfigParser as config_parser
+
+ def config_string_cleaner(text):
+ """Python3 already uses unicode strings, so just return the string
+ without modification.
+
+ """
+ return text
+
+from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM
+from manic.externals_description import ExternalsDescription
+from manic.externals_description import ExternalsDescriptionDict
+from manic.externals_description import ExternalsDescriptionConfigV1
+from manic.externals_description import get_cfg_schema_version
+from manic.externals_description import read_externals_description_file
+from manic.externals_description import create_externals_description
+
+from manic.global_constants import EMPTY_STR
+
+
+class TestCfgSchemaVersion(unittest.TestCase):
+ """Test that schema identification for the externals description
+ returns the correct results.
+
+ """
+
+ def setUp(self):
+ """Reusable config object
+ """
+ self._config = config_parser()
+ self._config.add_section('section1')
+ self._config.set('section1', 'keword', 'value')
+
+ self._config.add_section(DESCRIPTION_SECTION)
+
+ def test_schema_version_valid(self):
+ """Test that schema identification returns the correct version for a
+ valid tag.
+
+ """
+ version_str = '2.1.3'
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, version_str)
+ major, minor, patch = get_cfg_schema_version(self._config)
+ expected_major = 2
+ expected_minor = 1
+ expected_patch = 3
+ self.assertEqual(expected_major, major)
+ self.assertEqual(expected_minor, minor)
+ self.assertEqual(expected_patch, patch)
+
+ def test_schema_section_missing(self):
+ """Test that an error is returned if the schema section is missing
+ from the input file.
+
+ """
+ self._config.remove_section(DESCRIPTION_SECTION)
+ with self.assertRaises(RuntimeError):
+ get_cfg_schema_version(self._config)
+
+ def test_schema_version_missing(self):
+ """Test that a externals description file without a version raises a
+ runtime error.
+
+ """
+ # Note: the default setup method shouldn't include a version
+ # keyword, but remove it just to be future proof....
+ self._config.remove_option(DESCRIPTION_SECTION, VERSION_ITEM)
+ with self.assertRaises(RuntimeError):
+ get_cfg_schema_version(self._config)
+
+ def test_schema_version_not_int(self):
+ """Test that a externals description file a version that doesn't
+ decompose to integer major, minor and patch versions raises
+ runtime error.
+
+ """
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, 'unknown')
+ with self.assertRaises(RuntimeError):
+ get_cfg_schema_version(self._config)
+
+
+class TestModelDescritionConfigV1(unittest.TestCase):
+ """Test that parsing config/ini fileproduces a correct dictionary
+ for the externals description.
+
+ """
+ # pylint: disable=R0902
+
+ def setUp(self):
+ """Boiler plate construction of string containing xml for multiple components.
+ """
+ self._comp1_name = 'comp1'
+ self._comp1_path = 'path/to/comp1'
+ self._comp1_protocol = 'svn'
+ self._comp1_url = 'https://svn.somewhere.com/path/of/comp1'
+ self._comp1_tag = 'a_nice_tag_v1'
+ self._comp1_branch = ''
+ self._comp1_is_required = 'True'
+ self._comp1_externals = ''
+
+ self._comp2_name = 'comp2'
+ self._comp2_path = 'path/to/comp2'
+ self._comp2_protocol = 'git'
+ self._comp2_url = '/local/clone/of/comp2'
+ self._comp2_tag = ''
+ self._comp2_branch = 'a_very_nice_branch'
+ self._comp2_is_required = 'False'
+ self._comp2_externals = 'path/to/comp2.cfg'
+
+ def _setup_comp1(self, config):
+ """Boiler plate construction of xml string for componet 1
+ """
+ config.add_section(self._comp1_name)
+ config.set(self._comp1_name, 'local_path', self._comp1_path)
+ config.set(self._comp1_name, 'protocol', self._comp1_protocol)
+ config.set(self._comp1_name, 'repo_url', self._comp1_url)
+ config.set(self._comp1_name, 'tag', self._comp1_tag)
+ config.set(self._comp1_name, 'required', self._comp1_is_required)
+
+ def _setup_comp2(self, config):
+ """Boiler plate construction of xml string for componet 2
+ """
+ config.add_section(self._comp2_name)
+ config.set(self._comp2_name, 'local_path', self._comp2_path)
+ config.set(self._comp2_name, 'protocol', self._comp2_protocol)
+ config.set(self._comp2_name, 'repo_url', self._comp2_url)
+ config.set(self._comp2_name, 'branch', self._comp2_branch)
+ config.set(self._comp2_name, 'required', self._comp2_is_required)
+ config.set(self._comp2_name, 'externals', self._comp2_externals)
+
+ def _check_comp1(self, model):
+ """Test that component one was constructed correctly.
+ """
+ self.assertTrue(self._comp1_name in model)
+ comp1 = model[self._comp1_name]
+ self.assertEqual(comp1[ExternalsDescription.PATH], self._comp1_path)
+ self.assertTrue(comp1[ExternalsDescription.REQUIRED])
+ repo = comp1[ExternalsDescription.REPO]
+ self.assertEqual(repo[ExternalsDescription.PROTOCOL],
+ self._comp1_protocol)
+ self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp1_url)
+ self.assertEqual(repo[ExternalsDescription.TAG], self._comp1_tag)
+ self.assertEqual(EMPTY_STR, comp1[ExternalsDescription.EXTERNALS])
+
+ def _check_comp2(self, model):
+ """Test that component two was constucted correctly.
+ """
+ self.assertTrue(self._comp2_name in model)
+ comp2 = model[self._comp2_name]
+ self.assertEqual(comp2[ExternalsDescription.PATH], self._comp2_path)
+ self.assertFalse(comp2[ExternalsDescription.REQUIRED])
+ repo = comp2[ExternalsDescription.REPO]
+ self.assertEqual(repo[ExternalsDescription.PROTOCOL],
+ self._comp2_protocol)
+ self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp2_url)
+ self.assertEqual(repo[ExternalsDescription.BRANCH], self._comp2_branch)
+ self.assertEqual(self._comp2_externals,
+ comp2[ExternalsDescription.EXTERNALS])
+
+ def test_one_tag_required(self):
+ """Test that a component source with a tag is correctly parsed.
+ """
+ config = config_parser()
+ self._setup_comp1(config)
+ model = ExternalsDescriptionConfigV1(config)
+ print(model)
+ self._check_comp1(model)
+
+ def test_one_branch_externals(self):
+ """Test that a component source with a branch is correctly parsed.
+ """
+ config = config_parser()
+ self._setup_comp2(config)
+ model = ExternalsDescriptionConfigV1(config)
+ print(model)
+ self._check_comp2(model)
+
+ def test_two_sources(self):
+ """Test that multiple component sources are correctly parsed.
+ """
+ config = config_parser()
+ self._setup_comp1(config)
+ self._setup_comp2(config)
+ model = ExternalsDescriptionConfigV1(config)
+ print(model)
+ self._check_comp1(model)
+ self._check_comp2(model)
+
+
+class TestReadExternalsDescription(unittest.TestCase):
+ """Test the application logic of read_externals_description_file
+ """
+ TMP_FAKE_DIR = 'fake'
+
+ def setUp(self):
+ """Setup directory for tests
+ """
+ if not os.path.exists(self.TMP_FAKE_DIR):
+ os.makedirs(self.TMP_FAKE_DIR)
+
+ def tearDown(self):
+ """Cleanup tmp stuff on the file system
+ """
+ if os.path.exists(self.TMP_FAKE_DIR):
+ shutil.rmtree(self.TMP_FAKE_DIR)
+
+ def test_no_file_error(self):
+ """Test that a runtime error is raised when the file does not exist
+
+ """
+ root_dir = os.getcwd()
+ filename = 'this-file-should-not-exist'
+ with self.assertRaises(RuntimeError):
+ read_externals_description_file(root_dir, filename)
+
+ def test_no_dir_error(self):
+ """Test that a runtime error is raised when the file does not exist
+
+ """
+ root_dir = '/path/to/some/repo'
+ filename = 'externals.cfg'
+ with self.assertRaises(RuntimeError):
+ read_externals_description_file(root_dir, filename)
+
+ def test_no_invalid_error(self):
+ """Test that a runtime error is raised when the file format is invalid
+
+ """
+ root_dir = os.getcwd()
+ filename = 'externals.cfg'
+ file_path = os.path.join(root_dir, filename)
+ file_path = os.path.abspath(file_path)
+ contents = """
+
+invalid file format
+"""
+ with open(file_path, 'w') as fhandle:
+ fhandle.write(contents)
+ with self.assertRaises(RuntimeError):
+ read_externals_description_file(root_dir, filename)
+ os.remove(file_path)
+
+
+class TestCreateExternalsDescription(unittest.TestCase):
+ """Test the application logic of creat_externals_description
+ """
+
+ def setUp(self):
+ """Create config object used as basis for all tests
+ """
+ self._config = config_parser()
+ self.setup_config()
+
+ def setup_config(self):
+ """Boiler plate construction of xml string for componet 1
+ """
+ name = 'test'
+ self._config.add_section(name)
+ self._config.set(name, ExternalsDescription.PATH, 'externals')
+ self._config.set(name, ExternalsDescription.PROTOCOL, 'git')
+ self._config.set(name, ExternalsDescription.REPO_URL, '/path/to/repo')
+ self._config.set(name, ExternalsDescription.TAG, 'test_tag')
+ self._config.set(name, ExternalsDescription.REQUIRED, 'True')
+
+ self._config.add_section(DESCRIPTION_SECTION)
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0')
+
+ def test_cfg_v1(self):
+ """Test that a correct cfg v1 object is created by create_externals_description
+
+ """
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.2.3')
+ ext = create_externals_description(self._config, model_format='cfg')
+ self.assertIsInstance(ext, ExternalsDescriptionConfigV1)
+
+ def test_dict(self):
+ """Test that a correct cfg v1 object is created by create_externals_description
+
+ """
+ rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL: '/path/to/repo',
+ ExternalsDescription.TAG: 'tagv1',
+ ExternalsDescription.BRANCH: EMPTY_STR, }
+
+ desc = {
+ 'test': {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: '../fake',
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata, },
+ }
+
+ ext = create_externals_description(desc, model_format='dict')
+ self.assertIsInstance(ext, ExternalsDescriptionDict)
+
+ def test_cfg_unknown_version(self):
+ """Test that a runtime error is raised when an unknown file version is
+ received
+
+ """
+ self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '123.456.789')
+ with self.assertRaises(RuntimeError):
+ create_externals_description(self._config, model_format='cfg')
+
+ def test_cfg_unknown_format(self):
+ """Test that a runtime error is raised when an unknown format string is
+ received
+
+ """
+ with self.assertRaises(RuntimeError):
+ create_externals_description(self._config, model_format='unknown')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_externals_status.py b/manage_externals/test/test_unit_externals_status.py
new file mode 100644
index 0000000000..f8e953f756
--- /dev/null
+++ b/manage_externals/test/test_unit_externals_status.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+
+"""Unit test driver for the manic external status reporting module.
+
+Note: this script assumes the path to the manic package is already in
+the python path.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import unittest
+
+from manic.externals_status import ExternalStatus
+
+
+class TestStatusObject(unittest.TestCase):
+ """Verify that the Status object behaives as expected.
+ """
+
+ def test_exists_empty_all(self):
+ """If the repository sync-state is empty (doesn't exist), and there is no
+ clean state, then it is considered not to exist.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.EMPTY
+ stat.clean_state = ExternalStatus.DEFAULT
+ exists = stat.exists()
+ self.assertFalse(exists)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ exists = stat.exists()
+ self.assertFalse(exists)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ exists = stat.exists()
+ self.assertFalse(exists)
+
+ # this state represtens an internal logic error in how the
+ # repo status was determined.
+ stat.clean_state = ExternalStatus.STATUS_OK
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ # this state represtens an internal logic error in how the
+ # repo status was determined.
+ stat.clean_state = ExternalStatus.DIRTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ def test_exists_default_all(self):
+ """If the repository sync-state is default, then it is considered to exist
+ regardless of clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.DEFAULT
+ stat.clean_state = ExternalStatus.DEFAULT
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ def test_exists_unknown_all(self):
+ """If the repository sync-state is unknown, then it is considered to exist
+ regardless of clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.UNKNOWN
+ stat.clean_state = ExternalStatus.DEFAULT
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ def test_exists_modified_all(self):
+ """If the repository sync-state is modified, then it is considered to exist
+ regardless of clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.MODEL_MODIFIED
+ stat.clean_state = ExternalStatus.DEFAULT
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ def test_exists_ok_all(self):
+ """If the repository sync-state is ok, then it is considered to exist
+ regardless of clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.STATUS_OK
+ stat.clean_state = ExternalStatus.DEFAULT
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ exists = stat.exists()
+ self.assertTrue(exists)
+
+ def test_update_ok_all(self):
+ """If the repository in-sync is ok, then it is safe to
+ update only if clean state is ok
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.STATUS_OK
+ stat.clean_state = ExternalStatus.DEFAULT
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ safe_to_update = stat.safe_to_update()
+ self.assertTrue(safe_to_update)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ def test_update_modified_all(self):
+ """If the repository in-sync is modified, then it is safe to
+ update only if clean state is ok
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.MODEL_MODIFIED
+ stat.clean_state = ExternalStatus.DEFAULT
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ safe_to_update = stat.safe_to_update()
+ self.assertTrue(safe_to_update)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ def test_update_unknown_all(self):
+ """If the repository in-sync is unknown, then it is not safe to
+ update, regardless of the clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.UNKNOWN
+ stat.clean_state = ExternalStatus.DEFAULT
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ def test_update_default_all(self):
+ """If the repository in-sync is default, then it is not safe to
+ update, regardless of the clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.UNKNOWN
+ stat.clean_state = ExternalStatus.DEFAULT
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ def test_update_empty_all(self):
+ """If the repository in-sync is empty, then it is not safe to
+ update, regardless of the clean state.
+
+ """
+ stat = ExternalStatus()
+ stat.sync_state = ExternalStatus.UNKNOWN
+ stat.clean_state = ExternalStatus.DEFAULT
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.EMPTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.UNKNOWN
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.STATUS_OK
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+ stat.clean_state = ExternalStatus.DIRTY
+ safe_to_update = stat.safe_to_update()
+ self.assertFalse(safe_to_update)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_repository.py b/manage_externals/test/test_unit_repository.py
new file mode 100644
index 0000000000..095dacb102
--- /dev/null
+++ b/manage_externals/test/test_unit_repository.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the checkout_externals.py module is
+already in the python path.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import unittest
+
+from manic.repository_factory import create_repository
+from manic.repository_git import GitRepository
+from manic.repository_svn import SvnRepository
+from manic.repository import Repository
+from manic.externals_description import ExternalsDescription
+from manic.global_constants import EMPTY_STR
+
+
+class TestCreateRepositoryDict(unittest.TestCase):
+ """Test the create_repository functionality to ensure it returns the
+ propper type of repository and errors for unknown repository
+ types.
+
+ """
+
+ def setUp(self):
+ """Common data needed for all tests in this class
+ """
+ self._name = 'test_name'
+ self._repo = {ExternalsDescription.PROTOCOL: None,
+ ExternalsDescription.REPO_URL: 'junk_root',
+ ExternalsDescription.TAG: 'junk_tag',
+ ExternalsDescription.BRANCH: EMPTY_STR, }
+
+ def test_create_repo_git(self):
+ """Verify that several possible names for the 'git' protocol
+ create git repository objects.
+
+ """
+ protocols = ['git', 'GIT', 'Git', ]
+ for protocol in protocols:
+ self._repo[ExternalsDescription.PROTOCOL] = protocol
+ repo = create_repository(self._name, self._repo)
+ self.assertIsInstance(repo, GitRepository)
+
+ def test_create_repo_svn(self):
+ """Verify that several possible names for the 'svn' protocol
+ create svn repository objects.
+ """
+ protocols = ['svn', 'SVN', 'Svn', ]
+ for protocol in protocols:
+ self._repo[ExternalsDescription.PROTOCOL] = protocol
+ repo = create_repository(self._name, self._repo)
+ self.assertIsInstance(repo, SvnRepository)
+
+ def test_create_repo_externals_only(self):
+ """Verify that an externals only repo returns None.
+ """
+ protocols = ['externals_only', ]
+ for protocol in protocols:
+ self._repo[ExternalsDescription.PROTOCOL] = protocol
+ repo = create_repository(self._name, self._repo)
+ self.assertEqual(None, repo)
+
+ def test_create_repo_unsupported(self):
+ """Verify that an unsupported protocol generates a runtime error.
+ """
+ protocols = ['not_a_supported_protocol', ]
+ for protocol in protocols:
+ self._repo[ExternalsDescription.PROTOCOL] = protocol
+ with self.assertRaises(RuntimeError):
+ create_repository(self._name, self._repo)
+
+
+class TestRepository(unittest.TestCase):
+ """Test the externals description processing used to create the Repository
+ base class shared by protocol specific repository classes.
+
+ """
+
+ def test_tag(self):
+ """Test creation of a repository object with a tag
+ """
+ name = 'test_repo'
+ protocol = 'test_protocol'
+ url = 'test_url'
+ tag = 'test_tag'
+ repo_info = {ExternalsDescription.PROTOCOL: protocol,
+ ExternalsDescription.REPO_URL: url,
+ ExternalsDescription.TAG: tag,
+ ExternalsDescription.BRANCH: EMPTY_STR, }
+ repo = Repository(name, repo_info)
+ print(repo.__dict__)
+ self.assertEqual(repo.tag(), tag)
+ self.assertEqual(repo.url(), url)
+
+ def test_branch(self):
+ """Test creation of a repository object with a branch
+ """
+ name = 'test_repo'
+ protocol = 'test_protocol'
+ url = 'test_url'
+ branch = 'test_branch'
+ repo_info = {ExternalsDescription.PROTOCOL: protocol,
+ ExternalsDescription.REPO_URL: url,
+ ExternalsDescription.BRANCH: branch,
+ ExternalsDescription.TAG: EMPTY_STR, }
+ repo = Repository(name, repo_info)
+ print(repo.__dict__)
+ self.assertEqual(repo.branch(), branch)
+ self.assertEqual(repo.url(), url)
+
+ def test_tag_branch(self):
+ """Test creation of a repository object with a tag and branch raises a
+ runtimer error.
+
+ """
+ name = 'test_repo'
+ protocol = 'test_protocol'
+ url = 'test_url'
+ branch = 'test_branch'
+ tag = 'test_tag'
+ repo_info = {ExternalsDescription.PROTOCOL: protocol,
+ ExternalsDescription.REPO_URL: url,
+ ExternalsDescription.BRANCH: branch,
+ ExternalsDescription.TAG: tag, }
+ with self.assertRaises(RuntimeError):
+ Repository(name, repo_info)
+
+ def test_no_tag_no_branch(self):
+ """Test creation of a repository object without a tag or branch raises a
+ runtimer error.
+
+ """
+ name = 'test_repo'
+ protocol = 'test_protocol'
+ url = 'test_url'
+ branch = EMPTY_STR
+ tag = EMPTY_STR
+ repo_info = {ExternalsDescription.PROTOCOL: protocol,
+ ExternalsDescription.REPO_URL: url,
+ ExternalsDescription.BRANCH: branch,
+ ExternalsDescription.TAG: tag, }
+ with self.assertRaises(RuntimeError):
+ Repository(name, repo_info)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_repository_git.py b/manage_externals/test/test_unit_repository_git.py
new file mode 100644
index 0000000000..44a1353b05
--- /dev/null
+++ b/manage_externals/test/test_unit_repository_git.py
@@ -0,0 +1,1014 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the checkout_externals.py module is
+already in the python path.
+
+"""
+# pylint: disable=too-many-lines,protected-access
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import os
+import shutil
+import string
+import unittest
+
+from manic.repository_git import GitRepository
+from manic.externals_status import ExternalStatus
+from manic.externals_description import ExternalsDescription
+from manic.externals_description import ExternalsDescriptionDict
+from manic.global_constants import EMPTY_STR
+
+# pylint: disable=C0103
+GIT_BRANCH_OUTPUT_DETACHED_BRANCH_v1_8 = '''
+* (detached from origin/feature2) 36418b4 Work on feature2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+# pylint: enable=C0103
+
+
+GIT_BRANCH_OUTPUT_DETACHED_BRANCH = '''
+* (HEAD detached at origin/feature-2) 36418b4 Work on feature-2
+ feature-2 36418b4 [origin/feature-2] Work on feature-2
+ feature3 36418b4 Work on feature-2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+
+GIT_BRANCH_OUTPUT_DETACHED_HASH = '''
+* (HEAD detached at 36418b4) 36418b4 Work on feature-2
+ feature-2 36418b4 [origin/feature-2] Work on feature-2
+ feature3 36418b4 Work on feature-2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+
+GIT_BRANCH_OUTPUT_DETACHED_TAG = '''
+* (HEAD detached at tag1) 9b75494 Initialize repository.
+ feature-2 36418b4 [origin/feature-2] Work on feature-2
+ feature3 36418b4 Work on feature-2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+
+GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH = '''
+ feature-2 36418b4 [origin/feature-2] Work on feature-2
+* feature3 36418b4 Work on feature-2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+
+GIT_BRANCH_OUTPUT_TRACKING_BRANCH = '''
+* feature-2 36418b4 [origin/feature-2] Work on feature-2
+ feature3 36418b4 Work on feature-2
+ master 9b75494 [origin/master] Initialize repository.
+'''
+
+# NOTE(bja, 2017-11) order is important here. origin should be a
+# subset of other to trap errors on processing remotes!
+GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM = '''
+upstream /path/to/other/repo (fetch)
+upstream /path/to/other/repo (push)
+other /path/to/local/repo2 (fetch)
+other /path/to/local/repo2 (push)
+origin /path/to/local/repo (fetch)
+origin /path/to/local/repo (push)
+'''
+
+
+class TestGitRepositoryCurrentRefBranch(unittest.TestCase):
+ """test the current_ref_from_branch_command on a git repository
+ """
+
+ def setUp(self):
+ self._name = 'component'
+ rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL:
+ '/path/to/local/repo',
+ ExternalsDescription.TAG:
+ 'tag1',
+ ExternalsDescription.BRANCH: EMPTY_STR
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: 'junk',
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = GitRepository('test', repo)
+
+ def test_ref_detached_from_tag(self):
+ """Test that we correctly identify that the ref is detached from a tag
+ """
+ git_output = GIT_BRANCH_OUTPUT_DETACHED_TAG
+ expected = self._repo.tag()
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_detached_hash(self):
+ """Test that we can identify ref is detached from a hash
+
+ """
+ git_output = GIT_BRANCH_OUTPUT_DETACHED_HASH
+ expected = '36418b4'
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_detached_branch(self):
+ """Test that we can identify ref is detached from a remote branch
+
+ """
+ git_output = GIT_BRANCH_OUTPUT_DETACHED_BRANCH
+ expected = 'origin/feature-2'
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_detached_branch_v1_8(self):
+ """Test that we can identify ref is detached from a remote branch
+
+ """
+ git_output = GIT_BRANCH_OUTPUT_DETACHED_BRANCH_v1_8
+ expected = 'origin/feature2'
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_tracking_branch(self):
+ """Test that we correctly identify we are on a tracking branch
+ """
+ git_output = GIT_BRANCH_OUTPUT_TRACKING_BRANCH
+ expected = 'origin/feature-2'
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_untracked_branch(self):
+ """Test that we correctly identify we are on an untracked branch
+ """
+ git_output = GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH
+ expected = 'feature3'
+ result = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(result, expected)
+
+ def test_ref_none(self):
+ """Test that we can handle an empty string for output, e.g. not an git
+ repo.
+
+ """
+ git_output = EMPTY_STR
+ received = self._repo._current_ref_from_branch_command(
+ git_output)
+ self.assertEqual(received, EMPTY_STR)
+
+
+class TestGitRepositoryCheckSync(unittest.TestCase):
+ """Test whether the GitRepository _check_sync_logic functionality is
+ correct.
+
+ Note: there are a lot of combinations of state:
+
+ - external description - tag, branch
+
+ - working copy
+ - doesn't exist (not checked out)
+ - exists, no git info - incorrect protocol, e.g. svn, or tarball?
+ - exists, git info
+ - as expected:
+ - different from expected:
+ - detached tag,
+ - detached hash,
+ - detached branch (compare remote and branch),
+ - tracking branch (compare remote and branch),
+ - same remote
+ - different remote
+ - untracked branch
+
+ Test list:
+ - doesn't exist
+ - exists no git info
+
+ - num_external * (working copy expected + num_working copy different)
+ - total tests = 16
+
+ """
+
+ # NOTE(bja, 2017-11) pylint complains about long method names, but
+ # it is hard to differentiate tests without making them more
+ # cryptic. Also complains about too many public methods, but it
+ # doesn't really make sense to break this up.
+ # pylint: disable=invalid-name,too-many-public-methods
+
+ TMP_FAKE_DIR = 'fake'
+ TMP_FAKE_GIT_DIR = os.path.join(TMP_FAKE_DIR, '.git')
+
+ def setUp(self):
+ """Setup reusable git repository object
+ """
+ self._name = 'component'
+ rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL:
+ '/path/to/local/repo',
+ ExternalsDescription.TAG: 'tag1',
+ ExternalsDescription.BRANCH: EMPTY_STR
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: self.TMP_FAKE_DIR,
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = GitRepository('test', repo)
+ self._create_tmp_git_dir()
+
+ def tearDown(self):
+ """Cleanup tmp stuff on the file system
+ """
+ self._remove_tmp_git_dir()
+
+ def _create_tmp_git_dir(self):
+ """Create a temporary fake git directory for testing purposes.
+ """
+ if not os.path.exists(self.TMP_FAKE_GIT_DIR):
+ os.makedirs(self.TMP_FAKE_GIT_DIR)
+
+ def _remove_tmp_git_dir(self):
+ """Remove the temporary fake git directory
+ """
+ if os.path.exists(self.TMP_FAKE_DIR):
+ shutil.rmtree(self.TMP_FAKE_DIR)
+
+ #
+ # mock methods replacing git system calls
+ #
+ @staticmethod
+ def _git_branch_empty():
+ """Return an empty info string. Simulates git info failing.
+ """
+ return EMPTY_STR
+
+ @staticmethod
+ def _git_branch_detached_tag():
+ """Return an info sting that is a checkouted tag
+ """
+ return GIT_BRANCH_OUTPUT_DETACHED_TAG
+
+ @staticmethod
+ def _git_branch_detached_hash():
+ """Return an info string that is a checkout hash
+ """
+ return GIT_BRANCH_OUTPUT_DETACHED_HASH
+
+ @staticmethod
+ def _git_branch_detached_branch():
+ """Return an info string that is a checkout hash
+ """
+ return GIT_BRANCH_OUTPUT_DETACHED_BRANCH
+
+ @staticmethod
+ def _git_branch_untracked_branch():
+ """Return an info string that is a checkout branch
+ """
+ return GIT_BRANCH_OUTPUT_UNTRACKED_BRANCH
+
+ @staticmethod
+ def _git_branch_tracked_branch():
+ """Return an info string that is a checkout branch
+ """
+ return GIT_BRANCH_OUTPUT_TRACKING_BRANCH
+
+ @staticmethod
+ def _git_remote_origin_upstream():
+ """Return an info string that is a checkout hash
+ """
+ return GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM
+
+ @staticmethod
+ def _git_remote_none():
+ """Return an info string that is a checkout hash
+ """
+ return EMPTY_STR
+
+ # ----------------------------------------------------------------
+ #
+ # Tests where working copy doesn't exist or is invalid
+ #
+ # ----------------------------------------------------------------
+ def test_sync_dir_not_exist(self):
+ """Test that a directory that doesn't exist returns an error status
+
+ Note: the Repository classes should be prevented from ever
+ working on an empty directory by the _Source object.
+
+ """
+ stat = ExternalStatus()
+ self._repo._check_sync(stat, 'invalid_directory_name')
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR)
+ # check_dir should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_dir_exist_no_git_info(self):
+ """Test that an empty info string returns an unknown status
+ """
+ stat = ExternalStatus()
+ # Now we over-ride the _git_branch method on the repo to return
+ # a known value without requiring access to git.
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._git_branch_vv = self._git_branch_empty
+ self._repo._check_sync(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ # ----------------------------------------------------------------
+ #
+ # Tests where external description specifies a tag
+ #
+ # Perturbations of working dir state: on detached
+ # {tag|branch|hash}, tracking branch, untracked branch.
+ #
+ # ----------------------------------------------------------------
+ def test_sync_tag_on_detached_tag(self):
+ """Test expect tag on detached tag --> status ok
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag1'
+ self._repo._git_branch_vv = self._git_branch_detached_tag
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_tag_on_diff_tag(self):
+ """Test expect tag on diff tag --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag2'
+ self._repo._git_branch_vv = self._git_branch_detached_tag
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_tag_on_detached_hash(self):
+ """Test expect tag on detached hash --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag1'
+ self._repo._git_branch_vv = self._git_branch_detached_hash
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_tag_on_detached_branch(self):
+ """Test expect tag on detached branch --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag1'
+ self._repo._git_branch_vv = self._git_branch_detached_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_tag_on_tracking_branch(self):
+ """Test expect tag on tracking branch --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag1'
+ self._repo._git_branch_vv = self._git_branch_tracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_tag_on_untracked_branch(self):
+ """Test expect tag on untracked branch --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = ''
+ self._repo._tag = 'tag1'
+ self._repo._git_branch_vv = self._git_branch_untracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ # ----------------------------------------------------------------
+ #
+ # Tests where external description specifies a branch
+ #
+ # Perturbations of working dir state: on detached
+ # {tag|branch|hash}, tracking branch, untracked branch.
+ #
+ # ----------------------------------------------------------------
+ def test_sync_branch_on_detached_branch_same_remote(self):
+ """Test expect branch on detached branch with same remote --> status ok
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_detached_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_detached_branch_diff_remote(self):
+ """Test expect branch on detached branch, different remote --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._url = '/path/to/other/repo'
+ self._repo._git_branch_vv = self._git_branch_detached_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_detached_branch_diff_remote2(self):
+ """Test expect branch on detached branch, different remote --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._url = '/path/to/local/repo2'
+ self._repo._git_branch_vv = self._git_branch_detached_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_diff_branch(self):
+ """Test expect branch on diff branch --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'nice_new_feature'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_detached_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_detached_hash(self):
+ """Test expect branch on detached hash --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_detached_hash
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_detached_tag(self):
+ """Test expect branch on detached tag --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_detached_tag
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_tracking_branch_same_remote(self):
+ """Test expect branch on tracking branch with same remote --> status ok
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_tracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_tracking_branch_diff_remote(self):
+ """Test expect branch on tracking branch with different remote-->
+ status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._url = '/path/to/other/repo'
+ self._repo._git_branch_vv = self._git_branch_tracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_untracked_branch(self):
+ """Test expect branch on untracked branch --> status modified
+
+ NOTE(bja, 2017-11) the externals description url is always a
+ remote repository. A local untracked branch only exists
+ locally, therefore it is always a modified state, even if this
+ is what the user wants.
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_untracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_unknown_remote(self):
+ """Test expect branch, but remote is unknown --> status modified
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature-2'
+ self._repo._tag = ''
+ self._repo._url = '/path/to/unknown/repo'
+ self._repo._git_branch_vv = self._git_branch_untracked_branch
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_sync_branch_on_untracked_local(self):
+ """Test expect branch, on untracked branch in local repo --> status ok
+
+ Setting the externals description to '.' indicates that the
+ user only want's to consider the current local repo state
+ without fetching from remotes. This is required to preserve
+ the current branch of a repository during an update.
+
+ NOTE(bja, 2017-11) the externals description is always a
+ remote repository. A local untracked branch only exists
+ locally, therefore it is always a modified state, even if this
+ is what the user wants.
+
+ """
+ stat = ExternalStatus()
+ self._repo._git_remote_verbose = self._git_remote_origin_upstream
+ self._repo._branch = 'feature3'
+ self._repo._tag = ''
+ self._repo._git_branch_vv = self._git_branch_untracked_branch
+ self._repo._url = '.'
+ self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR)
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
+ # check_sync should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+
+class TestGitRegExp(unittest.TestCase):
+ """Test that the regular expressions in the GitRepository class
+ capture intended strings
+
+ """
+
+ def setUp(self):
+ """Common constans
+ """
+ self._detached_git_v2_tmpl = string.Template(
+ '* (HEAD detached at $ref) 36418b4 Work on feature-2')
+
+ self._detached_git_v1_tmpl = string.Template(
+ '* (detached from $ref) 36418b4 Work on feature-2')
+
+ self._tracking_tmpl = string.Template(
+ '* feature-2 36418b4 [$ref] Work on feature-2')
+
+ #
+ # RE_DETACHED
+ #
+ def test_re_detached_alphnum(self):
+ """Test re correctly matches alphnumeric (basic debugging)
+ """
+ value = 'feature2'
+ input_str = self._detached_git_v2_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+ input_str = self._detached_git_v1_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_detached_underscore(self):
+ """Test re matches with underscore
+ """
+ value = 'feature_2'
+ input_str = self._detached_git_v2_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+ input_str = self._detached_git_v1_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_detached_hyphen(self):
+ """Test re matches -
+ """
+ value = 'feature-2'
+ input_str = self._detached_git_v2_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+ input_str = self._detached_git_v1_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_detached_period(self):
+ """Test re matches .
+ """
+ value = 'feature.2'
+ input_str = self._detached_git_v2_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+ input_str = self._detached_git_v1_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_detached_slash(self):
+ """Test re matches /
+ """
+ value = 'feature/2'
+ input_str = self._detached_git_v2_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+ input_str = self._detached_git_v1_tmpl.substitute(ref=value)
+ match = GitRepository.RE_DETACHED.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ #
+ # RE_TRACKING
+ #
+ def test_re_tracking_alphnum(self):
+ """Test re matches alphanumeric for basic debugging
+ """
+ value = 'feature2'
+ input_str = self._tracking_tmpl.substitute(ref=value)
+ match = GitRepository.RE_TRACKING.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_tracking_underscore(self):
+ """Test re matches _
+ """
+ value = 'feature_2'
+ input_str = self._tracking_tmpl.substitute(ref=value)
+ match = GitRepository.RE_TRACKING.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_tracking_hyphen(self):
+ """Test re matches -
+ """
+ value = 'feature-2'
+ input_str = self._tracking_tmpl.substitute(ref=value)
+ match = GitRepository.RE_TRACKING.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_tracking_period(self):
+ """Test re match .
+ """
+ value = 'feature.2'
+ input_str = self._tracking_tmpl.substitute(ref=value)
+ match = GitRepository.RE_TRACKING.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+ def test_re_tracking_slash(self):
+ """Test re matches /
+ """
+ value = 'feature/2'
+ input_str = self._tracking_tmpl.substitute(ref=value)
+ match = GitRepository.RE_TRACKING.search(input_str)
+ self.assertIsNotNone(match)
+ self.assertEqual(match.group(1), value)
+
+
+class TestGitStatusPorcelain(unittest.TestCase):
+ """Test parsing of output from git status --porcelain=v1 -z
+ """
+ # pylint: disable=C0103
+ GIT_STATUS_PORCELAIN_V1_ALL = (
+ r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0'
+ r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0'
+ r'?? junk.txt')
+
+ GIT_STATUS_PORCELAIN_CLEAN = r''
+
+ def test_porcelain_status_dirty(self):
+ """Verify that git status output is considered dirty when there are
+ listed files.
+
+ """
+ git_output = self.GIT_STATUS_PORCELAIN_V1_ALL
+ is_dirty = GitRepository._status_v1z_is_dirty(git_output)
+ self.assertTrue(is_dirty)
+
+ def test_porcelain_status_clean(self):
+ """Verify that git status output is considered clean when there are no
+ listed files.
+
+ """
+ git_output = self.GIT_STATUS_PORCELAIN_CLEAN
+ is_dirty = GitRepository._status_v1z_is_dirty(git_output)
+ self.assertFalse(is_dirty)
+
+
+class TestGitCreateRemoteName(unittest.TestCase):
+ """Test the create_remote_name method on the GitRepository class
+ """
+
+ def setUp(self):
+ """Common infrastructure for testing _create_remote_name
+ """
+ self._rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL:
+ 'empty',
+ ExternalsDescription.TAG:
+ 'very_useful_tag',
+ ExternalsDescription.BRANCH: EMPTY_STR, }
+ self._repo = GitRepository('test', self._rdata)
+
+ def test_remote_git_proto(self):
+ """Test remote with git protocol
+ """
+ self._repo._url = 'git@git.github.com:very_nice_org/useful_repo'
+ remote_name = self._repo._create_remote_name()
+ self.assertEqual(remote_name, 'very_nice_org_useful_repo')
+
+ def test_remote_https_proto(self):
+ """Test remote with git protocol
+ """
+ self._repo._url = 'https://www.github.com/very_nice_org/useful_repo'
+ remote_name = self._repo._create_remote_name()
+ self.assertEqual(remote_name, 'very_nice_org_useful_repo')
+
+ def test_remote_local_abs(self):
+ """Test remote with git protocol
+ """
+ self._repo._url = '/path/to/local/repositories/useful_repo'
+ remote_name = self._repo._create_remote_name()
+ self.assertEqual(remote_name, 'repositories_useful_repo')
+
+ def test_remote_local_rel(self):
+ """Test remote with git protocol
+ """
+ os.environ['TEST_VAR'] = '/my/path/to/repos'
+ self._repo._url = '${TEST_VAR}/../../useful_repo'
+ remote_name = self._repo._create_remote_name()
+ self.assertEqual(remote_name, 'path_useful_repo')
+ del os.environ['TEST_VAR']
+
+
+class TestVerifyTag(unittest.TestCase):
+ """Test logic verifying that a tag exists and is unique
+
+ """
+
+ def setUp(self):
+ """Setup reusable git repository object
+ """
+ self._name = 'component'
+ rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL:
+ '/path/to/local/repo',
+ ExternalsDescription.TAG: 'tag1',
+ ExternalsDescription.BRANCH: EMPTY_STR
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: 'tmp',
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = GitRepository('test', repo)
+
+ @staticmethod
+ def _shell_true(url, remote=None):
+ _ = url
+ _ = remote
+ return 0
+
+ @staticmethod
+ def _shell_false(url, remote=None):
+ _ = url
+ _ = remote
+ return 1
+
+ def test_tag_not_tag_branch_commit(self):
+ """Verify a non-tag returns false
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_false
+ self._repo._tag = 'something'
+ remote_name = 'origin'
+ received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name)
+ self.assertFalse(received)
+
+ def test_tag_not_tag(self):
+ """Verify a non-tag, untracked remote returns false
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_true
+ self._repo._git_lsremote_branch = self._shell_true
+ self._repo._git_revparse_commit = self._shell_false
+ self._repo._tag = 'tag1'
+ remote_name = 'origin'
+ received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name)
+ self.assertFalse(received)
+
+ def test_tag_indeterminant(self):
+ """Verify an indeterminant tag/branch returns false
+ """
+ self._repo._git_showref_tag = self._shell_true
+ self._repo._git_showref_branch = self._shell_true
+ self._repo._git_lsremote_branch = self._shell_true
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = 'something'
+ remote_name = 'origin'
+ received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name)
+ self.assertFalse(received)
+
+ def test_tag_is_unique(self):
+ """Verify a unique tag match returns true
+ """
+ self._repo._git_showref_tag = self._shell_true
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = 'tag1'
+ remote_name = 'origin'
+ received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name)
+ self.assertTrue(received)
+
+ def test_tag_is_commit(self):
+ """Verify a commit hash
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = '97ebc0e0'
+ remote_name = 'origin'
+ received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name)
+ self.assertTrue(received)
+
+
+class TestValidRef(unittest.TestCase):
+ """Test logic verifying that a reference is a valid tag, branch or sha1
+
+ """
+
+ def setUp(self):
+ """Setup reusable git repository object
+ """
+ self._name = 'component'
+ rdata = {ExternalsDescription.PROTOCOL: 'git',
+ ExternalsDescription.REPO_URL:
+ '/path/to/local/repo',
+ ExternalsDescription.TAG: 'tag1',
+ ExternalsDescription.BRANCH: EMPTY_STR
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: 'tmp',
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = GitRepository('test', repo)
+
+ @staticmethod
+ def _shell_true(url, remote=None):
+ _ = url
+ _ = remote
+ return 0
+
+ @staticmethod
+ def _shell_false(url, remote=None):
+ _ = url
+ _ = remote
+ return 1
+
+ def test_valid_ref_is_invalid(self):
+ """Verify an invalid reference raises an exception
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_false
+ self._repo._tag = 'invalid_ref'
+ with self.assertRaises(RuntimeError):
+ self._repo._check_for_valid_ref(self._repo._tag)
+
+ def test_valid_tag(self):
+ """Verify a valid tag return true
+ """
+ self._repo._git_showref_tag = self._shell_true
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = 'tag1'
+ received = self._repo._check_for_valid_ref(self._repo._tag)
+ self.assertTrue(received)
+
+ def test_valid_branch(self):
+ """Verify a valid tag return true
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_true
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = 'tag1'
+ received = self._repo._check_for_valid_ref(self._repo._tag)
+ self.assertTrue(received)
+
+ def test_valid_hash(self):
+ """Verify a valid tag return true
+ """
+ self._repo._git_showref_tag = self._shell_false
+ self._repo._git_showref_branch = self._shell_false
+ self._repo._git_lsremote_branch = self._shell_false
+ self._repo._git_revparse_commit = self._shell_true
+ self._repo._tag = '56cc0b5394'
+ received = self._repo._check_for_valid_ref(self._repo._tag)
+ self.assertTrue(received)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_repository_svn.py b/manage_externals/test/test_unit_repository_svn.py
new file mode 100644
index 0000000000..f49e9898b8
--- /dev/null
+++ b/manage_externals/test/test_unit_repository_svn.py
@@ -0,0 +1,503 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the checkout_externals.py module is
+already in the python path.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import unittest
+
+from manic.repository_svn import SvnRepository
+from manic.externals_status import ExternalStatus
+from manic.externals_description import ExternalsDescription
+from manic.externals_description import ExternalsDescriptionDict
+from manic.global_constants import EMPTY_STR
+
+# pylint: disable=W0212
+
+SVN_INFO_MOSART = """Path: components/mosart
+Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/mosart
+URL: https://svn-ccsm-models.cgd.ucar.edu/mosart/trunk_tags/mosart1_0_26
+Relative URL: ^/mosart/trunk_tags/mosart1_0_26
+Repository Root: https://svn-ccsm-models.cgd.ucar.edu
+Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5
+Revision: 86711
+Node Kind: directory
+Schedule: normal
+Last Changed Author: erik
+Last Changed Rev: 86031
+Last Changed Date: 2017-07-07 12:28:10 -0600 (Fri, 07 Jul 2017)
+"""
+SVN_INFO_CISM = """
+Path: components/cism
+Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/cism
+URL: https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_37
+Relative URL: ^/glc/trunk_tags/cism2_1_37
+Repository Root: https://svn-ccsm-models.cgd.ucar.edu
+Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5
+Revision: 86711
+Node Kind: directory
+Schedule: normal
+Last Changed Author: sacks
+Last Changed Rev: 85704
+Last Changed Date: 2017-06-15 05:59:28 -0600 (Thu, 15 Jun 2017)
+"""
+
+
+class TestSvnRepositoryCheckURL(unittest.TestCase):
+ """Verify that the svn_check_url function is working as expected.
+ """
+
+ def setUp(self):
+ """Setup reusable svn repository object
+ """
+ self._name = 'component'
+ rdata = {ExternalsDescription.PROTOCOL: 'svn',
+ ExternalsDescription.REPO_URL:
+ 'https://svn-ccsm-models.cgd.ucar.edu/',
+ ExternalsDescription.TAG:
+ 'mosart/trunk_tags/mosart1_0_26',
+ ExternalsDescription.BRANCH: ''
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: 'junk',
+ ExternalsDescription.EXTERNALS: '',
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = SvnRepository('test', repo)
+
+ def test_check_url_same(self):
+ """Test that we correctly identify that the correct URL.
+ """
+ svn_output = SVN_INFO_MOSART
+ expected_url = self._repo.url()
+ result, current_version = \
+ self._repo._check_url(svn_output, expected_url)
+ self.assertEqual(result, ExternalStatus.STATUS_OK)
+ self.assertEqual(current_version, 'mosart/trunk_tags/mosart1_0_26')
+
+ def test_check_url_different(self):
+ """Test that we correctly reject an incorrect URL.
+ """
+ svn_output = SVN_INFO_CISM
+ expected_url = self._repo.url()
+ result, current_version = \
+ self._repo._check_url(svn_output, expected_url)
+ self.assertEqual(result, ExternalStatus.MODEL_MODIFIED)
+ self.assertEqual(current_version, 'glc/trunk_tags/cism2_1_37')
+
+ def test_check_url_none(self):
+ """Test that we can handle an empty string for output, e.g. not an svn
+ repo.
+
+ """
+ svn_output = EMPTY_STR
+ expected_url = self._repo.url()
+ result, current_version = \
+ self._repo._check_url(svn_output, expected_url)
+ self.assertEqual(result, ExternalStatus.UNKNOWN)
+ self.assertEqual(current_version, '')
+
+
+class TestSvnRepositoryCheckSync(unittest.TestCase):
+ """Test whether the SvnRepository svn_check_sync functionality is
+ correct.
+
+ """
+
+ def setUp(self):
+ """Setup reusable svn repository object
+ """
+ self._name = "component"
+ rdata = {ExternalsDescription.PROTOCOL: 'svn',
+ ExternalsDescription.REPO_URL:
+ 'https://svn-ccsm-models.cgd.ucar.edu/',
+ ExternalsDescription.TAG:
+ 'mosart/trunk_tags/mosart1_0_26',
+ ExternalsDescription.BRANCH: EMPTY_STR
+ }
+
+ data = {self._name:
+ {
+ ExternalsDescription.REQUIRED: False,
+ ExternalsDescription.PATH: 'junk',
+ ExternalsDescription.EXTERNALS: EMPTY_STR,
+ ExternalsDescription.REPO: rdata,
+ },
+ }
+
+ model = ExternalsDescriptionDict(data)
+ repo = model[self._name][ExternalsDescription.REPO]
+ self._repo = SvnRepository('test', repo)
+
+ @staticmethod
+ def _svn_info_empty(*_):
+ """Return an empty info string. Simulates svn info failing.
+ """
+ return ''
+
+ @staticmethod
+ def _svn_info_synced(*_):
+ """Return an info sting that is synced with the setUp data
+ """
+ return SVN_INFO_MOSART
+
+ @staticmethod
+ def _svn_info_modified(*_):
+ """Return and info string that is modified from the setUp data
+ """
+ return SVN_INFO_CISM
+
+ def test_repo_dir_not_exist(self):
+ """Test that a directory that doesn't exist returns an error status
+
+ Note: the Repository classes should be prevented from ever
+ working on an empty directory by the _Source object.
+
+ """
+ stat = ExternalStatus()
+ self._repo._check_sync(stat, 'junk')
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR)
+ # check_dir should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_repo_dir_exist_no_svn_info(self):
+ """Test that an empty info string returns an unknown status
+ """
+ stat = ExternalStatus()
+ # Now we over-ride the _svn_info method on the repo to return
+ # a known value without requiring access to svn.
+ self._repo._svn_info = self._svn_info_empty
+ self._repo._check_sync(stat, '.')
+ self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN)
+ # check_dir should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_repo_dir_synced(self):
+ """Test that a valid info string that is synced to the repo in the
+ externals description returns an ok status.
+
+ """
+ stat = ExternalStatus()
+ # Now we over-ride the _svn_info method on the repo to return
+ # a known value without requiring access to svn.
+ self._repo._svn_info = self._svn_info_synced
+ self._repo._check_sync(stat, '.')
+ self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK)
+ # check_dir should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+ def test_repo_dir_modified(self):
+ """Test that a valid svn info string that is out of sync with the
+ externals description returns a modified status.
+
+ """
+ stat = ExternalStatus()
+ # Now we over-ride the _svn_info method on the repo to return
+ # a known value without requiring access to svn.
+ self._repo._svn_info = self._svn_info_modified
+ self._repo._check_sync(stat, '.')
+ self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED)
+ # check_dir should only modify the sync_state, not clean_state
+ self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT)
+
+
+class TestSVNStatusXML(unittest.TestCase):
+ """Test parsing of svn status xml output
+ """
+ SVN_STATUS_XML_DIRTY_ALL = '''
+
+
+
+
+
+sacks
+2017-06-15T11:59:00.355419Z
+
+
+
+
+
+
+sacks
+2013-02-07T16:17:56.412878Z
+
+
+
+
+
+
+sacks
+2017-05-01T16:48:27.893741Z
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_DIRTY_MISSING = '''
+
+
+
+
+
+sacks
+2017-06-15T11:59:00.355419Z
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_DIRTY_MODIFIED = '''
+
+
+
+
+
+sacks
+2013-02-07T16:17:56.412878Z
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_DIRTY_DELETED = '''
+
+
+
+
+
+sacks
+2017-05-01T16:48:27.893741Z
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_DIRTY_UNVERSION = '''
+
+
+
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_DIRTY_ADDED = '''
+
+
+
+
+
+
+
+
+
+
+
+'''
+
+ SVN_STATUS_XML_CLEAN = '''
+
+
+
+
+
+
+
+
+
+
+
+'''
+
+ def test_xml_status_dirty_missing(self):
+ """Verify that svn status output is consindered dirty when there is a
+ missing file.
+
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_MISSING
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertTrue(is_dirty)
+
+ def test_xml_status_dirty_modified(self):
+ """Verify that svn status output is consindered dirty when there is a
+ modified file.
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_MODIFIED
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertTrue(is_dirty)
+
+ def test_xml_status_dirty_deleted(self):
+ """Verify that svn status output is consindered dirty when there is a
+ deleted file.
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_DELETED
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertTrue(is_dirty)
+
+ def test_xml_status_dirty_unversion(self):
+ """Verify that svn status output ignores unversioned files when making
+ the clean/dirty decision.
+
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertFalse(is_dirty)
+
+ def test_xml_status_dirty_added(self):
+ """Verify that svn status output is consindered dirty when there is a
+ added file.
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_ADDED
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertTrue(is_dirty)
+
+ def test_xml_status_dirty_all(self):
+ """Verify that svn status output is consindered dirty when there are
+ multiple dirty files..
+
+ """
+ svn_output = self.SVN_STATUS_XML_DIRTY_ALL
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertTrue(is_dirty)
+
+ def test_xml_status_dirty_clean(self):
+ """Verify that svn status output is consindered clean when there are
+ no 'dirty' files. This means accepting untracked and externals.
+
+ """
+ svn_output = self.SVN_STATUS_XML_CLEAN
+ is_dirty = SvnRepository.xml_status_is_dirty(
+ svn_output)
+ self.assertFalse(is_dirty)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/manage_externals/test/test_unit_utils.py b/manage_externals/test/test_unit_utils.py
new file mode 100644
index 0000000000..c994e58ebe
--- /dev/null
+++ b/manage_externals/test/test_unit_utils.py
@@ -0,0 +1,350 @@
+#!/usr/bin/env python
+
+"""Unit test driver for checkout_externals
+
+Note: this script assume the path to the checkout_externals.py module is
+already in the python path.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from __future__ import print_function
+
+import os
+import unittest
+
+from manic.utils import last_n_lines, indent_string
+from manic.utils import str_to_bool, execute_subprocess
+from manic.utils import is_remote_url, split_remote_url, expand_local_url
+
+
+class TestExecuteSubprocess(unittest.TestCase):
+ """Test the application logic of execute_subprocess wrapper
+ """
+
+ def test_exesub_return_stat_err(self):
+ """Test that execute_subprocess returns a status code when caller
+ requests and the executed subprocess fails.
+
+ """
+ cmd = ['false']
+ status = execute_subprocess(cmd, status_to_caller=True)
+ self.assertEqual(status, 1)
+
+ def test_exesub_return_stat_ok(self):
+ """Test that execute_subprocess returns a status code when caller
+ requests and the executed subprocess succeeds.
+
+ """
+ cmd = ['true']
+ status = execute_subprocess(cmd, status_to_caller=True)
+ self.assertEqual(status, 0)
+
+ def test_exesub_except_stat_err(self):
+ """Test that execute_subprocess raises an exception on error when
+ caller doesn't request return code
+
+ """
+ cmd = ['false']
+ with self.assertRaises(RuntimeError):
+ execute_subprocess(cmd, status_to_caller=False)
+
+
+class TestLastNLines(unittest.TestCase):
+ """Test the last_n_lines function.
+
+ """
+
+ def test_last_n_lines_short(self):
+ """With a message with <= n lines, result of last_n_lines should
+ just be the original message.
+
+ """
+ mystr = """three
+line
+string
+"""
+
+ mystr_truncated = last_n_lines(
+ mystr, 3, truncation_message='[truncated]')
+ self.assertEqual(mystr, mystr_truncated)
+
+ def test_last_n_lines_long(self):
+ """With a message with > n lines, result of last_n_lines should
+ be a truncated string.
+
+ """
+ mystr = """a
+big
+five
+line
+string
+"""
+ expected = """[truncated]
+five
+line
+string
+"""
+
+ mystr_truncated = last_n_lines(
+ mystr, 3, truncation_message='[truncated]')
+ self.assertEqual(expected, mystr_truncated)
+
+
+class TestIndentStr(unittest.TestCase):
+ """Test the indent_string function.
+
+ """
+
+ def test_indent_string_singleline(self):
+ """Test the indent_string function with a single-line string
+
+ """
+ mystr = 'foo'
+ result = indent_string(mystr, 4)
+ expected = ' foo'
+ self.assertEqual(expected, result)
+
+ def test_indent_string_multiline(self):
+ """Test the indent_string function with a multi-line string
+
+ """
+ mystr = """hello
+hi
+goodbye
+"""
+ result = indent_string(mystr, 2)
+ expected = """ hello
+ hi
+ goodbye
+"""
+ self.assertEqual(expected, result)
+
+
+class TestStrToBool(unittest.TestCase):
+ """Test the string to boolean conversion routine.
+
+ """
+
+ def test_case_insensitive_true(self):
+ """Verify that case insensitive variants of 'true' returns the True
+ boolean.
+
+ """
+ values = ['true', 'TRUE', 'True', 'tRuE', 't', 'T', ]
+ for value in values:
+ received = str_to_bool(value)
+ self.assertTrue(received)
+
+ def test_case_insensitive_false(self):
+ """Verify that case insensitive variants of 'false' returns the False
+ boolean.
+
+ """
+ values = ['false', 'FALSE', 'False', 'fAlSe', 'f', 'F', ]
+ for value in values:
+ received = str_to_bool(value)
+ self.assertFalse(received)
+
+ def test_invalid_str_error(self):
+ """Verify that a non-true/false string generates a runtime error.
+ """
+ values = ['not_true_or_false', 'A', '1', '0',
+ 'false_is_not_true', 'true_is_not_false']
+ for value in values:
+ with self.assertRaises(RuntimeError):
+ str_to_bool(value)
+
+
+class TestIsRemoteURL(unittest.TestCase):
+ """Crude url checking to determine if a url is local or remote.
+
+ """
+
+ def test_url_remote_git(self):
+ """verify that a remote git url is identified.
+ """
+ url = 'git@somewhere'
+ is_remote = is_remote_url(url)
+ self.assertTrue(is_remote)
+
+ def test_url_remote_ssh(self):
+ """verify that a remote ssh url is identified.
+ """
+ url = 'ssh://user@somewhere'
+ is_remote = is_remote_url(url)
+ self.assertTrue(is_remote)
+
+ def test_url_remote_http(self):
+ """verify that a remote http url is identified.
+ """
+ url = 'http://somewhere'
+ is_remote = is_remote_url(url)
+ self.assertTrue(is_remote)
+
+ def test_url_remote_https(self):
+ """verify that a remote https url is identified.
+ """
+ url = 'https://somewhere'
+ is_remote = is_remote_url(url)
+ self.assertTrue(is_remote)
+
+ def test_url_local_user(self):
+ """verify that a local path with '~/path/to/repo' gets rejected
+
+ """
+ url = '~/path/to/repo'
+ is_remote = is_remote_url(url)
+ self.assertFalse(is_remote)
+
+ def test_url_local_var_curly(self):
+ """verify that a local path with env var '${HOME}' gets rejected
+ """
+ url = '${HOME}/path/to/repo'
+ is_remote = is_remote_url(url)
+ self.assertFalse(is_remote)
+
+ def test_url_local_var(self):
+ """verify that a local path with an env var '$HOME' gets rejected
+ """
+ url = '$HOME/path/to/repo'
+ is_remote = is_remote_url(url)
+ self.assertFalse(is_remote)
+
+ def test_url_local_abs(self):
+ """verify that a local abs path gets rejected
+ """
+ url = '/path/to/repo'
+ is_remote = is_remote_url(url)
+ self.assertFalse(is_remote)
+
+ def test_url_local_rel(self):
+ """verify that a local relative path gets rejected
+ """
+ url = '../../path/to/repo'
+ is_remote = is_remote_url(url)
+ self.assertFalse(is_remote)
+
+
+class TestSplitRemoteURL(unittest.TestCase):
+ """Crude url checking to determine if a url is local or remote.
+
+ """
+
+ def test_url_remote_git(self):
+ """verify that a remote git url is identified.
+ """
+ url = 'git@somewhere.com:org/repo'
+ received = split_remote_url(url)
+ self.assertEqual(received, "org/repo")
+
+ def test_url_remote_ssh(self):
+ """verify that a remote ssh url is identified.
+ """
+ url = 'ssh://user@somewhere.com/path/to/repo'
+ received = split_remote_url(url)
+ self.assertEqual(received, 'somewhere.com/path/to/repo')
+
+ def test_url_remote_http(self):
+ """verify that a remote http url is identified.
+ """
+ url = 'http://somewhere.org/path/to/repo'
+ received = split_remote_url(url)
+ self.assertEqual(received, 'somewhere.org/path/to/repo')
+
+ def test_url_remote_https(self):
+ """verify that a remote http url is identified.
+ """
+ url = 'http://somewhere.gov/path/to/repo'
+ received = split_remote_url(url)
+ self.assertEqual(received, 'somewhere.gov/path/to/repo')
+
+ def test_url_local_url_unchanged(self):
+ """verify that a local path is unchanged
+
+ """
+ url = '/path/to/repo'
+ received = split_remote_url(url)
+ self.assertEqual(received, url)
+
+
+class TestExpandLocalURL(unittest.TestCase):
+ """Crude url checking to determine if a url is local or remote.
+
+ Remote should be unmodified.
+
+ Local, should perform user and variable expansion.
+
+ """
+
+ def test_url_local_user1(self):
+ """verify that a local path with '~/path/to/repo' gets expanded to an
+ absolute path.
+
+ NOTE(bja, 2017-11) we can't test for something like:
+ '~user/path/to/repo' because the user has to be in the local
+ machine password directory and we don't know a user name that
+ is valid on every system....?
+
+ """
+ field = 'test'
+ url = '~/path/to/repo'
+ received = expand_local_url(url, field)
+ print(received)
+ self.assertTrue(os.path.isabs(received))
+
+ def test_url_local_expand_curly(self):
+ """verify that a local path with '${HOME}' gets expanded to an absolute path.
+ """
+ field = 'test'
+ url = '${HOME}/path/to/repo'
+ received = expand_local_url(url, field)
+ self.assertTrue(os.path.isabs(received))
+
+ def test_url_local_expand_var(self):
+ """verify that a local path with '$HOME' gets expanded to an absolute path.
+ """
+ field = 'test'
+ url = '$HOME/path/to/repo'
+ received = expand_local_url(url, field)
+ self.assertTrue(os.path.isabs(received))
+
+ def test_url_local_env_missing(self):
+ """verify that a local path with env var that is missing gets left as-is
+
+ """
+ field = 'test'
+ url = '$TMP_VAR/path/to/repo'
+ received = expand_local_url(url, field)
+ print(received)
+ self.assertEqual(received, url)
+
+ def test_url_local_expand_env(self):
+ """verify that a local path with another env var gets expanded to an
+ absolute path.
+
+ """
+ field = 'test'
+ os.environ['TMP_VAR'] = '/some/absolute'
+ url = '$TMP_VAR/path/to/repo'
+ received = expand_local_url(url, field)
+ del os.environ['TMP_VAR']
+ print(received)
+ self.assertTrue(os.path.isabs(received))
+ self.assertEqual(received, '/some/absolute/path/to/repo')
+
+ def test_url_local_normalize_rel(self):
+ """verify that a local path with another env var gets expanded to an
+ absolute path.
+
+ """
+ field = 'test'
+ url = '/this/is/a/long/../path/to/a/repo'
+ received = expand_local_url(url, field)
+ print(received)
+ self.assertEqual(received, '/this/is/a/path/to/a/repo')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 21f15b3aff..be66201047 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -8,7 +8,6 @@ project(clm45_tests Fortran C)
include(CIME_utils)
set(CLM_ROOT "..")
-set(CESM_ROOT "${CLM_ROOT}/../../")
# This definition is needed to avoid having ESMF depend on mpi
add_definitions(-DHIDE_MPI)
@@ -17,10 +16,10 @@ add_definitions(-DHIDE_MPI)
# done first, so that in case of name collisions, the CLM versions take
# precedence (when there are two files with the same name, the one added later
# wins).
-add_subdirectory(${CESM_ROOT}/cime/src/share/util csm_share)
-add_subdirectory(${CESM_ROOT}/cime/src/share/unit_test_stubs/util csm_share_stubs)
-add_subdirectory(${CESM_ROOT}/cime/src/share/esmf_wrf_timemgr esmf_wrf_timemgr)
-add_subdirectory(${CESM_ROOT}/cime/src/drivers/mct/shr drv_share)
+add_subdirectory(${CIMEROOT}/src/share/util csm_share)
+add_subdirectory(${CIMEROOT}/src/share/unit_test_stubs/util csm_share_stubs)
+add_subdirectory(${CIMEROOT}/src/share/esmf_wrf_timemgr esmf_wrf_timemgr)
+add_subdirectory(${CIMEROOT}/src/drivers/mct/shr drv_share)
# Extract just the files we need from drv_share
set (drv_sources_needed_base
@@ -55,6 +54,24 @@ foreach (sourcefile ${share_sources})
endif()
endforeach()
+# Remove shr_cal_mod from share_sources.
+#
+# shr_cal_mod depends on ESMF (or the lightweight esmf wrf timemgr, at
+# least). Since CTSM doesn't currently use shr_cal_mod, we're avoiding
+# the extra overhead of including esmf_wrf_timemgr sources in this
+# build.
+#
+# TODO: like above, this should be moved into a general-purpose function
+# in Sourcelist_utils. Then this block of code could be replaced with a
+# single call, like: remove_source_file(${share_sources}
+# "shr_cal_mod.F90")
+foreach (sourcefile ${share_sources})
+ string(REGEX MATCH "shr_cal_mod.F90" match_found ${sourcefile})
+ if(match_found)
+ list(REMOVE_ITEM share_sources ${sourcefile})
+ endif()
+endforeach()
+
# Build libraries containing stuff needed for the unit tests.
# Eventually, these add_library calls should probably be distributed into the correct location, rather than being in this top-level CMakeLists.txt file.
add_library(csm_share ${share_sources} ${drv_sources_needed})
@@ -66,11 +83,11 @@ add_dependencies(esmf_wrf_timemgr csm_share)
add_dependencies(clm csm_share esmf_wrf_timemgr)
# We need to look for header files here, in order to pick up shr_assert.h
-include_directories(${CESM_ROOT}/cime/src/share/include)
+include_directories(${CIMEROOT}/src/share/include)
# And we need to look for header files here, for some include files needed by
# the esmf_wrf_timemgr code
-include_directories(${CESM_ROOT}/cime/src/share/esmf_wrf_timemgr)
+include_directories(${CIMEROOT}/src/share/esmf_wrf_timemgr)
# Tell cmake to look for libraries & mod files here, because this is where we built libraries
include_directories(${CMAKE_CURRENT_BINARY_DIR})
diff --git a/src/README.unit_testing b/src/README.unit_testing
index 60dd187364..de503ca8d3 100644
--- a/src/README.unit_testing
+++ b/src/README.unit_testing
@@ -1,6 +1,4 @@
-# To run all CLM unit tests on caldera, run the following command:
-#
-# Note that, on yellowstone/caldera, this requires 'module load all-python-libs'
+# To run all CTSM unit tests on caldera, run the following command:
#
# The creation of a temporary directory ensures that you are doing a completely
# clean build of the unit tests. (The use of the --clean flag to run_tests.py
@@ -8,5 +6,8 @@
# rerunning the tests after an incremental change, you can instead use an
# existing build directory.
-../../../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX`
+# From a standalone CTSM checkout:
+../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX`
+# If you are within a full CESM checkout, you would instead do:
+# ../../../cime/scripts/fortran_unit_testing/run_tests.py --build-dir `mktemp -d --tmpdir=. unit_tests.XXXXXXXX`
diff --git a/src/biogeochem/CNBalanceCheckMod.F90 b/src/biogeochem/CNBalanceCheckMod.F90
index e43eed4c68..c1fd8d0da5 100644
--- a/src/biogeochem/CNBalanceCheckMod.F90
+++ b/src/biogeochem/CNBalanceCheckMod.F90
@@ -190,8 +190,7 @@ subroutine CBalanceCheck(this, bounds, num_soilc, filter_soilc, &
err_found = .true.
err_index = c
end if
-
- if (abs(col_errcb(c)) > 10000) then !1e-9_r8
+ if (abs(col_errcb(c)) > 1e-8_r8) then
write(iulog,*) 'cbalance warning',c,col_errcb(c),col_endcb(c)
end if
@@ -326,7 +325,7 @@ subroutine NBalanceCheck(this, bounds, num_soilc, filter_soilc, &
err_index = c
end if
- if (abs(col_errnb(c)) > 1000) then !1e-8_r8
+ if (abs(col_errnb(c)) > 1e-7_r8) then
write(iulog,*) 'nbalance warning',c,col_errnb(c),col_endnb(c)
write(iulog,*)'inputs,ffix,nfix,ndep = ',ffix_to_sminn(c)*dt,nfix_to_sminn(c)*dt,ndep_to_sminn(c)*dt
write(iulog,*)'outputs,lch,roff,dnit = ',smin_no3_leached(c)*dt, smin_no3_runoff(c)*dt,f_n2o_nit(c)*dt
diff --git a/src/dyn_subgrid/.gitignore b/src/dyn_subgrid/.gitignore
new file mode 100644
index 0000000000..52f24730b5
--- /dev/null
+++ b/src/dyn_subgrid/.gitignore
@@ -0,0 +1,4 @@
+# files generated by genf90 in the unit test build
+dynVarMod.F90
+dynVarTimeInterpMod.F90
+dynVarTimeUninterpMod.F90
diff --git a/src/unit_test_shr/.gitignore b/src/unit_test_shr/.gitignore
new file mode 100644
index 0000000000..70bacd9621
--- /dev/null
+++ b/src/unit_test_shr/.gitignore
@@ -0,0 +1,2 @@
+# files generated by genf90 in the unit test build
+unittestArrayMod.F90
diff --git a/src/unit_test_stubs/main/.gitignore b/src/unit_test_stubs/main/.gitignore
new file mode 100644
index 0000000000..a457979e10
--- /dev/null
+++ b/src/unit_test_stubs/main/.gitignore
@@ -0,0 +1,3 @@
+# files generated by genf90 in the unit test build
+ncdio_pio_fake.F90
+ncdio_var.F90
diff --git a/src/unit_test_stubs/utils/.gitignore b/src/unit_test_stubs/utils/.gitignore
new file mode 100644
index 0000000000..9e11aa7f64
--- /dev/null
+++ b/src/unit_test_stubs/utils/.gitignore
@@ -0,0 +1,2 @@
+# files generated by genf90 in the unit test build
+restUtilMod_stub.F90
diff --git a/src/utils/.gitignore b/src/utils/.gitignore
new file mode 100644
index 0000000000..79fcba6e26
--- /dev/null
+++ b/src/utils/.gitignore
@@ -0,0 +1,2 @@
+# files generated by genf90 in the unit test build
+array_utils.F90