pax_global_header 0000666 0000000 0000000 00000000064 13522611462 0014514 g ustar 00root root 0000000 0000000 52 comment=8e1d6f534d4064e358ceace8d9a074b0defc7a92
vcstools-0.1.42/ 0000775 0000000 0000000 00000000000 13522611462 0013454 5 ustar 00root root 0000000 0000000 vcstools-0.1.42/.gitignore 0000664 0000000 0000000 00000000213 13522611462 0015440 0 ustar 00root root 0000000 0000000 *#
*.DS_Store
*.coverage
*.deb
*.egg-info
*.eggs
*.log
*.orig
*.pyc
*.swp
*.tgz
*~
.tox
build/*
description-pak
dist
doc-pak
nosetests.xml
vcstools-0.1.42/.travis.yml 0000664 0000000 0000000 00000001150 13522611462 0015562 0 ustar 00root root 0000000 0000000 language: python
python:
- "2.7"
- "3.4"
- "3.5"
- "3.6"
before_install:
- export REPO=`pwd`
install:
- sudo apt-get update
- sudo apt-get install -qq python-yaml python-dateutil
- sudo apt-get install -qq python3-yaml python3-dateutil
- sudo apt-get install -qq git mercurial bzr subversion
- pip install -U setuptools tox tox-travis
- echo $PYTHONPATH
- python -c 'import sys;print(sys.path)'
- python setup.py install
- hg --version
- bzr --version
- git --version
- svn --version
# command to run tests
script:
- tox
notifications:
email: false
after_success:
- coveralls
vcstools-0.1.42/CONTRIBUTING.rst 0000664 0000000 0000000 00000005464 13522611462 0016126 0 ustar 00root root 0000000 0000000 Contributing guide
==================
Thanks for your interest in contributing to vcstools.
Any kinds of contributions are welcome: Bug reports, Documentation, Patches.
Developer Environment
---------------------
For many tasks, it is okay to just develop using a single installed python version. But if you need to test/debug the project in multiple python versions, you need to install those version::
1. (Optional) Install multiple python versions
1. (Optional) Install [pyenv](https://github.com/pyenv/pyenv-installer) to manage python versions
2. (Optional) Using pyenv, install the python versions used in testing::
pyenv install 2.7.16
pyenv install 3.6.8
It may be okay to run and test python against locally installed libraries, but if you need to have a consistent build, it is recommended to manage your environment using `virtualenv `_::
$ virtualenv ~/vcstools_venv
$ source ~/vcstools_venv/bin/activate
Editable library install
-------------------
It is common to work on rosinstall or wstool while also needing to make changes to the vcstools library. For that purpose, use::
$ pip install --editable /path/to/vcstools_source
For convenience also consider [virtualenvwrapper](https://pypi.org/project/virtualenvwrapper/ ).
At this point in any shell where you run ``source ~/vcstools_venv/bin/activate``, you can use vcstools and evny edits to files in the vcstools source will take effect immediately.
This is the effect of ``pip install --editable``, see ``pip install --help``.
To setup a virtualenv for Python3 simply do this (from a clean terminal)::
$ virtualenv --python=python3 ~/vcstools_venv_py3
$ source ~/vcstools_venv_py3
When you're done developing, you can exit any shells where you did ``source .../bin/activate`` and delete the virtualenv folder, e.g. ``~/vcstools_venv``.
Testing
-------
Prerequisites:
* The tests require git, mercurial, bazaar and subversion to be installed.
Using the python library nose to test::
# run all tests using nose
$ nosetests
# run one test using nose
$ nosetests {testname}
# run all tests with coverage check
$ python setup.py test
# run all tests using python3
$ python3 setup.py test
# run all tests against multiple python versions (same as in travis)
$ tox
Releasing
---------
* Update `src/vcstools/__version__.py`
* Check `doc/changelog` is up to date
* Check `stdeb.cfg` is up to date
* prepare release dependencies::
pip install --upgrade setuptools wheel twine
* Upload to testpypi::
python3 setup.py sdist bdist_wheel
twine upload --repository testpypi dist/*
* Check testpypi download files and documentation look ok
* Actually release::
twine upload dist/*
* Create and push tag::
git tag x.y.z
git push
git push --tags
vcstools-0.1.42/LICENSE 0000664 0000000 0000000 00000003105 13522611462 0014460 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
vcstools-0.1.42/MANIFEST.in 0000664 0000000 0000000 00000000317 13522611462 0015213 0 ustar 00root root 0000000 0000000 include *.py
include LICENSE
global-exclude .tox
global-exclude *~
global-exclude __pycache__
global-exclude .coverage
global-exclude *.py[co]
global-exclude *.db
global-exclude .git*
global-exclude *.orig
vcstools-0.1.42/Makefile 0000664 0000000 0000000 00000001120 13522611462 0015106 0 ustar 00root root 0000000 0000000 .PHONY: all setup clean_dist distro clean install testsetup test
NAME=vcstools
VERSION=$(shell grep version ./src/vcstools/__version__.py | sed 's,version = ,,')
all:
echo "noop for debbuild"
setup:
echo "building version ${VERSION}"
clean_dist:
-rm -f MANIFEST
-rm -rf dist
-rm -rf deb_dist
-rm -fr src/vcstools.egg-info/
distro: clean_dist setup
python setup.py sdist
clean: clean_dist
echo "clean"
install: distro
sudo checkinstall python setup.py install
testsetup:
echo "running tests"
test: testsetup
nosetests --with-coverage --cover-package=vcstools --with-xunit
vcstools-0.1.42/README.rst 0000664 0000000 0000000 00000003250 13522611462 0015143 0 ustar 00root root 0000000 0000000 vcstools
========
.. image:: https://travis-ci.org/vcstools/vcstools.svg?branch=master
:target: https://travis-ci.org/vcstools/vcstools
.. image:: https://coveralls.io/repos/github/vcstools/vcstools/badge.svg?branch=master
:target: https://coveralls.io/github/vcstools/vcstools?branch=master
.. image:: https://img.shields.io/pypi/v/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/pyversions/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/status/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/l/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/dd/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/dw/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
.. image:: https://img.shields.io/pypi/dm/vcstools.svg
:target: https://pypi.python.org/pypi/vcstools
The vcstools module provides a Python API for interacting with different version control systems (VCS/SCMs)
It is used in tools like `wstool `_ and `rosinstall `_, which are frequently used in ROS.
This should not be confused with ``vcstool`` (no trailing ``s``), which provides the ``vcs`` command line tool and is otherwise unrelated to this repository, see: https://github.com/dirk-thomas/vcstool
See http://www.ros.org/doc/independent/api/vcstools/html/
Installing
----------
See `documentation `_
vcstools-0.1.42/doc/ 0000775 0000000 0000000 00000000000 13522611462 0014221 5 ustar 00root root 0000000 0000000 vcstools-0.1.42/doc/Makefile 0000664 0000000 0000000 00000011330 13522611462 0015657 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vcstools.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vcstools.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/vcstools"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vcstools"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
make -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
upload: html
# set write permission for group so that everybody can overwrite existing files on the webserver
chmod -R g+w _build/html/
scp -pr _build/html/ rosbot@ros.osuosl.org:/home/rosbot/docs/independent/api/vcstools
vcstools-0.1.42/doc/changelog.rst 0000664 0000000 0000000 00000012411 13522611462 0016701 0 ustar 00root root 0000000 0000000 Changelog
=========
0.1
===
0.1.42
------
- remove cosmic and disco until we have hosting for it
0.1.41
------
- fix git submodule error due to lack of quoting
- Fix git update failure by refreshing git index before fast-forward
- Fix python3 incompatibility due to wrong use of urlopen
- Updating get_affected_files function by removing single quotes covering format (#129)
- Fix export_upstream for git submodules with relative urls. (#130)
0.1.40
------
- Trivial style and testing changes
0.1.39
------
- Added support for git submodule in export_repository
- Add Wily Xenial Yakkety
- Add get_affected_files for all vcss
0.1.38
------
- Fixed test failures due to SVN 1.9.
- Added the ``get_default_remote_version_label()`` API method to support changes in ``wstool``.
- Renamed some internal functions to have a leading ``_`` to indicate that they are private.
0.1.37
------
- Fix an issue where log were restricted to the named branch (hg).
- Fixed svn to use a global revision number rather than a branch-local revision.
- Added the get_remote_version() and get_current_version_label() API calls.
- Enhanced use of ``no_warn`` in run_shell_command().
- Fix get_version() to catch stderr.
- Added get_branches() API call.
- Fix some errors and warnings to output to stderr.
- Fix output to avoid extra newlines when show_stdout=True.
0.1.36
------
- Updates to the release platforms (-lucid +utopic +vivid)
- Fix an issue with updating branches on git, see vcstools/wstool#25
0.1.31
------
- Fix submodule support on checkout #71
0.1.30
------
- use netrc to download tars from private repos, also will work for private rosinstall files
- Fix checks for empty repository #62
0.1.29
------
- fix #57 shallow checkout of non-master breaks with git >= 1.8.0
- unit test fixes
0.1.28
------
- test of new upload method
0.1.27
------
- fix #51 hg status and diff dont work if workspace is inside hg repo
- fix #47 several performance improvements by removing unecessary update actions after checkout
- fix #46 https tar download fails behind proxy
- fix #45 sometimes commands run forever
- fix #44 minor bug when checking out from repo with default branch not master
- fix #41 improvedAPI, get_vcs_client function part of vcstools module
0.1.26
------
- fix #38 git commands fail in local repositories with many (>2000) references
- fix #31 get_log() svn xml not available on Ubuntu Lucid (hg 1.4.2)
- fix #37 update() returns True even when fetch failed
0.1.25
------
- minor bugfixes
- travis-ci config file
- fix unit tests for svn diff&status ordering changes
- deprecated VcsClient Class
- added get_log function
0.1.24
------
- fix git update return value to False when fast-forward not possible due to diverge
- fix. svn certificate prompt invisible, svn checkout and update become verbose due to this
0.1.22
------
- Changed the way that git implements detect_presence to fix a bug with submodules in newer versions of git
- fix for git single quotes on Windows
- minor internal api bug where a git function always returned True
- fix gub in svn export_repository
0.1.21
------
- bugfix #66: hg http username prompt hidden
- add export_repository method to vcs_base and all implementations with tests
- bugfix #64: unicode decoding problems
0.1.20
------
- rosws update --verbose for git prints small message when rebasing
- improved python3 compatibility
0.1.19
------
- more python3 compatibility
- code style improved
- match_url to compare bzr shortcuts to real urls
- more unit tests
- get_status required to end with newline, to fix #55
0.1.18
------
- added shallow flag to API, implemented for git
0.1.17
------
- svn stdout output on get_version removed
0.1.16
------
- All SCMs show some output when update caused changes
- All SCMs have verbose option to show all changes done on update
- bugfix for bazaar getUrl() being a joined abspath
- bugfix for not all output being shown when requested
0.1.15
------
- Added pyyaml as a proper dependency, removed detection code.
- remove use of tar entirely, switch to tarfile module
- fix #36 allowing for tar being bsdtar on OSX
0.1.14
------
- Added tarball uncompression.
0.1.13
------
- added this changelog
- git get-version fetches only when local lookup fails
- hg get-version pulls if label not found
- Popen error message incudes cwd path
0.1.12
------
- py_checker clean after all refactorings since 0.1.0
0.1.11
------
- svn and hg update without user interaction
- bugfix #30
- minor bugfixes
0.1.10
------
- minor bugs
0.1.9
-----
- safer sanitization of shell params
- git diff and stat recurse for submodules
- base class manages all calls to Popen
0.1.8
-----
- several bugfixes
- reverted using shell commands instead of bazaar API
0.1.7
-----
- reverted using shell commands instaed of pysvn and mercurial APIs
- protection against shell incection attempts
0.1.6
-----
- bugfixes to svn and bzr
- unified all calls through Popen
0.1.5
-----
- missing dependency to dateutil added
0.1.4
-----
switched shell calls to calls to python API of mercurial, bazaar, py-svn
0.1.3
-----
- fix #6
0.1.2
-----
- fix #15
0.1.1
-----
- more unit tests
- diverse bugfixes
- major change to git client behavior, based around git https://kforge.ros.org/vcstools/trac/ticket/1
0.1.0
-----
- documentation fixes
0.0.3
-----
- import from svn
vcstools-0.1.42/doc/conf.py 0000664 0000000 0000000 00000017025 13522611462 0015525 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# vcstools documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 4 20:58:04 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.insert(0, os.path.abspath('../src'))
from vcstools.__version__ import version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.autosummary']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'vcstools'
copyright = u'2010, Willow Garage'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = version
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'vcstoolsdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'vcstools.tex', u'vcstools Documentation',
u'Tully Foote, Thibault Kruse, Ken Conley', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'vcstools', u'vcstools Documentation',
[u'Tully Foote, Thibault Kruse, Ken Conley'], 1)
]
autodoc_default_flags = ['members', 'private-members', 'special-members',
#'undoc-members',
'show-inheritance']
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = ('__weakref__', # special-members
'__doc__', '__module__', '__dict__', # undoc-members
)
exclude = name in exclusions
return skip or exclude
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
vcstools-0.1.42/doc/developers_guide.rst 0000664 0000000 0000000 00000002563 13522611462 0020306 0 ustar 00root root 0000000 0000000 Developer's Guide
=================
Code API
--------
.. toctree::
:maxdepth: 1
modules
Changelog
---------
.. toctree::
:maxdepth: 1
changelog
Bug reports and feature requests
--------------------------------
- `Submit a bug report `_
Developer Setup
---------------
vcstools uses `setuptools `_,
which you will need to download and install in order to run the
packaging. We use setuptools instead of distutils in order to be able
use ``setup()`` keys like ``install_requires``.
cd vcstools
python setup.py develop
Testing
-------
Install test dependencies
::
pip install nose
pip install mock
vcstools uses `Python nose
`_ for testing, which is
a fairly simple and straightfoward test framework. The vcstools
mainly use :mod:`unittest` to construct test fixtures, but with nose
you can also just write a function that starts with the name ``test``
and use normal ``assert`` statements.
vcstools also uses `mock `_
to create mocks for testing.
You can run the tests, including coverage, as follows:
::
cd vcstools
make test
Documentation
-------------
Sphinx is used to provide API documentation for vcstools. The documents
are stored in the ``doc`` subdirectory.
vcstools-0.1.42/doc/index.rst 0000664 0000000 0000000 00000003074 13522611462 0016066 0 ustar 00root root 0000000 0000000 vcstools documentation
======================
.. module:: vcstools
.. moduleauthor:: Tully Foote , Thibault Kruse , Ken Conley
The :mod:`vcstools` module provides a Python API for interacting with
different version control systems (VCS/SCMs). The :class:`VcsClient`
class provides an API for seamless interacting with Git, Mercurial
(Hg), Bzr and SVN. The focus of the API is manipulating on-disk
checkouts of source-controlled trees. Its main use is to support the
`rosinstall` tool.
.. toctree::
:maxdepth: 2
vcsclient
Example::
import vcstools
# interrogate an existing tree
client = vcstools.VcsClient('svn', '/path/to/checkout')
print client.get_url()
print client.get_version()
print client.get_diff()
# create a new tree
client = vcstools.VcsClient('hg', '/path/to/new/checkout')
client.checkout('https://bitbucket.org/foo/bar')
Installation
============
vcstools is available on pypi and can be installed via ``pip``
::
pip install vcstools
or ``easy_install``:
::
easy_install vcstools
Using vcstools
==============
The :mod:`vcstools` module is meant to be used as a normal Python
module. After it has been installed, you can ``import`` it normally
and do not need to declare as a ROS package dependency.
Advanced: vcstools developers/contributors
========================================
.. toctree::
:maxdepth: 2
developers_guide
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
vcstools-0.1.42/doc/modules.rst 0000664 0000000 0000000 00000000075 13522611462 0016425 0 ustar 00root root 0000000 0000000 Packages
========
.. toctree::
:maxdepth: 4
vcstools
vcstools-0.1.42/doc/vcsclient.rst 0000664 0000000 0000000 00000010237 13522611462 0016750 0 ustar 00root root 0000000 0000000 General VCS/SCM API
===================
.. currentmodule:: vcstools
The :class:`VcsClient` class provides a generic API for
- Subversion (``svn``)
- Mercurial (``hg``)
- Git (``git``)
- Bazaar (``bzr``)
.. class:: VcsClient(vcs_type, path)
API for interacting with source-controlled paths independent of
actual version-control implementation.
:param vcs_type: type of VCS to use (e.g. 'svn', 'hg', 'bzr', 'git'), ``str``
:param path: filesystem path where code is/will be checked out , ``str``
.. method:: path_exists() -> bool
:returns: True if path exists on disk.
.. method:: get_path() -> str
:returns: filesystem path this client is initialized with.
.. method:: url_matches(self, url, url_or_shortcut) -> bool
client can decide whether the url and the other url are equivalent.
Checks string equality by default
:param url_or_shortcut: url or shortcut (e.g. bzr launchpad url)
:returns: bool if params are equivalent
.. method:: get_version([spec=None]) -> str
:param spec: token for identifying repository revision
desired. Token might be a tagname, branchname, version-id,
or SHA-ID depending on the VCS implementation.
- svn: anything accepted by ``svn info --help``,
e.g. a ``revnumber``, ``{date}``, ``HEAD``, ``BASE``, ``PREV``, or
``COMMITTED``
- git: anything accepted by ``git log``, e.g. a tagname,
branchname, or sha-id.
- hg: anything accepted by ``hg log -r``, e.g. a tagname, sha-ID,
revision-number
- bzr: revisionspec as returned by ``bzr help revisionspec``,
e.g. a tagname or ``revno:``
:returns: current revision number of the repository. Or if
spec is provided, the globally unique identifier
(e.g. revision number, or SHA-ID) of a revision specified by
some token.
.. method:: get_remote_version(self, fatch=False) -> str
Find an identifier for the current revision on remote.
Token spec might be a tagname,
version-id, SHA-ID, ... depending on the VCS implementation.
:param fetch: if False, only local information may be used
:returns: current revision number of the remote repository.
.. method:: get_current_version_label() -> str
Find an description for the current local version.
Token spec might be a branchname,
version-id, SHA-ID, ... depending on the VCS implementation.
:returns: short description of local version (e.g. branchname, tagename).
.. method:: checkout(url, [version=''], [verbose=False], [shallow=False])
Checkout the given URL to the path associated with this client.
:param url: URL of source control to check out
:param version: specific version to check out
:param verbose: flag to run verbosely
:param shallow: flag to create shallow clone without history
.. method:: update(version)
Update the local checkout from upstream source control.
.. method:: detect_presence() -> bool
:returns: True if path has a checkout with matching VCS type,
e.g. if the type of this client is 'svn', the checkout at
the path is managed by Subversion.
.. method:: get_vcs_type_name() -> str
:returns: type of VCS this client is initialized with.
.. method:: get_url() -> str
:returns: Upstream URL that this code was checked out from.
.. method:: get_branch_parent()
(Git Only)
:returns: parent branch.
.. method:: get_diff([basepath=None])
:param basepath: compute diff relative to this path, if provided
:returns: A string showing local differences
.. method:: get_status([basepath=None, [untracked=False]])
Calls scm status command. semantics of untracked are difficult
to generalize. In SVN, this would be new files only. In git,
hg, bzr, this would be changes that have not been added for
commit.
:param basepath: status path will be relative to this, if provided.
:param untracked: If True, also show changes that would not commit
:returns: A string summarizing locally modified files
vcstools-0.1.42/doc/vcstools.rst 0000664 0000000 0000000 00000002147 13522611462 0016633 0 ustar 00root root 0000000 0000000 vcstools Package
================
:mod:`vcstools` Package
-----------------------
.. automodule:: vcstools
:members:
:undoc-members:
:show-inheritance:
:mod:`bzr` Module
-----------------
.. automodule:: vcstools.bzr
:members:
:undoc-members:
:show-inheritance:
:mod:`git` Module
-----------------
.. automodule:: vcstools.git
:members:
:undoc-members:
:show-inheritance:
:mod:`hg` Module
----------------
.. automodule:: vcstools.hg
:members:
:undoc-members:
:show-inheritance:
:mod:`svn` Module
-----------------
.. automodule:: vcstools.svn
:members:
:undoc-members:
:show-inheritance:
:mod:`tar` Module
-----------------
.. automodule:: vcstools.tar
:members:
:undoc-members:
:show-inheritance:
:mod:`vcs_abstraction` Module
-----------------------------
.. automodule:: vcstools.vcs_abstraction
:members:
:special-members:
:undoc-members:
:show-inheritance:
:mod:`vcs_base` Module
----------------------
.. automodule:: vcstools.vcs_base
:members:
:special-members:
:undoc-members:
:show-inheritance:
vcstools-0.1.42/rosdoc.yaml 0000664 0000000 0000000 00000000053 13522611462 0015627 0 ustar 00root root 0000000 0000000 - builder: sphinx
sphinx_root_dir: doc
vcstools-0.1.42/setup.cfg 0000664 0000000 0000000 00000000153 13522611462 0015274 0 ustar 00root root 0000000 0000000 [aliases]
test = nosetests --with-coverage --cover-package=vcstools --where=test --cover-min-percentage=80
vcstools-0.1.42/setup.py 0000664 0000000 0000000 00000003655 13522611462 0015177 0 ustar 00root root 0000000 0000000 from setuptools import setup
import imp
with open('README.rst') as readme_file:
README = readme_file.read()
def get_version():
ver_file = None
try:
ver_file, pathname, description = imp.find_module('__version__', ['src/vcstools'])
vermod = imp.load_module('__version__', ver_file, pathname, description)
version = vermod.version
return version
finally:
if ver_file is not None:
ver_file.close()
test_required = [
"nose",
"coverage",
"coveralls",
"mock",
"pep8",
# run checks in multiple environments
"tox",
"tox-pyenv",
# code metrics
"radon~=1.4.0; python_version > '3'",
# coala lint checks only in newest python
"coala; python_version > '3'",
"coala-bears; python_version > '3'",
# mypy typing checks only in newest python
"mypy; python_version > '3'"
]
setup(name='vcstools',
version=get_version(),
packages=['vcstools'],
package_dir={'': 'src'},
scripts=[],
install_requires=['pyyaml', 'python-dateutil'],
# tests_require automatically installed when running python setup.py test
tests_require=test_required,
# extras_require allow pip install .[test]
extras_require={
'test': test_required
},
author="Tully Foote, Thibault Kruse, Ken Conley",
author_email="tfoote@osrfoundation.org",
url="http://wiki.ros.org/vcstools",
keywords=["scm", "vcs", "git", "svn", "hg", "bzr"],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: BSD License",
"Development Status :: 7 - Inactive",
"Topic :: Software Development :: Version Control"
],
description="VCS/SCM source control library for svn, git, hg, and bzr",
long_description=README,
license="BSD")
vcstools-0.1.42/src/ 0000775 0000000 0000000 00000000000 13522611462 0014243 5 ustar 00root root 0000000 0000000 vcstools-0.1.42/src/vcstools/ 0000775 0000000 0000000 00000000000 13522611462 0016117 5 ustar 00root root 0000000 0000000 vcstools-0.1.42/src/vcstools/__init__.py 0000664 0000000 0000000 00000005252 13522611462 0020234 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
Library for tools that need to interact with ROS-support
version control systems.
"""
from __future__ import absolute_import, print_function, unicode_literals
import logging
from vcstools.vcs_abstraction import VcsClient, VCSClient, register_vcs, \
get_vcs_client
from vcstools.svn import SvnClient
from vcstools.bzr import BzrClient
from vcstools.hg import HgClient
from vcstools.git import GitClient
from vcstools.tar import TarClient
# configure the VCSClient
register_vcs("svn", SvnClient)
register_vcs("bzr", BzrClient)
register_vcs("git", GitClient)
register_vcs("hg", HgClient)
register_vcs("tar", TarClient)
def setup_logger():
"""
creates a logger 'vcstools'
"""
logger = logging.getLogger('vcstools')
logger.setLevel(logging.WARN)
handler = logging.StreamHandler()
handler.setLevel(logging.WARN)
# create formatter
template = '%(levelname)s [%(name)s] %(message)s[/%(name)s]'
formatter = logging.Formatter(template)
# add formatter to handler
handler.setFormatter(formatter)
# add handler to logger
logger.addHandler(handler)
setup_logger()
vcstools-0.1.42/src/vcstools/__version__.py 0000664 0000000 0000000 00000000023 13522611462 0020745 0 ustar 00root root 0000000 0000000 version = '0.1.42'
vcstools-0.1.42/src/vcstools/bzr.py 0000664 0000000 0000000 00000031743 13522611462 0017276 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
bzr vcs support.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import re
import email.utils # For email parsing
import dateutil.parser # Date string parsing
# first try python3, then python2
try:
from urllib.request import url2pathname
except ImportError:
from urllib2 import url2pathname
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, \
run_shell_command, ensure_dir_notexists
def _get_bzr_version():
"""Looks up bzr version by calling bzr --version.
:raises: VcsError if bzr is not installed"""
try:
value, output, _ = run_shell_command('bzr --version',
shell=True,
us_env=True)
if value == 0 and output is not None and len(output.splitlines()) > 0:
version = output.splitlines()[0]
else:
raise VcsError("bzr --version returned %s," +
" maybe bzr is not installed" %
value)
except VcsError as e:
raise VcsError("Coud not determine whether bzr is installed: %s" % e)
return version
class BzrClient(VcsClientBase):
def __init__(self, path):
"""
:raises: VcsError if bzr not detected
"""
VcsClientBase.__init__(self, 'bzr', path)
_get_bzr_version()
@staticmethod
def get_environment_metadata():
metadict = {}
try:
metadict["version"] = _get_bzr_version()
except:
metadict["version"] = "no bzr installed"
return metadict
def get_url(self):
"""
:returns: BZR URL of the branch (output of bzr info command),
or None if it cannot be determined
"""
result = None
if self.detect_presence():
cmd = 'bzr info %s' % self._path
_, output, _ = run_shell_command(cmd, shell=True, us_env=True)
matches = [l for l in output.splitlines() if l.startswith(' parent branch: ')]
if matches:
ppath = url2pathname(matches[0][len(' parent branch: '):])
# when it can, bzr substitues absolute path for relative paths
if (ppath is not None and os.path.isdir(ppath) and not os.path.isabs(ppath)):
result = os.path.abspath(os.path.join(os.getcwd(), ppath))
else:
result = ppath
return result
def url_matches(self, url, url_or_shortcut):
if super(BzrClient, self).url_matches(url, url_or_shortcut):
return True
# if we got a shortcut (e.g. launchpad url), we compare using
# bzr info and return that one if result matches.
result = False
if url_or_shortcut is not None:
cmd = 'bzr info %s' % url_or_shortcut
value, output, _ = run_shell_command(cmd, shell=True, us_env=True)
if value == 0:
for line in output.splitlines():
sline = line.strip()
for prefix in ['shared repository: ',
'repository branch: ',
'branch root: ']:
if sline.startswith(prefix):
if super(BzrClient, self).url_matches(url, sline[len(prefix):]):
result = True
break
return result
@staticmethod
def static_detect_presence(path):
return os.path.isdir(os.path.join(path, '.bzr'))
def checkout(self, url, version=None, verbose=False,
shallow=False, timeout=None):
if url is None or url.strip() == '':
raise ValueError('Invalid empty url : "%s"' % url)
# bzr 2.5.1 fails if empty directory exists
if not ensure_dir_notexists(self.get_path()):
self.logger.error("Can't remove %s" % self.get_path())
return False
cmd = 'bzr branch'
if version:
cmd += ' -r %s' % version
cmd += ' %s %s' % (url, self._path)
value, _, msg = run_shell_command(cmd,
shell=True,
show_stdout=verbose,
verbose=verbose)
if value != 0:
if msg:
self.logger.error('%s' % msg)
return False
return True
def update(self, version='', verbose=False, timeout=None):
if not self.detect_presence():
return False
value, _, _ = run_shell_command("bzr pull",
cwd=self._path,
shell=True,
show_stdout=True,
verbose=verbose)
if value != 0:
return False
# Ignore verbose param, bzr is pretty verbose on update anyway
if version is not None and version != '':
cmd = "bzr update -r %s" % (version)
else:
cmd = "bzr update"
value, _, _ = run_shell_command(cmd,
cwd=self._path,
shell=True,
show_stdout=True,
verbose=verbose)
if value == 0:
return True
return False
def get_version(self, spec=None):
"""
:param spec: (optional) revisionspec of desired version. May
be any revisionspec as returned by 'bzr help revisionspec',
e.g. a tagname or 'revno:'
:returns: the current revision number of the repository. Or if
spec is provided, the number of a revision specified by some
token.
"""
if self.detect_presence():
if spec is not None:
command = ['bzr log -r %s .' % sanitized(spec)]
_, output, _ = run_shell_command(command,
shell=True,
cwd=self._path,
us_env=True)
if output is None or output.strip() == '' or output.startswith("bzr:"):
return None
else:
matches = [l for l in output.split('\n') if l.startswith('revno: ')]
if len(matches) == 1:
return matches[0].split()[1]
else:
_, output, _ = run_shell_command('bzr revno --tree',
shell=True,
cwd=self._path,
us_env=True)
return output.strip()
def get_current_version_label(self):
# url contains branch information
return None
def get_remote_version(self, fetch=False):
# Not sure how to get any useful information from bzr about this,
# since bzr has no globally unique IDs
return None
def get_diff(self, basepath=None):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = sanitized(normalized_rel_path(self._path, basepath))
command = "bzr diff %s" % rel_path
command += " -p1 --prefix %s/:%s/" % (rel_path, rel_path)
_, response, _ = run_shell_command(command, shell=True, cwd=basepath)
return response
def get_affected_files(self, revision):
cmd = "bzr status -c {0} -S -V".format(
revision)
code, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
affected = []
if code == 0:
for filename in output.splitlines():
affected.append(filename.split(" ")[2])
return affected
def get_log(self, relpath=None, limit=None):
response = []
if relpath is None:
relpath = ''
# Compile regexes
id_regex = re.compile('^revno: ([0-9]+)$', flags=re.MULTILINE)
committer_regex = re.compile('^committer: (.+)$', flags=re.MULTILINE)
timestamp_regex = re.compile('^timestamp: (.+)$', flags=re.MULTILINE)
message_regex = re.compile('^ (.+)$', flags=re.MULTILINE)
if self.path_exists() and os.path.exists(os.path.join(self._path, relpath)):
# Get the log
limit_cmd = (("--limit=%d" % (int(limit))) if limit else "")
command = "bzr log %s %s" % (sanitized(relpath), limit_cmd)
return_code, text_response, stderr = run_shell_command(command, shell=True, cwd=self._path)
if return_code == 0:
revno_match = id_regex.findall(text_response)
committer_match = committer_regex.findall(text_response)
timestamp_match = timestamp_regex.findall(text_response)
message_match = message_regex.findall(text_response)
# Extract the entries
for revno, committer, timestamp, message in zip(revno_match,
committer_match,
timestamp_match,
message_match):
author, email_address = email.utils.parseaddr(committer)
date = dateutil.parser.parse(timestamp)
log_data = {'id': revno,
'author': author,
'email': email_address,
'message': message,
'date': date}
response.append(log_data)
return response
def get_status(self, basepath=None, untracked=False):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
command = "bzr status %s -S" % sanitized(rel_path)
if not untracked:
command += " -V"
_, response, _ = run_shell_command(command, shell=True, cwd=basepath)
response_processed = ""
for line in response.split('\n'):
if len(line.strip()) > 0:
response_processed += line[0:4] + rel_path + '/'
response_processed += line[4:] + '\n'
response = response_processed
return response
def get_branches(self, local_only=False):
# see http://doc.bazaar.canonical.com/beta/en/user-guide/shared_repository_layouts.html
# the 'bzr branches' command exists, but is not useful here (too many assumptions)
# Else bazaar branches are equivalent to forks in git and hg
# such branches (forks) on launchpad could be retrieved using
# the launchpadlib, but the API is probably not stable.
raise NotImplementedError("get_branches is not implemented for bzr")
def export_repository(self, version, basepath):
# execute the bzr export cmd
cmd = 'bzr export --format=tgz {0} '.format(basepath + '.tar.gz')
cmd += '{0}'.format(version)
result, _, _ = run_shell_command(cmd, shell=True, cwd=self._path)
if result:
return False
return True
BZRClient = BzrClient
vcstools-0.1.42/src/vcstools/common.py 0000664 0000000 0000000 00000032142 13522611462 0017763 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import errno
import os
import sys
import copy
import shlex
import subprocess
import logging
import netrc
import tempfile
import shutil
import threading
import signal
try:
# py3k
from urllib.request import urlopen, HTTPPasswordMgrWithDefaultRealm, \
HTTPBasicAuthHandler, build_opener
from urllib.parse import urlparse
from queue import Queue
except ImportError:
# py2.7
from urlparse import urlparse
from urllib2 import urlopen, HTTPPasswordMgrWithDefaultRealm, \
HTTPBasicAuthHandler, build_opener
from Queue import Queue
from vcstools.vcs_base import VcsError
def ensure_dir_notexists(path):
"""
helper function, removes dir if it exists
:returns: True if dir does not exist after this function
:raises: OSError if dir exists and removal failed for non-trivial reasons
"""
try:
if os.path.exists(path):
os.rmdir(path)
return True
except OSError as ose:
# ignore if directory
if ose.errno not in [errno.ENOENT, errno.ENOTEMPTY, errno.ENOTDIR]:
return False
def urlopen_netrc(uri, *args, **kwargs):
'''
wrapper to urlopen, using netrc on 401 as fallback
Since this wraps both python2 and python3 urlopen, accepted arguments vary
:returns: file-like object as urllib.urlopen
:raises: IOError and urlopen errors
'''
try:
return urlopen(uri, *args, **kwargs)
except IOError as ioe:
if hasattr(ioe, 'code') and ioe.code == 401:
# 401 means authentication required, we try netrc credentials
result = _netrc_open(uri)
if result is not None:
return result
raise
def urlretrieve_netrc(url, filename=None):
'''
writes a temporary file with the contents of url. This works
similar to urllib2.urlretrieve, but uses netrc as fallback on 401,
and has no reporthook or data option. Also urllib2.urlretrieve
malfunctions behind proxy, so we avoid it.
:param url: What to retrieve
:param filename: target file (default is basename of url)
:returns: (filename, response_headers)
:raises: IOError and urlopen errors
'''
fname = None
fhand = None
try:
resp = urlopen_netrc(url)
if filename:
fhand = open(filename, 'wb')
fname = filename
else:
# Make a temporary file
fdesc, fname = tempfile.mkstemp()
fhand = os.fdopen(fdesc, "wb")
# Copy the http response to the temporary file.
shutil.copyfileobj(resp, fhand)
finally:
if fhand:
fhand.close()
return (fname, resp.headers)
def _netrc_open(uri, filename=None):
'''
open uri using netrc credentials.
:param uri: uri to open
:param filename: optional, path to non-default netrc config file
:returns: file-like object from opening a socket to uri, or None
:raises IOError: if opening .netrc file fails (unless file not found)
'''
if not uri:
return None
parsed_uri = urlparse(uri)
machine = parsed_uri.netloc
if not machine:
return None
opener = None
try:
info = netrc.netrc(filename).authenticators(machine)
if info is not None:
(username, _, password) = info
if username and password:
pass_man = HTTPPasswordMgrWithDefaultRealm()
pass_man.add_password(None, machine, username, password)
authhandler = HTTPBasicAuthHandler(pass_man)
opener = build_opener(authhandler)
return opener.open(uri)
else:
# caught below, like other netrc parse errors
raise netrc.NetrcParseError('No authenticators for "%s"' % machine)
except IOError as ioe:
if ioe.errno != 2:
# if = 2, User probably has no .netrc, this is not an error
raise
except netrc.NetrcParseError as neterr:
logger = logging.getLogger('vcstools')
logger.warn('WARNING: parsing .netrc: %s' % str(neterr))
# we could install_opener() here, but prefer to keep
# default opening clean. Client can do that, though.
return None
def normalized_rel_path(path, basepath):
"""
If path is absolute, return relative path to it from
basepath. If relative, return it normalized.
:param path: an absolute or relative path
:param basepath: if path is absolute, shall be made relative to this
:returns: a normalized relative path
"""
# gracefully ignore invalid input absolute path + no basepath
if path is None:
return basepath
if os.path.isabs(path) and basepath is not None:
return os.path.normpath(os.path.relpath(os.path.realpath(path), os.path.realpath(basepath)))
return os.path.normpath(path)
def sanitized(arg):
"""
makes sure a composed command to be executed via shell was not injected.
A composed command would be like "ls %s"%foo.
In this example, foo could be "; rm -rf *"
sanitized raises an Error when it detects such an attempt
:raises VcsError: on injection attempts
"""
if arg is None or arg.strip() == '':
return ''
arg = str(arg.strip('"').strip())
safe_arg = '"%s"' % arg
# this also detects some false positives, like bar"";foo
if '"' in arg:
if (len(shlex.split(safe_arg, False, False)) != 1):
raise VcsError("Shell injection attempt detected: >%s< = %s" %
(arg, shlex.split(safe_arg, False, False)))
return safe_arg
def _discard_line(line):
if line is None:
return True
# the most common feedback lines of scms. We don't care about those. We let through anything unusual only.
discard_prefixes = ["adding ", "added ", "updating ", "requesting ", "pulling from ",
"searching for ", "(", "no changes found",
"0 files",
"A ", "D ", "U ",
"At revision", "Path: ", "First,",
"Installing", "Using ",
"No ", "Tree ",
"All ",
"+N ", "-D ", " M ", " M* ", "RM" # bzr
]
for pre in discard_prefixes:
if line.startswith(pre):
return True
return False
def _read_shell_output(proc, no_filter, verbose, show_stdout, output_queue):
# when we read output in while loop, it would not be returned
# in communicate()
stdout_buf = []
stderr_buf = []
if not no_filter:
if (verbose or show_stdout):
# this loop runs until proc is done it listen to the pipe, print
# and stores result in buffer for returning this allows proc to run
# while we still can filter out output avoiding readline() because
# it may block forever
for line in iter(proc.stdout.readline, b''):
line = line.decode('UTF-8')
if line is not None and line != '':
if verbose or not _discard_line(line):
sys.stdout.write(line),
stdout_buf.append(line)
if (not line or proc.returncode is not None):
break
# stderr was swallowed in pipe, in verbose mode print lines
if verbose:
for line in iter(proc.stderr.readline, b''):
line = line.decode('UTF-8')
if line != '':
sys.stdout.write(line),
stderr_buf.append(line)
if not line:
break
output_queue.put(proc.communicate())
output_queue.put(stdout_buf)
output_queue.put(stderr_buf)
def run_shell_command(cmd, cwd=None, shell=False, us_env=True,
show_stdout=False, verbose=False, timeout=None,
no_warn=False, no_filter=False):
"""
executes a command and hides the stdout output, loggs stderr
output when command result is not zero. Make sure to sanitize
arguments in the command.
:param cmd: A string to execute.
:param shell: Whether to use os shell.
:param us_env: changes env var LANG before running command, can influence program output
:param show_stdout: show some of the output (except for discarded lines in _discard_line()), ignored if no_filter
:param no_warn: hides warnings
:param verbose: show all output, overrides no_warn, ignored if no_filter
:param timeout: time allocated to the subprocess
:param no_filter: does not wrap stdout, so invoked command prints everything outside our knowledge
this is DANGEROUS, as vulnerable to shell injection.
:returns: ( returncode, stdout, stderr); stdout is None if no_filter==True
:raises: VcsError on OSError
"""
try:
env = copy.copy(os.environ)
if us_env:
env["LANG"] = "en_US.UTF-8"
if no_filter:
# in no_filter mode, we cannot pipe stdin, as this
# causes some prompts to be hidden (e.g. mercurial over
# http)
stdout_target = None
stderr_target = None
else:
stdout_target = subprocess.PIPE
stderr_target = subprocess.PIPE
# additional parameters to Popen when using a timeout
crflags = {}
if timeout is not None:
if hasattr(os.sys, 'winver'):
crflags['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
else:
crflags['preexec_fn'] = os.setsid
proc = subprocess.Popen(cmd,
shell=shell,
cwd=cwd,
stdout=stdout_target,
stderr=stderr_target,
env=env,
**crflags)
# using a queue to enable usage in a separate thread
q = Queue()
if timeout is None:
_read_shell_output(proc, no_filter, verbose, show_stdout, q)
else:
t = threading.Thread(target=_read_shell_output,
args=[proc, no_filter, verbose, show_stdout, q])
t.start()
t.join(timeout)
if t.isAlive():
if hasattr(os.sys, 'winver'):
os.kill(proc.pid, signal.CTRL_BREAK_EVENT)
else:
os.killpg(proc.pid, signal.SIGTERM)
t.join()
(stdout, stderr) = q.get()
stdout_buf = q.get()
stderr_buf = q.get()
if stdout is not None:
stdout_buf.append(stdout.decode('utf-8'))
stdout = "\n".join(stdout_buf)
if stderr is not None:
stderr_buf.append(stderr.decode('utf-8'))
stderr = "\n".join(stderr_buf)
message = None
if proc.returncode != 0 and stderr is not None and stderr != '':
logger = logging.getLogger('vcstools')
message = "Command failed: '%s'" % (cmd)
if cwd is not None:
message += "\n run at: '%s'" % (cwd)
message += "\n errcode: %s:\n%s" % (proc.returncode, stderr)
if not no_warn:
logger.warn(message)
result = stdout
if result is not None:
result = result.rstrip()
return (proc.returncode, result, message)
except OSError as ose:
logger = logging.getLogger('vcstools')
message = "Command failed with OSError. '%s' <%s, %s>:\n%s" % (cmd, shell, cwd, ose)
logger.error(message)
raise VcsError(message)
vcstools-0.1.42/src/vcstools/git.py 0000664 0000000 0000000 00000117730 13522611462 0017265 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
git vcs support.
refnames in git can be branchnames, hashes, partial hashes, tags. On
checkout, git will disambiguate by checking them in that order, taking
the first that applies
This class aims to provide git for linear centralized workflows. This
means in case of ambiguity, we assume that the only relevant remote
is the one named "origin", and we assume that commits once on origin
remain on origin.
A challenge with git is that it has strong reasonable conventions, but
is very allowing for breaking them. E.g. it is possible to name
remotes and branches with names like "refs/heads/master", give
branches and tags the same name, or a valid SHA-ID as name, etc.
Similarly git allows plenty of ways to reference any object, in case
of ambiguities, git attempts to take the most reasonable
disambiguation, and in some cases warns.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
import shutil
import tempfile
import gzip
import dateutil.parser # For parsing date strings
from distutils.version import LooseVersion
import logging
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, run_shell_command
from vcstools.git_archive_all import *
class GitError(Exception):
pass
def _git_diff_path_submodule_change(diff, rel_path_prefix):
"""
Parses git diff result and changes the filename prefixes.
"""
if diff is None:
return None
INIT = 0
INDIFF = 1
# small state machine makes sure we never touch anything inside
# the actual diff
state = INIT
result = ""
s_list = [line for line in diff.split(os.linesep)]
subrel_path = rel_path_prefix
for line in s_list:
newline = line
if line.startswith("Entering '"):
state = INIT
submodulepath = line.rstrip("'")[len("Entering '"):]
subrel_path = os.path.join(rel_path_prefix, submodulepath)
continue
if line.startswith("diff --git "):
state = INIT
if state == INIT:
if line.startswith("@@"):
state = INDIFF
else:
if line.startswith("---") and not line.startswith("--- /dev/null"):
newline = "--- " + subrel_path + line[5:]
if line.startswith("+++") and not line.startswith("+++ /dev/null"):
newline = "+++ " + subrel_path + line[5:]
if line.startswith("diff --git"):
# first replacing b in case path starts with a/
newline = line.replace(" b/", " " + subrel_path + "/", 1)
newline = newline.replace(" a/", " " + subrel_path + "/", 1)
if newline != '':
result += newline + '\n'
return result
def _get_git_version():
"""Looks up git version by calling git --version.
:raises: VcsError if git is not installed or returns
something unexpected"""
try:
cmd = 'git --version'
value, version, _ = run_shell_command(cmd, shell=True)
if value != 0:
raise VcsError("git --version returned %s, maybe git is not installed" % (value))
prefix = 'git version '
if version is not None and version.startswith(prefix):
version = version[len(prefix):].strip()
else:
raise VcsError("git --version returned invalid string: '%s'" % version)
except VcsError as exc:
raise VcsError("Could not determine whether git is installed: %s" % exc)
return version
class GitClient(VcsClientBase):
def __init__(self, path):
"""
:raises: VcsError if git not detected
"""
VcsClientBase.__init__(self, 'git', path)
self.gitversion = _get_git_version()
@staticmethod
def get_environment_metadata():
metadict = {}
try:
version = _get_git_version()
resetkeep = LooseVersion(version) >= LooseVersion('1.7.1')
submodules = LooseVersion(version) > LooseVersion('1.7')
metadict["features"] = "'reset --keep': %s, submodules: %s" % (resetkeep, submodules)
except VcsError:
version = "No git installed"
metadict["version"] = version
return metadict
def get_url(self):
"""
:returns: git URL of the directory path (output of git info command), or None if it cannot be determined
"""
if self.detect_presence():
cmd = "git config --get remote.%s.url" % self._get_default_remote()
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
return output.rstrip()
return None
def _get_default_remote(self):
"""
in order to support users who name their default origin
something else than origin, read remote name.
"""
# TODO: maybe pick other remote depending on context
return 'origin'
@staticmethod
def static_detect_presence(path):
# There is a proposed implementation of detect_presence which might be
# more future proof, but would depend on parsing the output of git
# See: https://github.com/vcstools/vcstools/pull/10
return os.path.exists(os.path.join(path, '.git'))
def checkout(self, url, version=None, verbose=False, shallow=False, timeout=None):
"""calls git clone and then, if version was given, update(version)"""
if url is None or url.strip() == '':
raise ValueError('Invalid empty url : "%s"' % url)
# since we cannot know whether version names a branch, clone master initially
cmd = 'git clone'
if shallow:
cmd += ' --depth 1'
if LooseVersion(self.gitversion) >= LooseVersion('1.7.10'):
cmd += ' --no-single-branch'
if version is None:
# quicker than using _do_update, but undesired when switching branches next
cmd += ' --recursive'
cmd += ' %s %s' % (url, self._path)
value, _, msg = run_shell_command(cmd,
shell=True,
no_filter=True,
show_stdout=verbose,
timeout=timeout,
verbose=verbose)
if value != 0:
if msg:
self.logger.error('%s' % msg)
return False
try:
# update to make sure we are on the right branch. Do not
# check for "master" here, as default branch could be anything
if version is not None:
return self._do_update(version,
verbose=verbose,
fast_foward=True,
timeout=timeout,
update_submodules=True)
else:
return True
except GitError:
return False
def _update_submodules(self, verbose=False, timeout=None):
# update submodules ( and init if necessary ).
if LooseVersion(self.gitversion) > LooseVersion('1.7'):
cmd = "git submodule update --init --recursive"
value, _, _ = run_shell_command(cmd,
shell=True,
cwd=self._path,
show_stdout=True,
timeout=timeout,
verbose=verbose)
if value != 0:
return False
return True
def update(self, version=None, verbose=False, force_fetch=False, timeout=None):
"""
if version is None, attempts fast-forwarding current branch, if any.
Else interprets version as a local branch, remote branch, tagname,
hash, etc.
If it is a branch, attempts to move to it unless
already on it, and to fast-forward, unless not a tracking
branch. Else go untracked on tag or whatever version is. Does
not leave if current commit would become dangling.
:return: True if already up-to-date with remote or after successful fast_foward
"""
if not self.detect_presence():
return False
try:
# fetch in any case to get updated tags even if we don't need them
self._do_fetch()
return self._do_update(refname=version, verbose=verbose, timeout=timeout)
except GitError:
return False
def _do_update(self,
refname=None,
verbose=False,
fast_foward=True,
timeout=None,
update_submodules=True):
'''
updates without fetching, thus any necessary fetching must be done before
allows arguments to reduce unnecessary steps after checkout
:param fast_foward: if false, does not perform fast-forward
:param update_submodules: if false, does not attempt to update submodules
'''
# are we on any branch?
current_branch = self._get_branch()
branch_parent = None
if current_branch:
# local branch might be named differently from remote by user, we respect that
same_branch = (refname == current_branch)
if not same_branch:
(branch_parent, remote) = self._get_branch_parent(current_branch=current_branch)
if not refname:
# ! changing refname to cause fast-forward
refname = branch_parent
same_branch = True
else:
same_branch = (refname == branch_parent)
if same_branch and not branch_parent:
# avoid expensive checking branch parent again later
fast_foward = False
else:
same_branch = False
if not refname:
# we are neither tracking, nor did we get any refname to update to
return (not update_submodules) or self._update_submodules(verbose=verbose,
timeout=timeout)
default_remote = self._get_default_remote()
if same_branch:
if fast_foward:
if not branch_parent and current_branch:
(branch_parent, remote) = self._get_branch_parent(current_branch=current_branch)
if remote != default_remote:
# if remote is not origin, must not fast-forward (because based on origin)
logger = logging.getLogger('vcstools')
logger.warn("vcstools only handles branches tracking default remote,"
" branch '%s' tracks remote '%s'.\nRepository path is '%s'."
% (current_branch, remote, self._path))
branch_parent = None
# already on correct branch, fast-forward if there is a parent
if branch_parent:
if not self._do_fast_forward(branch_parent=branch_parent,
fetch=False,
verbose=verbose):
return False
else:
# refname can be a different branch or something else than a branch
refname_is_local_branch = self._is_local_branch(refname)
if refname_is_local_branch:
# might also be remote branch, but we treat it as local
refname_is_remote_branch = False
else:
refname_is_remote_branch = self._is_remote_branch(refname, fetch=False)
refname_is_branch = refname_is_remote_branch or refname_is_local_branch
current_version = None
# shortcut if version is the same as requested
if not refname_is_branch:
current_version = self.get_version()
if current_version == refname:
return (not update_submodules) or self._update_submodules(verbose=verbose,
timeout=timeout)
if current_branch is None:
if not current_version:
current_version = self.get_version()
# prevent commit from becoming dangling
if self._is_commit_in_orphaned_subtree(current_version, fetch=False):
# commit becomes dangling unless we move to one of its descendants
if not self._rev_list_contains(refname, current_version, fetch=False):
# TODO: should raise error instead of printing message
sys.stderr.write("vcstools refusing to move away from dangling commit, to protect your work.\n")
return False
# git checkout makes all the decisions for us
self._do_checkout(refname, verbose=verbose, fetch=False)
if refname_is_local_branch:
# if we just switched to a local tracking branch (not created one), we should also fast forward
(new_branch_parent, remote) = self._get_branch_parent(current_branch=refname)
if remote != default_remote:
# if remote is not origin, must not fast-forward (because based on origin)
sys.stderr.write("vcstools only handles branches tracking default remote," +
" branch '%s' tracks remote '%s'\n" % (current_branch, remote))
new_branch_parent = None
if new_branch_parent is not None:
if fast_foward:
if not self._do_fast_forward(branch_parent=new_branch_parent,
fetch=False,
verbose=verbose):
return False
return (not update_submodules) or self._update_submodules(verbose=verbose, timeout=timeout)
def get_current_version_label(self):
"""
For git we change the label to clarify when a different remote
is configured.
"""
branch = self._get_branch()
if branch is None:
return ''
result = branch
(remote_branch, remote) = self._get_branch_parent()
if remote_branch is not None:
default_remote = self._get_default_remote()
# if not following 'origin/branch', display 'branch < tracked ref'
if (remote_branch != branch or remote != default_remote):
result += ' < '
if remote != default_remote:
result += remote + '/'
result += remote_branch
return result
def get_default_remote_version_label(self):
if self.detect_presence():
_, output, _ = run_shell_command('git remote show %s' % self._get_default_remote(),
shell=True,
cwd=self._path)
for line in output.splitlines():
elems = line.split()
if elems[0:2] == ['HEAD', 'branch:']:
return elems[2]
return None
def get_remote_version(self, fetch=False):
# try tracked branch on origin (returns None if on other remote)
(parent_branch, remote) = self._get_branch_parent(fetch=fetch)
if parent_branch is not None:
return self.get_version(spec=remote+'/'+parent_branch)
def get_version(self, spec=None):
"""
:param spec: (optional) token to identify desired version. For
git, this may be anything accepted by git log, e.g. a tagname,
branchname, or sha-id.
:param fetch: When spec is given, can be used to suppress git fetch call
:returns: current SHA-ID of the repository. Or if spec is
provided, the SHA-ID of a commit specified by some token if found, else None
"""
if self.detect_presence():
command = "git log -1"
if spec is not None:
command += " %s" % sanitized(spec)
command += " --format='%H'"
_, output, _ = run_shell_command(command, shell=True,
no_warn=True, cwd=self._path)
if output.strip() != '':
# On Windows the version can have single quotes around it
version = output.strip().strip("'")
return version # found SHA-ID
elif spec is None:
return None
# we try again after fetching if given spec had not been found
try:
self._do_fetch()
except GitError:
return None
# we repeat the call once again after fetching
_, output, _ = run_shell_command(command, shell=True,
no_warn=True, cwd=self._path)
if output.strip() == '':
# even if after fetching, not found specified version
return None
version = output.strip().strip("'")
return version
return None
def get_diff(self, basepath=None):
response = ''
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
# git needs special treatment as it only works from inside
# use HEAD to also show staged changes. Maybe should be option?
# injection should be impossible using relpath, but to be sure, we check
cmd = "git diff HEAD --src-prefix=%s/ --dst-prefix=%s/ ." % \
(sanitized(rel_path), sanitized(rel_path))
_, response, _ = run_shell_command(cmd, shell=True, cwd=self._path)
if LooseVersion(self.gitversion) > LooseVersion('1.7'):
cmd = 'git submodule foreach --recursive git diff HEAD'
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
response += _git_diff_path_submodule_change(output, rel_path)
return response
def get_affected_files(self, revision):
# Making changes for windows support
cmd = "git show {0} --pretty=format: --name-only".format(
revision)
code, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
affected = []
if code == 0:
for filename in output.splitlines():
if filename not in ('', None, ):
affected.append(filename)
return affected
def get_log(self, relpath=None, limit=None):
response = []
if relpath is None:
relpath = ''
if self.path_exists() and os.path.exists(os.path.join(self._path, relpath)):
# Get the log
limit_cmd = (("-n %d" % (int(limit))) if limit else "")
GIT_COMMIT_FIELDS = ['id', 'author', 'email', 'date', 'message']
GIT_LOG_FORMAT = '%x1f'.join(['%H', '%an', '%ae', '%ad', '%s']) + '%x1e'
command = "git --work-tree=%s log --format=\"%s\" %s %s " % (self._path, GIT_LOG_FORMAT,
limit_cmd, sanitized(relpath))
return_code, response_str, stderr = run_shell_command(command, shell=True, cwd=self._path)
if return_code == 0:
# Parse response
response = response_str.strip('\n\x1e').split("\x1e")
response = [row.strip().split("\x1f") for row in response]
response = [dict(zip(GIT_COMMIT_FIELDS, row)) for row in response]
# Parse dates
for entry in response:
entry['date'] = dateutil.parser.parse(entry['date'])
return response
def get_status(self, basepath=None, untracked=False, porcelain=False):
status_flag = '--porcelain' if porcelain else '-s'
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
# git command only works inside repo
# self._path is safe against command injection, as long as we check path.exists
command = "git status {0} ".format(status_flag)
if not untracked:
command += " -uno"
_, response, _ = run_shell_command(command,
shell=True,
cwd=self._path)
response_processed = ""
for line in response.split('\n'):
if len(line.strip()) > 0:
# prepend relative path
response_processed += '%s%s/%s\n' % (line[0:3],
rel_path,
line[3:])
if LooseVersion(self.gitversion) > LooseVersion('1.7'):
if not untracked:
status_flag += " -uno"
command = "git submodule foreach --recursive 'git status {0}'".format(status_flag)
_, response2, _ = run_shell_command(command,
shell=True,
cwd=self._path)
for line in response2.split('\n'):
if line.startswith("Entering"):
continue
if len(line.strip()) > 0:
# prepend relative path
response_processed += line[0:3] + rel_path + '/' + line[3:] + '\n'
response = response_processed
return response
def _is_remote_branch(self, branch_name, remote_name=None, fetch=True):
"""
checks list of remote branches for match. Set fetch to False if you just fetched already.
:returns: True if branch_name exists for remote (or 'origin' if None)
:raises: GitError when git fetch fails
"""
if remote_name is None:
remote_name = self._get_default_remote()
if self.path_exists():
if fetch:
self._do_fetch()
_, output, _ = run_shell_command('git branch -r',
shell=True,
cwd=self._path)
for l in output.splitlines():
elem = l.split()[0]
rem_name = elem[:elem.find('/')]
br_name = elem[elem.find('/') + 1:]
if rem_name == remote_name and br_name == branch_name:
return True
return False
def _is_local_branch(self, branch_name):
if self.path_exists():
_, output, _ = run_shell_command('git branch',
shell=True,
cwd=self._path)
for line in output.splitlines():
elems = line.split()
if len(elems) == 1:
if elems[0] == branch_name:
return True
elif len(elems) == 2:
if elems[0] == '*' and elems[1] == branch_name:
return True
return False
def _get_branch(self):
if self.path_exists():
_, output, _ = run_shell_command('git branch',
shell=True,
cwd=self._path)
for line in output.splitlines():
elems = line.split()
if len(elems) == 2 and elems[0] == '*':
return elems[1]
return None
def _get_branch_parent(self, fetch=False, current_branch=None):
"""
:param fetch: if true, performs git fetch first
:param current_branch: if not None, this is used as current branch (else extra shell call)
:returns: (branch, remote) the name of the branch this branch tracks and its remote
:raises: GitError if fetch fails
"""
if not self.path_exists():
return (None, None)
# get name of configured merge ref.
branchname = current_branch or self._get_branch()
if branchname is None:
return (None, None)
cmd = 'git config --get %s' % sanitized('branch.%s.merge' % branchname)
_, output, _ = run_shell_command(cmd,
shell=True,
cwd=self._path)
if not output:
return (None, None)
lines = output.splitlines()
if len(lines) > 1:
sys.stderr.write("vcstools unable to handle multiple merge references for branch %s:\n%s\n"
% (branchname, output))
return (None, None)
# get name of configured remote
cmd = 'git config --get "branch.%s.remote"' % branchname
_, output2, _ = run_shell_command(cmd, shell=True, cwd=self._path)
remote = output2 or self._get_default_remote()
branch_reference = lines[0]
# branch_reference is either refname, or /refs/heads/refname, or
# heads/refname we would like to return refname however,
# user could also have named any branch
# "/refs/heads/refname", for some unholy reason check all
# known branches on remote for refname, then for the odd
# cases, as git seems to do
candidate = branch_reference
if candidate.startswith('refs/'):
candidate = candidate[len('refs/'):]
if candidate.startswith('heads/'):
candidate = candidate[len('heads/'):]
elif candidate.startswith('tags/'):
candidate = candidate[len('tags/'):]
elif candidate.startswith('remotes/'):
candidate = candidate[len('remotes/'):]
result = None
if self._is_remote_branch(candidate, remote_name=remote, fetch=fetch):
result = candidate
elif branch_reference != candidate and self._is_remote_branch(branch_reference,
remote_name=remote,
fetch=False):
result = branch_reference
if result is not None:
return (result, remote)
return None, None
def is_tag(self, tag_name, fetch=True):
"""
checks list of tags for match.
Set fetch to False if you just fetched already.
:returns: True if tag_name among known tags
:raises: GitError when call to git fetch fails
"""
if fetch:
self._do_fetch()
if not tag_name:
raise ValueError('is_tag requires tag_name, got: "%s"' % tag_name)
if self.path_exists():
cmd = 'git tag -l %s' % sanitized(tag_name)
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
lines = output.splitlines()
if len(lines) == 1:
return True
return False
def _rev_list_contains(self, refname, version, fetch=True):
"""
calls git rev-list with refname and returns True if version
can be found in rev-list result
:param refname: a git refname
:param version: an SHA IDs (if partial, caller is responsible
for mismatch)
:returns: True if version is an ancestor commit from refname
:raises: GitError when call to git fetch fails
"""
# to avoid listing unnecessarily many rev-ids, we cut off all
# those we are definitely not interested in
# $ git rev-list foo bar ^baz ^bez
# means "list all the commits which are reachable from foo or
# bar, but not from baz or bez". We use --parents because
# ^baz also excludes baz itself. We could also use git
# show --format=%P to get all parents first and use that,
# not sure what's more performant
if fetch:
self._do_fetch()
if (refname is not None and refname != '' and
version is not None and version != ''):
cmd = 'git rev-list %s ^%s --parents' % (sanitized(refname), sanitized(version))
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
for line in output.splitlines():
# can have 1, 2 or 3 elements (commit, parent1, parent2)
for hashid in line.split(" "):
if hashid.startswith(version):
return True
return False
def _is_commit_in_orphaned_subtree(self, version, mask_self=False, fetch=True):
"""
checks git log --all (the list of all commits reached by
references, meaning branches or tags) for version. If it shows
up, that means git garbage collection will not remove the
commit. Else it would eventually be deleted.
:param version: SHA IDs (if partial, caller is responsible for mismatch)
:param mask_self: whether to consider direct references to this commit
(rather than only references on descendants) as well
:param fetch: whether fetch should be done first for remote refs
:returns: True if version is not recursively referenced by a branch or tag
:raises: GitError if git fetch fails
"""
if fetch:
self._do_fetch()
if version is not None and version != '':
cmd = 'git show-ref -s'
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
refs = output.splitlines()
# 2000 seems like a number the linux shell can cope with
chunksize = 2000
refchunks = [refs[x:x + chunksize] for x in range(0, len(refs), chunksize)]
for refchunk in refchunks:
# git log over all refs except HEAD
cmd = 'git log ' + " ".join(refchunk)
if mask_self:
# %P: parent hashes
cmd += " --pretty=format:%P"
else:
# %H: commit hash
cmd += " --pretty=format:%H"
_, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
for line in output.splitlines():
if line.strip("'").startswith(version):
return False
return True
return False
def export_repository(self, version, basepath):
if not self.detect_presence():
return False
# Export should work regardless of the state of the local dir (might have modified files),
# so we clone to a temporary folder and checkout the specified version there.
# If the repo has submodules with relative URLs, cloning to a temp dir doesn't work.
# To support this case, first check if current version is already checked out and there are no
# modifications and try to export from current local dir.
# see https://github.com/vcstools/vcstools/pull/130
current_sha = self.get_version()
export_sha = self.get_version(version)
if current_sha == export_sha and self.get_diff() == '':
archiver = GitArchiver(main_repo_abspath=self.get_path(), force_sub=True)
filepath = '{0}.tar.gz'.format(basepath)
archiver.create(filepath)
return filepath
# export a different version than currently checked out or there are local changes
try:
# since version may relate to remote branch / tag we do not
# know about yet, do fetch if not already done
self._do_fetch()
# export should work regardless of the state of the local dir (might have modified files)
# so we clone to a temporary folder and checkout the specified version there
tmpd_path = tempfile.mkdtemp()
try:
tmpgit = GitClient(tmpd_path)
if tmpgit.checkout(self._path, version=version, shallow=True):
archiver = GitArchiver(main_repo_abspath=tmpgit.get_path(), force_sub=True)
filepath = '{0}.tar.gz'.format(basepath)
archiver.create(filepath)
return filepath
else:
return False
finally:
shutil.rmtree(tmpd_path)
except GitError:
return False
def get_branches(self, local_only=False):
cmd = 'git branch --no-color'
if not local_only:
cmd += ' -a'
result, out, err = run_shell_command(cmd,
cwd=self._path,
shell=True,
show_stdout=False)
branches = []
for line in out.splitlines():
if 'HEAD -> ' in line:
continue
line = line.strip('* ')
branches.append(line)
return branches
def _do_fetch(self, timeout=None):
"""
calls git fetch
:raises: GitError when call fails
"""
cmd = "git fetch"
value1, _, _ = run_shell_command(cmd,
cwd=self._path,
shell=True,
no_filter=True,
timeout=timeout,
show_stdout=True)
# git fetch --tags ONLY fetches new tags and commits used, no other commits!
cmd = "git fetch --tags"
value2, _, _ = run_shell_command(cmd,
cwd=self._path,
shell=True,
no_filter=True,
timeout=timeout,
show_stdout=True)
if value1 != 0 or value2 != 0:
raise GitError('git fetch failed')
def _do_fast_forward(self, branch_parent, fetch=True, verbose=False):
"""Execute git fetch if necessary, and if we can fast-foward,
do so to the last fetched version using git rebase.
:param branch_parent: name of branch we track
:param fetch: whether fetch should be done first for remote refs
:returns: True if up-to-date or after succesful fast-forward
:raises: GitError when git fetch fails
"""
assert branch_parent is not None
current_version = self.get_version()
default_remote = self._get_default_remote()
parent_version = self.get_version("remotes/%s/%s" % (default_remote, branch_parent))
if current_version == parent_version:
return True
# check if we are true ancestor of tracked branch
if not self._rev_list_contains(parent_version,
current_version,
fetch=fetch):
# if not rev_list_contains this version, we are on same
# commit (checked before), have advanced, or have diverged.
# Now check whether tracked branch is a true ancestor of us
if self._rev_list_contains(current_version,
parent_version,
fetch=False):
return True
print("Cannot fast-forward, local repository and remote '%s' have diverged." % branch_parent)
return False
# 'git reset --keep' doesn't refresh the index. Do it manually to avoid
# errors as reported in: https://github.com/vcstools/wstool/issues/77
cmd = "git update-index -q --refresh"
run_shell_command(cmd,
shell=True,
cwd=self._path,
show_stdout=False,
verbose=verbose)
if verbose:
print("Rebasing repository")
# Rebase, do not pull, because somebody could have
# commited in the meantime.
if LooseVersion(self.gitversion) >= LooseVersion('1.7.1'):
# --keep allows to rebase even with local changes, as long as
# local changes are not in files that change between versions
cmd = "git reset --keep remotes/%s/%s" % (default_remote, branch_parent)
value, _, _ = run_shell_command(cmd,
shell=True,
cwd=self._path,
show_stdout=True,
verbose=verbose)
if value == 0:
return True
else:
verboseflag = ''
if verbose:
verboseflag = '-v'
# prior to version 1.7.1, git does not know --keep
# Do not merge, rebase does nothing when there are local changes
cmd = "git rebase %s remotes/%s/%s" % (verboseflag, default_remote, branch_parent)
value, _, _ = run_shell_command(cmd,
shell=True,
cwd=self._path,
show_stdout=True,
verbose=verbose)
if value == 0:
return True
return False
def _do_checkout(self, refname, fetch=True, verbose=False):
"""
meaning git checkout, not vcstools checkout. This works
for local branches, remote branches, tagnames, hashes, etc.
git will create local branch of same name when no such local
branch exists, and also setup tracking. Git decides with own
rules whether local changes would cause conflicts, and refuses
to checkout else.
:raises GitError: when checkout fails
"""
# since refname may relate to remote branch / tag we do not
# know about yet, do fetch if not already done
if fetch:
self._do_fetch()
cmd = "git checkout %s" % (refname)
value, _, _ = run_shell_command(cmd,
shell=True,
cwd=self._path,
show_stdout=verbose,
verbose=verbose)
if value != 0:
raise GitError('Git Checkout failed')
# Backwards compatibility
GITClient = GitClient
vcstools-0.1.42/src/vcstools/git_archive_all.py 0000664 0000000 0000000 00000046110 13522611462 0021607 0 ustar 00root root 0000000 0000000 # coding=utf-8
# The MIT License (MIT)
#
# Copyright (c) 2010 Ilya Kulakov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# from
# https://github.com/Kentzo/git-archive-all/blob/613fa6525e4815c37fed4122fb4ba6ca992d8ff9/git_archive_all.py
from __future__ import print_function
from __future__ import unicode_literals
import logging
from os import extsep, path, readlink, curdir
from subprocess import CalledProcessError, Popen, PIPE
import sys
import tarfile
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED
import re
__version__ = "1.16.4"
class GitArchiver(object):
"""
GitArchiver
Scan a git repository and export all tracked files, and submodules.
Checks for .gitattributes files in each directory and uses 'export-ignore'
pattern entries for ignore files in the archive.
>>> archiver = GitArchiver(main_repo_abspath='my/repo/path')
>>> archiver.create('output.zip')
"""
LOG = logging.getLogger('GitArchiver')
def __init__(self, prefix='', exclude=True, force_sub=False, extra=None, main_repo_abspath=None):
"""
@param prefix: Prefix used to prepend all paths in the resulting archive.
Extra file paths are only prefixed if they are not relative.
E.g. if prefix is 'foo' and extra is ['bar', '/baz'] the resulting archive will look like this:
/
baz
foo/
bar
@type prefix: str
@param exclude: Determines whether archiver should follow rules specified in .gitattributes files.
@type exclude: bool
@param force_sub: Determines whether submodules are initialized and updated before archiving.
@type force_sub: bool
@param extra: List of extra paths to include in the resulting archive.
@type extra: list
@param main_repo_abspath: Absolute path to the main repository (or one of subdirectories).
If given path is path to a subdirectory (but not a submodule directory!) it will be replaced
with abspath to top-level directory of the repository.
If None, current cwd is used.
@type main_repo_abspath: str
"""
if extra is None:
extra = []
if main_repo_abspath is None:
main_repo_abspath = path.abspath('')
elif not path.isabs(main_repo_abspath):
raise ValueError("main_repo_abspath must be an absolute path")
try:
main_repo_abspath = path.abspath(self.run_git_shell('git rev-parse --show-toplevel', main_repo_abspath).rstrip())
except CalledProcessError:
raise ValueError("{0} is not part of a git repository".format(main_repo_abspath))
self.prefix = prefix
self.exclude = exclude
self.extra = extra
self.force_sub = force_sub
self.main_repo_abspath = main_repo_abspath
def create(self, output_path, dry_run=False, output_format=None):
"""
Create the archive at output_file_path.
Type of the archive is determined either by extension of output_file_path or by output_format.
Supported formats are: gz, zip, bz2, xz, tar, tgz, txz
@param output_path: Output file path.
@type output_path: str
@param dry_run: Determines whether create should do nothing but print what it would archive.
@type dry_run: bool
@param output_format: Determines format of the output archive. If None, format is determined from extension
of output_file_path.
@type output_format: str
"""
if output_format is None:
file_name, file_ext = path.splitext(output_path)
output_format = file_ext[len(extsep):].lower()
self.LOG.debug("Output format is not explicitly set, determined format is {0}.".format(output_format))
if not dry_run:
if output_format == 'zip':
archive = ZipFile(path.abspath(output_path), 'w')
def add_file(file_path, arcname):
if not path.islink(file_path):
archive.write(file_path, arcname, ZIP_DEFLATED)
else:
i = ZipInfo(arcname)
i.create_system = 3
i.external_attr = 0xA1ED0000
archive.writestr(i, readlink(file_path))
elif output_format in ['tar', 'bz2', 'gz', 'xz', 'tgz', 'txz']:
if output_format == 'tar':
t_mode = 'w'
elif output_format == 'tgz':
t_mode = 'w:gz'
elif output_format == 'txz':
t_mode = 'w:xz'
else:
t_mode = 'w:{0}'.format(output_format)
archive = tarfile.open(path.abspath(output_path), t_mode)
def add_file(file_path, arcname):
archive.add(file_path, arcname)
else:
raise RuntimeError("unknown format: {0}".format(output_format))
def archiver(file_path, arcname):
self.LOG.debug("Compressing {0} => {1}...".format(file_path, arcname))
add_file(file_path, arcname)
else:
archive = None
def archiver(file_path, arcname):
self.LOG.info("{0} => {1}".format(file_path, arcname))
self.archive_all_files(archiver)
if archive is not None:
archive.close()
def get_exclude_patterns(self, repo_abspath, repo_file_paths):
"""
Returns exclude patterns for a given repo. It looks for .gitattributes files in repo_file_paths.
Resulting dictionary will contain exclude patterns per path (relative to the repo_abspath).
E.g. {('.', 'Catalyst', 'Editions', 'Base'): ['Foo*', '*Bar']}
@param repo_abspath: Absolute path to the git repository.
@type repo_abspath: str
@param repo_file_paths: List of paths relative to the repo_abspath that are under git control.
@type repo_file_paths: list
@return: Dictionary representing exclude patterns.
Keys are tuples of strings. Values are lists of strings.
Returns None if self.exclude is not set.
@rtype: dict or None
"""
if not self.exclude:
return None
def read_attributes(attributes_abspath):
patterns = []
if path.isfile(attributes_abspath):
attributes = open(attributes_abspath, 'r').readlines()
patterns = []
for line in attributes:
tokens = line.strip().split()
if "export-ignore" in tokens[1:]:
patterns.append(tokens[0])
return patterns
exclude_patterns = {(): []}
# There may be no gitattributes.
try:
global_attributes_abspath = self.run_git_shell("git config --get core.attributesfile", repo_abspath).rstrip()
exclude_patterns[()] = read_attributes(global_attributes_abspath)
except:
# And it's valid to not have them.
pass
for attributes_abspath in [path.join(repo_abspath, f) for f in repo_file_paths if f.endswith(".gitattributes")]:
# Each .gitattributes affects only files within its directory.
key = tuple(self.get_path_components(repo_abspath, path.dirname(attributes_abspath)))
exclude_patterns[key] = read_attributes(attributes_abspath)
local_attributes_abspath = path.join(repo_abspath, ".git", "info", "attributes")
key = tuple(self.get_path_components(repo_abspath, repo_abspath))
if key in exclude_patterns:
exclude_patterns[key].extend(read_attributes(local_attributes_abspath))
else:
exclude_patterns[key] = read_attributes(local_attributes_abspath)
return exclude_patterns
def is_file_excluded(self, repo_abspath, repo_file_path, exclude_patterns):
"""
Checks whether file at a given path is excluded.
@param repo_abspath: Absolute path to the git repository.
@type repo_abspath: str
@param repo_file_path: Path to a file within repo_abspath.
@type repo_file_path: str
@param exclude_patterns: Exclude patterns with format specified for get_exclude_patterns.
@type exclude_patterns: dict
@return: True if file should be excluded. Otherwise False.
@rtype: bool
"""
if exclude_patterns is None or not len(exclude_patterns):
return False
from fnmatch import fnmatch
file_name = path.basename(repo_file_path)
components = self.get_path_components(repo_abspath, path.join(repo_abspath, path.dirname(repo_file_path)))
is_excluded = False
# We should check all patterns specified in intermediate directories to the given file.
# At the end we should also check for the global patterns (key '()' or empty tuple).
while not is_excluded:
key = tuple(components)
if key in exclude_patterns:
patterns = exclude_patterns[key]
for p in patterns:
if fnmatch(file_name, p) or fnmatch(repo_file_path, p):
self.LOG.debug("Exclude pattern matched {0}: {1}".format(p, repo_file_path))
is_excluded = True
if not len(components):
break
components.pop()
return is_excluded
def archive_all_files(self, archiver):
"""
Archive all files using archiver.
@param archiver: Callable that accepts 2 arguments:
abspath to file on the system and relative path within archive.
@type archiver: Callable
"""
for file_path in self.extra:
archiver(path.abspath(file_path), path.join(self.prefix, file_path))
for file_path in self.walk_git_files():
archiver(path.join(self.main_repo_abspath, file_path), path.join(self.prefix, file_path))
def walk_git_files(self, repo_path=''):
"""
An iterator method that yields a file path relative to main_repo_abspath
for each file that should be included in the archive.
Skips those that match the exclusion patterns found in
any discovered .gitattributes files along the way.
Recurs into submodules as well.
@param repo_path: Path to the git submodule repository relative to main_repo_abspath.
@type repo_path: str
@return: Iterator to traverse files under git control relative to main_repo_abspath.
@rtype: Iterable
"""
repo_abspath = path.join(self.main_repo_abspath, repo_path)
repo_file_paths = self.run_git_shell(
"git ls-files --cached --full-name --no-empty-directory",
repo_abspath
).splitlines()
exclude_patterns = self.get_exclude_patterns(repo_abspath, repo_file_paths)
for repo_file_path in repo_file_paths:
# Git puts path in quotes if file path has unicode characters.
repo_file_path = repo_file_path.strip('"') # file path relative to current repo
repo_file_abspath = path.join(repo_abspath, repo_file_path) # absolute file path
main_repo_file_path = path.join(repo_path, repo_file_path) # file path relative to the main repo
# Only list symlinks and files.
if not path.islink(repo_file_abspath) and path.isdir(repo_file_abspath):
continue
if self.is_file_excluded(repo_abspath, repo_file_path, exclude_patterns):
continue
yield main_repo_file_path
if self.force_sub:
self.run_git_shell("git submodule init", repo_abspath)
self.run_git_shell("git submodule update", repo_abspath)
try:
repo_gitmodules_abspath = path.join(repo_abspath, ".gitmodules")
with open(repo_gitmodules_abspath) as f:
lines = f.readlines()
for l in lines:
m = re.match("^\s*path\s*=\s*(.*)\s*$", l)
if m:
submodule_path = m.group(1)
submodule_path = path.join(repo_path, submodule_path)
if self.is_file_excluded(repo_abspath, submodule_path, exclude_patterns):
continue
for submodule_file_path in self.walk_git_files(submodule_path):
if self.is_file_excluded(repo_abspath, submodule_file_path, exclude_patterns):
continue
yield submodule_file_path
except IOError:
pass
@staticmethod
def get_path_components(repo_abspath, abspath):
"""
Split given abspath into components relative to repo_abspath.
These components are primarily used as unique keys of files and folders within a repository.
E.g. if repo_abspath is '/Documents/Hobby/ParaView/' and abspath is
'/Documents/Hobby/ParaView/Catalyst/Editions/Base/', function will return:
['.', 'Catalyst', 'Editions', 'Base']
First element is always os.curdir (concrete symbol depends on OS).
@param repo_abspath: Absolute path to the git repository. Normalized via os.path.normpath.
@type repo_abspath: str
@param abspath: Absolute path to a file within repo_abspath. Normalized via os.path.normpath.
@type abspath: str
@return: List of path components.
@rtype: list
"""
repo_abspath = path.normpath(repo_abspath)
abspath = path.normpath(abspath)
if not path.isabs(repo_abspath):
raise ValueError("repo_abspath MUST be absolute path.")
if not path.isabs(abspath):
raise ValueError("abspath MUST be absoulte path.")
if not path.commonprefix([repo_abspath, abspath]):
raise ValueError(
"abspath (\"{0}\") MUST have common prefix with repo_abspath (\"{1}\")"
.format(abspath, repo_abspath)
)
components = []
while not abspath == repo_abspath:
abspath, tail = path.split(abspath)
if tail:
components.insert(0, tail)
components.insert(0, curdir)
return components
@staticmethod
def run_git_shell(cmd, cwd=None):
"""
Runs git shell command, reads output and decodes it into unicode string.
@param cmd: Command to be executed.
@type cmd: str
@type cwd: str
@param cwd: Working directory.
@rtype: str
@return: Output of the command.
@raise CalledProcessError: Raises exception if return code of the command is non-zero.
"""
p = Popen(cmd, shell=True, stdout=PIPE, cwd=cwd)
output, _ = p.communicate()
output = output.decode('unicode_escape').encode('raw_unicode_escape').decode('utf-8')
if p.returncode:
if sys.version_info > (2, 6):
raise CalledProcessError(returncode=p.returncode, cmd=cmd, output=output)
else:
raise CalledProcessError(returncode=p.returncode, cmd=cmd)
return output
def main():
from optparse import OptionParser
parser = OptionParser(
usage="usage: %prog [-v] [--prefix PREFIX] [--no-exclude] [--force-submodules]"
" [--extra EXTRA1 [EXTRA2]] [--dry-run] OUTPUT_FILE",
version="%prog {0}".format(__version__)
)
parser.add_option('--prefix',
type='string',
dest='prefix',
default=None,
help="""prepend PREFIX to each filename in the archive.
OUTPUT_FILE name is used by default to avoid tarbomb.
You can set it to '' in order to explicitly request tarbomb""")
parser.add_option('-v', '--verbose',
action='store_true',
dest='verbose',
help='enable verbose mode')
parser.add_option('--no-exclude',
action='store_false',
dest='exclude',
default=True,
help="don't read .gitattributes files for patterns containing export-ignore attrib")
parser.add_option('--force-submodules',
action='store_true',
dest='force_sub',
help='force a git submodule init && git submodule update at each level before iterating submodules')
parser.add_option('--extra',
action='append',
dest='extra',
default=[],
help="any additional files to include in the archive")
parser.add_option('--dry-run',
action='store_true',
dest='dry_run',
help="don't actually archive anything, just show what would be done")
options, args = parser.parse_args()
if len(args) != 1:
parser.error("You must specify exactly one output file")
output_file_path = args[0]
if path.isdir(output_file_path):
parser.error("You cannot use directory as output")
# avoid tarbomb
if options.prefix is not None:
options.prefix = path.join(options.prefix, '')
else:
import re
output_name = path.basename(output_file_path)
output_name = re.sub(
'(\.zip|\.tar|\.tgz|\.txz|\.gz|\.bz2|\.xz|\.tar\.gz|\.tar\.bz2|\.tar\.xz)$',
'',
output_name
) or "Archive"
options.prefix = path.join(output_name, '')
try:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(message)s'))
GitArchiver.LOG.addHandler(handler)
GitArchiver.LOG.setLevel(logging.DEBUG if options.verbose else logging.INFO)
archiver = GitArchiver(options.prefix,
options.exclude,
options.force_sub,
options.extra)
archiver.create(output_file_path, options.dry_run)
except Exception as e:
parser.exit(2, "{0}\n".format(e))
sys.exit(0)
if __name__ == '__main__':
main()
vcstools-0.1.42/src/vcstools/hg.py 0000664 0000000 0000000 00000035347 13522611462 0017103 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
hg vcs support.
using ui object to redirect output into a string
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
import gzip
import dateutil.parser # For parsing date strings
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, run_shell_command
def _get_hg_version():
"""Looks up hg version by calling hg --version.
:raises: VcsError if hg is not installed"""
try:
value, output, _ = run_shell_command('hg --version',
shell=True,
us_env=True)
if value == 0 and output is not None and len(output.splitlines()) > 0:
version = output.splitlines()[0]
else:
raise VcsError("hg --version returned %s, output '%s', maybe hg is not installed" % (value, output))
except VcsError as e:
raise VcsError("Could not determine whether hg is installed %s" % e)
return version
# hg diff cannot seem to be persuaded to accept a different prefix for filenames
def _hg_diff_path_change(diff, path):
"""
Parses hg diff result and changes the filename prefixes.
"""
if diff is None:
return None
INIT = 0
INDIFF = 1
# small state machine makes sure we never touch anything inside
# the actual diff
state = INIT
s_list = [line for line in diff.split(os.linesep)]
lines = []
for line in s_list:
if line.startswith("diff"):
state = INIT
if state == INIT:
if line.startswith("@@"):
state = INDIFF
newline = line
else:
if line.startswith("---") and not line.startswith("--- /dev/null"):
newline = "--- %s%s" % (path, line[5:])
elif line.startswith("+++") and not line.startswith("+++ /dev/null"):
newline = "+++ %s%s" % (path, line[5:])
elif line.startswith("diff --git"):
# first replacing b in case path starts with a/
newline = line.replace(" b/", " " + path + "/", 1)
newline = newline.replace(" a/", " " + path + "/", 1)
else:
newline = line
else:
newline = line
if newline != '':
lines.append(newline)
result = "\n".join(lines)
return result
class HgClient(VcsClientBase):
def __init__(self, path):
"""
:raises: VcsError if hg not detected
"""
VcsClientBase.__init__(self, 'hg', path)
_get_hg_version()
@staticmethod
def get_environment_metadata():
metadict = {}
try:
metadict["version"] = '%s' % _get_hg_version()
except:
metadict["version"] = "no mercurial installed"
return metadict
def get_url(self):
"""
:returns: HG URL of the directory path. (output of hg paths
command), or None if it cannot be determined
"""
if self.detect_presence():
cmd = "hg paths default"
_, output, _ = run_shell_command(cmd,
shell=True,
cwd=self._path,
us_env=True)
return output.rstrip()
return None
@staticmethod
def static_detect_presence(path):
return os.path.isdir(os.path.join(path, '.hg'))
def checkout(self, url, version='', verbose=False,
shallow=False, timeout=None):
if url is None or url.strip() == '':
raise ValueError('Invalid empty url : "%s"' % url)
# make sure that the parent directory exists for #3497
base_path = os.path.split(self.get_path())[0]
try:
os.makedirs(base_path)
except OSError:
# OSError thrown if directory already exists this is ok
pass
cmd = "hg clone %s %s" % (sanitized(url), self._path)
value, _, msg = run_shell_command(cmd,
shell=True,
no_filter=True)
if value != 0:
if msg:
sys.logger.error('%s' % msg)
return False
if version is not None and version.strip() != '':
cmd = "hg checkout %s" % sanitized(version)
value, _, msg = run_shell_command(cmd,
cwd=self._path,
shell=True,
no_filter=True)
if value != 0:
if msg:
sys.stderr.write('%s\n' % msg)
return False
return True
def update(self, version='', verbose=False, timeout=None):
verboseflag = ''
if verbose:
verboseflag = '--verbose'
if not self.detect_presence():
sys.stderr.write("Error: cannot update non-existing directory\n")
return True
if not self._do_pull():
return False
if version is not None and version.strip() != '':
cmd = "hg checkout %s %s" % (verboseflag, sanitized(version))
else:
cmd = "hg update %s --config ui.merge=internal:fail" % verboseflag
value, _, _ = run_shell_command(cmd,
cwd=self._path,
shell=True,
no_filter=True)
if value != 0:
return False
return True
def get_version(self, spec=None):
"""
:param spec: (optional) token for identifying version. spec can be
a whatever is allowed by 'hg log -r', e.g. a tagname, sha-ID,
revision-number
:returns: the current SHA-ID of the repository. Or if spec is
provided, the SHA-ID of a revision specified by some
token.
"""
# detect presence only if we need path for cwd in popen
if spec is not None:
if self.detect_presence():
command = 'hg log -r %s' % sanitized(spec)
repeated = False
output = ''
# we repeat the call once after pullin if necessary
while output == '':
_, output, _ = run_shell_command(command,
shell=True,
cwd=self._path,
us_env=True)
if (output.strip() != '' and
not output.startswith("abort") or
repeated is True):
matches = [l for l in output.splitlines() if l.startswith('changeset: ')]
if len(matches) == 1:
return matches[0].split(':')[2]
else:
sys.stderr.write("Warning: found several candidates for hg spec %s" % spec)
break
self._do_pull()
repeated = True
return None
else:
command = 'hg identify -i %s' % self._path
_, output, _ = run_shell_command(command, shell=True, us_env=True)
if output is None or output.strip() == '' or output.startswith("abort"):
return None
# hg adds a '+' to the end if there are uncommited
# changes, inconsistent to hg log
return output.strip().rstrip('+')
def get_current_version_label(self):
"""
:param spec: (optional) spec can be what 'svn info --help'
allows, meaning a revnumber, {date}, HEAD, BASE, PREV, or
COMMITTED.
:returns: current revision number of the repository. Or if spec
provided, the number of a revision specified by some
token.
"""
return self.get_branch()
def get_branch(self):
if self.path_exists():
command = "hg branch --repository %s" % self.get_path()
_, output, _ = run_shell_command(command, shell=True)
if output is not None:
return output.strip()
return None
def get_remote_version(self, fetch=False):
if fetch:
self._do_pull(filter=True)
# use local information only
result = self.get_log(limit=1)
if (len(result) == 1 and 'id' in result[0]):
return result[0]['id']
return None
def get_diff(self, basepath=None):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
command = "hg diff -g %(path)s --repository %(path)s" % {'path': sanitized(rel_path)}
_, response, _ = run_shell_command(command, shell=True, cwd=basepath)
response = _hg_diff_path_change(response, rel_path)
return response
def get_affected_files(self, revision):
cmd = "hg log -r %s --template '{files}'" % revision
code, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
affected = []
if code == 0:
affected = output.split(" ")
return affected
def get_log(self, relpath=None, limit=None):
response = []
if relpath is None:
relpath = ''
if self.path_exists() and os.path.exists(os.path.join(self._path, relpath)):
# Get the log
limit_cmd = (("--limit %d" % (int(limit))) if limit else "")
HG_COMMIT_FIELDS = ['id', 'author', 'email', 'date', 'message']
HG_LOG_FORMAT = '\x1f'.join(['{node|short}', '{author|person}',
'{autor|email}', '{date|isodate}',
'{desc}']) + '\x1e'
command = "hg log %s -b %s --template '%s' %s" % (sanitized(relpath),
self.get_branch(),
HG_LOG_FORMAT,
limit_cmd)
return_code, response_str, stderr = run_shell_command(command, shell=True, cwd=self._path)
if return_code == 0:
# Parse response
response = response_str.strip('\n\x1e').split("\x1e")
response = [row.strip().split("\x1f") for row in response]
response = [dict(zip(HG_COMMIT_FIELDS, row)) for row in response]
# Parse dates
for entry in response:
entry['date'] = dateutil.parser.parse(entry['date'])
return response
def get_status(self, basepath=None, untracked=False):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
# protect against shell injection
command = "hg status %(path)s --repository %(path)s" % {'path': sanitized(rel_path)}
if not untracked:
command += " -mard"
_, response, _ = run_shell_command(command,
shell=True,
cwd=basepath)
if response is not None:
if response.startswith("abort"):
raise VcsError("Probable Bug; Could not call %s, cwd=%s" % (command, basepath))
if len(response) > 0 and response[-1] != '\n':
response += '\n'
return response
def export_repository(self, version, basepath):
# execute the hg archive cmd
cmd = 'hg archive -t tar -r {0} {1}.tar'.format(version, basepath)
result, _, _ = run_shell_command(cmd, shell=True, cwd=self._path)
if result:
return False
try:
# gzip the tar file
with open(basepath + '.tar', 'rb') as tar_file:
gzip_file = gzip.open(basepath + '.tar.gz', 'wb')
try:
gzip_file.writelines(tar_file)
finally:
gzip_file.close()
finally:
# clean up
os.remove(basepath + '.tar')
return True
def get_branches(self, local_only=False):
if not local_only:
self._do_pull()
cmd = 'hg branches'
result, out, _ = run_shell_command(cmd, shell=True, cwd=self._path,
show_stdout=False)
if result:
return []
branches = []
for line in out.splitlines():
line = line.strip()
line = line.split()
branches.append(line[0])
return branches
def _do_pull(self, filter=False):
value, _, _ = run_shell_command("hg pull",
cwd=self._path,
shell=True,
no_filter=not filter)
return value == 0
# backwards compat
HGClient = HgClient
vcstools-0.1.42/src/vcstools/svn.py 0000664 0000000 0000000 00000036306 13522611462 0017307 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
svn vcs support.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
try:
# PY3K
from urlparse import urlsplit
except ImportError:
from urllib.parse import urlsplit
import re
import tarfile
import dateutil.parser # For parsing date strings
import xml.dom.minidom # For parsing logfiles
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, \
run_shell_command, ensure_dir_notexists
def canonical_svn_url_split(url):
"""
checks url for traces of canonical svn structure,
and return root, type, name (of tag or branch), subfolder, query and fragment (see urllib urlparse)
This should allow creating a different url for switching to a different tag or branch
:param url: location of central repo, ``str``
:returns: dict {root, type, name, subfolder, query, fragment}
with type one of "trunk", "tags", "branches"
"""
result = {'root': url, 'type': None, 'name': None, 'subfolder': None, 'query': None, 'fragment': None}
if not url:
return result
splitresult = urlsplit(url)
if not splitresult.scheme:
# svn does not accept mere paths
return result
canonical_pattern = re.compile('(.*/)?(trunk|branches|tags)(/.*)?')
matches = canonical_pattern.findall(splitresult.path)
if len(matches) > 0:
if len(matches) > 1:
raise ValueError('Invalid path in url %s' % splitresult.path)
prefix, branchtype, rest = matches[0]
prefix = prefix.rstrip('/')
rest = rest.lstrip('/')
if branchtype == 'trunk':
result['root'] = '%s://%s%s' % (splitresult.scheme,
splitresult.netloc,
prefix)
result['type'] = branchtype
result['query'] = splitresult.query or None
result['fragment'] = splitresult.fragment or None
if rest:
result['subfolder'] = rest
elif branchtype in ['tags', 'branches']:
result['type'] = branchtype
result['root'] = '%s://%s%s' % (splitresult.scheme,
splitresult.netloc,
prefix)
result['query'] = splitresult.query or None
result['fragment'] = splitresult.fragment or None
if rest:
splitrest = rest.split('/', 1)
print(splitrest)
result['name'] = splitrest[0]
if len(splitrest) == 2 and splitrest[1]:
result['subfolder'] = splitrest[1]
return result
def get_remote_contents(url):
contents = []
if url:
cmd = 'svn ls %s' % (url)
result_code, output, _ = run_shell_command(cmd, shell=True)
if result_code:
return []
contents = [line.strip('/') for line in output.splitlines()]
return contents
def _get_svn_version():
"""Looks up svn version by calling svn --version.
:raises: VcsError if svn is not installed"""
try:
# SVN commands produce differently formatted output for french locale
value, output, _ = run_shell_command('svn --version',
shell=True,
us_env=True)
if value == 0 and output is not None and len(output.splitlines()) > 0:
version = output.splitlines()[0]
else:
raise VcsError("svn --version returned " +
"%s maybe svn is not installed" % value)
except VcsError as exc:
raise VcsError("Could not determine whether svn is installed: " +
str(exc))
return version
class SvnClient(VcsClientBase):
def __init__(self, path):
"""
:raises: VcsError if python-svn not detected
"""
VcsClientBase.__init__(self, 'svn', path)
# test for svn here, we need it for status
_get_svn_version()
@staticmethod
def get_environment_metadata():
metadict = {}
try:
metadict["version"] = _get_svn_version()
except:
metadict["version"] = "no svn installed"
return metadict
def get_url(self):
"""
:returns: SVN URL of the directory path (output of svn info command),
or None if it cannot be determined
"""
if self.detect_presence():
# 3305: parsing not robust to non-US locales
cmd = 'svn info %s' % self._path
_, output, _ = run_shell_command(cmd, shell=True)
matches = [l for l in output.splitlines() if l.startswith('URL: ')]
if matches:
return matches[0][5:]
@staticmethod
def static_detect_presence(path):
return os.path.isdir(os.path.join(path, '.svn'))
def checkout(self, url, version='', verbose=False,
shallow=False, timeout=None):
if url is None or url.strip() == '':
raise ValueError('Invalid empty url : "%s"' % url)
# Need to check as SVN 1.6.17 writes into directory even if not empty
if not ensure_dir_notexists(self.get_path()):
self.logger.error("Can't remove %s" % self.get_path())
return False
if version is not None and version != '':
if not version.startswith("-r"):
version = "-r%s" % version
elif version is None:
version = ''
cmd = 'svn co %s %s %s' % (sanitized(version),
sanitized(url),
self._path)
value, _, msg = run_shell_command(cmd,
shell=True,
no_filter=True)
if value != 0:
if msg:
self.logger.error('%s' % msg)
return False
return True
def update(self, version=None, verbose=False, timeout=None):
if not self.detect_presence():
sys.stderr.write("Error: cannot update non-existing directory\n")
return False
# protect against shell injection
if version is not None and version != '':
if not version.startswith("-r"):
version = "-r" + version
elif version is None:
version = ''
cmd = 'svn up %s %s --non-interactive' % (sanitized(version),
self._path)
value, _, _ = run_shell_command(cmd,
shell=True,
no_filter=True)
if value == 0:
return True
return False
def get_version(self, spec=None):
"""
:param spec: (optional) spec can be what 'svn info --help'
allows, meaning a revnumber, {date}, HEAD, BASE, PREV, or
COMMITTED.
:param path: the url to use, default is for this repo
:returns: current revision number of the repository. Or if spec
provided, the number of a revision specified by some
token.
"""
return self._get_version_from_path(spec=spec, path=self._path)
def _get_version_from_path(self, spec=None, path=None):
"""
:param spec: (optional) spec can be what 'svn info --help'
allows, meaning a revnumber, {date}, HEAD, BASE, PREV, or
COMMITTED.
:param path: the url to use, default is for this repo
:returns: current revision number of the repository. Or if spec
provided, the number of a revision specified by some
token.
"""
if not self.path_exists():
return None
command = 'svn info '
if spec is not None:
if spec.isdigit():
# looking up svn with "-r" takes long, and if spec is
# a number, all we get from svn is the same number,
# unless we try to look at higher rev numbers (in
# which case either get the same number, or an error
# if the rev does not exist). So we first do a very
# quick svn info, and check revision numbers.
currentversion = self.get_version(spec=None)
# currentversion is like '-r12345'
if currentversion is not None and \
int(currentversion[2:]) > int(spec):
# so if we know revision exist, just return the
# number, avoid the long call to svn server
return '-r' + spec
if spec.startswith("-r"):
command += sanitized(spec)
else:
command += sanitized('-r%s' % spec)
command += " %s" % path
# #3305: parsing not robust to non-US locales
_, output, _ = run_shell_command(command, shell=True, us_env=True)
if output is not None:
matches = \
[l for l in output.splitlines() if l.startswith('Last Changed Rev: ')]
if len(matches) == 1:
split_str = matches[0].split()
if len(split_str) == 4:
return '-r' + split_str[3]
return None
def get_current_version_label(self):
# SVN branches are part or URL
return None
def get_remote_version(self, fetch=False):
if fetch is False:
return None
return self._get_version_from_path(path=self.get_url())
def get_diff(self, basepath=None):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
command = 'svn diff %s' % sanitized(rel_path)
_, response, _ = run_shell_command(command,
shell=True,
cwd=basepath)
return response
def get_affected_files(self, revision):
cmd = "svn diff --summarize -c {0}".format(
revision)
code, output, _ = run_shell_command(cmd, shell=True, cwd=self._path)
affected = []
if code == 0:
for filename in output.splitlines():
affected.append(filename.split(" ")[7])
return affected
def get_log(self, relpath=None, limit=None):
response = []
if relpath is None:
relpath = ''
if self.path_exists() and os.path.exists(os.path.join(self._path, relpath)):
# Get the log
limit_cmd = (("--limit %d" % (int(limit))) if limit else "")
command = "svn log %s --xml %s" % (limit_cmd, sanitized(relpath) if len(relpath) > 0 else '')
return_code, xml_response, stderr = run_shell_command(command, shell=True, cwd=self._path)
# Parse response
dom = xml.dom.minidom.parseString(xml_response)
log_entries = dom.getElementsByTagName("logentry")
# Extract the entries
for log_entry in log_entries:
author_tag = log_entry.getElementsByTagName("author")[0]
date_tag = log_entry.getElementsByTagName("date")[0]
msg_tags = log_entry.getElementsByTagName("msg")
log_data = dict()
log_data['id'] = log_entry.getAttribute("revision")
log_data['author'] = author_tag.firstChild.nodeValue
log_data['email'] = None
log_data['date'] = dateutil.parser.parse(str(date_tag.firstChild.nodeValue))
if len(msg_tags) > 0 and msg_tags[0].firstChild:
log_data['message'] = msg_tags[0].firstChild.nodeValue
else:
log_data['message'] = ''
response.append(log_data)
return response
def get_status(self, basepath=None, untracked=False):
response = None
if basepath is None:
basepath = self._path
if self.path_exists():
rel_path = normalized_rel_path(self._path, basepath)
# protect against shell injection
command = 'svn status %s' % sanitized(rel_path)
if not untracked:
command += " -q"
_, response, _ = run_shell_command(command,
shell=True,
cwd=basepath)
if response is not None and \
len(response) > 0 and \
response[-1] != '\n':
response += '\n'
return response
def export_repository(self, version, basepath):
# Run the svn export cmd
cmd = 'svn export {0} {1}'.format(os.path.join(self._path, version),
basepath)
result, _, _ = run_shell_command(cmd, shell=True)
if result:
return False
try:
# tar gzip the exported repo
targzip_file = tarfile.open(basepath + '.tar.gz', 'w:gz')
try:
targzip_file.add(basepath, '')
finally:
targzip_file.close()
finally:
# clean up
from shutil import rmtree
rmtree(basepath)
return True
def get_branches(self, local_only=False):
url = self.get_url()
canonical_dict = canonical_svn_url_split(url)
if local_only:
if canonical_dict['type'] == 'branches':
return [canonical_dict['name']]
return []
branches = []
if canonical_dict['type']:
branches = get_remote_contents('%s/%s' % (canonical_dict['root'], 'branches'))
return branches
SVNClient = SvnClient
vcstools-0.1.42/src/vcstools/tar.py 0000664 0000000 0000000 00000016037 13522611462 0017266 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
tar vcs support.
The implementation uses the "version" argument to indicate a subfolder
within a tarfile. Hence one can organize sources by creating one
tarfile with a folder inside for each version.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import tempfile
import shutil
import tarfile
import sys
import yaml
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import urlretrieve_netrc, ensure_dir_notexists
__pychecker__ = 'unusednames=spec'
_METADATA_FILENAME = ".tar"
class TarClient(VcsClientBase):
def __init__(self, path):
"""
@raise VcsError if tar not detected
"""
VcsClientBase.__init__(self, 'tar', path)
self.metadata_path = os.path.join(self._path, _METADATA_FILENAME)
@staticmethod
def get_environment_metadata():
metadict = {}
metadict["version"] = 'tarfile version: %s' % tarfile.version
return metadict
def get_url(self):
"""
:returns: TAR URL of the directory path (output of tar info
command), or None if it cannot be determined
"""
if self.detect_presence():
with open(self.metadata_path, 'r') as metadata_file:
metadata = yaml.load(metadata_file.read())
if 'url' in metadata:
return metadata['url']
return None
@staticmethod
def static_detect_presence(path):
return os.path.isfile(os.path.join(path, _METADATA_FILENAME))
def checkout(self, url, version='', verbose=False,
shallow=False, timeout=None):
"""
untars tar at url to self.path.
If version was given, only the subdirectory 'version' of the
tar will end up in self.path. Also creates a file next to the
checkout named *.tar which is a yaml file listing origin url
and version arguments.
"""
if not ensure_dir_notexists(self.get_path()):
self.logger.error("Can't remove %s" % self.get_path())
return False
tempdir = None
result = False
try:
tempdir = tempfile.mkdtemp()
if os.path.isfile(url):
filename = url
else:
(filename, _) = urlretrieve_netrc(url)
# print "filename", filename
temp_tarfile = tarfile.open(filename, 'r:*')
members = None # means all members in extractall
if version == '' or version is None:
subdir = tempdir
self.logger.warn("No tar subdirectory chosen via the 'version' argument for url: %s" % url)
else:
# getmembers lists all files contained in tar with
# relative path
subdirs = []
members = []
for m in temp_tarfile.getmembers():
if m.name.startswith(version + '/'):
members.append(m)
if m.name.split('/')[0] not in subdirs:
subdirs.append(m.name.split('/')[0])
if not members:
raise VcsError("%s is not a subdirectory with contents in members %s" % (version, subdirs))
subdir = os.path.join(tempdir, version)
temp_tarfile.extractall(path=tempdir, members=members)
if not os.path.isdir(subdir):
raise VcsError("%s is not a subdirectory\n" % subdir)
try:
# os.makedirs(os.path.dirname(self._path))
shutil.move(subdir, self._path)
except Exception as ex:
raise VcsError("%s failed to move %s to %s" % (ex, subdir, self._path))
metadata = yaml.dump({'url': url, 'version': version})
with open(self.metadata_path, 'w') as mdat:
mdat.write(metadata)
result = True
except Exception as exc:
self.logger.error("Tarball download unpack failed: %s" % str(exc))
finally:
if tempdir is not None and os.path.exists(tempdir):
shutil.rmtree(tempdir)
return result
def update(self, version='', verbose=False, timeout=None):
"""
Does nothing except returning true if tar exists in same
"version" as checked out with vcstools.
"""
if not self.detect_presence():
return False
if version != self.get_version():
sys.stderr.write("Tarball Client does not support updating with different version '%s' != '%s'\n"
% (version, self.get_version()))
return False
return True
def get_version(self, spec=None):
if self.detect_presence():
with open(self.metadata_path, 'r') as metadata_file:
metadata = yaml.load(metadata_file.read())
if 'version' in metadata:
return metadata['version']
return None
def get_current_version_label(self):
# exploded tar has no local version
return None
def get_remote_version(self, fetch=False):
# exploded tar has no remote version (not a required feature)
return None
def get_diff(self, basepath=None):
return ''
def get_status(self, basepath=None, untracked=False):
return ''
def export_repository(self, version, basepath):
raise VcsError('export repository not implemented for extracted tars')
# backwards compatibility
TARClient = TarClient
vcstools-0.1.42/src/vcstools/vcs_abstraction.py 0000664 0000000 0000000 00000011634 13522611462 0021662 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import warnings
_VCS_TYPES = {}
def register_vcs(vcs_type, clazz):
"""
:param vcs_type: id, ``str``
:param clazz: class extending VcsClientBase
"""
_VCS_TYPES[vcs_type] = clazz
def get_registered_vcs_types():
"""
:returns: list of valid key to use as vcs_type
"""
return list(_VCS_TYPES.keys())
def get_vcs(vcs_type):
"""
Returns the class interfacing with vcs of given type
:param vcs_type: id of the tpye, e.g. git, svn, hg, bzr
:returns: class extending VcsClientBase
:raises: ValueError for unknown vcs_type
"""
vcs_class = _VCS_TYPES.get(vcs_type, None)
if not vcs_class:
raise ValueError('No Client type registered for vcs type "%s"' % vcs_type)
return vcs_class
def get_vcs_client(vcs_type, path):
"""
Returns a client with which to interact with the vcs at given path
:param vcs_type: id of the tpye, e.g. git, svn, hg, bzr
:returns: instance of VcsClientBase
:raises: ValueError for unknown vcs_type
"""
clientclass = get_vcs(vcs_type)
return clientclass(path)
class VcsClient(object):
"""
*DEPRECATED* API for interacting with source-controlled paths
independent of actual version-control implementation.
"""
def __init__(self, vcs_type, path):
self._path = path
warnings.warn("Class VcsClient is deprecated, use from vcstools" +
" import get_vcs_client; get_vcs_client() instead")
self.vcs = get_vcs_client(vcs_type, path)
def path_exists(self):
return os.path.exists(self._path)
def get_path(self):
return self._path
# pass through VCSClientBase API
def get_version(self, spec=None):
return self.vcs.get_version(spec)
def get_current_version_label(self):
return self.vcs.get_current_version_label()
def get_remote_version(self, fetch=False):
return self.vcs.get_remote_version(fetch)
def get_default_remote_version_label(self):
return self.vcs.get_default_remote_version_label()
def checkout(self, url, version='', verbose=False, shallow=False):
return self.vcs.checkout(url,
version,
verbose=verbose,
shallow=shallow)
def url_matches(self, url, url_or_shortcut):
return self.vcs.url_matches(url=url, url_or_shortcut=url_or_shortcut)
def update(self, version='', verbose=False):
return self.vcs.update(version, verbose=verbose)
def detect_presence(self):
return self.vcs.detect_presence()
def get_vcs_type_name(self):
return self.vcs.get_vcs_type_name()
def get_url(self):
return self.vcs.get_url()
def get_diff(self, basepath=None):
return self.vcs.get_diff(basepath)
def get_status(self, basepath=None, untracked=False, **kwargs):
return self.vcs.get_status(basepath, untracked, **kwargs)
def get_log(self, relpath=None, limit=None):
return self.vcs.get_log(relpath, limit)
def export_repository(self, version, basepath):
return self.vcs.export_repository(version, basepath)
def get_branches(self, local_only=False):
return self.vcs.get_branches(local_only)
# backwards compat
VCSClient = VcsClient
vcstools-0.1.42/src/vcstools/vcs_base.py 0000664 0000000 0000000 00000031167 13522611462 0020266 0 ustar 00root root 0000000 0000000 # Software License Agreement (BSD License)
#
# Copyright (c) 2010, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
"""
vcs support library base class.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
import logging
__pychecker__ = 'unusednames=spec,url,version,basepath,untracked'
class VcsError(Exception):
"""To be thrown when an SCM Client faces a situation because of a
violated assumption"""
def __init__(self, value):
super(VcsError, self).__init__(value)
self.value = value
def __str__(self):
return repr(self.value)
class VcsClientBase(object):
"""
parent class for all vcs clients, provides their public API
"""
def __init__(self, vcs_type_name, path):
"""
subclasses may raise VcsError when a dependency is missing
"""
self._path = path
if path is None:
raise VcsError("Cannot initialize VCSclient without path")
self._vcs_type_name = vcs_type_name
self.logger = logging.getLogger('vcstools')
@staticmethod
def get_environment_metadata():
"""
For debugging purposes, returns a dict containing information
about the environment, like the version of the SCM client, or
version of libraries involved.
Suggest considering keywords "version", "dependency", "features" first.
:returns: a dict containing relevant information
:rtype: dict
"""
raise NotImplementedError(
"Base class get_environment_metadata method must be overridden")
def path_exists(self):
"""
helper function
"""
return os.path.exists(self._path)
def get_path(self):
"""
returns the path this client was configured for
"""
return self._path
def url_matches(self, url, url_or_shortcut):
"""
client can decide whether the url and the other url are equivalent.
Checks string equality by default
:param url_or_shortcut: url or shortcut (e.g. bzr launchpad url)
:returns: bool if params are equivalent
"""
if url is None or url_or_shortcut is None:
return False
return url.rstrip('/') == url_or_shortcut.rstrip('/')
def get_url(self):
"""
:returns: The source control url for the path or None if not set
:rtype: str
"""
raise NotImplementedError(
"Base class get_url method must be overridden for client type %s" %
self._vcs_type_name)
def get_version(self, spec=None):
"""
Find an identifier for a the current or a specified
revision. Token spec might be a tagname, branchname,
version-id, SHA-ID, ... depending on the VCS implementation.
:param spec: token for identifying repository revision
:type spec: str
:returns: current revision number of the repository. Or if
spec is provided, the respective revision number.
:rtype: str
"""
raise NotImplementedError("Base class get_version method must be overridden for client type %s " %
self._vcs_type_name)
def get_current_version_label(self):
"""
Find an description for the current local version.
Token spec might be a branchname,
version-id, SHA-ID, ... depending on the VCS implementation.
:returns: short description of local version (e.g. branchname, tagename).
:rtype: str
"""
raise NotImplementedError("Base class get_current_version method must be overridden for client type %s " %
self._vcs_type_name)
def get_remote_version(self, fetch=False):
"""
Find an identifier for the current revision on remote.
Token spec might be a tagname,
version-id, SHA-ID, ... depending on the VCS implementation.
:param fetch: if False, only local information may be used
:returns: current revision number of the remote repository.
:rtype: str
"""
raise NotImplementedError("Base class get_remote_version method must be overridden for client type %s " %
self._vcs_type_name)
def get_default_remote_version_label(self):
"""
Find a label for the default branch on remote, meaning
the one that would be checked out on a clean checkout.
:returns: a label or None (if not applicable)
:rtype: str
"""
raise NotImplementedError("Base class get_default_remote_version_label" +
"method must be overridden for client type %s " %
self._vcs_type_name)
def checkout(self, url, version=None, verbose=False, shallow=False, timeout=None):
"""
Attempts to create a local repository given a remote
url. Fails if a target path exists, unless it's an empty directory.
If a version is provided, the local repository
will be updated to that revision. It is possible that
after a failed call to checkout, a repository still exists,
e.g. if an invalid revision was given.
If shallow is provided, the scm client may checkout less
than the full repository history to save time / disk space.
If a timeout is specified, any pending operation will fail after
the specified amount (in seconds). NOTE: this parameter might or
might not be honored, depending on VCS client implementation.
:param url: where to checkout from
:type url: str
:param version: token for identifying repository revision
:type version: str
:param shallow: hint to checkout less than a full repository
:type shallow: bool
:param timeout: maximum allocated time to perform operation
:type shallow: int
:returns: True if successful
"""
raise NotImplementedError("Base class checkout method must be overridden for client type %s " %
self._vcs_type_name)
def update(self, version=None, verbose=False, timeout=None):
"""
Sets the local copy of the repository to a version matching
the version parameter. Fails when there are uncommited changes.
On failures (also e.g. network failure) grants the
checked out files are in the same state as before the call.
If a timeout is specified, any pending operation will fail after
the specified amount (in seconds)
:param version: token for identifying repository revision
desired. Token might be a tagname, branchname, version-id,
SHA-ID, ... depending on the VCS implementation.
:param timeout: maximum allocated time to perform operation
:type shallow: int
:returns: True on success, False else
"""
raise NotImplementedError("Base class update method must be overridden for client type %s " %
self._vcs_type_name)
@staticmethod
def static_detect_presence(path):
"""For auto detection"""
raise NotImplementedError(
"Base class detect_presence method must be overridden")
def detect_presence(self):
"""For auto detection"""
# call static method
return self.static_detect_presence(self._path)
def get_vcs_type_name(self):
""" used when auto detected """
return self._vcs_type_name
def get_diff(self, basepath=None):
"""
:param basepath: diff paths will be relative to this, if any
:returns: A string showing local differences
:rtype: str
"""
raise NotImplementedError(
"Base class get_diff method must be overridden")
def get_status(self, basepath=None, untracked=False, **kwargs):
"""
Calls scm status command. Output must be terminated by newline
unless empty.
Semantics of untracked are difficult to generalize.
In SVN, this would be new files only. In git,
hg, bzr, this would be changes that have not been added for
commit.
Extra keyword arguments are passed along to the underlying vcs code.
See the specific implementations of get_status() for extra options.
:param basepath: status path will be relative to this, if any
:param untracked: whether to also show changes that would not commit
:returns: A string summarizing locally modified files
:rtype: str
"""
raise NotImplementedError("Base class get_status method must be overridden for client type %s " %
self._vcs_type_name)
def get_affected_files(self, revision):
"""
Get the files that were affected by a specific revision
:param revision: SHA or revision number.
:returns: A list of strings with the files affected by a specific commit
"""
raise NotImplemented(
"Base class get_affected_files method must be overriden")
def get_log(self, relpath=None, limit=None):
"""
Calls scm log command.
This returns a list of dictionaries with the following fields:
- id: the commit SHA or revision number
- date: the date the commit was made (python datetime)
- author: the name of the author of the commit, if available
- email: the e-mail address of the author of the commit
- message: the commit message, if any
:param relpath: (optional) restrict logs to events on this
resource path (folder or file) relative to the root of the
repository. If None (default), this is the root of the
repository.
:param limit: (optional) the maximum number of log entries
that should be retrieved. If None (default), there is no
limit.
"""
raise NotImplementedError(
"Base class get_log method must be overridden")
def export_repository(self, version, basepath):
"""
Calls scm equivalent to `svn export`, removing scm meta
information and tar gzip'ing the repository at a given version
to the given basepath.
:param version: version of the repository to export. This can
be a branch, tag, or path (svn). When specifying the version
as a path for svn, the path should be relative to the root of
the svn repository, i.e. 'trunk', or 'tags/1.2.3', or './' for
the root.
:param basepath: this is the path to the tar gzip, excluding
the extension which will be .tar.gz
:returns: True on success, False otherwise.
"""
raise NotImplementedError("Base class export_repository method must be overridden for client type %s " %
self._vcs_type_name)
def get_branches(self, local_only=False):
"""
Returns a list of all branches in the vcs repository.
:param local_only: if True it will only list local branches
:returns: list of branches in the repository, [] if none exist
"""
raise NotImplementedError("Base class get_branches method must "
"be overridden")
vcstools-0.1.42/stdeb.cfg 0000664 0000000 0000000 00000000462 13522611462 0015240 0 ustar 00root root 0000000 0000000 [DEFAULT]
Depends: subversion, mercurial, git-core, bzr, python-yaml, python-dateutil
Depends3: subversion, mercurial, git-core, bzr, python3-yaml
Suite: oneiric precise quantal raring saucy trusty utopic vivid wily xenial yakkety zesty artful bionic wheezy jessie stretch buster
X-Python3-Version: >= 3.2
vcstools-0.1.42/test/ 0000775 0000000 0000000 00000000000 13522611462 0014433 5 ustar 00root root 0000000 0000000 vcstools-0.1.42/test/__init__.py 0000664 0000000 0000000 00000000000 13522611462 0016532 0 ustar 00root root 0000000 0000000 vcstools-0.1.42/test/mock_server.py 0000664 0000000 0000000 00000005657 13522611462 0017341 0 ustar 00root root 0000000 0000000 import os
import json
import re
import socket
import shutil
import tempfile
from threading import Thread
from io import BytesIO
try:
# py3k
from http.server import BaseHTTPRequestHandler, HTTPServer
except ImportError:
# py2.7
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
def get_free_port():
s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
s.bind(('localhost', 0))
address, port = s.getsockname()
s.close()
return port
def start_mock_server(file_content):
port = get_free_port()
class MockServerRequestHandler(BaseHTTPRequestHandler):
'''adapted from https://realpython.com/testing-third-party-apis-with-mock-servers
serves file in file_content
returns in chunks if chunked=true is part of url
requires any Basic auth if auth=true is part of url
:returns: base URL
'''
CHUNK_SIZE = 1024
def do_GET(self):
if 'auth=true' in self.path:
if not 'Authorization' in self.headers:
self.send_response(401)
self.send_header('Www-Authenticate', 'Basic realm="foo"')
self.end_headers()
return
if re.search(re.compile(r'/downloads/.*'), self.path):
# Add response status code.
self.send_response(200)
# Add response headers.
self.send_header('Content-Type', 'application/application/x-gzip;')
# Add response content.
if 'chunked=true' in self.path:
self.send_header('Transfer-Encoding', 'chunked')
self.send_header('Connection', 'close')
self.end_headers()
stream = BytesIO(file_content)
while True:
data = stream.read(self.CHUNK_SIZE)
# python3.[0-4] cannot easily format bytes (see PEP 461)
self.wfile.write(("%X\r\n" % len(data)).encode('ascii'))
self.wfile.write(data)
self.wfile.write(b"\r\n")
# If there's no more data to read, stop streaming
if not data:
break
else:
self.end_headers()
self.wfile.write(file_content) # nonchunked
# Ensure any buffered output has been transmitted and close the stream
self.wfile.flush()
return
if result_status is None:
self.send_response(404)
self.end_headers()
return
mock_server = HTTPServer(('localhost', port), MockServerRequestHandler)
mock_server_thread = Thread(target=mock_server.serve_forever)
mock_server_thread.setDaemon(True)
mock_server_thread.start()
return 'http://localhost:{port}'.format(port=port)
vcstools-0.1.42/test/test_base.py 0000664 0000000 0000000 00000014415 13522611462 0016763 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import, print_function, unicode_literals
import os
import unittest
import tempfile
import shutil
from mock import Mock
import vcstools
from vcstools.vcs_base import VcsClientBase, VcsError
from vcstools.common import sanitized, normalized_rel_path, \
run_shell_command, urlretrieve_netrc, _netrc_open, urlopen_netrc
class BaseTest(unittest.TestCase):
def test_normalized_rel_path(self):
self.assertEqual(None, normalized_rel_path(None, None))
self.assertEqual('foo', normalized_rel_path(None, 'foo'))
self.assertEqual('/foo', normalized_rel_path(None, '/foo'))
self.assertEqual('../bar', normalized_rel_path('/bar', '/foo'))
self.assertEqual('../bar', normalized_rel_path('/bar', '/foo/baz/..'))
self.assertEqual('../bar', normalized_rel_path('/bar/bam/foo/../..', '/foo/baz/..'))
self.assertEqual('bar', normalized_rel_path('bar/bam/foo/../..', '/foo/baz/..'))
def test_sanitized(self):
self.assertEqual('', sanitized(None))
self.assertEqual('', sanitized(''))
self.assertEqual('"foo"', sanitized('foo'))
self.assertEqual('"foo"', sanitized('\"foo\"'))
self.assertEqual('"foo"', sanitized('"foo"'))
self.assertEqual('"foo"', sanitized('" foo"'))
try:
sanitized('bla"; foo"')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo"')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo \"bum')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla";foo;"bam')
self.fail("Expected Exception")
except VcsError:
pass
try:
sanitized('bla"#;foo;"bam')
self.fail("Expected Exception")
except VcsError:
pass
def test_shell_command(self):
self.assertEqual((0, "", None), run_shell_command("true"))
self.assertEqual((1, "", None), run_shell_command("false"))
self.assertEqual((0, "foo", None), run_shell_command("echo foo", shell=True))
(v, r, e) = run_shell_command("[", shell=True)
self.assertFalse(v == 0)
self.assertFalse(e is None)
self.assertEqual(r, '')
(v, r, e) = run_shell_command("echo foo && [", shell=True)
self.assertFalse(v == 0)
self.assertFalse(e is None)
self.assertEqual(r, 'foo')
# not a great test on a system where this is default
_, env_langs, _ = run_shell_command("/usr/bin/env |grep LANG=", shell=True, us_env=True)
self.assertTrue("LANG=en_US.UTF-8" in env_langs.splitlines())
try:
run_shell_command("two words")
self.fail("expected exception")
except:
pass
def test_shell_command_verbose(self):
# just check no Exception happens due to decoding
run_shell_command("echo %s" % (b'\xc3\xa4'.decode('UTF-8')), shell=True, verbose=True)
run_shell_command(["echo", b'\xc3\xa4'.decode('UTF-8')], verbose=True)
def test_netrc_open(self):
root_directory = tempfile.mkdtemp()
machine = 'foo.org'
uri = 'https://%s/bim/bam' % machine
netrcname = os.path.join(root_directory, "netrc")
mock_build_opener = Mock()
mock_build_opener_fun = Mock()
mock_build_opener_fun.return_value = mock_build_opener
back_build_opener = vcstools.common.build_opener
try:
vcstools.common.build_opener = mock_build_opener_fun
filelike = _netrc_open(uri, netrcname)
self.assertFalse(filelike)
with open(netrcname, 'w') as fhand:
fhand.write(
'machine %s login fooname password foopass' % machine)
filelike = _netrc_open(uri, netrcname)
self.assertTrue(filelike)
filelike = _netrc_open('other', netrcname)
self.assertFalse(filelike)
filelike = _netrc_open(None, netrcname)
self.assertFalse(filelike)
finally:
shutil.rmtree(root_directory)
vcstools.common.build_opener = back_build_opener
def test_urlopen_netrc(self):
mockopen = Mock()
mock_result = Mock()
backopen = vcstools.common.urlopen
backget = vcstools.common._netrc_open
try:
#monkey-patch with mocks
vcstools.common.urlopen = mockopen
vcstools.common._netrc_open = Mock()
vcstools.common._netrc_open.return_value = mock_result
ioe = IOError('MockError')
mockopen.side_effect = ioe
self.assertRaises(IOError, urlopen_netrc, 'foo')
ioe.code = 401
result = urlopen_netrc('foo')
self.assertEqual(mock_result, result)
finally:
vcstools.common.urlopen = backopen
vcstools.common._netrc_open = backget
def test_urlretrieve_netrc(self):
root_directory = tempfile.mkdtemp()
examplename = os.path.join(root_directory, "foo")
outname = os.path.join(root_directory, "fooout")
with open(examplename, "w") as fhand:
fhand.write('content')
mockget = Mock()
mockopen = Mock()
mock_fhand = Mock()
backopen = vcstools.common.urlopen
backget = vcstools.common._netrc_open
try:
# vcstools.common.urlopen = mockopen
# vcstools.common.urlopen.return_value = mock_fhand
# mock_fhand.read.return_value = 'content'
mockopen.open.return_value
vcstools.common._netrc_open = Mock()
vcstools.common._netrc_open.return_value = mockget
(fname, headers) = urlretrieve_netrc('file://' + examplename)
self.assertTrue(fname)
self.assertFalse(os.path.exists(outname))
(fname, headers) = urlretrieve_netrc('file://' + examplename,
outname)
self.assertEqual(outname, fname)
self.assertTrue(os.path.isfile(outname))
finally:
vcstools.common.urlopen = backopen
vcstools.common._netrc_open = backget
shutil.rmtree(root_directory)
vcstools-0.1.42/test/test_bzr.py 0000664 0000000 0000000 00000040332 13522611462 0016643 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import platform
import os
import io
import fnmatch
import shutil
import subprocess
import tempfile
import unittest
from vcstools.bzr import BzrClient, _get_bzr_version
os.environ['EMAIL'] = 'Your Name '
class BzrClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
self.directories = dict(setUp=self.root_directory)
self.remote_path = os.path.join(self.root_directory, "remote")
os.makedirs(self.remote_path)
# create a "remote" repo
subprocess.check_call(["bzr", "init"], cwd=self.remote_path)
subprocess.check_call(["touch", "fixed.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "add", "fixed.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "commit", "-m", "initial"], cwd=self.remote_path)
subprocess.check_call(["bzr", "tag", "test_tag"], cwd=self.remote_path)
self.local_version_init = "1"
# files to be modified in "local" repo
subprocess.check_call(["touch", "modified.txt"], cwd=self.remote_path)
subprocess.check_call(["touch", "modified-fs.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "add", "modified.txt", "modified-fs.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "commit", "-m", "initial"], cwd=self.remote_path)
self.local_version_second = "2"
subprocess.check_call(["touch", "deleted.txt"], cwd=self.remote_path)
subprocess.check_call(["touch", "deleted-fs.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "add", "deleted.txt", "deleted-fs.txt"], cwd=self.remote_path)
subprocess.check_call(["bzr", "commit", "-m", "modified"], cwd=self.remote_path)
self.local_version = "3"
self.local_path = os.path.join(self.root_directory, "local")
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
class BzrClientTest(BzrClientTestSetups):
def test_url_matches_with_shortcut_strings(self):
client = BzrClient(self.local_path)
self.assertTrue(client.url_matches('test1234', 'test1234'))
def test_url_matches_with_shortcut_strings_slashes(self):
client = BzrClient(self.local_path)
self.assertTrue(client.url_matches('test1234/', 'test1234'))
self.assertTrue(client.url_matches('test1234', 'test1234/'))
self.assertTrue(client.url_matches('test1234/', 'test1234/'))
def get_launchpad_info(self, url):
po = subprocess.Popen(["bzr", "info", url], stdout=subprocess.PIPE)
output = po.stdout.read()
# it is not great to use the same code for testing as in
# production, but relying on fixed bzr info output is just as
# bad.
for line in output.splitlines():
sline = line.decode('UTF-8').strip()
for prefix in ['shared repository: ',
'repository branch: ',
'branch root: ']:
if sline.startswith(prefix):
return sline[len(prefix):]
return None
# this test fails on travis with bzr 2.1.4 and python2.6, but
# probably due to the messed up source install of bzr using python2.7
if not (platform.python_version().startswith('2.6') and
'2.1' in _get_bzr_version()):
def test_url_matches_with_shortcut(self):
# bzr on launchpad should have shared repository
client = BzrClient(self.local_path)
url = 'lp:bzr'
url2 = self.get_launchpad_info(url)
self.assertFalse(url2 is None)
self.assertTrue(client.url_matches(url2, url), "%s~=%s" % (url, url2))
# launchpad on launchpad should be a branch root
url = 'lp:launchpad'
url2 = self.get_launchpad_info(url)
self.assertFalse(url2 is None)
self.assertTrue(client.url_matches(url2, url), "%s~=%s" % (url, url2))
def test_get_url_by_reading(self):
client = BzrClient(self.local_path)
url = self.remote_path
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_url(), self.remote_path)
self.assertEqual(client.get_version(), self.local_version)
self.assertEqual(client.get_version(self.local_version_init[0:6]), self.local_version_init)
self.assertEqual(client.get_version("test_tag"), self.local_version_init)
def test_get_url_nonexistant(self):
local_path = "/tmp/dummy"
client = BzrClient(local_path)
self.assertEqual(client.get_url(), None)
def test_get_type_name(self):
local_path = "/tmp/dummy"
client = BzrClient(local_path)
self.assertEqual(client.get_vcs_type_name(), 'bzr')
def test_checkout_invalid(self):
"makes sure failed checkout results in False, not Exception"
url = self.remote_path + "foobar"
client = BzrClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.checkout(url))
def test_checkout_invalid_update(self):
"makes sure no exception happens on invalid update"
url = self.remote_path
client = BzrClient(self.local_path)
self.assertTrue(client.checkout(url))
new_version = 'foobar'
self.assertFalse(client.update(new_version))
def test_checkout(self):
url = self.remote_path
client = BzrClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
def test_checkout_dir_exists(self):
url = self.remote_path
client = BzrClient(self.local_path)
self.assertFalse(client.path_exists())
os.makedirs(self.local_path)
self.assertTrue(client.checkout(url))
# non-empty
self.assertFalse(client.checkout(url))
def test_checkout_specific_version_and_update(self):
url = self.remote_path
version = "1"
client = BzrClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client.get_version(), version)
new_version = '2'
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), new_version)
def testDiffClean(self):
client = BzrClient(self.remote_path)
self.assertEquals('', client.get_diff())
def testStatusClean(self):
client = BzrClient(self.remote_path)
self.assertEquals('', client.get_status())
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = BzrClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class BzrClientLogTest(BzrClientTestSetups):
@classmethod
def setUpClass(self):
BzrClientTestSetups.setUpClass()
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
def test_get_log_defaults(self):
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
log = client.get_log()
self.assertEquals(3, len(log))
self.assertEquals('modified', log[0]['message'])
for key in ['id', 'author', 'email', 'date', 'message']:
self.assertTrue(log[0][key] is not None, key)
def test_get_log_limit(self):
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
log = client.get_log(limit=1)
self.assertEquals(1, len(log))
self.assertEquals('modified', log[0]['message'])
def test_get_log_path(self):
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
log = client.get_log(relpath='fixed.txt')
self.assertEquals('initial', log[0]['message'])
class BzrClientAffectedFilesTest(BzrClientTestSetups):
@classmethod
def setUpClass(self):
BzrClientTestSetups.setUpClass()
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
def test_get_log_defaults(self):
client = BzrClient(self.local_path)
client.checkout(self.remote_path)
log = client.get_log(limit=1)[0]
affected = client.get_affected_files(log['id'])
self.assertEqual(sorted(['deleted-fs.txt', 'deleted.txt']),
sorted(affected))
class BzrDiffStatClientTest(BzrClientTestSetups):
@classmethod
def setUpClass(self):
# setup a local repo once for all diff and status test
BzrClientTestSetups.setUpClass()
url = self.remote_path
client = BzrClient(self.local_path)
client.checkout(url)
# after setting up "local" repo, change files and make some changes
subprocess.check_call(["rm", "deleted-fs.txt"], cwd=self.local_path)
subprocess.check_call(["bzr", "rm", "deleted.txt"], cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "modified.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "modified-fs.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added-fs.txt"), 'w')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added.txt"), 'w')
f.write('0123456789abcdef')
f.close()
subprocess.check_call(["bzr", "add", "added.txt"], cwd=self.local_path)
def tearDown(self):
pass
@classmethod
def tearDownClass(self):
BzrClientTestSetups.tearDownClass()
def test_diff(self):
client = BzrClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
# using fnmatch because date and time change (remove when bzr reaches diff --format)
diff = client.get_diff()
self.assertTrue(diff is not None)
self.assertTrue(fnmatch.fnmatch(diff, "=== added file 'added.txt'\n--- ./added.txt\t????-??-?? ??:??:?? +0000\n+++ ./added.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\n\n=== removed file 'deleted-fs.txt'\n=== removed file 'deleted.txt'\n=== modified file 'modified-fs.txt'\n--- ./modified-fs.txt\t????-??-?? ??:??:?? +0000\n+++ ./modified-fs.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\n\n=== modified file 'modified.txt'\n--- ./modified.txt\t????-??-?? ??:??:?? +0000\n+++ ./modified.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file"))
def test_diff_relpath(self):
client = BzrClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
# using fnmatch because date and time change (remove when bzr introduces diff --format)
diff = client.get_diff(basepath=os.path.dirname(self.local_path))
self.assertTrue(diff is not None)
self.assertTrue(fnmatch.fnmatch(diff, "=== added file 'added.txt'\n--- local/added.txt\t????-??-?? ??:??:?? +0000\n+++ local/added.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\n\n=== removed file 'deleted-fs.txt'\n=== removed file 'deleted.txt'\n=== modified file 'modified-fs.txt'\n--- local/modified-fs.txt\t????-??-?? ??:??:?? +0000\n+++ local/modified-fs.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\n\n=== modified file 'modified.txt'\n--- local/modified.txt\t????-??-?? ??:??:?? +0000\n+++ local/modified.txt\t????-??-?? ??:??:?? +0000\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file"))
def test_status(self):
client = BzrClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('+N ./added.txt\n D ./deleted-fs.txt\n-D ./deleted.txt\n M ./modified-fs.txt\n M ./modified.txt\n', client.get_status())
def test_status_relpath(self):
client = BzrClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('+N local/added.txt\n D local/deleted-fs.txt\n-D local/deleted.txt\n M local/modified-fs.txt\n M local/modified.txt\n', client.get_status(basepath=os.path.dirname(self.local_path)))
def test_status_untracked(self):
client = BzrClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('? ./added-fs.txt\n+N ./added.txt\n D ./deleted-fs.txt\n-D ./deleted.txt\n M ./modified-fs.txt\n M ./modified.txt\n', client.get_status(untracked=True))
class BzrDiffStatClientTest(BzrClientTestSetups):
@classmethod
def setUpClass(self):
# setup a local repo once for all diff and status test
BzrClientTestSetups.setUpClass()
url = self.remote_path
client = BzrClient(self.local_path)
client.checkout(url)
self.basepath_export = os.path.join(self.root_directory, 'export')
def tearDown(self):
pass
@classmethod
def tearDownClass(self):
BzrClientTestSetups.tearDownClass()
def test_export_repository(self):
client = BzrClient(self.local_path)
self.assertTrue(
client.export_repository(self.local_version, self.basepath_export)
)
self.assertTrue(os.path.exists(self.basepath_export + '.tar.gz'))
self.assertFalse(os.path.exists(self.basepath_export + '.tar'))
self.assertFalse(os.path.exists(self.basepath_export))
vcstools-0.1.42/test/test_code_format.py 0000664 0000000 0000000 00000001661 13522611462 0020332 0 ustar 00root root 0000000 0000000 from __future__ import print_function
import os
from pkg_resources import parse_version, get_distribution
def test_pep8_conformance():
"""Test source code for PEP8 conformance"""
try:
import pep8
except:
print("Skipping pep8 Tests because pep8.py not installed.")
return
# Skip test if pep8 is not new enough
pep8_version = parse_version(get_distribution('pep8').version)
needed_version = parse_version('1.0')
if pep8_version < needed_version:
print("Skipping pep8 Tests because pep8.py is too old")
return
pep8style = pep8.StyleGuide(max_line_length=120)
report = pep8style.options.report
report.start()
pep8style.options.exclude.append('git_archive_all.py')
pep8style.input_dir(os.path.join('..', 'vcstools', 'src'))
report.stop()
assert report.total_errors == 0, "Found '{0}' code style errors (and warnings).".format(report.total_errors)
vcstools-0.1.42/test/test_git.py 0000664 0000000 0000000 00000133665 13522611462 0016645 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import io
import unittest
import subprocess
import tempfile
import shutil
import types
import threading
import time
from distutils.version import LooseVersion
from vcstools import GitClient
from vcstools.vcs_base import VcsError
try:
from socketserver import TCPServer, BaseRequestHandler
except ImportError:
from SocketServer import TCPServer, BaseRequestHandler
os.environ['GIT_AUTHOR_NAME'] = 'Your Name'
os.environ['GIT_COMMITTER_NAME'] = 'Your Name'
os.environ['GIT_AUTHOR_EMAIL'] = 'name@example.com'
os.environ['EMAIL'] = 'Your Name '
class GitClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
# helpful when setting tearDown to pass
self.directories = dict(setUp=self.root_directory)
self.remote_path = os.path.join(self.root_directory, "remote")
self.local_path = os.path.join(self.root_directory, "ros")
os.makedirs(self.remote_path)
# create a "remote" repo
subprocess.check_call("git init", shell=True, cwd=self.remote_path)
subprocess.check_call("touch fixed.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m initial", shell=True, cwd=self.remote_path)
subprocess.check_call("git tag test_tag", shell=True, cwd=self.remote_path)
# other branch
subprocess.check_call("git branch test_branch", shell=True, cwd=self.remote_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.readonly_version_init = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# files to be modified in "local" repo
subprocess.check_call("touch modified.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("touch modified-fs.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m initial", shell=True, cwd=self.remote_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.readonly_version_second = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
subprocess.check_call("touch deleted.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("touch deleted-fs.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m modified", shell=True, cwd=self.remote_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.readonly_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
subprocess.check_call("git tag last_tag", shell=True, cwd=self.remote_path)
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
class GitSwitchDefaultBranchTest(GitClientTestSetups):
def test_get_default_remote_version_label(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url))
self.assertEqual(client.get_default_remote_version_label(), 'master')
subprocess.check_call("git symbolic-ref HEAD refs/heads/test_branch", shell=True, cwd=self.remote_path)
self.assertEqual(client.get_default_remote_version_label(), 'test_branch')
class GitClientTest(GitClientTestSetups):
def test_get_url_by_reading(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_url(), self.remote_path)
self.assertEqual(client.get_version(), self.readonly_version)
self.assertEqual(client.get_version(self.readonly_version_init[0:6]), self.readonly_version_init)
self.assertEqual(client.get_version("test_tag"), self.readonly_version_init)
# private functions
self.assertFalse(client._is_local_branch("test_branch"))
self.assertTrue(client._is_remote_branch("test_branch"))
self.assertTrue(client.is_tag("test_tag"))
self.assertFalse(client._is_remote_branch("test_tag"))
self.assertFalse(client.is_tag("test_branch"))
def test_get_url_nonexistant(self):
# local_path = "/tmp/dummy"
client = GitClient(self.local_path)
self.assertEqual(client.get_url(), None)
def test_get_type_name(self):
# local_path = "/tmp/dummy"
client = GitClient(self.local_path)
self.assertEqual(client.get_vcs_type_name(), 'git')
def test_checkout(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client._get_branch(), "master")
self.assertEqual(client._get_branch_parent(), ("master", "origin"))
#self.assertEqual(client.get_version(), '-r*')
def test_checkout_dir_exists(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
os.makedirs(self.local_path)
self.assertTrue(client.checkout(url))
# non-empty
self.assertFalse(client.checkout(url))
def test_checkout_no_unnecessary_updates(self):
client = GitClient(self.local_path)
client.fetches = 0
client.submodules = 0
client.fast_forwards = 0
def ifetch(self):
self.fetches += 1
return True
def iff(self, branch_parent, fetch=True, verbose=False):
self.fast_forwards += 1
return True
def isubm(self, verbose=False, timeout=None):
self.submodules += 1
return True
client._do_fetch = types.MethodType(ifetch, client)
client._do_fast_forward = types.MethodType(iff, client)
client._update_submodules = types.MethodType(isubm, client)
url = self.remote_path
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertEqual(0, client.submodules)
self.assertEqual(0, client.fetches)
self.assertEqual(0, client.fast_forwards)
self.assertTrue(client.update())
self.assertEqual(1, client.submodules)
self.assertEqual(1, client.fetches)
self.assertEqual(1, client.fast_forwards)
self.assertTrue(client.update('test_branch'))
self.assertEqual(2, client.submodules)
self.assertEqual(2, client.fetches)
self.assertEqual(1, client.fast_forwards)
self.assertTrue(client.update('test_branch'))
self.assertEqual(3, client.submodules)
self.assertEqual(3, client.fetches)
self.assertEqual(2, client.fast_forwards)
def test_checkout_no_unnecessary_updates_other_branch(self):
client = GitClient(self.local_path)
client.fetches = 0
client.submodules = 0
client.fast_forwards = 0
def ifetch(self):
self.fetches += 1
return True
def iff(self, branch_parent, fetch=True, verbose=False):
self.fast_forwards += 1
return True
def isubm(self, verbose=False, timeout=None):
self.submodules += 1
return True
client._do_fetch = types.MethodType(ifetch, client)
client._do_fast_forward = types.MethodType(iff, client)
client._update_submodules = types.MethodType(isubm, client)
url = self.remote_path
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, 'test_branch'))
self.assertEqual(1, client.submodules)
self.assertEqual(0, client.fetches)
self.assertEqual(0, client.fast_forwards)
def test_checkout_shallow(self):
url = 'file://' + self.remote_path
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, shallow=True))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client._get_branch(), "master")
self.assertEqual(client._get_branch_parent(), ("master", "origin"))
po = subprocess.Popen("git log --pretty=format:%H", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
log = po.stdout.read().decode('UTF-8').strip().splitlines()
if LooseVersion(client.gitversion) >= LooseVersion('1.8.2'):
# shallow only contains last commit
self.assertEqual(1, len(log), log)
else:
# shallow only contains last 2 commits
self.assertEqual(2, len(log), log)
def test_checkout_specific_version_and_update(self):
url = self.remote_path
version = self.readonly_version
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client.get_version(), version)
new_version = self.readonly_version_second
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), new_version)
def test_checkout_master_branch_and_update(self):
# subdir = "checkout_specific_version_test"
url = self.remote_path
branch = "master"
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, branch))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
self.assertTrue(client.update(branch))
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
def test_checkout_specific_branch_and_update(self):
# subdir = "checkout_specific_version_test"
url = self.remote_path
branch = "test_branch"
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, branch))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertTrue(client._is_local_branch(branch))
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client.get_version(), self.readonly_version_init)
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
self.assertTrue(client.update()) # no arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.readonly_version_init)
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
self.assertTrue(client.update(branch)) # same branch arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.readonly_version_init)
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
new_branch = 'master'
self.assertTrue(client.update(new_branch))
self.assertEqual(client._get_branch(), new_branch)
self.assertEqual(client._get_branch_parent(), (new_branch, "origin"))
def test_checkout_local_only_branch_and_update(self):
# prevent regression on wstool#25: no rebase after switching branch
url = self.remote_path
branch = "master"
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, branch))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertTrue(client._is_local_branch(branch))
subprocess.check_call("git reset --hard HEAD~1", shell=True, cwd=self.local_path)
subprocess.check_call("git checkout -b new_local_branch", shell=True, cwd=self.local_path)
self.assertTrue(client.update(branch)) # same branch arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.readonly_version)
self.assertEqual(client._get_branch_parent(), (branch, "origin"))
def test_checkout_specific_tag_and_update(self):
url = self.remote_path
tag = "last_tag"
client = GitClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, tag))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client._get_branch_parent(), (None, None))
tag = "test_tag"
self.assertTrue(client.update(tag))
self.assertEqual(client._get_branch_parent(), (None, None))
new_branch = 'master'
self.assertTrue(client.update(new_branch))
self.assertEqual(client._get_branch_parent(), (new_branch, "origin"))
tag = "test_tag"
self.assertTrue(client.update(tag))
def test_fast_forward(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url, "master"))
subprocess.check_call("git reset --hard test_tag", shell=True, cwd=self.local_path)
self.assertTrue(client.update())
def test_fast_forward_diverged(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url, "master"))
subprocess.check_call("git reset --hard test_tag", shell=True, cwd=self.local_path)
subprocess.check_call("touch diverged.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m diverge", shell=True, cwd=self.local_path)
# fail because we have diverged
self.assertFalse(client.update('master'))
def test_fast_forward_simple_ref(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url, "master"))
subprocess.check_call("git reset --hard test_tag", shell=True, cwd=self.local_path)
# replace "refs/head/master" with just "master"
subprocess.check_call("git config --replace-all branch.master.merge master", shell=True, cwd=self.local_path)
self.assertTrue(client._get_branch_parent() is not (None, None))
def test_get_version_not_exist(self):
client = GitClient(path=self.local_path)
client.checkout(url=self.remote_path, version='master')
self.assertEqual(client.get_version(spec='not_exist_version'), None)
def test_get_branch_parent(self):
client = GitClient(path=self.local_path)
client.checkout(url=self.remote_path, version='master')
self.assertEqual(client._get_branch_parent(), ("master", "origin"))
# with other remote than origin
for cmd in ['git remote add remote2 %s' % self.remote_path,
'git config --replace-all branch.master.remote remote2']:
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
self.assertEqual(client._get_branch_parent(), (None, None))
self.assertEqual(client._get_branch_parent(fetch=True), ('master', "remote2"))
# with not actual remote branch
cmd = 'git config --replace-all branch.master.merge dummy_branch'
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
self.assertEqual(client._get_branch_parent(), (None, None))
# return remote back to original config
for cmd in [
'git config --replace-all branch.master.remote origin',
'git config --replace-all branch.master.merge refs/heads/master']:
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
# with detached local status
client.update(version='test_tag')
self.assertEqual(client._get_branch_parent(), (None, None))
# back to master branch
client.update(version='master')
def test_get_current_version_label(self):
client = GitClient(path=self.local_path)
# with detached local status
client.checkout(url=self.remote_path, version='test_tag')
self.assertEqual(client.get_current_version_label(), '')
# when difference between local and tracking branch
client.update(version='master')
self.assertEqual(client.get_current_version_label(), 'master')
# with other tracking branch
cmd = 'git config --replace-all branch.master.merge test_branch'
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
self.assertEqual(client.get_current_version_label(),
'master < test_branch')
# with other remote
for cmd in [
'git remote add remote2 %s' % self.remote_path,
'git config --replace-all branch.master.remote remote2',
'git fetch remote2']:
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
self.assertEqual(client.get_current_version_label(),
'master < remote2/test_branch')
# return remote back to original config
for cmd in [
'git config --replace-all branch.master.remote origin',
'git config --replace-all branch.master.merge refs/heads/master']:
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
def test_get_remote_version(self):
url = self.remote_path
client = GitClient(path=self.local_path)
client.checkout(url, version='master')
self.assertEqual(client.get_remote_version(fetch=True), self.readonly_version)
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version)
subprocess.check_call("git reset --hard test_tag", shell=True, cwd=self.local_path)
self.assertEqual(client.get_remote_version(fetch=True), self.readonly_version)
client.update(version='test_branch')
self.assertEqual(client.get_remote_version(fetch=True), self.readonly_version_init)
client.update(version='test_branch')
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version_init)
# switch tracked branch
subprocess.check_call('git config --replace-all branch.master.merge test_branch', shell=True, cwd=self.local_path)
client.update(version='master')
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version_init)
# with other remote
for cmd in [
'git remote add remote2 %s' % self.remote_path,
'git config --replace-all branch.master.remote remote2',
'git fetch remote2']:
subprocess.check_call(cmd, shell=True, cwd=self.local_path)
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version_init)
def testDiffClean(self):
client = GitClient(self.remote_path)
self.assertEquals('', client.get_diff())
def testStatusClean(self):
client = GitClient(self.remote_path)
self.assertEquals('', client.get_status(porcelain=True))
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = GitClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class GitClientUpdateTest(GitClientTestSetups):
def test_update_fetch_all_tags(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url, "master"))
self.assertEqual(client._get_branch(), "master")
self.assertTrue(client.update())
p = subprocess.Popen("git tag", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
output = p.communicate()[0].decode('utf-8')
self.assertEqual('last_tag\ntest_tag\n', output)
subprocess.check_call("git checkout test_tag", shell=True, cwd=self.remote_path)
subprocess.check_call("git branch alt_branch", shell=True, cwd=self.remote_path)
subprocess.check_call("touch alt_file.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m altfile", shell=True, cwd=self.remote_path)
# switch to untracked
subprocess.check_call("git checkout test_tag", shell=True, cwd=self.remote_path)
subprocess.check_call("touch new_file.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m newfile", shell=True, cwd=self.remote_path)
subprocess.check_call("git tag new_tag", shell=True, cwd=self.remote_path)
self.assertTrue(client.update())
# test whether client gets the tag
p = subprocess.Popen("git tag", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
output = p.communicate()[0].decode('utf-8')
self.assertEqual('''\
last_tag
new_tag
test_tag
''', output)
p = subprocess.Popen("git branch -a", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
output = p.communicate()[0].decode('utf-8')
self.assertEqual('''\
* master
remotes/origin/HEAD -> origin/master
remotes/origin/alt_branch
remotes/origin/master
remotes/origin/test_branch
''', output)
class GitClientRemoteVersionFetchTest(GitClientTestSetups):
def test_update_fetch_all_tags(self):
url = self.remote_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url, "master"))
self.assertEqual(client._get_branch(), "master")
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version)
self.assertEqual(client.get_remote_version(fetch=True), self.readonly_version)
subprocess.check_call("touch new_file.txt", shell=True, cwd=self.remote_path)
subprocess.check_call("git add *", shell=True, cwd=self.remote_path)
subprocess.check_call("git commit -m newfile", shell=True, cwd=self.remote_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
remote_new_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
self.assertNotEqual(self.readonly_version, remote_new_version)
# remote version stays same until we fetch
self.assertEqual(client.get_remote_version(fetch=False), self.readonly_version)
self.assertEqual(client.get_remote_version(fetch=True), remote_new_version)
self.assertEqual(client.get_remote_version(fetch=False), remote_new_version)
class GitClientLogTest(GitClientTestSetups):
def setUp(self):
client = GitClient(self.local_path)
client.checkout(self.remote_path)
# Create some local untracking branch
subprocess.check_call("git checkout test_tag -b localbranch", shell=True, cwd=self.local_path)
self.n_commits = 10
for i in range(self.n_commits):
subprocess.check_call("touch local_%d.txt" % i, shell=True, cwd=self.local_path)
subprocess.check_call("git add local_%d.txt" % i, shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m \"local_%d\"" % i, shell=True, cwd=self.local_path)
def test_get_log_defaults(self):
client = GitClient(self.local_path)
log = client.get_log()
self.assertEquals(self.n_commits + 1, len(log))
self.assertEquals('local_%d' % (self.n_commits - 1), log[0]['message'])
for key in ['id', 'author', 'email', 'date', 'message']:
self.assertTrue(log[0][key] is not None, key)
def test_get_log_limit(self):
client = GitClient(self.local_path)
log = client.get_log(limit=1)
self.assertEquals(1, len(log))
self.assertEquals('local_%d' % (self.n_commits - 1), log[0]['message'])
def test_get_log_path(self):
client = GitClient(self.local_path)
for count in range(self.n_commits):
log = client.get_log(relpath='local_%d.txt' % count)
self.assertEquals(1, len(log))
class GitClientAffectedFiles(GitClientTestSetups):
def setUp(self):
client = GitClient(self.local_path)
client.checkout(self.remote_path)
# Create some local untracking branch
subprocess.check_call("git checkout test_tag -b localbranch", shell=True, cwd=self.local_path)
subprocess.check_call("touch local_file", shell=True, cwd=self.local_path)
subprocess.check_call("git add local_file", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m \"local_file\"", shell=True, cwd=self.local_path)
def test_get_affected_files(self):
client = GitClient(self.local_path)
affected = client.get_affected_files(client.get_log()[0]['id'])
self.assertEqual(sorted(['local_file']),
sorted(affected))
self.assertEquals(['local_file'], affected)
class GitClientDanglingCommitsTest(GitClientTestSetups):
def setUp(self):
client = GitClient(self.local_path)
client.checkout(self.remote_path)
# Create some local untracking branch
subprocess.check_call("git checkout test_tag -b localbranch", shell=True, cwd=self.local_path)
subprocess.check_call("touch local.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m my_branch", shell=True, cwd=self.local_path)
subprocess.check_call("git tag my_branch_tag", shell=True, cwd=self.local_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
self.untracked_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# diverged branch
subprocess.check_call("git checkout test_tag -b diverged_branch", shell=True, cwd=self.local_path)
subprocess.check_call("touch diverged.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m diverged_branch", shell=True, cwd=self.local_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
self.diverged_branch_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# Go detached to create some dangling commits
subprocess.check_call("git checkout test_tag", shell=True, cwd=self.local_path)
# create a commit only referenced by tag
subprocess.check_call("touch tagged.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m no_branch", shell=True, cwd=self.local_path)
subprocess.check_call("git tag no_br_tag", shell=True, cwd=self.local_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
self.no_br_tag_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# create a dangling commit
subprocess.check_call("touch dangling.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m dangling", shell=True, cwd=self.local_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
self.dangling_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# create a dangling tip on top of dangling commit (to catch related bugs)
subprocess.check_call("touch dangling-tip.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add *", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m dangling_tip", shell=True, cwd=self.local_path)
# create and delete branch to cause reflog entry
subprocess.check_call("git branch oldbranch", shell=True, cwd=self.local_path)
subprocess.check_call("git branch -D oldbranch", shell=True, cwd=self.local_path)
# go back to master to make head point somewhere else
subprocess.check_call("git checkout master", shell=True, cwd=self.local_path)
def test_is_commit_in_orphaned_subtree(self):
client = GitClient(self.local_path)
self.assertTrue(client._is_commit_in_orphaned_subtree(self.dangling_version))
self.assertFalse(client._is_commit_in_orphaned_subtree(self.no_br_tag_version))
self.assertFalse(client._is_commit_in_orphaned_subtree(self.diverged_branch_version))
def test_protect_dangling(self):
client = GitClient(self.local_path)
# url = self.remote_path
self.assertEqual(client._get_branch(), "master")
tag = "no_br_tag"
self.assertTrue(client.update(tag))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client._get_branch_parent(), (None, None))
tag = "test_tag"
self.assertTrue(client.update(tag))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client._get_branch_parent(), (None, None))
# to dangling commit
sha = self.dangling_version
self.assertTrue(client.update(sha))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client.get_version(), self.dangling_version)
self.assertEqual(client._get_branch_parent(), (None, None))
# now HEAD protects the dangling commit, should not be allowed to move off.
new_branch = 'master'
self.assertFalse(client.update(new_branch))
def test_detached_to_branch(self):
client = GitClient(self.local_path)
# url = self.remote_path
self.assertEqual(client._get_branch(), "master")
tag = "no_br_tag"
self.assertTrue(client.update(tag))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client._get_branch_parent(), (None, None))
tag = "test_tag"
self.assertTrue(client.update(tag))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client.get_version(), self.readonly_version_init)
self.assertEqual(client._get_branch_parent(), (None, None))
#update should not change anything
self.assertTrue(client.update()) # no arg
self.assertEqual(client._get_branch(), None)
self.assertEqual(client.get_version(), self.readonly_version_init)
self.assertEqual(client._get_branch_parent(), (None, None))
new_branch = 'master'
self.assertTrue(client.update(new_branch))
self.assertEqual(client._get_branch(), new_branch)
self.assertEqual(client.get_version(), self.readonly_version)
self.assertEqual(client._get_branch_parent(), (new_branch, "origin"))
def test_checkout_untracked_branch_and_update(self):
# difference to tracked branches is that branch parent is None, and we may hop outside lineage
client = GitClient(self.local_path)
url = self.remote_path
branch = "localbranch"
self.assertEqual(client._get_branch(), "master")
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertTrue(client._is_local_branch(branch))
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertTrue(client.update(branch))
self.assertEqual(client.get_version(), self.untracked_version)
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client._get_branch_parent(), (None, None))
self.assertTrue(client.update()) # no arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.untracked_version)
self.assertEqual(client._get_branch_parent(), (None, None))
self.assertTrue(client.update(branch)) # same branch arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.untracked_version)
self.assertEqual(client._get_branch_parent(), (None, None))
# to master
new_branch = 'master'
self.assertTrue(client.update(new_branch))
self.assertEqual(client._get_branch(), new_branch)
self.assertEqual(client.get_version(), self.readonly_version)
self.assertEqual(client._get_branch_parent(), (new_branch, "origin"))
# and back
self.assertTrue(client.update(branch)) # same branch arg
self.assertEqual(client._get_branch(), branch)
self.assertEqual(client.get_version(), self.untracked_version)
self.assertEqual(client._get_branch_parent(), (None, None))
# to dangling commit
sha = self.dangling_version
self.assertTrue(client.update(sha))
self.assertEqual(client._get_branch(), None)
self.assertEqual(client.get_version(), self.dangling_version)
self.assertEqual(client._get_branch_parent(), (None, None))
#should not work to protect commits from becoming dangled
# to commit outside lineage
tag = "test_tag"
self.assertFalse(client.update(tag))
def test_inject_protection(self):
client = GitClient(self.local_path)
try:
client.is_tag('foo"; bar"', fetch=False)
self.fail("expected Exception")
except VcsError:
pass
try:
client._rev_list_contains('foo"; echo bar"', "foo", fetch=False)
self.fail("expected Exception")
except VcsError:
pass
try:
client._rev_list_contains('foo', 'foo"; echo bar"', fetch=False)
self.fail("expected Exception")
except VcsError:
pass
try:
client.get_version('foo"; echo bar"')
self.fail("expected Exception")
except VcsError:
pass
class GitClientOverflowTest(GitClientTestSetups):
'''Test reproducing an overflow of arguments to git log'''
def setUp(self):
client = GitClient(self.local_path)
client.checkout(self.remote_path)
subprocess.check_call("git checkout test_tag", shell=True, cwd=self.local_path)
subprocess.check_call("echo 0 >> count.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git add count.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m modified-0", shell=True, cwd=self.local_path)
# produce many tags to make git log command fail if all are added
for count in range(4000):
subprocess.check_call("git tag modified-%s" % count, shell=True, cwd=self.local_path)
po = subprocess.Popen(
"git log -n 1 --pretty=format:\"%H\"",
shell=True, cwd=self.local_path, stdout=subprocess.PIPE)
self.last_version = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
def test_orphaned_overflow(self):
client = GitClient(self.local_path)
# this failed when passing all ref ids to git log
self.assertFalse(client._is_commit_in_orphaned_subtree(self.last_version))
class GitDiffStatClientTest(GitClientTestSetups):
@classmethod
def setUpClass(self):
GitClientTestSetups.setUpClass()
client = GitClient(self.local_path)
client.checkout(self.remote_path, self.readonly_version)
# after setting up "readonly" repo, change files and make some changes
subprocess.check_call("rm deleted-fs.txt", shell=True, cwd=self.local_path)
subprocess.check_call("git rm deleted.txt", shell=True, cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "modified.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "modified-fs.txt"), 'a')
f.write('0123456789abcdef')
f.close()
subprocess.check_call("git add modified.txt", shell=True, cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "added-fs.txt"), 'w')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added.txt"), 'w')
f.write('0123456789abcdef')
f.close()
subprocess.check_call("git add added.txt", shell=True, cwd=self.local_path)
def tearDown(self):
pass
def testDiff(self):
client = GitClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals(
'''\
diff --git ./added.txt ./added.txt
new file mode 100644
index 0000000..454f6b3
--- /dev/null
+++ ./added.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file
diff --git ./deleted-fs.txt ./deleted-fs.txt
deleted file mode 100644
index e69de29..0000000
diff --git ./deleted.txt ./deleted.txt
deleted file mode 100644
index e69de29..0000000
diff --git ./modified-fs.txt ./modified-fs.txt
index e69de29..454f6b3 100644
--- ./modified-fs.txt
+++ ./modified-fs.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file
diff --git ./modified.txt ./modified.txt
index e69de29..454f6b3 100644
--- ./modified.txt
+++ ./modified.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file''',
client.get_diff().rstrip())
def testDiffRelpath(self):
client = GitClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals(
'''\
diff --git ros/added.txt ros/added.txt
new file mode 100644
index 0000000..454f6b3
--- /dev/null
+++ ros/added.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file
diff --git ros/deleted-fs.txt ros/deleted-fs.txt
deleted file mode 100644
index e69de29..0000000
diff --git ros/deleted.txt ros/deleted.txt
deleted file mode 100644
index e69de29..0000000
diff --git ros/modified-fs.txt ros/modified-fs.txt
index e69de29..454f6b3 100644
--- ros/modified-fs.txt
+++ ros/modified-fs.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file
diff --git ros/modified.txt ros/modified.txt
index e69de29..454f6b3 100644
--- ros/modified.txt
+++ ros/modified.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file''',
client.get_diff(basepath=os.path.dirname(self.local_path)).rstrip())
def testStatus(self):
client = GitClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals(
'''\
A ./added.txt
D ./deleted-fs.txt
D ./deleted.txt
M ./modified-fs.txt
M ./modified.txt
''',
client.get_status(porcelain=True))
def testStatusRelPath(self):
client = GitClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals(
'''\
A ros/added.txt
D ros/deleted-fs.txt
D ros/deleted.txt
M ros/modified-fs.txt
M ros/modified.txt
''',
client.get_status(basepath=os.path.dirname(self.local_path), porcelain=True))
def testStatusUntracked(self):
client = GitClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals(
'''\
A ./added.txt
D ./deleted-fs.txt
D ./deleted.txt
M ./modified-fs.txt
M ./modified.txt
?? ./added-fs.txt
''',
client.get_status(untracked=True, porcelain=True))
class GitExportClientTest(GitClientTestSetups):
@classmethod
def setUpClass(self):
GitClientTestSetups.setUpClass()
client = GitClient(self.local_path)
client.checkout(self.remote_path, self.readonly_version)
self.basepath_export = os.path.join(self.root_directory, 'export')
def tearDown(self):
pass
def testExportRepository(self):
client = GitClient(self.local_path)
self.assertTrue(
client.export_repository(self.readonly_version,
self.basepath_export)
)
self.assertTrue(os.path.exists(self.basepath_export + '.tar.gz'))
self.assertFalse(os.path.exists(self.basepath_export + '.tar'))
self.assertFalse(os.path.exists(self.basepath_export))
class GitGetBranchesClientTest(GitClientTestSetups):
@classmethod
def setUpClass(self):
GitClientTestSetups.setUpClass()
def tearDown(self):
pass
def testGetBranches(self):
client = GitClient(self.local_path)
client.checkout(self.remote_path)
self.assertEqual(client.get_branches(True), ['master'])
self.assertEqual(client.get_branches(),
['master', 'remotes/origin/master',
'remotes/origin/test_branch'])
subprocess.check_call('git checkout test_branch', shell=True,
cwd=self.local_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.assertEqual(client.get_branches(True), ['master', 'test_branch'])
self.assertEqual(client.get_branches(),
['master', 'test_branch', 'remotes/origin/master',
'remotes/origin/test_branch'])
class GitTimeoutTest(unittest.TestCase):
class MuteHandler(BaseRequestHandler):
def handle(self):
data = True
while data:
data = self.request.recv(1024)
@classmethod
def setUpClass(self):
self.mute_server = TCPServer(('localhost', 0), GitTimeoutTest.MuteHandler)
_, self.mute_port = self.mute_server.server_address
serv_thread = threading.Thread(target=self.mute_server.serve_forever)
serv_thread.daemon = True
serv_thread.start()
self.root_directory = tempfile.mkdtemp()
self.local_path = os.path.join(self.root_directory, "ros")
def test_checkout_timeout(self):
# SSH'ing to a mute server will hang for a very long time
url = 'ssh://test@127.0.0.1:{0}/test'.format(self.mute_port)
client = GitClient(self.local_path)
start = time.time()
self.assertFalse(client.checkout(url, timeout=2.0))
stop = time.time()
self.assertTrue(stop - start > 1.9)
self.assertTrue(stop - start < 3.0)
# the git processes will clean up the checkout dir, we have to wait
# for them to finish in order to avoid a race condition with rmtree()
while os.path.exists(self.local_path):
time.sleep(0.2)
@classmethod
def tearDownClass(self):
self.mute_server.shutdown()
if os.path.exists(self.root_directory):
shutil.rmtree(self.root_directory)
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
vcstools-0.1.42/test/test_git_subm.py 0000664 0000000 0000000 00000071472 13522611462 0017670 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import unittest
import subprocess
import tempfile
import shutil
import tarfile
import filecmp
from contextlib import closing
from vcstools.git import GitClient
class GitClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
# helpful when setting tearDown to pass
self.directories = dict(setUp=self.root_directory)
self.remote_dir = os.path.join(self.root_directory, "remote")
self.repo_path = os.path.join(self.remote_dir, "repo")
self.submodule_path = os.path.join(self.remote_dir, "submodule")
self.subsubmodule_path = os.path.join(self.remote_dir, "subsubmodule")
self.local_path = os.path.join(self.root_directory, "local")
self.sublocal_path = os.path.join(self.local_path, "submodule")
self.sublocal2_path = os.path.join(self.local_path, "submodule2")
self.subsublocal_path = os.path.join(self.sublocal_path, "subsubmodule")
self.subsublocal2_path = os.path.join(self.sublocal2_path, "subsubmodule")
self.export_path = os.path.join(self.root_directory, "export")
self.subexport_path = os.path.join(self.export_path, "submodule")
self.subexport2_path = os.path.join(self.export_path, "submodule2")
self.subsubexport_path = os.path.join(self.subexport_path, "subsubmodule")
self.subsubexport2_path = os.path.join(self.subexport2_path, "subsubmodule")
os.makedirs(self.repo_path)
os.makedirs(self.submodule_path)
os.makedirs(self.subsubmodule_path)
# create a "remote" repo
subprocess.check_call("git init", shell=True, cwd=self.repo_path)
subprocess.check_call("touch fixed.txt", shell=True, cwd=self.repo_path)
subprocess.check_call("git add fixed.txt", shell=True, cwd=self.repo_path)
subprocess.check_call("git commit -m initial", shell=True, cwd=self.repo_path)
subprocess.check_call("git tag test_tag", shell=True, cwd=self.repo_path)
subprocess.check_call("git branch initial_branch", shell=True, cwd=self.repo_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.repo_path, stdout=subprocess.PIPE)
self.version_init = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# create a submodule repo
subprocess.check_call("git init", shell=True, cwd=self.submodule_path)
subprocess.check_call("touch subfixed.txt", shell=True, cwd=self.submodule_path)
subprocess.check_call("git add *", shell=True, cwd=self.submodule_path)
subprocess.check_call("git commit -m initial", shell=True, cwd=self.submodule_path)
subprocess.check_call("git tag sub_test_tag", shell=True, cwd=self.submodule_path)
# create a subsubmodule repo
subprocess.check_call("git init", shell=True, cwd=self.subsubmodule_path)
subprocess.check_call("touch subsubfixed.txt", shell=True, cwd=self.subsubmodule_path)
subprocess.check_call("git add *", shell=True, cwd=self.subsubmodule_path)
subprocess.check_call("git commit -m initial", shell=True, cwd=self.subsubmodule_path)
subprocess.check_call("git tag subsub_test_tag", shell=True, cwd=self.subsubmodule_path)
# attach subsubmodule to submodule
subprocess.check_call("git submodule add %s %s" % (self.subsubmodule_path, "subsubmodule"),
shell=True, cwd=self.submodule_path)
subprocess.check_call("git submodule init", shell=True, cwd=self.submodule_path)
subprocess.check_call("git submodule update", shell=True, cwd=self.submodule_path)
subprocess.check_call("git commit -m subsubmodule", shell=True, cwd=self.submodule_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.subsubmodule_path, stdout=subprocess.PIPE)
self.subsubversion_final = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.submodule_path, stdout=subprocess.PIPE)
self.subversion_final = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# attach submodule somewhere, only in test_branch first
subprocess.check_call("git checkout master -b test_branch", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule add %s %s" % (self.submodule_path,
"submodule2"), shell=True, cwd=self.repo_path)
# this is needed only if git <= 1.7, during the time when submodules were being introduced (from 1.5.3)
subprocess.check_call("git submodule init", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule update", shell=True, cwd=self.repo_path)
subprocess.check_call("git commit -m submodule", shell=True, cwd=self.repo_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.repo_path, stdout=subprocess.PIPE)
self.version_test = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# attach submodule using relative url, only in test_sub_relative
subprocess.check_call("git checkout master -b test_sub_relative", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule add %s %s" % (os.path.join('..', os.path.basename(self.submodule_path)),
"submodule"), shell=True, cwd=self.repo_path)
# this is needed only if git <= 1.7, during the time when submodules were being introduced (from 1.5.3)
subprocess.check_call("git submodule init", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule update", shell=True, cwd=self.repo_path)
subprocess.check_call("git commit -m submodule", shell=True, cwd=self.repo_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.repo_path, stdout=subprocess.PIPE)
self.version_relative = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
# attach submodule to remote on master. CAREFUL : submodule2 is still in working tree (git does not clean it)
subprocess.check_call("git checkout master", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule add %s %s" % (self.submodule_path, "submodule"),
shell=True, cwd=self.repo_path)
# this is needed only if git <= 1.7, during the time when submodules were being introduced (from 1.5.3)
subprocess.check_call("git submodule init", shell=True, cwd=self.repo_path)
subprocess.check_call("git submodule update", shell=True, cwd=self.repo_path)
subprocess.check_call("git commit -m submodule", shell=True, cwd=self.repo_path)
po = subprocess.Popen("git log -n 1 --pretty=format:\"%H\"", shell=True,
cwd=self.repo_path, stdout=subprocess.PIPE)
self.version_final = po.stdout.read().decode('UTF-8').rstrip('"').lstrip('"')
subprocess.check_call("git tag last_tag", shell=True, cwd=self.repo_path)
print("setup done\n\n")
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
if os.path.exists(self.export_path):
shutil.rmtree(self.export_path)
class GitClientTest(GitClientTestSetups):
def test_checkout_master_with_subs(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.version_final, client.get_version())
self.assertTrue(subclient.path_exists())
self.assertTrue(subclient.detect_presence())
self.assertEqual(self.subversion_final, subclient.get_version())
self.assertTrue(subsubclient.path_exists())
self.assertTrue(subsubclient.detect_presence())
self.assertEqual(self.subsubversion_final, subsubclient.get_version())
def test_export_master(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(os.path.exists(self.export_path))
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
tarpath = client.export_repository("master", self.export_path)
self.assertEqual(tarpath, self.export_path + '.tar.gz')
os.mkdir(self.export_path)
with closing(tarfile.open(tarpath, "r:gz")) as tarf:
tarf.extractall(self.export_path)
subsubdirdiff = filecmp.dircmp(self.subsubexport_path, self.subsublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subsubdirdiff.left_only, [])
self.assertEqual(subsubdirdiff.right_only, [])
self.assertEqual(subsubdirdiff.diff_files, [])
subdirdiff = filecmp.dircmp(self.subexport_path, self.sublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subdirdiff.left_only, [])
self.assertEqual(subdirdiff.right_only, [])
self.assertEqual(subdirdiff.diff_files, [])
dirdiff = filecmp.dircmp(self.export_path, self.local_path, ignore=['.git', '.gitmodules'])
self.assertEqual(dirdiff.left_only, [])
self.assertEqual(dirdiff.right_only, [])
self.assertEqual(dirdiff.diff_files, [])
def test_export_relative(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(os.path.exists(self.export_path))
self.assertTrue(client.checkout(url, "test_sub_relative"))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
#subprocess.call(["tree", self.root_directory])
tarpath = client.export_repository("test_sub_relative", self.export_path)
self.assertEqual(tarpath, self.export_path + '.tar.gz')
os.mkdir(self.export_path)
with closing(tarfile.open(tarpath, "r:gz")) as tarf:
tarf.extractall(self.export_path)
subsubdirdiff = filecmp.dircmp(self.subsubexport_path, self.subsublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subsubdirdiff.left_only, [])
self.assertEqual(subsubdirdiff.right_only, [])
self.assertEqual(subsubdirdiff.diff_files, [])
subdirdiff = filecmp.dircmp(self.subexport_path, self.sublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subdirdiff.left_only, [])
self.assertEqual(subdirdiff.right_only, [])
self.assertEqual(subdirdiff.diff_files, [])
dirdiff = filecmp.dircmp(self.export_path, self.local_path, ignore=['.git', '.gitmodules'])
self.assertEqual(dirdiff.left_only, [])
self.assertEqual(dirdiff.right_only, [])
self.assertEqual(dirdiff.diff_files, [])
def test_export_branch(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient = GitClient(self.subsublocal_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(os.path.exists(self.export_path))
self.assertTrue(client.checkout(url, version='master'))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertFalse(subclient2.path_exists())
self.assertFalse(subsubclient2.path_exists())
# we need first to retrieve locally the branch we want to export
self.assertTrue(client.update(version='test_branch'))
self.assertTrue(client.path_exists())
# git leaves old submodule around by default
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
# new submodule should be there
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
tarpath = client.export_repository("test_branch", self.export_path)
self.assertEqual(tarpath, self.export_path + '.tar.gz')
os.mkdir(self.export_path)
with closing(tarfile.open(tarpath, "r:gz")) as tarf:
tarf.extractall(self.export_path)
# Checking that we have only submodule2 in our export
self.assertFalse(os.path.exists(self.subexport_path))
self.assertFalse(os.path.exists(self.subsubexport_path))
self.assertTrue(os.path.exists(self.subexport2_path))
self.assertTrue(os.path.exists(self.subsubexport2_path))
# comparing with test_branch version ( currently checked-out )
subsubdirdiff = filecmp.dircmp(self.subsubexport2_path, self.subsublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subsubdirdiff.left_only, []) # same subsubfixed.txt in both subsubmodule/
self.assertEqual(subsubdirdiff.right_only, [])
self.assertEqual(subsubdirdiff.diff_files, [])
subdirdiff = filecmp.dircmp(self.subexport2_path, self.sublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subdirdiff.left_only, [])
self.assertEqual(subdirdiff.right_only, [])
self.assertEqual(subdirdiff.diff_files, [])
dirdiff = filecmp.dircmp(self.export_path, self.local_path, ignore=['.git', '.gitmodules'])
self.assertEqual(dirdiff.left_only, [])
# submodule is still there on local_path (git default behavior)
self.assertEqual(dirdiff.right_only, ['submodule'])
self.assertEqual(dirdiff.diff_files, [])
def test_export_hash(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient = GitClient(self.subsublocal_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(os.path.exists(self.export_path))
self.assertTrue(client.checkout(url, version='master'))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertFalse(subclient2.path_exists())
self.assertFalse(subsubclient2.path_exists())
# we need first to retrieve locally the hash we want to export
self.assertTrue(client.update(version=self.version_test))
self.assertTrue(client.path_exists())
# git leaves old submodule around by default
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
# new submodule should be there
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
tarpath = client.export_repository(self.version_test, self.export_path)
self.assertEqual(tarpath, self.export_path + '.tar.gz')
os.mkdir(self.export_path)
with closing(tarfile.open(tarpath, "r:gz")) as tarf:
tarf.extractall(self.export_path)
# Checking that we have only submodule2 in our export
self.assertFalse(os.path.exists(self.subexport_path))
self.assertFalse(os.path.exists(self.subsubexport_path))
self.assertTrue(os.path.exists(self.subexport2_path))
self.assertTrue(os.path.exists(self.subsubexport2_path))
# comparing with version_test ( currently checked-out )
subsubdirdiff = filecmp.dircmp(self.subsubexport2_path, self.subsublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subsubdirdiff.left_only, []) # same subsubfixed.txt in both subsubmodule/
self.assertEqual(subsubdirdiff.right_only, [])
self.assertEqual(subsubdirdiff.diff_files, [])
subdirdiff = filecmp.dircmp(self.subexport2_path, self.sublocal_path, ignore=['.git', '.gitmodules'])
self.assertEqual(subdirdiff.left_only, [])
self.assertEqual(subdirdiff.right_only, [])
self.assertEqual(subdirdiff.diff_files, [])
dirdiff = filecmp.dircmp(self.export_path, self.local_path, ignore=['.git', '.gitmodules'])
self.assertEqual(dirdiff.left_only, [])
# submodule is still there on local_path (git default behavior)
self.assertEqual(dirdiff.right_only, ['submodule'])
self.assertEqual(dirdiff.diff_files, [])
def test_checkout_branch_without_subs(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version='initial_branch'))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.version_init, client.get_version())
self.assertFalse(subclient.path_exists())
self.assertFalse(subsubclient.path_exists())
def test_checkout_test_branch_with_subs(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version='test_branch'))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.version_test, client.get_version())
self.assertFalse(subclient.path_exists())
self.assertFalse(subsubclient.path_exists())
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
def test_checkout_master_with_subs2(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subsubclient = GitClient(self.subsublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version='master'))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.version_final, client.get_version())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertFalse(subclient2.path_exists())
self.assertFalse(subsubclient2.path_exists())
def test_switch_branches(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient = GitClient(self.subsublocal_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertFalse(subclient2.path_exists())
new_version = "test_branch"
self.assertTrue(client.update(new_version))
# checking that update doesnt make submodule disappear (git default behavior)
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
oldnew_version = "master"
self.assertTrue(client.update(oldnew_version))
# checking that update doesnt make submodule2 disappear (git default behavior)
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
def test_switch_branches_retrieve_local_subcommit(self):
url = self.repo_path
client = GitClient(self.local_path)
subclient = GitClient(self.sublocal_path)
subclient2 = GitClient(self.sublocal2_path)
subsubclient = GitClient(self.subsublocal_path)
subsubclient2 = GitClient(self.subsublocal2_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertFalse(subclient2.path_exists())
new_version = "test_branch"
self.assertTrue(client.update(new_version))
# checking that update doesnt make submodule disappear (git default behavior)
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
subprocess.check_call("touch submodif.txt", shell=True, cwd=self.sublocal2_path)
subprocess.check_call("git add submodif.txt", shell=True, cwd=self.sublocal2_path)
subprocess.check_call("git commit -m submodif", shell=True, cwd=self.sublocal2_path)
subprocess.check_call("git add submodule2", shell=True, cwd=self.local_path)
subprocess.check_call("git commit -m submodule2_modif", shell=True, cwd=self.local_path)
oldnew_version = "master"
self.assertTrue(client.update(oldnew_version))
# checking that update doesnt make submodule2 disappear (git default behavior)
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertTrue(client.update(new_version))
# checking that update still has submodule with submodif
self.assertTrue(subclient2.path_exists())
self.assertTrue(subsubclient2.path_exists())
self.assertTrue(subclient.path_exists())
self.assertTrue(subsubclient.path_exists())
self.assertTrue(os.path.exists(os.path.join(self.sublocal2_path, "submodif.txt")))
def test_status(self):
url = self.repo_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url))
output = client.get_status(porcelain=True) # porcelain=True ensures stable format
self.assertEqual('', output, "Expected empty string, got `{0}`".format(output))
with open(os.path.join(self.local_path, 'fixed.txt'), 'a') as f:
f.write('0123456789abcdef')
subprocess.check_call("touch new.txt", shell=True, cwd=self.local_path)
with open(os.path.join(self.sublocal_path, 'subfixed.txt'), 'a') as f:
f.write('abcdef0123456789')
subprocess.check_call("touch subnew.txt", shell=True, cwd=self.sublocal_path)
with open(os.path.join(self.subsublocal_path, 'subsubfixed.txt'), 'a') as f:
f.write('012345cdef')
subprocess.check_call("touch subsubnew.txt", shell=True, cwd=self.subsublocal_path)
output = client.get_status(porcelain=True) # porcelain=True ensures stable format
self.assertEqual('''\
M ./fixed.txt
M ./submodule
M ./subfixed.txt
M ./subsubmodule
M ./subsubfixed.txt''', output.rstrip())
output = client.get_status(untracked=True, porcelain=True)
self.assertEqual('''\
M ./fixed.txt
M ./submodule
?? ./new.txt
M ./subfixed.txt
M ./subsubmodule
?? ./subnew.txt
M ./subsubfixed.txt
?? ./subsubnew.txt''', output.rstrip())
output = client.get_status(
basepath=os.path.dirname(self.local_path),
untracked=True,
porcelain=True)
self.assertEqual('''\
M local/fixed.txt
M local/submodule
?? local/new.txt
M local/subfixed.txt
M local/subsubmodule
?? local/subnew.txt
M local/subsubfixed.txt
?? local/subsubnew.txt''', output.rstrip())
def test_diff(self):
url = self.repo_path
client = GitClient(self.local_path)
self.assertTrue(client.checkout(url))
output = client.get_diff()
self.assertEqual('', output, output)
with open(os.path.join(self.local_path, 'fixed.txt'), 'a') as f:
f.write('0123456789abcdef')
subprocess.check_call("touch new.txt", shell=True, cwd=self.local_path)
with open(os.path.join(self.sublocal_path, 'subfixed.txt'), 'a') as f:
f.write('abcdef0123456789')
subprocess.check_call("touch subnew.txt", shell=True, cwd=self.sublocal_path)
with open(os.path.join(self.subsublocal_path, 'subsubfixed.txt'), 'a') as f:
f.write('012345cdef')
subprocess.check_call("touch subsubnew.txt", shell=True, cwd=self.subsublocal_path)
output = client.get_diff()
self.assertEqual(1094, len(output))
self.assertTrue('''\
diff --git ./fixed.txt ./fixed.txt
index e69de29..454f6b3 100644
--- ./fixed.txt
+++ ./fixed.txt
@@ -0,0 +1 @@
+0123456789abcdef
\\ No newline at end of file''' in output)
self.assertTrue('''\
diff --git ./submodule/subsubmodule/subsubfixed.txt ./submodule/subsubmodule/subsubfixed.txt
index e69de29..1a332dc 100644
--- ./submodule/subsubmodule/subsubfixed.txt
+++ ./submodule/subsubmodule/subsubfixed.txt
@@ -0,0 +1 @@
+012345cdef
\\ No newline at end of file''' in output)
output = client.get_diff(basepath=os.path.dirname(self.local_path))
self.assertEqual(1174, len(output))
self.assertTrue('''\
diff --git local/fixed.txt local/fixed.txt
index e69de29..454f6b3 100644
--- local/fixed.txt
+++ local/fixed.txt
@@ -0,0 +1 @@
+0123456789abcdef
\ No newline at end of file''' in output, output)
self.assertTrue('''
diff --git local/submodule/subsubmodule/subsubfixed.txt local/submodule/subsubmodule/subsubfixed.txt
index e69de29..1a332dc 100644
--- local/submodule/subsubmodule/subsubfixed.txt
+++ local/submodule/subsubmodule/subsubfixed.txt
@@ -0,0 +1 @@
+012345cdef
\ No newline at end of file''' in output, output)
vcstools-0.1.42/test/test_hg.py 0000664 0000000 0000000 00000045677 13522611462 0016465 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import io
import unittest
import subprocess
import tempfile
import shutil
from vcstools.hg import HgClient
os.environ['EMAIL'] = 'Your Name '
class HGClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
self.directories = dict(setUp=self.root_directory)
self.remote_path = os.path.join(self.root_directory, "remote")
os.makedirs(self.remote_path)
# create a "remote" repo
for cmd in [
"hg init",
"touch fixed.txt",
"hg add fixed.txt",
"hg commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.remote_path)
po = subprocess.Popen("hg log --template '{node|short}' -l1", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.local_version_init = po.stdout.read().decode('UTF-8').rstrip("'").lstrip("'")
# in hg, tagging creates an own changeset, so we need to fetch version before tagging
subprocess.check_call("hg tag test_tag", shell=True, cwd=self.remote_path)
# files to be modified in "local" repo
for cmd in [
"touch modified.txt",
"touch modified-fs.txt",
"hg add modified.txt modified-fs.txt",
"hg commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.remote_path)
po = subprocess.Popen("hg log --template '{node|short}' -l1", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.local_version_second = po.stdout.read().decode('UTF-8').rstrip("'").lstrip("'")
for cmd in [
"touch deleted.txt",
"touch deleted-fs.txt",
"hg add deleted.txt deleted-fs.txt",
"hg commit -m modified"]:
subprocess.check_call(cmd, shell=True, cwd=self.remote_path)
po = subprocess.Popen("hg log --template '{node|short}' -l1", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.local_version = po.stdout.read().decode('UTF-8').rstrip("'").lstrip("'")
self.local_path = os.path.join(self.root_directory, "local")
self.local_url = self.remote_path
# create a hg branch
for cmd in [
"hg branch test_branch",
"touch test.txt",
"hg add test.txt",
"hg commit -m test"]:
subprocess.check_call(cmd, shell=True, cwd=self.remote_path)
po = subprocess.Popen("hg log --template '{node|short}' -l1", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
self.branch_version = po.stdout.read().decode('UTF-8').rstrip("'").lstrip("'")
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
class HGClientTest(HGClientTestSetups):
def test_get_url_by_reading(self):
url = self.local_url
client = HgClient(self.local_path)
client.checkout(url, self.local_version)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_url(), self.local_url)
self.assertEqual(client.get_version(), self.local_version)
self.assertEqual(client.get_version(self.local_version_init[0:6]), self.local_version_init)
self.assertEqual(client.get_version("test_tag"), self.local_version_init)
def test_get_url_nonexistant(self):
local_path = "/tmp/dummy"
client = HgClient(local_path)
self.assertEqual(client.get_url(), None)
def test_get_type_name(self):
local_path = "/tmp/dummy"
client = HgClient(local_path)
self.assertEqual(client.get_vcs_type_name(), 'hg')
def test_checkout(self):
url = self.local_url
client = HgClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client.get_version(), self.local_version)
def test_checkout_dir_exists(self):
url = self.remote_path
client = HgClient(self.local_path)
self.assertFalse(client.path_exists())
os.makedirs(self.local_path)
self.assertTrue(client.checkout(url))
# non-empty
self.assertFalse(client.checkout(url))
def test_checkout_emptystringversion(self):
# special test to check that version '' means the same as None
url = self.local_url
client = HgClient(self.local_path)
self.assertTrue(client.checkout(url, ''))
self.assertEqual(client.get_version(), self.local_version)
# test for #3497
def test_checkout_into_subdir_without_existing_parent(self):
local_path = os.path.join(self.local_path, "nonexistant_subdir")
url = self.local_url
client = HgClient(local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), local_path)
self.assertEqual(client.get_url(), url)
def test_checkout_specific_version_and_update(self):
url = self.local_url
version = self.local_version
client = HgClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertEqual(client.get_version(), version)
new_version = self.local_version_second
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), new_version)
self.assertTrue(client.update())
self.assertEqual(client.get_version(), self.local_version)
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), new_version)
self.assertTrue(client.update(''))
self.assertEqual(client.get_version(), self.local_version)
def test_get_current_version_label(self):
url = self.local_url
version = self.local_version
client = HgClient(self.local_path)
client.checkout(url, version='test_tag')
self.assertEqual(client.get_current_version_label(), 'default')
client.update(version='default')
self.assertEqual(client.get_current_version_label(), 'default')
client.update(version='test_branch')
self.assertEqual(client.get_current_version_label(), 'test_branch')
def test_get_remote_version(self):
url = self.local_url
version = self.local_version
client = HgClient(self.local_path)
client.checkout(url)
self.assertEqual(client.get_remote_version(fetch=True), self.local_version)
client.checkout(url, version='test_tag')
self.assertEqual(client.get_remote_version(fetch=True), self.local_version)
client.update(version='default')
self.assertEqual(client.get_remote_version(fetch=True), self.local_version)
client.update(version='test_branch')
self.assertEqual(client.get_remote_version(fetch=True), self.branch_version)
def testDiffClean(self):
client = HgClient(self.remote_path)
self.assertEquals('', client.get_diff())
def testStatusClean(self):
client = HgClient(self.remote_path)
self.assertEquals('', client.get_status())
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = HgClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class HGClientLogTest(HGClientTestSetups):
@classmethod
def setUpClass(self):
HGClientTestSetups.setUpClass()
client = HgClient(self.local_path)
client.checkout(self.local_url)
def test_get_log_defaults(self):
client = HgClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log()
self.assertEquals(4, len(log))
self.assertEquals('modified', log[0]['message'])
for key in ['id', 'author', 'email', 'date', 'message']:
self.assertTrue(log[0][key] is not None, key)
def test_get_log_limit(self):
client = HgClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(limit=1)
self.assertEquals(1, len(log))
self.assertEquals('modified', log[0]['message'])
def test_get_log_path(self):
client = HgClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(relpath='fixed.txt')
self.assertEquals('initial', log[0]['message'])
class HGAffectedFilesTest(HGClientTestSetups):
@classmethod
def setUpClass(self):
HGClientTestSetups.setUpClass()
client = HgClient(self.local_path)
client.checkout(self.local_url)
def test_get_log_defaults(self):
client = HgClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(limit=1)[0]
affected = client.get_affected_files(log['id'])
self.assertEqual(sorted(['deleted-fs.txt', 'deleted.txt']),
sorted(affected))
class HGDiffStatClientTest(HGClientTestSetups):
@classmethod
def setUpClass(self):
HGClientTestSetups.setUpClass()
url = self.local_url
client = HgClient(self.local_path)
client.checkout(url)
# after setting up "local" repo, change files and make some changes
subprocess.check_call("rm deleted-fs.txt", shell=True, cwd=self.local_path)
subprocess.check_call("hg rm deleted.txt", shell=True, cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "modified.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "modified-fs.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added-fs.txt"), 'w')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added.txt"), 'w')
f.write('0123456789abcdef')
f.close()
subprocess.check_call("hg add added.txt", shell=True, cwd=self.local_path)
def tearDown(self):
pass
def test_diff(self):
client = HgClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('diff --git ./added.txt ./added.txt\nnew file mode 100644\n--- /dev/null\n+++ ./added.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\ndiff --git ./deleted.txt ./deleted.txt\ndeleted file mode 100644\ndiff --git ./modified-fs.txt ./modified-fs.txt\n--- ./modified-fs.txt\n+++ ./modified-fs.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\ndiff --git ./modified.txt ./modified.txt\n--- ./modified.txt\n+++ ./modified.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file', client.get_diff())
def test_diff_relpath(self):
client = HgClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('diff --git local/added.txt local/added.txt\nnew file mode 100644\n--- /dev/null\n+++ local/added.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\ndiff --git local/deleted.txt local/deleted.txt\ndeleted file mode 100644\ndiff --git local/modified-fs.txt local/modified-fs.txt\n--- local/modified-fs.txt\n+++ local/modified-fs.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file\ndiff --git local/modified.txt local/modified.txt\n--- local/modified.txt\n+++ local/modified.txt\n@@ -0,0 +1,1 @@\n+0123456789abcdef\n\\ No newline at end of file', client.get_diff(basepath=os.path.dirname(self.local_path)))
def test_get_version_modified(self):
client = HgClient(self.local_path)
self.assertFalse(client.get_version().endswith('+'))
def test_status(self):
client = HgClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('M modified-fs.txt\nM modified.txt\nA added.txt\nR deleted.txt\n! deleted-fs.txt\n', client.get_status())
def test_status_relpath(self):
client = HgClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('M local/modified-fs.txt\nM local/modified.txt\nA local/added.txt\nR local/deleted.txt\n! local/deleted-fs.txt\n', client.get_status(basepath=os.path.dirname(self.local_path)))
def testStatusUntracked(self):
client = HgClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEquals('M modified-fs.txt\nM modified.txt\nA added.txt\nR deleted.txt\n! deleted-fs.txt\n? added-fs.txt\n', client.get_status(untracked=True))
def test_hg_diff_path_change_None(self):
from vcstools.hg import _hg_diff_path_change
self.assertEqual(_hg_diff_path_change(None, '/tmp/dummy'), None)
class HGRemoteFetchTest(HGClientTestSetups):
def test_get_remote_version(self):
url = self.local_url
version = self.local_version
client = HgClient(self.local_path)
client.checkout(url, version='default')
self.assertEqual(client.get_remote_version(fetch=True), self.local_version)
self.assertEqual(client.get_version(), self.local_version)
for cmd in [
"hg checkout default",
"touch remote_new.txt",
"hg add remote_new.txt",
"hg commit -m remote_new"]:
subprocess.check_call(cmd, shell=True, cwd=self.remote_path)
po = subprocess.Popen("hg log --template '{node|short}' -l1", shell=True, cwd=self.remote_path, stdout=subprocess.PIPE)
remote_new_version = po.stdout.read().decode('UTF-8').rstrip("'").lstrip("'")
self.assertNotEqual(self.local_version, remote_new_version)
self.assertEqual(client.get_remote_version(fetch=False), self.local_version)
self.assertEqual(client.get_remote_version(fetch=True), remote_new_version)
self.assertEqual(client.get_remote_version(fetch=False), remote_new_version)
self.assertEqual(client.get_version(), self.local_version)
class HGExportRepositoryClientTest(HGClientTestSetups):
@classmethod
def setUpClass(self):
HGClientTestSetups.setUpClass()
url = self.local_url
client = HgClient(self.local_path)
client.checkout(url)
self.basepath_export = os.path.join(self.root_directory, 'export')
def tearDown(self):
pass
def test_export_repository(self):
client = HgClient(self.local_path)
self.assertTrue(
client.export_repository(self.local_version, self.basepath_export)
)
self.assertTrue(os.path.exists(self.basepath_export + '.tar.gz'))
self.assertFalse(os.path.exists(self.basepath_export + '.tar'))
self.assertFalse(os.path.exists(self.basepath_export))
class HGGetBranchesClientTest(HGClientTestSetups):
@classmethod
def setUpClass(self):
HGClientTestSetups.setUpClass()
url = self.local_url
client = HgClient(self.local_path)
client.checkout(url)
def tearDown(self):
pass
def test_get_branches(self):
client = HgClient(self.local_path)
# Make a local branch
subprocess.check_call('hg branch test_branch2', shell=True,
cwd=self.local_path, stdout=subprocess.PIPE)
subprocess.check_call('hg commit -m "Making test_branch2"', shell=True,
cwd=self.local_path, stdout=subprocess.PIPE)
self.assertEqual(client.get_branches(), ['test_branch2', 'test_branch', 'default'])
# Make a remote branch
subprocess.check_call('hg branch remote_branch', shell=True,
cwd=self.remote_path, stdout=subprocess.PIPE)
subprocess.check_call("touch fixed.txt", shell=True,
cwd=self.remote_path)
subprocess.check_call("hg add fixed.txt", shell=True,
cwd=self.remote_path)
subprocess.check_call("hg commit -m initial", shell=True,
cwd=self.remote_path)
self.assertEqual(client.get_branches(local_only=True), ['test_branch2', 'test_branch', 'default'])
self.assertEqual(client.get_branches(), ['remote_branch', 'test_branch2', 'test_branch', 'default'])
vcstools-0.1.42/test/test_svn.py 0000664 0000000 0000000 00000057041 13522611462 0016661 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import io
import unittest
import subprocess
import tempfile
import shutil
import re
from vcstools.svn import SvnClient, canonical_svn_url_split, get_remote_contents
class SvnClientUtilTest(unittest.TestCase):
def test_canonical_svn_url_split(self):
self.assertEqual({'root': 'foo',
'type': None,
'name': None, 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split('foo'))
self.assertEqual({'root': None,
'type': None,
'name': None, 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split(None))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'branches',
'name': 'foo', 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/branches/foo'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'branches',
'name': 'foo', 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/branches/foo/'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'branches',
'name': 'foo', 'subfolder': 'sub/bar',
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/branches/foo/sub/bar'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'trunk',
'name': None, 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/trunk'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'trunk',
'name': None, 'subfolder': 'sub',
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/trunk/sub'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'trunk',
'name': None, 'subfolder': 'sub/foo',
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/trunk/sub/foo'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'tags',
'name': '1.2.3', 'subfolder': None,
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/tags/1.2.3'))
self.assertEqual({'root': 'svn://gcc.gnu.org/svn/gcc',
'type': 'tags',
'name': '1.2.3', 'subfolder': 'sub/foo',
'query': None, 'fragment': None},
canonical_svn_url_split('svn://gcc.gnu.org/svn/gcc/tags/1.2.3/sub/foo'))
self.assertEqual({'root': 'file://localhost/svn/gcc',
'type': 'tags',
'name': '1.2.3', 'subfolder': 'sub/foo',
'query': None, 'fragment': None},
canonical_svn_url_split('file://localhost/svn/gcc/tags/1.2.3/sub/foo'))
self.assertEqual({'root': 'https://frodo@gcc.gnu.org/svn/gcc',
'type': 'tags',
'name': '1.2.3', 'subfolder': 'sub/foo',
'query': 'pw=guest', 'fragment': 'today'},
canonical_svn_url_split('https://frodo@gcc.gnu.org/svn/gcc/tags/1.2.3/sub/foo?pw=guest#today'))
class SvnClientTestSetups(unittest.TestCase):
@classmethod
def setUpClass(self):
self.root_directory = tempfile.mkdtemp()
self.directories = dict(setUp=self.root_directory)
self.remote_path = os.path.join(self.root_directory, "remote")
self.init_path = os.path.join(self.root_directory, "init")
# create a "remote" repo
subprocess.check_call("svnadmin create %s" % self.remote_path, shell=True, cwd=self.root_directory)
self.local_root_url = "file://localhost" + self.remote_path
self.local_url = self.local_root_url + "/trunk"
# create an "init" repo to populate remote repo
subprocess.check_call("svn checkout %s %s" % (self.local_root_url, self.init_path), shell=True, cwd=self.root_directory)
for cmd in [
"mkdir trunk",
"mkdir branches",
"mkdir tags",
"svn add trunk branches tags",
"touch trunk/fixed.txt",
"svn add trunk/fixed.txt",
"svn commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_version_init = "-r1"
# files to be modified in "local" repo
for cmd in [
"touch trunk/modified.txt",
"touch trunk/modified-fs.txt",
"svn add trunk/modified.txt trunk/modified-fs.txt",
"svn commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_version_second = "-r2"
for cmd in [
"touch trunk/deleted.txt",
"touch trunk/deleted-fs.txt",
"svn add trunk/deleted.txt trunk/deleted-fs.txt",
"svn commit -m modified"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.local_version_master = "-r3"
# files to be modified in "local" repo
for cmd in [
"mkdir branches/foo",
"touch branches/foo/modified.txt",
"svn add branches/foo",
"svn commit -m 'foo branch'"]:
subprocess.check_call(cmd, shell=True, cwd=self.init_path)
self.branch_url = self.local_root_url + "/branches/foo"
self.local_version_foo_branch = "-r4"
self.local_path = os.path.join(self.root_directory, "local")
@classmethod
def tearDownClass(self):
for d in self.directories:
shutil.rmtree(self.directories[d])
def tearDown(self):
if os.path.exists(self.local_path):
shutil.rmtree(self.local_path)
class SvnClientTest(SvnClientTestSetups):
def test_get_url_by_reading(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(self.local_url, client.get_url())
self.assertEqual(client.get_version(), self.local_version_master)
self.assertEqual(client.get_version("PREV"), "-r2")
self.assertEqual(client.get_version("2"), "-r2")
self.assertEqual(client.get_version("-r2"), "-r2")
# test invalid cient and repo without url
client = SvnClient(os.path.join(self.remote_path, 'foo'))
self.assertEqual(None, client.get_url())
def test_get_type_name(self):
local_path = "/tmp/dummy"
client = SvnClient(local_path)
self.assertEqual(client.get_vcs_type_name(), 'svn')
def test_get_url_nonexistant(self):
local_path = "/tmp/dummy"
client = SvnClient(local_path)
self.assertEqual(client.get_url(), None)
def test_checkout(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
def test_checkout_dir_exists(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
os.makedirs(self.local_path)
self.assertTrue(client.checkout(url))
# non-empty
self.assertFalse(client.checkout(url))
def test_checkout_emptyversion(self):
url = self.local_url
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version=''))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), self.local_path)
self.assertEqual(client.get_url(), url)
self.assertTrue(client.update(None))
self.assertTrue(client.update(""))
def test_checkout_specific_version_and_update_short(self):
"using just a number as version"
url = self.local_url
version = "3"
client = SvnClient(self.local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_version(), "-r3")
new_version = '2'
self.assertTrue(client.update(new_version))
self.assertEqual(client.get_version(), "-r2")
def test_get_remote_version(self):
url = self.local_url
client = SvnClient(self.local_path)
client.checkout(url)
self.assertEqual(client.get_remote_version(fetch=True),
self.local_version_master)
self.assertEqual(client.get_remote_version(fetch=False),
None)
def test_get_remote_branch_version(self):
url = self.branch_url
client = SvnClient(self.local_path)
client.checkout(url)
self.assertEqual(client.get_remote_version(fetch=True),
self.local_version_foo_branch)
self.assertEqual(client.get_remote_version(fetch=False),
None)
def testDiffClean(self):
client = SvnClient(self.remote_path)
self.assertEquals('', client.get_diff())
def testStatusClean(self):
client = SvnClient(self.remote_path)
self.assertEquals('', client.get_status())
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = SvnClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class SvnClientLogTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
def test_get_log_defaults(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log()
self.assertEquals(3, len(log))
self.assertEquals('modified', log[0]['message'])
for key in ['id', 'author', 'date', 'message']:
self.assertTrue(log[0][key] is not None, key)
# svn logs don't have email, but key should be in dict
self.assertTrue(log[0]['email'] is None)
def test_get_log_limit(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(limit=1)
self.assertEquals(1, len(log))
self.assertEquals('modified', log[0]['message'])
def test_get_log_path(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(relpath='fixed.txt')
self.assertEquals('initial', log[0]['message'])
class SVNClientAffectedFiles(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
def test_get_affected_files(self):
client = SvnClient(self.local_path)
client.checkout(self.local_url)
log = client.get_log(limit=1)[0]
affected = client.get_affected_files(log['id'])
self.assertEqual(sorted(['deleted-fs.txt', 'deleted.txt']),
sorted(affected))
class SvnDiffStatClientTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
# after setting up "local" repo, change files and make some changes
subprocess.check_call("rm deleted-fs.txt", shell=True, cwd=self.local_path)
subprocess.check_call("svn rm deleted.txt", shell=True, cwd=self.local_path)
f = io.open(os.path.join(self.local_path, "modified.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "modified-fs.txt"), 'a')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added-fs.txt"), 'w')
f.write('0123456789abcdef')
f.close()
f = io.open(os.path.join(self.local_path, "added.txt"), 'w')
f.write('0123456789abcdef')
f.close()
subprocess.check_call("svn add added.txt", shell=True, cwd=self.local_path)
def tearDown(self):
pass
def assertStatusListEqual(self, listexpect, listactual):
"""helper fun to check scm status output while discarding file ordering differences"""
lines_expect = listexpect.splitlines()
lines_actual = listactual.splitlines()
for line in lines_expect:
self.assertTrue(line in lines_actual, 'Missing entry %s in output %s' % (line, listactual))
for line in lines_actual:
self.assertTrue(line in lines_expect, 'Superflous entry %s in output %s' % (line, listactual))
def assertEqualDiffs(self, expected, actual):
"True if actual is similar enough to expected, minus svn properties"
def filter_block(block):
"""removes property information that varies between systems, not relevant fo runit test"""
newblock = []
for line in block.splitlines():
if re.search("[=+-\\@ ].*", line) == None:
break
else:
# new svn versions use different labels for added
# files (working copy) vs (revision x)
fixedline = re.sub('\(revision [0-9]+\)', '(working copy)', line)
# svn 1.9 added (nonexistent)
fixedline = re.sub('\(nonexistent\)', '(working copy)', fixedline)
newblock.append(fixedline)
return "\n".join(newblock)
filtered_actual_blocks = []
# A block starts with \nIndex, and the actual diff goes up to the first line starting with [a-zA-Z], e.g. "Properties changed:"
for block in actual.split("\nIndex: "):
if filtered_actual_blocks != []:
# restore "Index: " removed by split()
block = "Index: " + block
block = filter_block(block)
filtered_actual_blocks.append(block)
expected_blocks = []
for block in expected.split("\nIndex: "):
if expected_blocks != []:
block = "Index: " + block
block = filter_block(block)
expected_blocks.append(block)
filtered = "\n".join(filtered_actual_blocks)
self.assertEquals(set(expected_blocks), set(filtered_actual_blocks))
def test_diff(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqualDiffs('Index: added.txt\n===================================================================\n--- added.txt\t(revision 0)\n+++ added.txt\t(revision 0)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: modified-fs.txt\n===================================================================\n--- modified-fs.txt\t(revision 3)\n+++ modified-fs.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: modified.txt\n===================================================================\n--- modified.txt\t(revision 3)\n+++ modified.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file',
client.get_diff().rstrip())
def test_diff_relpath(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqualDiffs('Index: local/added.txt\n===================================================================\n--- local/added.txt\t(revision 0)\n+++ local/added.txt\t(revision 0)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: local/modified-fs.txt\n===================================================================\n--- local/modified-fs.txt\t(revision 3)\n+++ local/modified-fs.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file\nIndex: local/modified.txt\n===================================================================\n--- local/modified.txt\t(revision 3)\n+++ local/modified.txt\t(working copy)\n@@ -0,0 +1 @@\n+0123456789abcdef\n\\ No newline at end of file', client.get_diff(basepath=os.path.dirname(self.local_path)).rstrip())
def test_status(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('A added.txt\nD deleted.txt\nM modified-fs.txt\n! deleted-fs.txt\nM modified.txt\n', client.get_status())
def test_status_relpath(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('A local/added.txt\nD local/deleted.txt\nM local/modified-fs.txt\n! local/deleted-fs.txt\nM local/modified.txt\n', client.get_status(basepath=os.path.dirname(self.local_path)))
def test_status_untracked(self):
client = SvnClient(self.local_path)
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertStatusListEqual('? added-fs.txt\nA added.txt\nD deleted.txt\nM modified-fs.txt\n! deleted-fs.txt\nM modified.txt\n', client.get_status(untracked=True))
class SvnExportRepositoryClientTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
self.basepath_export = os.path.join(self.root_directory, 'export')
def tearDown(self):
pass
def test_export_repository(self):
client = SvnClient(self.local_path)
self.assertTrue(
client.export_repository('',
self.basepath_export)
)
self.assertTrue(os.path.exists(self.basepath_export + '.tar.gz'))
self.assertFalse(os.path.exists(self.basepath_export + '.tar'))
self.assertFalse(os.path.exists(self.basepath_export))
class SvnGetBranchesClientTest(SvnClientTestSetups):
@classmethod
def setUpClass(self):
SvnClientTestSetups.setUpClass()
client = SvnClient(self.local_path)
client.checkout(self.local_url)
# def tearDown(self):
# pass
def test_get_remote_contents(self):
self.assertEqual(['branches', 'tags', 'trunk'], get_remote_contents(self.local_root_url))
def test_get_branches_non_canonical(self):
remote_path = os.path.join(self.root_directory, "remote_nc")
init_path = os.path.join(self.root_directory, "init_nc")
local_path = os.path.join(self.root_directory, "local_nc")
# create a "remote" repo
subprocess.check_call("svnadmin create %s" % remote_path, shell=True, cwd=self.root_directory)
local_root_url = "file://localhost/" + remote_path
local_url = local_root_url + "/footest"
# create an "init" repo to populate remote repo
subprocess.check_call("svn checkout %s %s" % (local_root_url, init_path), shell=True, cwd=self.root_directory)
for cmd in [
"mkdir footest",
"mkdir footest/foosub",
"touch footest/foosub/fixed.txt",
"svn add footest",
"svn commit -m initial"]:
subprocess.check_call(cmd, shell=True, cwd=init_path)
client = SvnClient(local_path)
client.checkout(local_url)
self.assertEqual([], client.get_branches())
def test_get_branches(self):
client = SvnClient(self.local_path)
self.assertEqual(['foo'], client.get_branches())
# slyly create some empty branches
subprocess.check_call("mkdir -p branches/foo2", shell=True, cwd=self.init_path)
subprocess.check_call("mkdir -p branches/bar", shell=True, cwd=self.init_path)
subprocess.check_call("svn add branches/foo2", shell=True, cwd=self.init_path)
subprocess.check_call("svn add branches/bar", shell=True, cwd=self.init_path)
subprocess.check_call("svn commit -m newbranches", shell=True, cwd=self.init_path)
self.assertEqual([], client.get_branches(local_only=True))
self.assertEqual(['bar', 'foo', 'foo2'], client.get_branches())
# checkout branch foo
local_path2 = os.path.join(self.root_directory, "local_foo")
client = SvnClient(local_path2)
client.checkout(self.local_root_url + '/branches/foo')
self.assertEqual(['foo'], client.get_branches(local_only=True))
vcstools-0.1.42/test/test_tar.py 0000664 0000000 0000000 00000026020 13522611462 0016632 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, print_function, unicode_literals
import os
import unittest
import tarfile
import tempfile
import shutil
import subprocess
import mock
from vcstools.tar import TarClient
from test.mock_server import start_mock_server
def tarfile_contents():
'''
:returns: binary string of ROS package-release-like tarfile to serve
'''
tar_directory = tempfile.mkdtemp()
filename = os.path.join(tar_directory, 'sample.tar.gz')
pkgname = "sample-1.0"
pkg_directory = os.path.join(tar_directory, pkgname)
os.mkdir(pkg_directory)
packagexml = os.path.join(pkg_directory, "package.xml")
with open(packagexml, 'w+') as f:
f.write('' + ('sample' * 1000) + '')
with tarfile.open(filename, "w:gz") as tar_handle:
tar_handle.addfile(tarfile.TarInfo(os.path.join(pkgname, "package.xml")), packagexml)
tar_handle.close()
with open(filename, mode='rb') as file: # b is important -> binary
result = file.read()
shutil.rmtree(tar_directory)
return result
class TarClientTest(unittest.TestCase):
'''Test against mock http server'''
@classmethod
def setUpClass(self):
baseURL = start_mock_server(tarfile_contents())
self.remote_url = baseURL + '/downloads/1.0.tar.gz'
self.package_version = "sample-1.0"
def setUp(self):
self.directories = {}
def tearDown(self):
for d in self.directories:
self.assertTrue(os.path.exists(self.directories[d]))
shutil.rmtree(self.directories[d])
self.assertFalse(os.path.exists(self.directories[d]))
def test_get_url_by_reading(self):
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = TarClient(local_path)
self.assertTrue(client.checkout(self.remote_url, self.package_version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_url(), self.remote_url)
#self.assertEqual(client.get_version(), self.package_version)
def test_get_url_nonexistant(self):
local_path = "/tmp/dummy"
client = TarClient(local_path)
self.assertEqual(client.get_url(), None)
def test_get_type_name(self):
local_path = "/tmp/dummy"
client = TarClient(local_path)
self.assertEqual(client.get_vcs_type_name(), 'tar')
@mock.patch('netrc.netrc') # cannot rely on local ~/.netrc file
def test_checkout_parametrized(self, patched_netrc):
netrc_mock = mock.Mock()
netrc_mock.authenticators.return_value = ('user', '' , 'password')
patched_netrc.return_value = netrc_mock
for query_params in ['', '?chunked=true', '?auth=true', '?chunked=true&auth=true']:
self.check_checkout(query_params)
# parametrized
def check_checkout(self, query_params):
# checks out all subdirs
directory = tempfile.mkdtemp()
self.directories["checkout_test"] = directory
local_path = os.path.join(directory, "exploration")
client = TarClient(local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(self.remote_url + query_params))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), local_path)
self.assertEqual(client.get_url(), self.remote_url + query_params)
# make sure the tarball subdirectory was promoted correctly.
self.assertTrue(os.path.exists(os.path.join(local_path,
self.package_version,
'package.xml')))
def test_checkout_dir_exists(self):
directory = tempfile.mkdtemp()
self.directories["checkout_test"] = directory
local_path = os.path.join(directory, "exploration")
client = TarClient(local_path)
self.assertFalse(client.path_exists())
os.makedirs(local_path)
self.assertTrue(client.checkout(self.remote_url))
# non-empty
self.assertFalse(client.checkout(self.remote_url))
def test_checkout_version(self):
directory = tempfile.mkdtemp()
self.directories["checkout_test"] = directory
local_path = os.path.join(directory, "exploration")
client = TarClient(local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(self.remote_url,
version=self.package_version))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), local_path)
self.assertEqual(client.get_url(), self.remote_url)
# make sure the tarball subdirectory was promoted correctly.
self.assertTrue(os.path.exists(os.path.join(local_path, 'package.xml')))
def test_get_environment_metadata(self):
# Verify that metadata is generated
directory = tempfile.mkdtemp()
self.directories['local'] = directory
local_path = os.path.join(directory, "local")
client = TarClient(local_path)
self.assertTrue('version' in client.get_environment_metadata())
class TarClientTestLocal(unittest.TestCase):
'''Tests with URL being a local filepath'''
def setUp(self):
self.root_directory = tempfile.mkdtemp()
# helpful when setting tearDown to pass
self.directories = dict(setUp=self.root_directory)
self.version_path0 = os.path.join(self.root_directory, "version")
self.version_path1 = os.path.join(self.root_directory, "version1")
self.version_path2 = os.path.join(self.root_directory, "version1.0")
os.makedirs(self.version_path0)
os.makedirs(self.version_path1)
os.makedirs(self.version_path2)
subprocess.check_call("touch stack0.xml", shell=True, cwd=self.version_path0)
subprocess.check_call("touch stack.xml", shell=True, cwd=self.version_path1)
subprocess.check_call("touch stack1.xml", shell=True, cwd=self.version_path2)
subprocess.check_call("touch version1.txt", shell=True, cwd=self.root_directory)
self.tar_url = os.path.join(self.root_directory, "origin.tar")
self.tar_url_compressed = os.path.join(self.root_directory,
"origin_compressed.tar.bz2")
subprocess.check_call("tar -cf %s %s" % (self.tar_url, " ".join(["version",
"version1",
"version1.txt",
"version1.0"])),
shell=True,
cwd=self.root_directory)
subprocess.check_call("tar -cjf %s %s" % (self.tar_url_compressed, " ".join(["version",
"version1",
"version1.txt",
"version1.0"])),
shell=True,
cwd=self.root_directory)
def tearDown(self):
for d in self.directories:
self.assertTrue(os.path.exists(self.directories[d]))
shutil.rmtree(self.directories[d])
self.assertFalse(os.path.exists(self.directories[d]))
def test_checkout_version_local(self):
directory = tempfile.mkdtemp()
self.directories["checkout_test"] = directory
local_path = os.path.join(directory, "version1")
url = self.tar_url
client = TarClient(local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version='version1'))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), local_path)
self.assertEqual(client.get_url(), url)
# make sure the tarball subdirectory was promoted correctly.
self.assertTrue(os.path.exists(os.path.join(local_path, 'stack.xml')))
def test_checkout_version_compressed_local(self):
directory = tempfile.mkdtemp()
self.directories["checkout_test"] = directory
local_path = os.path.join(directory, "version1")
url = self.tar_url_compressed
client = TarClient(local_path)
self.assertFalse(client.path_exists())
self.assertFalse(client.detect_presence())
self.assertFalse(client.detect_presence())
self.assertTrue(client.checkout(url, version='version1'))
self.assertTrue(client.path_exists())
self.assertTrue(client.detect_presence())
self.assertEqual(client.get_path(), local_path)
self.assertEqual(client.get_url(), url)
# make sure the tarball subdirectory was promoted correctly.
self.assertTrue(os.path.exists(os.path.join(local_path, 'stack.xml')))
vcstools-0.1.42/test/test_vcs_abstraction.py 0000664 0000000 0000000 00000003532 13522611462 0021233 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import, print_function, unicode_literals
import unittest
from mock import Mock
import vcstools.vcs_abstraction
from vcstools.vcs_abstraction import register_vcs, get_registered_vcs_types, \
get_vcs
from vcstools import get_vcs_client
class TestVcsAbstraction(unittest.TestCase):
def test_register_vcs(self):
try:
backup = vcstools.vcs_abstraction._VCS_TYPES
vcstools.vcs_abstraction._VCS_TYPES = {}
self.assertEqual([], get_registered_vcs_types())
mock_class = Mock()
register_vcs('foo', mock_class)
self.assertEqual(['foo'], get_registered_vcs_types())
finally:
vcstools.vcs_abstraction._VCS_TYPES = backup
def test_get_vcs(self):
try:
backup = vcstools.vcs_abstraction._VCS_TYPES
vcstools.vcs_abstraction._VCS_TYPES = {}
self.assertEqual([], get_registered_vcs_types())
mock_class = Mock()
register_vcs('foo', mock_class)
self.assertEqual(mock_class, get_vcs('foo'))
self.assertRaises(ValueError, get_vcs, 'bar')
finally:
vcstools.vcs_abstraction._VCS_TYPES = backup
def test_get_vcs_client(self):
try:
backup = vcstools.vcs_abstraction._VCS_TYPES
vcstools.vcs_abstraction._VCS_TYPES = {}
self.assertEqual([], get_registered_vcs_types())
mock_class = Mock()
mock_instance = Mock()
# mock __init__ constructor
mock_class.return_value = mock_instance
register_vcs('foo', mock_class)
self.assertEqual(mock_instance, get_vcs_client('foo', 'foopath'))
self.assertRaises(ValueError, get_vcs_client, 'bar', 'barpath')
finally:
vcstools.vcs_abstraction._VCS_TYPES = backup
vcstools-0.1.42/tox.ini 0000664 0000000 0000000 00000001132 13522611462 0014764 0 ustar 00root root 0000000 0000000 # Tox is the QA gateway before releasing
# it can run against multiple python versions
# it runs commands in virtualenvs prepared with python version and dependencies from setup.[py,cfg]
# While tox can be configured to do plenty of things, it is bothersome to work with quickly, and it is not a lightweight dependency, so prefer to write build logic in setup.py or other commands that run without tox.
[tox]
envlist = py27, py34, py35, py36
[testenv]
# flawed due to https://github.com/tox-dev/tox/issues/149
# deps = -rrequirements.txt
commands =
pip install .[test]
{envpython} setup.py test