commit rdiff-backup for openSUSE:Factory
Hello community, here is the log from the commit of package rdiff-backup for openSUSE:Factory checked in at 2020-02-29 21:24:56 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/rdiff-backup (Old) and /work/SRC/openSUSE:Factory/.rdiff-backup.new.26092 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Package is "rdiff-backup" Sat Feb 29 21:24:56 2020 rev:2 rq:780486 version:1.9.1~b0 Changes: -------- --- /work/SRC/openSUSE:Factory/rdiff-backup/rdiff-backup.changes 2020-02-03 11:14:26.165878829 +0100 +++ /work/SRC/openSUSE:Factory/.rdiff-backup.new.26092/rdiff-backup.changes 2020-02-29 21:25:14.442602728 +0100 @@ -1,0 +2,20 @@ +Sun Feb 23 10:36:23 UTC 2020 - Stefan Seyfried <seife+obs@b1-systems.com> + +- update to 1.9.1b0, third beta before 2.0.0: + * avoid double unquoting of increment file infos, closes #266 + * fix some ugly bytes output in strings, closes #238 + * improved hardink handling, Closes #239 + * add support for SOURCE_DATE_EPOCH to override the build date + * add efficient sparse file handling +- remove all (upstreamed) patches: + * 0001-handle-sparse-files-efficiently.patch + * Hardlink.py.revised-1.4.0.patch + * compare.py-1.4.0.patch + * rdiff-backup-1.9-reproducible.diff + +------------------------------------------------------------------- +Fri Feb 7 08:58:30 UTC 2020 - Stefan Seyfried <seife+obs@b1-systems.com> + +- add rdiff-backup-1.9-reproducible.diff for reproducible build + +------------------------------------------------------------------- Old: ---- 0001-handle-sparse-files-efficiently.patch Hardlink.py.revised-1.4.0.patch compare.py-1.4.0.patch rdiff-backup-1.9.0b0.tar.gz New: ---- rdiff-backup-1.9.1b0.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ rdiff-backup.spec ++++++ --- /var/tmp/diff_new_pack.p1mCJK/_old 2020-02-29 21:25:15.182604193 +0100 +++ /var/tmp/diff_new_pack.p1mCJK/_new 2020-02-29 21:25:15.186604201 +0100 @@ -18,23 +18,14 @@ Name: rdiff-backup -Version: 1.9.0~b0 -%define _beta 1.9.0b0 +Version: 1.9.1~b0 +%define _beta 1.9.1b0 Release: 0 Summary: Convenient and transparent local/remote incremental mirror/backup License: GPL-2.0-or-later Group: Productivity/Archiving/Backup Url: https://rdiff-backup.net/ Source0: https://github.com/rdiff-backup/rdiff-backup/releases/download/v%{_beta}/rdiff-backup-%{_beta}.tar.gz -# https://github.com/rdiff-backup/rdiff-backup/pull/248 -# PATCH-FEATURE-UPSTREAM 0001-handle-sparse-files-efficiently.patch -- seife+obs@b1-systems.com -Patch1: 0001-handle-sparse-files-efficiently.patch -# in order not stumble on handle hardlinks, these two patches are necessary -# http://savannah.nongnu.org/bugs/?26848 -# https://github.com/rdiff-backup/rdiff-backup/pull/240 -# PATCH-FEATURE-UPSTREAM Hardlink.py.revised-patch compare.py.patch -- hpj@urpla.net -Patch2: Hardlink.py.revised-1.4.0.patch -Patch3: compare.py-1.4.0.patch # BuildRequires: librsync-devel BuildRequires: python3-devel ++++++ rdiff-backup-1.9.0b0.tar.gz -> rdiff-backup-1.9.1b0.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/.travis.yml new/rdiff-backup-1.9.1b0/.travis.yml --- old/rdiff-backup-1.9.0b0/.travis.yml 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/.travis.yml 2020-02-23 09:20:37.000000000 +0100 @@ -12,22 +12,89 @@ - 3.7 - 3.8 +#=== DEPLOYMENTS === +# - everything to GitHub releases +# - wheels to PyPI (if tag looks like an alpha, beta, rc or final release) +# - sdist to PyPI (if tag looks like an alpha, beta, rc or final release) +# - wheels to Test PyPI (if tag looks like a development release) +# - sdist to Test PyPI (if tag looks like a development release) + deploy: - provider: releases - file_glob: true - file: dist/*.* - skip_cleanup: true - draft: true - token: - secure: V1Ad29+3lIQB1Sxqt8RiNKwferucTIr2mJo1Ka0m8Y1GdscP0ZkvHN4Vzjb0hts+lap8cU24tJ1zq9k22bdsNmoS3/YEg7fwbQSOPCJSYbjwMQeitDc1NP3u1mp5LWiEnQ7cflalIkmWUVpwGbwPSnUQdW3JyheUm4DP6jndGHmv3sC2r4WtZDkVY44SI5OGjvOTStTLnXhVlwXMIBKzw+o9BNdmIcpRvI3UTR+C9mA2yoNE7qgpzALZ4qn7qAmTMJzcVQb22yVZqXVCF8VD72Z66GHx75aatMmsJ/2isOEap19sVYt03fY8lchsTUsdRRCAIInxUvbdfgaEoBWj8Vfv2r/vm5DgWh93amv1RkJdmAYAMTGZG/k20vd7N+sNGKCdAF0W8hj5kriGkNnSQg7dFd+NWrF3pX9XcUkT9IGzv5eT5eYeS+yzgB3YPy8qGKwF5dRkRf7Ylw68j8YOly40rAAfgEx/pcFlYrJh62LDPr5RNhKA679+9UAp0rgkiszNH327bj1i0drqeDIhJAZSXw86B8tJZI1JRBVllKNbwDc5GRFNyms4+pvKCO+lAy5Pcn0bQMUq49qxDsdnM1t2RISivWy5CdkOTNW5WYglhovOSoA8/NjcxmsOpOazCaENEvsB3AogI+LRmpZ7lUZKxjaILuVUM2rwXlKxFB0= - on: - tags: true - all_branches: true - condition: $MAKE_STEP == ?dist* + - provider: releases + file_glob: true + file: dist/*.* + skip_cleanup: true + draft: true + token: + secure: V1Ad29+3lIQB1Sxqt8RiNKwferucTIr2mJo1Ka0m8Y1GdscP0ZkvHN4Vzjb0hts+lap8cU24tJ1zq9k22bdsNmoS3/YEg7fwbQSOPCJSYbjwMQeitDc1NP3u1mp5LWiEnQ7cflalIkmWUVpwGbwPSnUQdW3JyheUm4DP6jndGHmv3sC2r4WtZDkVY44SI5OGjvOTStTLnXhVlwXMIBKzw+o9BNdmIcpRvI3UTR+C9mA2yoNE7qgpzALZ4qn7qAmTMJzcVQb22yVZqXVCF8VD72Z66GHx75aatMmsJ/2isOEap19sVYt03fY8lchsTUsdRRCAIInxUvbdfgaEoBWj8Vfv2r/vm5DgWh93amv1RkJdmAYAMTGZG/k20vd7N+sNGKCdAF0W8hj5kriGkNnSQg7dFd+NWrF3pX9XcUkT9IGzv5eT5eYeS+yzgB3YPy8qGKwF5dRkRf7Ylw68j8YOly40rAAfgEx/pcFlYrJh62LDPr5RNhKA679+9UAp0rgkiszNH327bj1i0drqeDIhJAZSXw86B8tJZI1JRBVllKNbwDc5GRFNyms4+pvKCO+lAy5Pcn0bQMUq49qxDsdnM1t2RISivWy5CdkOTNW5WYglhovOSoA8/NjcxmsOpOazCaENEvsB3AogI+LRmpZ7lUZKxjaILuVUM2rwXlKxFB0= + on: + tags: true + all_branches: true + condition: $MAKE_STEP == ?dist* + # we use beta/alpha/rc/final version tags to release to pypi.org + - provider: pypi + user: "__token__" + password: + secure: QGprJSAZR3X88Ir4qpJpNdel5m6CngEtYlD2DAgwRuyPHClhMhXcayB7NbRqACRdtsqBBdQHSOnPO6jBcrhvTIeFbyhiSWZKME113yn4RFD3E+z/p8otb02Lk0DvFTakeC2qxsJ1nlwxGte7qT5Ac9dL7U5O3SSlWNoq2c3QmoD/d+fVdtjf6tp4QywlOIr4Jyx2WWFBQDSQB2cjY7YfwgcQchXkOoVog83dUID4pTh15HlM1Q9E8k0EKueGrDVkS3hhlbPimv1C1coM/IAIAXaTSR1AZZCZELsVSbBOW1hMaAL4MPpr9ekKx/eErnZiIkCuovBTk0XKvzfOyPluFjnuxazKWH9EU5WutCzfxtXh29fLNLYKaqalgjI024kvzTxVFtxJFvCcnhEjcqgs6sxh4nrzAosK163LViFRlJjgbT+2kVgeWiA51fjFTdIj0ep3/NMN8SgvSWQu7SRA8Vtxx9NLzTu57QPKZdRLOW98XUnnSwWPvFymNJ3JHQtp3tsRvTaLcbECvpI82bQa/9+MEszGXwm5TdM3Ogie7OUgDReq6fYbSCYXli5800QwA8Zj6zwJH2qG7l2VAPm0Gd0O9RPP/U2DCxDVAHPDV8VYzKsLWeCQ0Rsxz0S0d1r1Vss15rZ/C/RaCXUJB9ba4F1uRMh4l8Jm4/eSAcuAryI= + distributions: bdist_wheel + skip_existing: true + skip_cleanup: true + on: + tags: true + all_branches: true + condition: $MAKE_STEP =~ ^(bdist_wheel)$ && $TRAVIS_TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?$ + # TODO add |bdist_win to release also Windows wheel + - provider: pypi + user: "__token__" + password: + secure: QGprJSAZR3X88Ir4qpJpNdel5m6CngEtYlD2DAgwRuyPHClhMhXcayB7NbRqACRdtsqBBdQHSOnPO6jBcrhvTIeFbyhiSWZKME113yn4RFD3E+z/p8otb02Lk0DvFTakeC2qxsJ1nlwxGte7qT5Ac9dL7U5O3SSlWNoq2c3QmoD/d+fVdtjf6tp4QywlOIr4Jyx2WWFBQDSQB2cjY7YfwgcQchXkOoVog83dUID4pTh15HlM1Q9E8k0EKueGrDVkS3hhlbPimv1C1coM/IAIAXaTSR1AZZCZELsVSbBOW1hMaAL4MPpr9ekKx/eErnZiIkCuovBTk0XKvzfOyPluFjnuxazKWH9EU5WutCzfxtXh29fLNLYKaqalgjI024kvzTxVFtxJFvCcnhEjcqgs6sxh4nrzAosK163LViFRlJjgbT+2kVgeWiA51fjFTdIj0ep3/NMN8SgvSWQu7SRA8Vtxx9NLzTu57QPKZdRLOW98XUnnSwWPvFymNJ3JHQtp3tsRvTaLcbECvpI82bQa/9+MEszGXwm5TdM3Ogie7OUgDReq6fYbSCYXli5800QwA8Zj6zwJH2qG7l2VAPm0Gd0O9RPP/U2DCxDVAHPDV8VYzKsLWeCQ0Rsxz0S0d1r1Vss15rZ/C/RaCXUJB9ba4F1uRMh4l8Jm4/eSAcuAryI= + distributions: sdist + skip_existing: true + skip_cleanup: true + on: + tags: true + all_branches: true + condition: $MAKE_STEP == sdist && $TRAVIS_TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?$ + # we use dev version tags to release to test.pypi.org + - provider: pypi + user: "__token__" + password: + secure: KYfWfkPfllpPq+qkAXiX5KEPNrPu/L46DkZ1H6VWE69zQMuuTdlfC4UiQ28tt5HeeFt7Wxnn1JlfWuaFhIZx+xMa3oa3ln9BINXZmfMUcX6xpK5Uegtd8Y0ywiZJsd5PN1ZmthfgWTLJ9mJydYb13OpgzNxqpO9eiYz/4xpnSm7zG5OYfmzt5gR+CbzIEcL0VS15Os7i/aCEes32HNcqyl4Dwx7mJ0lFgzRTChYTzsifrtD4XmH6p5H+WrPIx336Wz/8jrBgcM20kXwHiyWGjg7gnWCx8+fcPhHWa0e3iC48KoSd3ixeK1QyrPt2jbUtuMmB6x5yM/fxloWyMCj8W0jU1jJgzrZA3TzV2EPym54BoDz2lZ1xTeAxeCk1nYcA6wT6Xea6MwSGTvV/qPLEnXIafEt9bcD0ox+JfCjNVrw3baQqnwAWnyKCFKYJ3IHJCXz23J26pLlv10DgEtxI5/U2nUU/Nx9f/fJn/CJlz1X1TOFygqPICoUSE3t4jsqbLbA48UJz6fZnzSwFO2FN+wGUqudq/JkAaRcLu4/W+1f4HrQ5UVk8Ql4wDUS3iBr8FYsQ+S8fxvHRJGaCVSO1KCUcX//DHV3iB2VwDNHiD8zF3FtRuyODpFbo5BlL3eIX5w1dCTx/6SgDpNb/PofkQdaHi6woIzX+8zoymMT+4QA= + distributions: bdist_wheel + skip_existing: true + skip_cleanup: true + server: https://test.pypi.org/legacy/ + # FIXME workaround to make deploy possibly work under Windows, remove once released + edge: + source: native-api/dpl + branch: z-quotes + on: + tags: true + all_branches: true + condition: $MAKE_STEP =~ ^(bdist_wheel)$ && $TRAVIS_TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?\.dev[0-9]+$ + # TODO add |bdist_win to release also Windows wheel + - provider: pypi + user: "__token__" + password: + secure: KYfWfkPfllpPq+qkAXiX5KEPNrPu/L46DkZ1H6VWE69zQMuuTdlfC4UiQ28tt5HeeFt7Wxnn1JlfWuaFhIZx+xMa3oa3ln9BINXZmfMUcX6xpK5Uegtd8Y0ywiZJsd5PN1ZmthfgWTLJ9mJydYb13OpgzNxqpO9eiYz/4xpnSm7zG5OYfmzt5gR+CbzIEcL0VS15Os7i/aCEes32HNcqyl4Dwx7mJ0lFgzRTChYTzsifrtD4XmH6p5H+WrPIx336Wz/8jrBgcM20kXwHiyWGjg7gnWCx8+fcPhHWa0e3iC48KoSd3ixeK1QyrPt2jbUtuMmB6x5yM/fxloWyMCj8W0jU1jJgzrZA3TzV2EPym54BoDz2lZ1xTeAxeCk1nYcA6wT6Xea6MwSGTvV/qPLEnXIafEt9bcD0ox+JfCjNVrw3baQqnwAWnyKCFKYJ3IHJCXz23J26pLlv10DgEtxI5/U2nUU/Nx9f/fJn/CJlz1X1TOFygqPICoUSE3t4jsqbLbA48UJz6fZnzSwFO2FN+wGUqudq/JkAaRcLu4/W+1f4HrQ5UVk8Ql4wDUS3iBr8FYsQ+S8fxvHRJGaCVSO1KCUcX//DHV3iB2VwDNHiD8zF3FtRuyODpFbo5BlL3eIX5w1dCTx/6SgDpNb/PofkQdaHi6woIzX+8zoymMT+4QA= + distributions: sdist + skip_existing: true + skip_cleanup: true + server: https://test.pypi.org/legacy/ + # FIXME workaround to make deploy possibly work under Windows, remove once released + edge: + source: native-api/dpl + branch: z-quotes + on: + tags: true + all_branches: true + condition: $MAKE_STEP == sdist && $TRAVIS_TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+((a|b|rc)[0-9]+)?\.dev[0-9]+$ + +#=== ADDITIONAL JOBS === jobs: include: - # Build and deploy Windows executable + #--- Build and deploy Windows executable --- - os: windows language: shell env: MAKE_STEP=bdist_win @@ -43,7 +110,7 @@ - pyenv local $pyver - python --version install: - - pip install --upgrade tox pywin32 setuptools-scm PyInstaller + - pip install --upgrade tox pywin32 setuptools-scm PyInstaller wheel - pyenv rehash - git clone -b v2.2.1 https://github.com/librsync/librsync.git $HOME/.librsync - export LIBRSYNC_DIR=$HOME/librsync @@ -53,7 +120,7 @@ - cmake --install . --config Release - popd script: - - python setup.py bdist + - python setup.py bdist_wheel - vername=rdiff-backup-`python setup.py --version` - PyInstaller --onefile --distpath build/$vername --paths=build/lib.win32-$pyverbrief --add-data=src/rdiff_backup.egg-info/PKG-INFO\;rdiff_backup.egg-info --console build/scripts-$pyverbrief/rdiff-backup before_deploy: @@ -61,7 +128,11 @@ - pushd build - 7z a -tzip ../dist/$vername.win32exe.zip $vername - popd - # Build and deploy Linux wheels using manylinux Docker containers + + #--- Build and deploy Linux wheels using manylinux Docker containers --- + # - build manylinux2010 (and manylinux1) x64 + # - build manylinux2010 i686 + # - build manylinux2014 x64 # avoiding manylinux2014_i686 because it does not provide librsync-devel - os: linux sudo: required @@ -98,9 +169,11 @@ script: - docker run --rm -e PLAT=$PLAT -v `pwd`:/io $DOCKER_IMAGE $PRE_CMD /io/tools/build_wheels.sh - ls dist/ + + #--- Build Debian packages --- - os: linux language: shell - env: MAKE_STEP=bdist_deb RUN_COMMAND= + env: MAKE_STEP=dist_deb RUN_COMMAND= addons: apt: packages: # make sure these match debian/control contents @@ -120,8 +193,8 @@ script: - make $MAKE_STEP - cat ../*.changes - - mkdir -vp dist - - cp -v ../*.* dist/ + + #--- Build and deploy Debian packages --- - os: linux env: MAKE_STEP=sdist RUN_COMMAND= install: @@ -135,7 +208,7 @@ only: - master - /_$/ # put an underscore at the end of the branch name to force building - - /^v\d+\.\d+\.\d+[a-z]?[0-9]?$/ # we need to whitelist tags or nothing will happen + - /^v\d+\.\d+\.\d+((a|b|rc)[0-9])?(\.dev[0-9]+)?$/ # we need to whitelist tags or nothing will happen PEP440 addons: apt: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/CHANGELOG new/rdiff-backup-1.9.1b0/CHANGELOG --- old/rdiff-backup-1.9.0b0/CHANGELOG 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/CHANGELOG 2020-02-23 09:20:37.000000000 +0100 @@ -1,13 +1,42 @@ -New in v2.0.0 (????/??/??) --------------------------- +New in v1.9.1b0 (2020-02-23) +---------------------------- + +## Changes + +* FIX: remove too specific Debian packages from GitHub deployment, closes #263 +* NEW: add a new tool to help generate the changelog (description in DEVELOP.md) +* DOC: new release rules and procedure added to docs/DEVELOP.md +* FIX: avoid double unquoting of increment file infos, closes #266 +* FIX: versioning of Debian packages follows without glitch the overall tag based versioning. +* DEV: automate via Travis deployment pipeline release to PyPI and Test PyPI. +* FIX: remove some more ugly bytes output in strings using _safe_str, closes #238 +* FIX: added and moved hardlinks were not correctly counted and restored, Closes #239 +* FIX: rdiff-backup complained about missing SHA checksums of hardlinks, Closes #78 +* FIX: avoid int is not iterable error when calling remote command on Windows +* DEV: flake8 checks only setup.py, src, testing and tools code. +* NEW: add support for SOURCE_DATE_EPOCH to override the build date, making reproducible builds possible. +* NEW: sparse files are handled more efficiently, if not compressed and depending on file system + +## Authors + +* Bernhard M. Wiedemann +* Eric L +* Otto Kekäläinen +* Patrik Dufresne +* Stefan Seyfried +* zjw + + +New in v1.9.0b0 (2020-01-31) +---------------------------- Different bug fixes, improvements in code and documentation - too many to list (Andreas Olsson, Andrew Foster, Arrigo Marchiori, bigbear3001, davekempe, David I. Lehn, elMor3no, Eric Lavarde, Frank Crawford, Jiri Lunacek, joshn, Josh Soref, -mestre, Oliver Lowe, orangenschalen, Otto Kek�l�inen, owsla, Patrik Dufresne, Reio Remma, +mestre, Oliver Lowe, orangenschalen, Otto Kekäläinen, owsla, Patrik Dufresne, Reio Remma, Rodrigo Silva, Stefan Seyfried, Wes Cilldhaire, zjw) -Add automated of different package formats (Otto Kek�l�inen, Arrigo Marchiori, Eric Lavarde) +Add automated of different package formats (Otto Kekäläinen, Arrigo Marchiori, Eric Lavarde) Add RDIFF_BACKUP_VERBOSITY environment variable (Eric Lavarde) @@ -866,7 +895,7 @@ Kaltenecker for bug report. Fixed error when --restrict path given with trailing backslash. Bug -report by �ke Br�nnstr�m. +report by Åke Brännström. Fixed many functions like --list-increments, --remove-older-than, etc. which previously didn't work with filename quoting. Thanks to @@ -1233,7 +1262,7 @@ --windows-mode is now short for --windows-time-format --chars-to-quote A-Z: --no-hard-links --exclude-special-files. Thanks to Paul-Erik -T�rr�nen for some helpful windows info. +Törrönen for some helpful windows info. Multiple --include and --exclude statements can now be given in a single file. See the documentation on diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/Makefile new/rdiff-backup-1.9.1b0/Makefile --- old/rdiff-backup-1.9.0b0/Makefile 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/Makefile 2020-02-23 09:20:37.000000000 +0100 @@ -50,8 +50,8 @@ # Prepare wheel for deployment. ${RUN_COMMAND} ./setup.py sdist -bdist_deb: - ${RUN_COMMAND} gbp buildpackage -us -uc +dist_deb: + ${RUN_COMMAND} debian/autobuild.sh container: # Build development image diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/PKG-INFO new/rdiff-backup-1.9.1b0/PKG-INFO --- old/rdiff-backup-1.9.0b0/PKG-INFO 2020-01-31 21:51:35.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/PKG-INFO 2020-02-23 09:20:42.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: rdiff-backup -Version: 1.9.0b0 +Version: 1.9.1b0 Summary: Backup and Restore utility, easy to use, efficient, locally and remotely usable Home-page: https://rdiff-backup.net/ Author: The rdiff-backup project diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/README.md new/rdiff-backup-1.9.1b0/README.md --- old/rdiff-backup-1.9.0b0/README.md 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/README.md 2020-02-23 09:20:37.000000000 +0100 @@ -58,14 +58,15 @@ #### On Windows -Just drop the binary `rdiff-backup-VERSION-PLATFORM.exe`, possibly renamed to `rdiff-backup`, -somewhere in your PATH and it should work, as it comes with all dependencies included. +Just download and unpack the file `rdiff-backup-VERSION.winBITS.zip` +available as _asset_ attached to one of the releases available in the +[releases section](https://github.com/rdiff-backup/rdiff-backup/releases) and +drop the binary `rdiff-backup.exe` somewhere in your PATH and it should work, +as it comes with all dependencies included. For remote operations, you will need to have an SSH package installed (also on Linux but it is generally more obvious). -> **NOTE:** for now the documentation under Windows is available online from the [documentation folder](docs/). - ### From source code This is an advanced topic, but necessary for platforms like MacOS X and FreeBSD, and diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/debian/autobuild.sh new/rdiff-backup-1.9.1b0/debian/autobuild.sh --- old/rdiff-backup-1.9.0b0/debian/autobuild.sh 1970-01-01 01:00:00.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/debian/autobuild.sh 2020-02-23 09:20:37.000000000 +0100 @@ -0,0 +1,11 @@ +#!/bin/bash + +# Automatically update changelog with new version number +VERSION="$(./setup.py --version)" +dch -b -v "${VERSION}" "Automatic build" + +# Build package ignoring the modified changelog +gbp buildpackage -us -uc --git-ignore-new + +# Reset debian/changelog +git checkout debian/changelog diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/debian/changelog new/rdiff-backup-1.9.1b0/debian/changelog --- old/rdiff-backup-1.9.0b0/debian/changelog 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/debian/changelog 2020-02-23 09:20:37.000000000 +0100 @@ -1,4 +1,4 @@ -rdiff-backup (1.4.0b1) unstable; urgency=medium +rdiff-backup (1.9.0b0) unstable; urgency=medium * Initial changelog entry for native rdiff-backup packaging. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/debian/rules new/rdiff-backup-1.9.1b0/debian/rules --- old/rdiff-backup-1.9.0b0/debian/rules 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/debian/rules 2020-02-23 09:20:37.000000000 +0100 @@ -1,10 +1,4 @@ #!/usr/bin/make -f -# Get VERSION from the string in PKG_INFO as generated by the current build -VERSION := $(shell grep '^Version:' src/rdiff_backup.egg-info/PKG-INFO | cut -d ' ' -f 2) - %: dh $@ --buildsystem=pybuild --with python3 - -override_dh_gencontrol: - dh_gencontrol -- -v${VERSION} diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/docs/DEVELOP.md new/rdiff-backup-1.9.1b0/docs/DEVELOP.md --- old/rdiff-backup-1.9.0b0/docs/DEVELOP.md 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/docs/DEVELOP.md 2020-02-23 09:20:37.000000000 +0100 @@ -10,7 +10,8 @@ git clone https://github.com/rdiff-backup/rdiff-backup.git
**NOTE:** If you plan to provide your own code, you should first fork -our repo and clone your own forked repo. How is described at +our repo and clone your own forked repo (probably using ssh not https). +How is described at https://help.github.com/en/github/collaborating-with-issues-and-pull-request...
@@ -18,9 +19,9 @@ - Before committing to a lot of writing or coding, please file an issue on Github and discuss your plans and gather feedback. Eventually it will be much easier to merge your change request if the idea and design has been agreed upon, and there will be less work for you as a contributor if you implement your idea along the correct lines to begin with. - Please check out [existing issues](https://github.com/rdiff-backup/rdiff-backup/issues) and [existing merge requests](https://github.com/rdiff-backup/rdiff-backup/pulls) and browse the [git history](https://github.com/rdiff-backup/rdiff-backup/commits/master) to see if somebody already tried to address the thing you have are interested in. It might provide useful insight why the current state is as it is. -- Changes can be submitted using the typical Github workflow: clone this repository, make your changes, test and verify, and submit a Pull Request. -- Each change (= pull request) should focus on some topic and resist changing anything else. Keeping the scope clear also makes it easier to review the pull request. A good pull request has only one or a few commits, with each commit having a good commit subject and if needed also a body that explains the change. -- For all code changes, please remember also to include inline comments and update tests where needed. +- Changes can be submitted using the typical Github workflow: clone this repository, make your changes, test and verify, and submit a Pull Request (PR). +- For all code changes, please remember also to include inline comments and + update tests where needed. ### License @@ -35,6 +36,11 @@ and the code and other contents to be of good quality and a reasonable foundation for them to continue development on. +Each PR focuses on some topic and resist changing anything else. Keeping the +scope clear also makes it easier to review the pull request. A good pull +request has only one or a few commits, with each commit having a good commit +subject and if needed also a body that explains the change. + Each pull request has only one author, but anybody can give feedback. The original author should be given time to address the feedback – reviewers should not do the fixes for the author, but instead let the author keep the authorship. @@ -42,15 +48,25 @@ been merged, or even in parallel if the changes are in different files or at least on different lines and do not cause merge conflicts if worked on. -If a pull requests for whatever reason is not quickly merged, should it be -refreshed by [rebasing](https://git-scm.com/docs/git-rebase) it on latest -upstream master. +It is the responsibility of the PR author to keep it without conflict with +master (e.g. if not quickly merged) and overall to support the review process. Ideally each pull request gets some feedback within 24 hours from it having been filed, and is merged within days or a couple of weeks. Each author should facilitate quick reviews and merges by making clean and neat commits and pull requests that are quick to review and do not spiral out in long discussions. +If something is of interest for the changelog, prefix the statement in the +commit body with a three uppercase letters and a column; which acronym is +not that important but here is a list of recommended ones (see the release +section to understand why it's important): + +* FIX: for a bug fix +* NEW: for a new feature +* CHG: for a change requesting consideration when upgrading +* DOC: for documentation aspects +* WEB: anything regarding the website + #### Merging changes to master Currently the rdiff-backup Github repository is configured so that merging a @@ -64,6 +80,12 @@ standards and have a track record of meaningful contributions over several months. +It is the responsibility of the merging developer to make sure that the PR +is _squashed_ and that the squash commit message helps the release process +with the right description and 3-capital-letters prefix (it is still the +obligation of the PR author to provide enough information in their commit +messages). + ### Coding style This project is written in Python, and must follow the official [PEP 8 coding @@ -286,3 +308,136 @@ rdiff-backup -v9 localhost::/sourcedir /backupdir 2>&1 | awk \ '/^2019-09-16/ { if (line) print line; line = $0 } ! /^2019-09-16/ { line = line " ## " $0 }' \ | sort | sed 's/ ## /\n/g' + +### Debug iterators + +When debugging, the fact that rdiff-backup uses a lot of iterators makes it +rather complex to understand what's happening. It would sometimes make it +easier to have a list to study at once of iterating painfully through each +_but_ if you simply use `p list(some_iter_var)`, you basically run through +the iterator and it's lost for the program, which can only fail. + +The solution is to use `itertools.tee`, create a copy of the iterator and +print the copy, e.g.: + +``` +(Pdb) import itertools +(Pdb) inc_pair_iter,mycopy = itertools.tee(inc_pair_iter) +(Pdb) p list(map(lambda x: [str(x[0]),list(map(str,x[1]))], mycopy)) +[... whatever output ...] +``` + +Assuming the iteration has no side effects, the initial variable `inc_pair_iter` +is still valid for the rest of the program, whereas the `mycopy` is "dried out" +(but you can repeat the `tee` operation as often as you want). + +## RELEASING + +We use [Travis CI](https://travis-ci.org) to release automatically, as setup in the [Travis configuration file](../.travis.yml). + +The following rules apply: + +* each modification to master happens through a Pull Request (PR) which triggers + a pipeline job, which must be succesful for the merge to have a chance to + happen. Such PR jobs will _not_ trigger a release. +* GitHub releases are generated as draft only on Git tags looking like a release. + The release manager reviews then the draft release, names and describes it + before they makes it visible. An automated Pypi release is foreseen but not + yet implemented. +* If you need to trigger a job for test purposes (e.g. because you changed + something to the pipeline), create a branch or a tag with an underscore at + the end of their name. Just make sure that you remove such tags, and + potential draft releases, after usage. +* If you want, again for test purposes, to trigger a PyPI deployment towards + test.pypi.org, tag the commit before you push it with a development release + tag, like `vA.B.CbD.devN`, then explicitly push the tag and the branch at + the same time e.g. with `git push origin vA.B.CbD.devN myname-mybranch`. + +> **TIP:** Travis will not trigger again on a commit which has already gone + through the pipeline, even if you add a tag. This applies especially + to PR commits merged to master without squashing. + +Given the above rules, a release cycle looks roughly as follows: + +1. Call `./tools/get_changelog_since.sh PREVIOUSTAG` to get a list of changes + (see above) since the last release and a sorted and unique list of authors, + on which basis you can extend the [CHANGELOG](../CHANGELOG) for the + new release. + **IMPORTANT:** make sure that the PR is squashed or you won't be able to + trigger the release pipeline via a tag on master. +2. Make sure you have the latest master commits with + `git checkout master && git pull --prune`. +3. Tag the last commit with `git tag vX.Y.ZbN` (beta) or `git tag vX.y.Z" (stable). +4. Push the tag to GitHub with `git push --tags`. +5. You won't see anything in GitHub at first and need to go directly to + [Travis builds](https://travis-ci.org/rdiff-backup/rdiff-backup/builds) to + verify that the pipeline has started. +6. If everything goes well, you should see the + [new draft release](https://github.com/rdiff-backup/rdiff-backup/releases) + with all assets (aka packages) attached to it after all jobs have finished + in Travis. +7. Give the release a title and description and save it to make it visible to + everybody. +8. You'll get a notification e-mail telling you that rdiff-backup-admin has + released a new version. +9. Use this e-mail to inform the [rdiff-backup users](rdiff-backup-users@nongnu.org). + +> **IMPORTANT:** if not everything goes well, remove the tag both locally with + `git tag -d TAG` and remotely with `git push -d origin TAG`. + Then fix the issue with a new PR and start from the beginning. + +> **TIP:** the PyPI deploy pipeline is for now broken under Windows on Travis-CI. + You may download the Windows wheel(s) from GitHub and upload them to + PyPI from the command line using twine: + `twine upload [--repository-url https://test.pypi.org/legacy/] dist/rdiff\_backup-*-win32.whl` + +The following sub-chapters list some learnings and specifities in case you need to modify the pipeline. + +### Install the Travis client locally + +See https://github.com/travis-ci/travis.rb for details, here only the gist of it: + +``` +ruby -v # version >= 2 +dnf install rubygems # or zipper, apt, yum... +gem install travis # as non-root keeps everybody more happy +travis version # 1.8.10 -> all OK +``` + +> **NOTE:** installing travis gem also pulls the dependencies multipart-post, faraday, faraday_middleware, highline, backports, net-http-pipeline, net-http-persistent, addressable, multi_json, gh, launchy, ethon, typhoeus, websocket, pusher-client. You might want to install some of them via your preferred package manager instead. + +### Create an OAuth key + +Use the travis client to generate a secure API key (you can throw away other changes to the `.travis.yml` file). You will need the password of the rdiff-backup-admin, hence only project admins can generate it: + +``` +$ travis setup releases +Detected repository as rdiff-backup/rdiff-backup, is this correct? |yes| +Username: rdiff-backup-admin +Password for rdiff-backup-admin: ******************** +File to Upload: dist/* +Deploy only from rdiff-backup/rdiff-backup? |yes| +Encrypt API key? |yes| +``` + +The key to add looks then as follows for GitHub deployment (the concrete key shown here isn't valid though): + +``` +deploy: + provider: releases + api_key: + secure: lqg+HZoy68WudiogbEnOmhxfw9zEJhPOyM4bLJdU2lRBlUZbf0uFvpVJdJqPB7rovKpDknapg4xdXdpbLbD0r/PwsSI9UyFLmyhGn24pnSlrFFjFm2AIQQJUMiCcqsPqNc7fXNMC1BwuM1/RjO3hIxfPxI+A9MSVqW3qhzmerOKXeKFiOLXJ0FkTomRdWGhCEafWO1Ibz5O2d5psK1N/r1ni8kv+E6GPjHk54vmKNcFg8uB7+cPs7ONtW2F+M/h12UVZkC+hy8Bss+esQIMYdVLW5JkKSFfNwKs57qDYYd0lWLzMRti+S+0k/1O6l51BzLY61C4FlRwrMWAy4HIYn5ui39GXIYtGXq9zW+EpYvqTsar+KDU+DGzsr+hAt+eCQpbmZ2SpA7B8Mb3x+BwAcEkvCql789FhWCOd3arUm3H6Ng6yNt50crafJeboHhmitgFQ9uTM7AnXwMnIYVkl6IAZlPkIj20TF1JSdmzpPG2jEJATsMybCuaAuS+ngq4DnJ1axGcclIr4AY9RkSI8EVrL1HTcVLaIH0JnWdO/YC7DSZloC0oswbch1qaW3WsWkJspeaLRvochyFYsatAbvZ46Mzt5uuJUPtSNUVizeb7kBhVGzLVYIepd5XYPgc3Qxp23hu2k9lwg4vjq8WFegC5a34SW/zEZeuFP3HTnD+4= +``` + +### Delete draft releases + +Because there is one draft release created for each pipeline job, it can be quite a lot when one tests the release pipeline. The GitHub WebUI requires quite a lot of clicks to delete them. A way to simplify (a bit) the deletion is to install the command line tool `hub` and call the following command: + +``` +hub release --include-drafts -f '%U %S %cr%n' | \ + awk '$2 == "draft" && $4 == "days" && $3 > 2 {print $1}' | xargs firefox +``` + +the `2` compared to `$3` is the number of days, so that you get one tab opened in firefox for each draft release, so that you only need 2 clicks and one Ctrl+W (close the tab) to delete those releases. + +> **NOTE:** deletion directly using hub isn't possible as it only supports tags and not release IDs. Drafts do NOT have tags... diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/setup.py new/rdiff-backup-1.9.1b0/setup.py --- old/rdiff-backup-1.9.0b0/setup.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/setup.py 2020-02-23 09:20:37.000000000 +0100 @@ -88,9 +88,10 @@ def run(self): if DEBUG: self.debug_print(self.distribution.dump_option_dicts()) + build_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) replacement_dict = { "version": self.distribution.get_version(), - "month_year": time.strftime("%B %Y", time.localtime(time.time())) + "month_year": time.strftime("%B %Y", time.gmtime(build_time)) } for template in self.template_files: self.make_file( diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff-backup-statistics new/rdiff-backup-1.9.1b0/src/rdiff-backup-statistics --- old/rdiff-backup-1.9.0b0/src/rdiff-backup-statistics 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff-backup-statistics 2020-02-23 09:20:37.000000000 +0100 @@ -78,13 +78,13 @@ os.path.join(os.fsencode(args[0]), b"rdiff-backup-data"), ) if not Globals.rbdir.isdir(): - sys.exit("Directory %a not found" % (Globals.rbdir.path,)) + sys.exit("Directory %s not found" % (Globals.rbdir.get_safepath(),)) def system(cmd): sys.stdout.flush() if os.system(cmd): - sys.exit("Error running command '%a'\n" % (cmd,)) + sys.exit("Error running command '%s'\n" % _safe_str(cmd)) class StatisticsRPaths: @@ -125,7 +125,7 @@ result.append((session_dict[time], filestat_dict[time])) else: sys.stderr.write( - "No file_statistics to match %a\n" % (session_dict[time].path,) + "No file_statistics to match '%s'\n" % session_dict[time].get_safepath() ) return result @@ -284,7 +284,7 @@ continue match = r.match(line) if not match: - sys.stderr.write("Error parsing line: %a\n" % (line,)) + sys.stderr.write("Error parsing line: %s\n" % _safe_str(line)) continue filename = match.group(1) @@ -521,6 +521,14 @@ Globals.rbdir = FilenameMapping.get_quotedrpath(Globals.rbdir) +def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + + def Main(): Time.setcurtime() parse_args() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/FilenameMapping.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/FilenameMapping.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/FilenameMapping.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/FilenameMapping.py 2020-02-23 09:20:37.000000000 +0100 @@ -48,6 +48,14 @@ pass +def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + + def set_init_quote_vals(): """Set quoting value from Globals on all conns""" for conn in Globals.connections: @@ -60,7 +68,7 @@ chars_to_quote = Globals.chars_to_quote if len(Globals.quoting_char) != 1: log.Log.FatalError("Expected single character for quoting char," - "got '%a' instead" % (Globals.quoting_char, )) + "got '%s' instead." % _safe_str(Globals.quoting_char)) quoting_char = Globals.quoting_char init_quoting_regexps() @@ -69,7 +77,7 @@ """Compile quoting regular expressions""" global chars_to_quote_regexp, unquoting_regexp assert chars_to_quote and isinstance(chars_to_quote, bytes), \ - "Chars to quote are wrong: %a" % chars_to_quote + "Chars to quote are wrong: '%s'." % _safe_str(chars_to_quote) try: chars_to_quote_regexp = re.compile(b"[%b]|%b" % (chars_to_quote, quoting_char), re.S) @@ -125,11 +133,11 @@ def unquote_single(match): """Unquote a single quoted character""" if not len(match.group()) == 4: - raise QuotingException("Quoted group wrong size: %a" % match.group()) + raise QuotingException("Quoted group wrong size: '%s'." % _safe_str(match.group())) try: return os.fsencode(chr(int(match.group()[1:]))) except ValueError: - raise QuotingException("Quoted out of range: %a" % match.group()) + raise QuotingException("Quoted out of range: '%s'." % _safe_str(match.group())) class QuotedRPath(rpath.RPath): @@ -183,8 +191,6 @@ self.inc_timestr = unquote(temp_rp.inc_timestr) else: result = rpath.RPath.isincfile(self) - if result: - self.inc_basestr = unquote(self.inc_basestr) return result def get_path(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/Hardlink.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/Hardlink.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/Hardlink.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/Hardlink.py 2020-02-23 09:20:37.000000000 +0100 @@ -105,7 +105,14 @@ or src_rorp.getnumlinks() == dest_rorp.getnumlinks() == 1): return 1 # Hard links don't apply - if src_rorp.getnumlinks() < dest_rorp.getnumlinks(): + """The sha1 of linked files is only stored in the metadata of the first + linked file on the dest side. If the first linked file on the src side is + deleted, then the sha1 will also be deleted on the dest side, so we test for this + & report not equal so that another sha1 will be stored with the next linked + file on the dest side""" + if (not islinked(src_rorp) and not dest_rorp.has_sha1()): + return 0 + if src_rorp.getnumlinks() != dest_rorp.getnumlinks(): return 0 src_key = get_inode_key(src_rorp) index, remaining, dest_key, digest = _inode_index[src_key] diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/SetConnections.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/SetConnections.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/SetConnections.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/SetConnections.py 2020-02-23 09:20:37.000000000 +0100 @@ -151,7 +151,7 @@ try: return __cmd_schema % host_info except TypeError: - Log.FatalError("Invalid remote schema:\n\n%a\n" % __cmd_schema) + Log.FatalError("Invalid remote schema:\n\n%s\n" % _safe_str(__cmd_schema)) def init_connection(remote_cmd): @@ -165,15 +165,23 @@ if not remote_cmd: return Globals.local_connection - Log("Executing %a" % remote_cmd, 4) + Log("Executing %s" % _safe_str(remote_cmd), 4) try: # we need buffered read on SSH communications, hence using # default value for bufsize parameter - process = subprocess.Popen( - remote_cmd, - shell=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE) + if os.name == 'nt': + # FIXME workaround because python 3.7 doesn't yet accept bytes + process = subprocess.Popen( + os.fsdecode(remote_cmd), + shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + else: + process = subprocess.Popen( + remote_cmd, + shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE) (stdin, stdout) = (process.stdin, process.stdout) except OSError: (stdin, stdout) = (None, None) @@ -196,13 +204,13 @@ Couldn't start up the remote connection by executing - %a + %s Remember that, under the default settings, rdiff-backup must be installed in the PATH on the remote system. See the man page for more information on this. This message may also be displayed if the remote version of rdiff-backup is quite different from the local version (%s).""" % - (exception, remote_cmd, Globals.version)) + (exception, _safe_str(remote_cmd), Globals.version)) except OverflowError: Log.FatalError( """Integer overflow while attempting to establish the @@ -213,10 +221,11 @@ Please make sure that nothing is printed (e.g., by your login shell) when this command executes. Try running this command: - %a + %s which should only print out the text: rdiff-backup <version>""" % - (remote_cmd, remote_cmd.replace(b"--server", b"--version"))) + (_safe_str(remote_cmd), + _safe_str(remote_cmd.replace(b"--server", b"--version")))) if remote_version != Globals.version: Log( @@ -320,3 +329,11 @@ """ % (Globals.version, version)) else: print("Server OK") + + +def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/compare.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/compare.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/compare.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/compare.py 2020-02-23 09:20:37.000000000 +0100 @@ -24,7 +24,7 @@ """ import os -from . import Globals, restore, rorpiter, log, backup, rpath, hash, robust +from . import Globals, restore, rorpiter, log, backup, rpath, hash, robust, Hardlink def Compare(src_rp, mirror_rp, inc_rp, compare_time): @@ -84,7 +84,8 @@ for repo_rorp in repo_iter: if not repo_rorp.isreg(): continue - if not repo_rorp.has_sha1(): + verify_sha1 = get_hash(repo_rorp) + if not verify_sha1: log.Log( "Warning: Cannot find SHA1 digest for file %s,\n" "perhaps because this feature was added in v1.1.1" % @@ -92,7 +93,7 @@ continue fp = RepoSide.rf_cache.get_fp(base_index + repo_rorp.index, repo_rorp) computed_hash = hash.compute_sha1_fp(fp) - if computed_hash == repo_rorp.get_sha1(): + if computed_hash == verify_sha1: log.Log( "Verified SHA1 digest of %s" % repo_rorp.get_safeindexpath(), 5) @@ -103,13 +104,27 @@ "doesn't match recorded digest of\n %s\n" "Your backup repository may be corrupted!" % (repo_rorp.get_safeindexpath(), computed_hash, - repo_rorp.get_sha1()), 2) + verify_sha1), 2) RepoSide.close_rf_cache() if not bad_files: log.Log("Every file verified successfully.", 3) return bad_files +def get_hash(repo_rorp): + """ Try to get a sha1 digest from the repository. If hardlinks + are saved in the metadata, get the sha1 from the first hardlink """ + Hardlink.add_rorp(repo_rorp) + if Hardlink.islinked(repo_rorp): + verify_sha1 = Hardlink.get_sha1(repo_rorp) + elif repo_rorp.has_sha1(): + verify_sha1 = repo_rorp.get_sha1() + else: + verify_sha1 = None + Hardlink.del_rorp(repo_rorp) + return verify_sha1 + + def print_reports(report_iter): """Given an iter of CompareReport objects, print them to screen""" assert not Globals.server @@ -232,13 +247,14 @@ def hashes_changed(src_rp, mir_rorp): """Return 0 if their data hashes same, 1 otherwise""" - if not mir_rorp.has_sha1(): + verify_sha1 = get_hash(mir_rorp) + if not verify_sha1: log.Log( "Warning: Metadata file has no digest for %s, " "unable to compare." % (mir_rorp.get_safeindexpath(), ), 2) return 0 elif (src_rp.getsize() == mir_rorp.getsize() - and hash.compute_sha1(src_rp) == mir_rorp.get_sha1()): + and hash.compute_sha1(src_rp) == verify_sha1): return 0 return 1 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/eas_acls.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/eas_acls.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/eas_acls.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/eas_acls.py 2020-02-23 09:20:37.000000000 +0100 @@ -236,8 +236,16 @@ def join_ea_iter(rorp_iter, ea_iter): """Update a rorp iter by adding the information from ea_iter""" + + def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + for rorp, ea in rorpiter.CollateIterators(rorp_iter, ea_iter): - assert rorp, "Missing rorp for index %a" % (ea.index, ) + assert rorp, "Missing rorp for index '%s'." % _safe_str(ea.index) if not ea: ea = ExtendedAttributes(rorp.index) rorp.set_ea(ea) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/longname.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/longname.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/longname.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/longname.py 2020-02-23 09:20:37.000000000 +0100 @@ -106,7 +106,7 @@ free_name_counter = scan_next_free() filename = b'%i' % free_name_counter rp = get_long_rp(filename) - assert not rp.lstat(), "Unexpected file at %a found" % (rp.path, ) + assert not rp.lstat(), "Unexpected file at '%s' found" % rp.get_safepath() free_name_counter += 1 write_next_free(free_name_counter) return filename @@ -267,11 +267,18 @@ def update_rf(rf, rorp, mirror_root): """Return new or updated restorefile based on alt name info in rorp""" + def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + def update_incs(rf, inc_base): """Swap inclist in rf with those with base inc_base and return""" log.Log( - "Restoring with increment base %a for file %s" % - (inc_base, rorp.get_safeindexpath()), 6) + "Restoring with increment base %s for file %s" % + (_safe_str(inc_base), rorp.get_safeindexpath()), 6) rf.inc_rp = get_long_rp(inc_base) rf.inc_list = get_inclist(inc_base) rf.set_relevant_incs() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/restore.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/restore.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/restore.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/restore.py 2020-02-23 09:20:37.000000000 +0100 @@ -28,7 +28,10 @@ def Restore(mirror_rp, inc_rpath, target, restore_to_time): - """Recursively restore mirror and inc_rpath to target at rest_time""" + """Recursively restore mirror and inc_rpath to target at restore_to_time + in epoch format""" + + # Store references to classes over the connection MirrorS = mirror_rp.conn.restore.MirrorStruct TargetS = target.conn.restore.TargetStruct @@ -441,6 +444,11 @@ self.inc_rp, self.inc_list = inc_rp, inc_list self.set_relevant_incs() + def __str__(self): + return "Index: %s, Mirror: %s, Increment: %s\nIncList: %s\nIncRel: %s" % ( + self.index, self.mirror_rp, self.inc_rp, + list(map(str, self.inc_list)), list(map(str, self.relevant_incs))) + def relevant_incs_string(self): """Return printable string of relevant incs, used for debugging""" inc_header = ["---- Relevant incs for %s" % ("/".join(self.index), )] diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/rpath.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/rpath.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/rpath.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/rpath.py 2020-02-23 09:20:37.000000000 +0100 @@ -67,11 +67,32 @@ def copyfileobj(inputfp, outputfp): """Copies file inputfp to outputfp in blocksize intervals""" blocksize = Globals.blocksize + + sparse = False + """Negative seeks are not supported by GzipFile""" + compressed = False + if isinstance(outputfp, gzip.GzipFile): + compressed = True + while 1: inbuf = inputfp.read(blocksize) if not inbuf: break - outputfp.write(inbuf) + + buflen = len(inbuf) + if not compressed and inbuf == b"\x00" * buflen: + outputfp.seek(buflen, os.SEEK_CUR) + # flag sparse=True, that we seek()ed, but have not written yet + # The filesize is wrong until we write + sparse = True + else: + outputfp.write(inbuf) + # We wrote, so clear sparse. + sparse = False + + if sparse: + outputfp.seek(-1, os.SEEK_CUR) + outputfp.write(b"\x00") def cmpfileobj(fp1, fp2): @@ -145,7 +166,7 @@ elif rpin.issock(): rpout.mksock() else: - raise RPathException("File %a has unknown type" % rpin.path) + raise RPathException("File '%s' has unknown type." % rpin.get_safepath()) def copy_reg_file(rpin, rpout, compress=0): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/selection.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/selection.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/selection.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/selection.py 2020-02-23 09:20:37.000000000 +0100 @@ -709,11 +709,19 @@ def glob_get_prefix_res(self, glob_str): """Return list of regexps equivalent to prefixes of glob_str""" + + def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + glob_parts = glob_str.split(b"/") if b"" in glob_parts[1: -1]: # "" OK if comes first or last, as in /foo/ raise GlobbingError( - "Consecutive '/'s found in globbing string %a" % glob_str) + "Consecutive '/'s found in globbing string %s" % _safe_str(glob_str)) prefixes = [ b"/".join(glob_parts[:i + 1]) for i in range(len(glob_parts)) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup/win_acls.py new/rdiff-backup-1.9.1b0/src/rdiff_backup/win_acls.py --- old/rdiff-backup-1.9.0b0/src/rdiff_backup/win_acls.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup/win_acls.py 2020-02-23 09:20:37.000000000 +0100 @@ -230,10 +230,18 @@ return os.fsdecode(self.__bytes__()) def from_string(self, acl_str): + + def _safe_str(cmd): + """Transform bytes into string without risk of conversion error""" + if isinstance(cmd, str): + return cmd + else: + return str(cmd, errors='replace') + lines = acl_str.splitlines() if len(lines) != 2 or not lines[0][:8] == b"# file: ": raise metadata.ParsingError( - "Bad record beginning: %a" % lines[0][:8]) + "Bad record beginning: %s" % _safe_str(lines[0][:8])) filename = lines[0][8:] if filename == b'.': self.index = () diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup.egg-info/PKG-INFO new/rdiff-backup-1.9.1b0/src/rdiff_backup.egg-info/PKG-INFO --- old/rdiff-backup-1.9.0b0/src/rdiff_backup.egg-info/PKG-INFO 2020-01-31 21:51:35.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup.egg-info/PKG-INFO 2020-02-23 09:20:42.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: rdiff-backup -Version: 1.9.0b0 +Version: 1.9.1b0 Summary: Backup and Restore utility, easy to use, efficient, locally and remotely usable Home-page: https://rdiff-backup.net/ Author: The rdiff-backup project diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/src/rdiff_backup.egg-info/SOURCES.txt new/rdiff-backup-1.9.1b0/src/rdiff_backup.egg-info/SOURCES.txt --- old/rdiff-backup-1.9.0b0/src/rdiff_backup.egg-info/SOURCES.txt 2020-01-31 21:51:35.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/src/rdiff_backup.egg-info/SOURCES.txt 2020-02-23 09:20:42.000000000 +0100 @@ -11,6 +11,7 @@ tox_dist.ini tox_root.ini tox_slow.ini +debian/autobuild.sh debian/changelog debian/compat debian/control @@ -106,6 +107,7 @@ testing/timetest.py testing/user_grouptest.py tools/build_wheels.sh +tools/get_changelog_since.sh tools/rdiff-backup.spec.template tools/rdiff-backup.spec.template-fedora tools/setup-testfiles.sh diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/testing/commontest.py new/rdiff-backup-1.9.1b0/testing/commontest.py --- old/rdiff-backup-1.9.0b0/testing/commontest.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/testing/commontest.py 2020-02-23 09:20:37.000000000 +0100 @@ -8,7 +8,7 @@ # Avoid circularities from rdiff_backup.log import Log from rdiff_backup import Globals, Hardlink, SetConnections, Main, \ - selection, rpath, eas_acls, rorpiter, Security + selection, rpath, eas_acls, rorpiter, Security, hash RBBin = os.fsencode(shutil.which("rdiff-backup")) @@ -286,15 +286,49 @@ return src_select.set_iter(), dest_select.set_iter() - def preprocess(src_rorp, dest_rorp): - """Initially process src and dest_rorp""" - if compare_hardlinks and src_rorp: - Hardlink.add_rorp(src_rorp, dest_rorp) - - def postprocess(src_rorp, dest_rorp): - """After comparison, process src_rorp and dest_rorp""" - if compare_hardlinks and src_rorp: - Hardlink.del_rorp(src_rorp) + def hardlink_rorp_eq(src_rorp, dest_rorp): + Hardlink.add_rorp(dest_rorp) + Hardlink.add_rorp(src_rorp, dest_rorp) + rorp_eq = Hardlink.rorp_eq(src_rorp, dest_rorp) + if not src_rorp.isreg() or not dest_rorp.isreg() or src_rorp.getnumlinks() == dest_rorp.getnumlinks() == 1: + if not rorp_eq: + Log("Hardlink compare error with when no links exist exist", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + elif src_rorp.getnumlinks() > 1 and not Hardlink.islinked(src_rorp): + if rorp_eq: + Log("Hardlink compare error with first linked src_rorp and no dest_rorp sha1", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + hash.compute_sha1(dest_rorp) + rorp_eq = Hardlink.rorp_eq(src_rorp, dest_rorp) + if src_rorp.getnumlinks() != dest_rorp.getnumlinks(): + if rorp_eq: + Log("Hardlink compare error with first linked src_rorp, with dest_rorp sha1, and with differing link counts", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + elif not rorp_eq: + Log("Hardlink compare error with first linked src_rorp, with dest_rorp sha1, and with equal link counts", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + elif src_rorp.getnumlinks() != dest_rorp.getnumlinks(): + if rorp_eq: + Log("Hardlink compare error with non-first linked src_rorp and with differing link counts", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + elif not rorp_eq: + Log("Hardlink compare error with non-first linked src_rorp and with equal link counts", 3) + Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) + Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + return 0 + Hardlink.del_rorp(src_rorp) + Hardlink.del_rorp(dest_rorp) + return 1 def equality_func(src_rorp, dest_rorp): """Combined eq func returns true if two files compare same""" @@ -307,10 +341,7 @@ if not src_rorp.equal_verbose(dest_rorp, compare_ownership=compare_ownership): return 0 - if compare_hardlinks and not Hardlink.rorp_eq(src_rorp, dest_rorp): - Log("Hardlink compare failure", 3) - Log("%s: %s" % (src_rorp.index, Hardlink.get_inode_key(src_rorp)), 3) - Log("%s: %s" % (dest_rorp.index, Hardlink.get_inode_key(dest_rorp)), 3) + if compare_hardlinks and not hardlink_rorp_eq(src_rorp, dest_rorp): return 0 if compare_eas and not eas_acls.ea_compare_rps(src_rorp, dest_rorp): Log( @@ -332,10 +363,8 @@ reset_hardlink_dicts() src_iter, dest_iter = get_selection_functions() for src_rorp, dest_rorp in rorpiter.Collate2Iters(src_iter, dest_iter): - preprocess(src_rorp, dest_rorp) if not equality_func(src_rorp, dest_rorp): return 0 - postprocess(src_rorp, dest_rorp) return 1 def rbdir_equal(src_rorp, dest_rorp): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/testing/hardlinktest.py new/rdiff-backup-1.9.1b0/testing/hardlinktest.py --- old/rdiff-backup-1.9.0b0/testing/hardlinktest.py 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/testing/hardlinktest.py 2020-02-23 09:20:37.000000000 +0100 @@ -4,7 +4,7 @@ from commontest import abs_test_dir, abs_output_dir, old_test_dir, re_init_rpath_dir, \ CompareRecursive, BackupRestoreSeries, InternalBackup, InternalRestore, \ MakeOutputDir, reset_hardlink_dicts -from rdiff_backup import Globals, Hardlink, selection, rpath +from rdiff_backup import Globals, Hardlink, selection, rpath, restore, metadata class HardlinkTest(unittest.TestCase): @@ -21,6 +21,8 @@ hlinks_rp1copy = rpath.RPath(Globals.local_connection, hlinks_dir1copy) hlinks_rp2 = rpath.RPath(Globals.local_connection, hlinks_dir2) hlinks_rp3 = rpath.RPath(Globals.local_connection, hlinks_dir3) + hello_str = "Hello, world!" + hello_str_hash = "943a702d06f34599aee1f8da8ef9f7296031d699" def testEquality(self): """Test rorp_eq function in conjunction with CompareRecursive""" @@ -156,6 +158,212 @@ assert hlout2.getinode() == hlout3.getinode() assert hlout1.getinode() != hlout2.getinode() + def extract_metadata(self, metadata_rp): + """Return lists of hashes and hardlink counts in the metadata_rp""" + hashes = [] + link_counts = [] + comp = metadata_rp.isinccompressed() + extractor = metadata.RorpExtractor(metadata_rp.open("r", comp)) + for rorp in extractor.iterate(): + link_counts.append(rorp.getnumlinks()) + if rorp.has_sha1(): + hashes.append(rorp.get_sha1()) + else: + hashes.append(None) + return (hashes, link_counts) + + def test_adding_hardlinks(self): + """Test the addition of a new hardlinked file. + + This test is directed at some previously buggy code that 1) failed to + keep the correct number of hardlinks in the mirror metadata, and 2) + failed to restore hardlinked files so that they are linked the same as + when they were backed up. One of the conditions that triggered these + bugs included adding a new hardlinked file somewhere in the middle of a + list of previously linked files. The bug was originally reported here: + https://savannah.nongnu.org/bugs/?26848 + """ + + # Setup initial backup + MakeOutputDir() + output = rpath.RPath(Globals.local_connection, abs_output_dir) + hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink") + + hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir) + if hlsrc.lstat(): + hlsrc.delete() + hlsrc.mkdir() + hlsrc_sub = hlsrc.append("subdir") + hlsrc_sub.mkdir() + hl_file1 = hlsrc_sub.append("hardlink1") + hl_file1.write_string(self.hello_str) + hl_file3 = hlsrc_sub.append("hardlink3") + hl_file3.hardlink(hl_file1.path) + + InternalBackup(1, 1, hlsrc.path, output.path, 10000) + out_subdir = output.append("subdir") + assert out_subdir.append("hardlink1").getinode() == \ + out_subdir.append("hardlink3").getinode() + + # validate that hashes and link counts are correctly saved in metadata + meta_prefix = rpath.RPath( + Globals.local_connection, + os.path.join(abs_output_dir, b"rdiff-backup-data", + b"mirror_metadata")) + incs = restore.get_inclist(meta_prefix) + assert len(incs) == 1 + metadata_rp = incs[0] + hashes, link_counts = self.extract_metadata(metadata_rp) + # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3 + expected_hashes = [None, None, self.hello_str_hash, None] + assert expected_hashes == hashes, (expected_hashes, hashes) + expected_link_counts = [1, 1, 2, 2] + assert expected_link_counts == link_counts, (expected_link_counts, link_counts) + + # Create a new hardlinked file between "hardlink1" and "hardlink3" and perform another backup + hl_file2 = hlsrc_sub.append("hardlink2") + hl_file2.hardlink(hl_file1.path) + + InternalBackup(1, 1, hlsrc.path, output.path, 20000) + assert out_subdir.append("hardlink1").getinode() == \ + out_subdir.append("hardlink2").getinode() + assert out_subdir.append("hardlink1").getinode() == \ + out_subdir.append("hardlink3").getinode() + + # validate that hashes and link counts are correctly saved in metadata + incs = restore.get_inclist(meta_prefix) + assert len(incs) == 2 + if incs[0].getinctype() == b'snapshot': + metadata_rp = incs[0] + else: + metadata_rp = incs[1] + hashes, link_counts = self.extract_metadata(metadata_rp) + # hashes for ., ./subdir/, ./subdir/hardlink1, ./subdir/hardlink2, ./subdir/hardlink3 + expected_hashes = [None, None, self.hello_str_hash, None, None] + assert expected_hashes == hashes, (expected_hashes, hashes) + expected_link_counts = [1, 1, 3, 3, 3] + # The following assertion would fail as a result of bugs that are now fixed + assert expected_link_counts == link_counts, (expected_link_counts, link_counts) + + # Now try restoring, still checking hard links. + sub_path = os.path.join(abs_output_dir, b"subdir") + restore_path = os.path.join(abs_test_dir, b"hl_restore") + restore_dir = rpath.RPath(Globals.local_connection, restore_path) + hlrestore_file1 = restore_dir.append("hardlink1") + hlrestore_file2 = restore_dir.append("hardlink2") + hlrestore_file3 = restore_dir.append("hardlink3") + + if restore_dir.lstat(): + restore_dir.delete() + InternalRestore(1, 1, sub_path, restore_path, 10000) + for rp in [hlrestore_file1, hlrestore_file3]: + rp.setdata() + assert hlrestore_file1.getinode() == hlrestore_file3.getinode() + + if restore_dir.lstat(): + restore_dir.delete() + InternalRestore(1, 1, sub_path, restore_path, 20000) + for rp in [hlrestore_file1, hlrestore_file2, hlrestore_file3]: + rp.setdata() + assert hlrestore_file1.getinode() == hlrestore_file2.getinode() + # The following assertion would fail as a result of bugs that are now fixed + assert hlrestore_file1.getinode() == hlrestore_file3.getinode() + + def test_moving_hardlinks(self): + """Test moving the first hardlinked file in a series to later place in the series. + + This test is directed at some previously buggy code that failed to + always keep a sha1 hash in the metadata for the first (and only the + first) file among a series of linked files. The condition that + triggered this bug involved removing the first file from a list of + linked files, while also adding a new file at some later position in + the list. The total number of hardlinked files in the list remains + unchanged. None of the files had a sha1 hash saved in its metadata. + The bug was originally reported here: + https://savannah.nongnu.org/bugs/?26848 + """ + + # Setup initial backup + MakeOutputDir() + output = rpath.RPath(Globals.local_connection, abs_output_dir) + hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink") + + hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir) + if hlsrc.lstat(): + hlsrc.delete() + hlsrc.mkdir() + hlsrc_sub = hlsrc.append("subdir") + hlsrc_sub.mkdir() + hl_file1 = hlsrc_sub.append("hardlink1") + hl_file1.write_string(self.hello_str) + hl_file2 = hlsrc_sub.append("hardlink2") + hl_file2.hardlink(hl_file1.path) + + InternalBackup(1, 1, hlsrc.path, output.path, 10000) + out_subdir = output.append("subdir") + assert out_subdir.append("hardlink1").getinode() == \ + out_subdir.append("hardlink2").getinode() + + # validate that hashes and link counts are correctly saved in metadata + meta_prefix = rpath.RPath( + Globals.local_connection, + os.path.join(abs_output_dir, b"rdiff-backup-data", + b"mirror_metadata")) + incs = restore.get_inclist(meta_prefix) + assert len(incs) == 1 + metadata_rp = incs[0] + hashes, link_counts = self.extract_metadata(metadata_rp) + # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3 + expected_hashes = [None, None, self.hello_str_hash, None] + assert expected_hashes == hashes, (expected_hashes, hashes) + expected_link_counts = [1, 1, 2, 2] + assert expected_link_counts == link_counts, (expected_link_counts, link_counts) + + # Move the first hardlinked file to be last + hl_file3 = hlsrc_sub.append("hardlink3") + rpath.rename(hl_file1, hl_file3) + + InternalBackup(1, 1, hlsrc.path, output.path, 20000) + assert out_subdir.append("hardlink2").getinode() == \ + out_subdir.append("hardlink3").getinode() + + # validate that hashes and link counts are correctly saved in metadata + incs = restore.get_inclist(meta_prefix) + assert len(incs) == 2 + if incs[0].getinctype() == b'snapshot': + metadata_rp = incs[0] + else: + metadata_rp = incs[1] + hashes, link_counts = self.extract_metadata(metadata_rp) + # hashes for ., ./subdir/, ./subdir/hardlink2, ./subdir/hardlink3 + expected_hashes = [None, None, self.hello_str_hash, None] + # The following assertion would fail as a result of bugs that are now fixed + assert expected_hashes == hashes, (expected_hashes, hashes) + expected_link_counts = [1, 1, 2, 2] + assert expected_link_counts == link_counts, (expected_link_counts, link_counts) + + # Now try restoring, still checking hard links. + sub_path = os.path.join(abs_output_dir, b"subdir") + restore_path = os.path.join(abs_test_dir, b"hl_restore") + restore_dir = rpath.RPath(Globals.local_connection, restore_path) + hlrestore_file1 = restore_dir.append("hardlink1") + hlrestore_file2 = restore_dir.append("hardlink2") + hlrestore_file3 = restore_dir.append("hardlink3") + + if restore_dir.lstat(): + restore_dir.delete() + InternalRestore(1, 1, sub_path, restore_path, 10000) + for rp in [hlrestore_file1, hlrestore_file2]: + rp.setdata() + assert hlrestore_file1.getinode() == hlrestore_file2.getinode() + + if restore_dir.lstat(): + restore_dir.delete() + InternalRestore(1, 1, sub_path, restore_path, 20000) + for rp in [hlrestore_file2, hlrestore_file3]: + rp.setdata() + assert hlrestore_file2.getinode() == hlrestore_file3.getinode() + if __name__ == "__main__": unittest.main() diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/tools/get_changelog_since.sh new/rdiff-backup-1.9.1b0/tools/get_changelog_since.sh --- old/rdiff-backup-1.9.0b0/tools/get_changelog_since.sh 1970-01-01 01:00:00.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/tools/get_changelog_since.sh 2020-02-23 09:20:37.000000000 +0100 @@ -0,0 +1,25 @@ +#!/bin/sh +# get a list of changes and authors since a give revision tag in Git + +if [[ -z "${1}" ]] +then + echo "Usage: $0 <Git-Tag>" >&2 + echo " outputs changes marked with 'XYZ:' and a unique list of authors since the tagged release" >&2 + exit 1 +fi +RELTAG="${1}" + +echo "(make sure the version is the next correct one)" +echo +echo "New in v$($(dirname $0)/../setup.py --version) ($(date -I))" +echo "----------------------------" + +echo -e "\n## Changes\n" +git log ${RELTAG}.. | + sed -n '/^ *[A-Z][A-Z][A-Z]: / s/^ */* /p' + +echo -e "\n## Authors\n" +git log ${RELTAG}.. | + awk -F': *| *<' '$1 == "Author" { print "* " $2 }' | sort -u + +echo diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rdiff-backup-1.9.0b0/tox.ini new/rdiff-backup-1.9.1b0/tox.ini --- old/rdiff-backup-1.9.0b0/tox.ini 2020-01-31 21:51:30.000000000 +0100 +++ new/rdiff-backup-1.9.1b0/tox.ini 2020-02-23 09:20:37.000000000 +0100 @@ -58,7 +58,7 @@ deps = flake8 commands = - flake8 . + flake8 setup.py src testing tools [flake8] ignore =
participants (1)
-
root