openSUSE Commits
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
March 2018
- 1 participants
- 2685 discussions
Hello community,
here is the log from the commit of package google-merriweather-fonts for openSUSE:Factory checked in at 2018-03-29 11:58:10
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/google-merriweather-fonts (Old)
and /work/SRC/openSUSE:Factory/.google-merriweather-fonts.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "google-merriweather-fonts"
Thu Mar 29 11:58:10 2018 rev:8 rq:592171 version:2.001
Changes:
--------
--- /work/SRC/openSUSE:Factory/google-merriweather-fonts/google-merriweather-fonts.changes 2012-09-15 11:52:43.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.google-merriweather-fonts.new/google-merriweather-fonts.changes 2018-03-29 11:59:42.389504562 +0200
@@ -1,0 +2,14 @@
+Wed Mar 28 16:58:23 UTC 2018 - thomas.schraitle(a)suse.com
+
+Updated to 2.001 from GitHub
+
+From FONTLOG.txt
+- 2 Feb 2017 (Marc Foley) Merriweather v2.001
+ * Fixed interpolation issues in Italic
+
+- 17 Jan 2017 (Marc Foley) Merriweather v2.000
+ * Added CrystalType Vietnamese extension
+ * Added Alexieva Cyrillic extension
+ * Updated font metadata
+
+-------------------------------------------------------------------
Old:
----
merriweather-1.3.tar.bz2
New:
----
merriweather-2.001.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ google-merriweather-fonts.spec ++++++
--- /var/tmp/diff_new_pack.EVqoVL/_old 2018-03-29 11:59:43.097479022 +0200
+++ /var/tmp/diff_new_pack.EVqoVL/_new 2018-03-29 11:59:43.101478878 +0200
@@ -1,7 +1,7 @@
#
# spec file for package google-merriweather-fonts
#
-# Copyright (c) 2012 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
%define fontname merriweather
Name: google-merriweather-fonts
-Version: 1.3
+Version: 2.001
Release: 0
Summary: Readable Text Serif Font for Screen
License: OFL-1.1
@@ -66,9 +66,8 @@
%files
%defattr(-, root, root)
-%doc METADATA FONTLOG.txt
+%doc AUTHORS.txt FONTLOG.txt CONTRIBUTORS.txt README.md OFL.txt
%dir %{_ttfontsdir}
%{_ttfontsdir}/*.ttf
-%doc OFL.txt
%changelog
++++++ merriweather-1.3.tar.bz2 -> merriweather-2.001.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/merriweather-1.3/FONTLOG.txt new/merriweather-1.3/FONTLOG.txt
--- old/merriweather-1.3/FONTLOG.txt 2012-02-14 09:48:19.000000000 +0100
+++ new/merriweather-1.3/FONTLOG.txt 1970-01-01 01:00:00.000000000 +0100
@@ -1,78 +0,0 @@
-FONTLOG for the Merriweather font
-
-This file provides detailed information on the Merriweather
-font Software.
-
-This information should be distributed along with the
-Merriweather fonts and any derivative works.
-
-Basic Font Information
-
-Merriweather was designed to be a text face that
-is pleasant to read on screens.
-
-Merriweather is evolving and will be updated.
-As of now there are 4 styles: Regular, Light, Bold,
-and Black. There will also be Italic in each
-of these weights. And fairly soon after that there
-will also be a Sans Serif version which mirrors the
-weights and styles of the Serif design.
-
-Designed by Eben Sorkin, Merriweather features a
-very large x height, slightly condensed letterforms,
-a mild diagonal stress, sturdy serifs and open
-forms.
-
-Merriweather is a work in progress and
-will be improved regularly. This means you can request
-improvements and even fund specific features if
-if they are outside of the current scope of work.
-
-For more information and to stay updated see Eben
-Sorkin's blog and Flickr and follow the Merriweather
-Twitter microblog:
-
-http://ebensorkin.wordpress.com
-
-http://www.flickr.com/photos/55990250@N02/
-
-http://twitter.com/MerriweatherFnt
-
-Merriweather is a Unicode typeface family that supports
-languages that use the Latin script and its variants, and
-could be expanded to support other scripts.
-
-More specifically, this release supports the following Unicode
-ranges: Latin-1
-
-To contribute to the project contact Eben Sorkin at
-sorkineben(a)gmail.com
-
-ChangeLog
-
-29 July 2011 (Eben Sorkin) Merriweather v1.3
-
-- Altered and improved 90% of the glyphs. Some in minor ways
- and other in fairly significant ways. For example:
- The lc a was a small change.
- The changes to the numbers corrected weight consistency issues
- were more significant.
-- Added glyphs to support texts originating from Windows
- ANSI and Mac Roman encodings.
-- Adjusted diacritics further
-- This may be the last update to shapes before work
- begins on the Sans. This means that VTT hinting
- should be next.
-
-22 July 2011 (Eben Sorkin) Merriweather v1.287
-- Adusted glyph diacritics to make them work with MS Word.
-- Mastered Font from Fontlab to TTF
-
-If you make modifications be sure to add your name (N),
-email (E), web-address (if you have one) (W) and description (D).
-This list is in alphabetical order.
-
-N: Eben Sorkin
-E: sorkineben(a)gmail.com
-W: www.sorkintype.com
-D: Designer and Mastering
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/merriweather-1.3/METADATA new/merriweather-1.3/METADATA
--- old/merriweather-1.3/METADATA 2012-02-14 09:48:19.000000000 +0100
+++ new/merriweather-1.3/METADATA 1970-01-01 01:00:00.000000000 +0100
@@ -1,48 +0,0 @@
-license: OFL
-payment: DESIGNER
-category: serif
-subsets: menu,latin
-
-family: Merriweather
-
-designer: Eben Sorkin
-
-profiledescriptionlicense: Creative Commons Attribution-ShareAlike 3.0
-profiledescriptionlicenseurl: http://creativecommons.org/licenses/by-sa/3.0/
-approved: true
-description: <p>Merriweather was designed to be \
- a text face that is pleasant to read on screens.</p> \
- <p>Merriweather is evolving and will be updated. \
- As of now there are 4 styles: Regular, Light, Bold, \
- and Black. There will also be Italic in each \
- of these weights. And fairly soon after that there \
- will also be a sans serif version which mirrors the \
- weights and styles of the Serif design.</p> \
- <p>Designed by Eben Sorkin, Merriweather features a \
- very large x height, slightly condensed letterforms, \
- a mild diagonal stress, sturdy serifs and open \
- forms.</p> \
- <p>Because Merriweather is a work in progress and \
- will be \
- improved regularly. This means you can request \
- improvements and even fund specific features if \
- if they are outside of the current scope of work. \
- For more information and to stay updated see \
- <a href="http://ebensorkin.wordpress.com/">Eben \
- Sorkin's blog</a> and \
- <a href="http://www.flickr.com/photos/55990250@N02/">Flickr \
- stream</a> and the \
- <a href="http://twitter.com/#!/MerriweatherFnt">Merriweather \
- Twitter</a> microblog.</p>
-
-font.Merriweather-Light.ttf.weight: 300
-font.Merriweather-Regular.ttf.weight: 400
-font.Merriweather-Bold.ttf.weight: 700
-font.Merriweather-Black.ttf.weight: 900
-
-# cd ~/src/font-optimizer;
-# ./subset.pl -v --licensesubst="SIL OFL 1.1" \
-# --exclude="c2sc,smcp,ordn,sups,sinf,subs,dlig,frac" \
-# --chars="Merriweather" \
-# ~/googlefontdirectory/merriweather/Merriweather-Regular.{ttf,menu};
-# cd ~/googlefontdirectory/merriweather/;
\ No newline at end of file
Binary files old/merriweather-1.3/Merriweather-Black.ttf and new/merriweather-1.3/Merriweather-Black.ttf differ
Binary files old/merriweather-1.3/Merriweather-Bold.ttf and new/merriweather-1.3/Merriweather-Bold.ttf differ
Binary files old/merriweather-1.3/Merriweather-Light.ttf and new/merriweather-1.3/Merriweather-Light.ttf differ
Binary files old/merriweather-1.3/Merriweather-Regular.ttf and new/merriweather-1.3/Merriweather-Regular.ttf differ
1
0
Hello community,
here is the log from the commit of package lammps for openSUSE:Factory checked in at 2018-03-29 11:58:02
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/lammps (Old)
and /work/SRC/openSUSE:Factory/.lammps.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "lammps"
Thu Mar 29 11:58:02 2018 rev:9 rq:592170 version:20180316
Changes:
--------
--- /work/SRC/openSUSE:Factory/lammps/lammps.changes 2018-03-09 10:47:23.856945675 +0100
+++ /work/SRC/openSUSE:Factory/.lammps.new/lammps.changes 2018-03-29 11:59:39.393612639 +0200
@@ -1,0 +2,8 @@
+Wed Mar 28 16:57:22 UTC 2018 - junghans(a)votca.org
+
+- bump version to 20180316 (stable)
+- many little bugfxes: http://lammps.sandia.gov/bug.html
+ * A potentially harmful bug was found and fixed in the pair_style reax/c command
+- Added 858.patch to fix return value on ppc64
+
+-------------------------------------------------------------------
Old:
----
lammps-patch_8Mar2018.tar.gz
New:
----
858.patch
lammps-stable_16Mar2018.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ lammps.spec ++++++
--- /var/tmp/diff_new_pack.yclxLh/_old 2018-03-29 11:59:42.145513364 +0200
+++ /var/tmp/diff_new_pack.yclxLh/_new 2018-03-29 11:59:42.149513220 +0200
@@ -17,14 +17,16 @@
#
Name: lammps
-Version: 20180308
-%define uversion patch_8Mar2018
+Version: 20180316
+%define uversion stable_16Mar2018
Release: 0
Summary: Molecular Dynamics Simulator
License: GPL-2.0 and GPL-3.0+
Group: Productivity/Scientific/Chemistry
Url: http://lammps.sandia.gov
Source0: https://github.com/lammps/lammps/archive/%{uversion}.tar.gz#/%{name}-%{uver…
+# PATCH-FIX-UPSTREAM 858.patch, https://github.com/lammps/lammps/pull/858 - fix return value on ppc64
+Patch0: https://github.com/lammps/lammps/pull/858.patch
BuildRequires: fftw3-devel
BuildRequires: gcc-c++
BuildRequires: gcc-fortran
@@ -116,6 +118,7 @@
%prep
%setup -q -n %{name}-%{uversion}
+%patch0 -p1
%build
source %{_libdir}/mpi/gcc/openmpi/bin/mpivars.sh
++++++ 858.patch ++++++
>From c664f46b31650f362cc132bcc17a3f9de923768b Mon Sep 17 00:00:00 2001
From: Christoph Junghans <junghans(a)votca.org>
Date: Wed, 28 Mar 2018 11:28:28 -0600
Subject: [PATCH] MathSpecial::fm_exp: fix return value on ppc64
---
src/math_special.cpp | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/src/math_special.cpp b/src/math_special.cpp
index d778e1e1ad..4b9197f0fc 100644
--- a/src/math_special.cpp
+++ b/src/math_special.cpp
@@ -537,10 +537,8 @@ double MathSpecial::exp2_x86(double x)
double MathSpecial::fm_exp(double x)
{
-#if defined(__BYTE_ORDER__)
-#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+#if defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
return exp2_x86(FM_DOUBLE_LOG2OFE * x);
-#endif
#else
return ::exp(x);
#endif
1
0
Hello community,
here is the log from the commit of package crmsh for openSUSE:Factory checked in at 2018-03-29 11:57:58
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/crmsh (Old)
and /work/SRC/openSUSE:Factory/.crmsh.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "crmsh"
Thu Mar 29 11:57:58 2018 rev:143 rq:592143 version:4.0.0+git.1522278003.cd7ae188
Changes:
--------
--- /work/SRC/openSUSE:Factory/crmsh/crmsh.changes 2018-02-27 17:00:16.599015133 +0100
+++ /work/SRC/openSUSE:Factory/.crmsh.new/crmsh.changes 2018-03-29 11:58:06.188975738 +0200
@@ -1,0 +2,6 @@
+Wed Mar 28 23:00:23 UTC 2018 - kgronlund(a)suse.com
+
+- Update to version 4.0.0+git.1522278003.cd7ae188:
+ * high: ui_resource: Undeprecate refresh and remove reprobe (bsc#1084736)
+
+-------------------------------------------------------------------
Old:
----
crmsh-4.0.0+git.1519721966.9abd841c.tar.bz2
New:
----
crmsh-4.0.0+git.1522278003.cd7ae188.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ crmsh.spec ++++++
--- /var/tmp/diff_new_pack.B5n5f0/_old 2018-03-29 11:58:08.912877430 +0200
+++ /var/tmp/diff_new_pack.B5n5f0/_new 2018-03-29 11:58:08.916877286 +0200
@@ -36,7 +36,7 @@
Summary: High Availability cluster command-line interface
License: GPL-2.0-or-later
Group: %{pkg_group}
-Version: 4.0.0+git.1519721966.9abd841c
+Version: 4.0.0+git.1522278003.cd7ae188
Release: 0
Url: http://crmsh.github.io
Source0: %{name}-%{version}.tar.bz2
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.B5n5f0/_old 2018-03-29 11:58:08.956875842 +0200
+++ /var/tmp/diff_new_pack.B5n5f0/_new 2018-03-29 11:58:08.956875842 +0200
@@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">git://github.com/ClusterLabs/crmsh.git</param>
- <param name="changesrevision">9abd841ca08a609923c8406584a112900bb27262</param></service></servicedata>
\ No newline at end of file
+ <param name="changesrevision">7686d0078edae58e16cf29ffdf38210e9cac37f3</param></service></servicedata>
\ No newline at end of file
++++++ crmsh-4.0.0+git.1519721966.9abd841c.tar.bz2 -> crmsh-4.0.0+git.1522278003.cd7ae188.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/.vscode/settings.json new/crmsh-4.0.0+git.1522278003.cd7ae188/.vscode/settings.json
--- old/crmsh-4.0.0+git.1519721966.9abd841c/.vscode/settings.json 1970-01-01 01:00:00.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/.vscode/settings.json 2018-03-29 01:00:03.000000000 +0200
@@ -0,0 +1,6 @@
+{
+ "python.linting.pylintEnabled": true,
+ "python.linting.flake8Enabled": false,
+ "python.linting.enabled": true,
+ "python.pythonPath": "/usr/bin/python3"
+}
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/AUTHORS new/crmsh-4.0.0+git.1522278003.cd7ae188/AUTHORS
--- old/crmsh-4.0.0+git.1519721966.9abd841c/AUTHORS 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/AUTHORS 2018-03-29 01:00:03.000000000 +0200
@@ -5,10 +5,13 @@
Adam Spiers <aspiers[at]suse[dot]com>
Andrei Maruha <Andrei_Maruha[at]epam[dot]com>
Andrew Beekhof <andrew[at]beekhof[dot]net>
+ Bin Liu <bliu[at]suse[dot]com>
Borislav Borisov <borislav[dot]v[dot]borisov[at]gmail[dot]com>
Christian Seiler <christian[at]iwakd[dot]de>
Daniel Hoffend <dh[at]dotlan[dot]net>
Dejan Muhamedagic <dejan[at]suse[dot]de>
+ dougcahill <doug[dot]cahill[at]actifio[dot]com>
+ Eric Ren <zren[at]suse[dot]com>
Federica Teodori <federica[dot]teodori[at]googlemail[dot]com>
Florian Haas <florian[dot]haas[at]linbit[dot]com>
Goldwyn Rodrigues <rgoldwyn[at]novell[dot]com>
@@ -29,6 +32,7 @@
Nate Clark <nate[at]neworld[dot]us>
nozawat <nozawat[at]gmail[dot]com>
Pedro Salgado <steenzout[at]saucelabs[dot]com>
+ Peter Schwindt <peter[at]schwindt-net[dot]de>
Richard B Winters <rik[at]mmogp[dot]com>
seabres <rainer[dot]brestan[at]gmx[dot]net>
Tim Serong <tserong[at]suse[dot]com>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/bootstrap.py new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/bootstrap.py
--- old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/bootstrap.py 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/bootstrap.py 2018-03-29 01:00:03.000000000 +0200
@@ -1799,17 +1799,18 @@
if nodelist is None:
for v in corosync.get_values("quorum.expected_votes"):
expected_votes = v
- #for node >= 2, expected_votes = nodecount + device_votes
- #asume nodecount is N, for ffsplit, qdevice only has one vote
- #which means that device_votes is 1, ie:expected_votes = N + 1;
- #while for lms, qdevice has N - 1 votes, ie: expected_votes = N + (N - 1)
- #and update quorum.device.net.algorithm based on device_votes
+
+ # For node >= 2, expected_votes = nodecount + device_votes
+ # Assume nodecount is N, for ffsplit, qdevice only has one vote
+ # which means that device_votes is 1, ie:expected_votes = N + 1;
+ # while for lms, qdevice has N - 1 votes, ie: expected_votes = N + (N - 1)
+ # and update quorum.device.net.algorithm based on device_votes
if corosync.get_value("quorum.device.net.algorithm") == "lms":
device_votes = int((expected_votes - 1) / 2)
nodecount = expected_votes - device_votes
- #as nodecount will increase 1, and device_votes is nodecount - 1
- #device_votes also increase 1
+ # as nodecount will increase 1, and device_votes is nodecount - 1
+ # device_votes also increase 1
device_votes += 1
elif corosync.get_value("quorum.device.net.algorithm") == "ffsplit":
device_votes = 1
@@ -1824,10 +1825,10 @@
else:
nodecount = len(nodelist)
expected_votes = 0
- #for node >= 2, expected_votes = nodecount + device_votes
- #asume nodecount is N, for ffsplit, qdevice only has one vote
- #which means that device_votes is 1, ie:expected_votes = N + 1;
- #while for lms, qdevice has N - 1 votes, ie: expected_votes = N + (N - 1)
+ # For node >= 2, expected_votes = nodecount + device_votes
+ # Assume nodecount is N, for ffsplit, qdevice only has one vote
+ # which means that device_votes is 1, ie:expected_votes = N + 1;
+ # while for lms, qdevice has N - 1 votes, ie: expected_votes = N + (N - 1)
if corosync.get_value("quorum.device.net.algorithm") == "ffsplit":
device_votes = 1
if corosync.get_value("quorum.device.net.algorithm") == "lms":
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/cache.py new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/cache.py
--- old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/cache.py 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/cache.py 2018-03-29 01:00:03.000000000 +0200
@@ -1,17 +1,20 @@
# Copyright (C) 2008-2011 Dejan Muhamedagic <dmuhamedagic(a)suse.de>
+# Copyright (C) 2018 Kristoffer Gronlund <kgronlund(a)suse.com>
# See COPYING for license information.
#
# Cache stuff. A naive implementation.
+# Used by ra.py to cache named lists of things.
import time
-_max_cache_age = 600 # seconds
+_max_cache_age = 600.0 # seconds
_stamp = time.time()
_lists = {}
def _clear():
+ "Clear the cache."
global _stamp
global _lists
_stamp = time.time()
@@ -19,20 +22,26 @@
def is_cached(name):
- if time.time() - _stamp > _max_cache_age:
- _clear()
- return name in _lists
+ "True if the argument exists in the cache."
+ return retrieve(name) is not None
def store(name, lst):
+ """
+ Stores the given list for the given name.
+ Returns the given list.
+ """
_lists[name] = lst
return lst
def retrieve(name):
- if is_cached(name):
- return _lists[name]
- return None
+ """
+ Returns the cached list for name, or None.
+ """
+ if time.time() - _stamp > _max_cache_age:
+ _clear()
+ return _lists.get(name)
# vim:ts=4:sw=4:et:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/ui_resource.py new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/ui_resource.py
--- old/crmsh-4.0.0+git.1519721966.9abd841c/crmsh/ui_resource.py 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/crmsh/ui_resource.py 2018-03-29 01:00:03.000000000 +0200
@@ -1,5 +1,5 @@
# Copyright (C) 2008-2011 Dejan Muhamedagic <dmuhamedagic(a)suse.de>
-# Copyright (C) 2013 Kristoffer Gronlund <kgronlund(a)suse.com>
+# Copyright (C) 2013-2018 Kristoffer Gronlund <kgronlund(a)suse.com>
# See COPYING for license information.
from . import command
@@ -150,17 +150,6 @@
return True
-def cleanup_resource(rsc, node='', force=False):
- if not utils.is_name_sane(rsc) or not utils.is_name_sane(node):
- return False
- forces = " -f" if force else ""
- if not node:
- rc = utils.ext_cmd((RscMgmt.rsc_cleanup_all % (rsc)) + forces) == 0
- else:
- rc = utils.ext_cmd((RscMgmt.rsc_cleanup % (rsc, node)) + forces) == 0
- return rc
-
-
_attrcmds = compl.choice(['delete', 'set', 'show'])
_raoperations = compl.choice(constants.ra_operations)
@@ -171,27 +160,25 @@
'''
name = "resource"
- rsc_status_all = "crm_resource -L"
- rsc_status = "crm_resource --locate -r '%s'"
- rsc_showxml = "crm_resource -q -r '%s'"
- rsc_setrole = "crm_resource --meta -r '%s' -p target-role -v '%s'"
- rsc_migrate = "crm_resource --quiet --move -r '%s' %s"
- rsc_unmigrate = "crm_resource --quiet --clear -r '%s'"
- rsc_ban = "crm_resource --ban -r '%s' %s"
- rsc_cleanup = "crm_resource -C -r '%s' -H '%s'"
- rsc_cleanup_all = "crm_resource -C -r '%s'"
- rsc_maintenance = "crm_resource -r '%s' --meta -p maintenance -v '%s'"
+ rsc_status_all = "crm_resource --list"
+ rsc_status = "crm_resource --locate --resource '%s'"
+ rsc_showxml = "crm_resource --query-xml --resource '%s'"
+ rsc_setrole = "crm_resource --meta --resource '%s' --set-parameter target-role --parameter-value '%s'"
+ rsc_migrate = "crm_resource --quiet --move --resource '%s' %s"
+ rsc_unmigrate = "crm_resource --quiet --clear --resource '%s'"
+ rsc_ban = "crm_resource --ban --resource '%s' %s"
+ rsc_maintenance = "crm_resource --resource '%s' --meta --set-parameter maintenance --parameter-value '%s'"
rsc_param = {
- 'set': "crm_resource -r '%s' -p '%s' -v '%s'",
- 'delete': "crm_resource -r '%s' -d '%s'",
- 'show': "crm_resource -r '%s' -g '%s'",
- 'get': "crm_resource -r '%s' -g '%s'",
+ 'set': "crm_resource --resource '%s' --set-parameter '%s' --parameter-value '%s'",
+ 'delete': "crm_resource --resource '%s' --delete-parameter '%s'",
+ 'show': "crm_resource --resource '%s' --get-parameter '%s'",
+ 'get': "crm_resource --resource '%s' --get-parameter '%s'",
}
rsc_meta = {
- 'set': "crm_resource --meta -r '%s' -p '%s' -v '%s'",
- 'delete': "crm_resource --meta -r '%s' -d '%s'",
- 'show': "crm_resource --meta -r '%s' -g '%s'",
- 'get': "crm_resource --meta -r '%s' -g '%s'",
+ 'set': "crm_resource --meta --resource '%s' --set-parameter '%s' --parameter-value '%s'",
+ 'delete': "crm_resource --meta --resource '%s' --delete-parameter '%s'",
+ 'show': "crm_resource --meta --resource '%s' --get-parameter '%s'",
+ 'get': "crm_resource --meta --resource '%s' --get-parameter '%s'",
}
rsc_failcount = {
'set': "crm_attribute -t status -n 'fail-count-%s' -N '%s' -v '%s' -d 0",
@@ -200,10 +187,10 @@
'get': "crm_failcount -G -r %s -N %s",
}
rsc_utilization = {
- 'set': "crm_resource -z -r '%s' -p '%s' -v '%s'",
- 'delete': "crm_resource -z -r '%s' -d '%s'",
- 'show': "crm_resource -z -r '%s' -g '%s'",
- 'get': "crm_resource -z -r '%s' -g '%s'",
+ 'set': "crm_resource --utilization --resource '%s' --set-parameter '%s' --parameter-value '%s'",
+ 'delete': "crm_resource --utilization --resource '%s' --delete-parameter '%s'",
+ 'show': "crm_resource --utilization --resource '%s' --get-parameter '%s'",
+ 'get': "crm_resource --utilization --resource '%s' --get-parameter '%s'",
}
rsc_secret = {
'set': "cibsecret set '%s' '%s' '%s'",
@@ -214,10 +201,31 @@
'get': "cibsecret get '%s' '%s'",
'check': "cibsecret check '%s' '%s'",
}
- rsc_refresh = "crm_resource -C"
- rsc_refresh_node = "crm_resource -C -H '%s'"
- rsc_reprobe = "crm_resource -C"
- rsc_reprobe_node = "crm_resource -C -H '%s'"
+
+ def _refresh_cleanup(self, action, rsc, node, force):
+ """
+ Implements the refresh and cleanup commands.
+ """
+ if rsc == "force":
+ rsc = None
+ force = True
+ if node == "force":
+ node = None
+ force = True
+ cmd = ["crm_resource", "--" + action]
+ if rsc:
+ if not utils.is_name_sane(rsc):
+ return False
+ cmd.append("--resource")
+ cmd.append(rsc)
+ if node:
+ if not utils.is_name_sane(node):
+ return False
+ cmd.append("--node")
+ cmd.append(node)
+ if force:
+ cmd.append("--force")
+ return utils.ext_cmd(" ".join(cmd)) == 0
def requires(self):
for program in ('crm_resource', 'crm_attribute'):
@@ -373,9 +381,9 @@
opts = ''
if node:
- opts = "--node='%s'" % node
+ opts = "--node '%s'" % node
if lifetime:
- opts = "%s --lifetime='%s'" % (opts, lifetime)
+ opts = "%s --lifetime '%s'" % (opts, lifetime)
if force or config.core.force:
opts = "%s --force" % opts
rc = utils.ext_cmd(self.rsc_migrate % (rsc, opts))
@@ -403,9 +411,9 @@
context.fatal_error("Not our node: " + node)
opts = ''
if node:
- opts = "--node='%s'" % node
+ opts = "--node '%s'" % node
if lifetime:
- opts = "%s --lifetime='%s'" % (opts, lifetime)
+ opts = "%s --lifetime '%s'" % (opts, lifetime)
if force:
opts = "%s --force" % opts
rc = utils.ext_cmd(self.rsc_ban % (rsc, opts))
@@ -432,14 +440,9 @@
@command.skill_level('administrator')
@command.wait
@command.completers(compl.resources, compl.nodes)
- def do_cleanup(self, context, resource, *args):
- "usage: cleanup <rsc> [<node>] [force]"
- # Cleanup a resource on a node. Omit node to cleanup on
- # all live nodes.
- argl = list(args)
- force = "force" in utils.fetch_opts(argl, ["force"]) or config.core.force
- node = argl[0] if len(argl) > 0 else ''
- return cleanup_resource(resource, node, force=force)
+ def do_cleanup(self, context, rsc=None, node=None, force=False):
+ "usage: cleanup [<rsc>] [<node>] [force]"
+ return self._refresh_cleanup("cleanup", rsc, node, force)
@command.wait
@command.completers(compl.resources, compl.nodes)
@@ -515,26 +518,11 @@
return ui_utils.manage_attr(context.get_command_name(), self.rsc_utilization,
rsc, cmd, attr, value)
+ @command.alias('reprobe')
@command.completers(compl.nodes)
- def do_refresh(self, context, *args):
- 'usage: refresh [<node>]'
- if len(args) == 1:
- if not utils.is_name_sane(args[0]):
- return False
- return utils.ext_cmd(self.rsc_refresh_node % args[0]) == 0
- else:
- return utils.ext_cmd(self.rsc_refresh) == 0
-
- @command.wait
- @command.completers(compl.nodes)
- def do_reprobe(self, context, *args):
- 'usage: reprobe [<node>]'
- if len(args) == 1:
- if not utils.is_name_sane(args[0]):
- return False
- return utils.ext_cmd(self.rsc_reprobe_node % args[0]) == 0
- else:
- return utils.ext_cmd(self.rsc_reprobe) == 0
+ def do_refresh(self, context, rsc=None, node=None, force=False):
+ 'usage: refresh [<rsc>] [<node>] [force]'
+ return self._refresh_cleanup("refresh", rsc, node, force)
@command.wait
@command.completers(compl.resources, compl.choice(['on', 'off', 'true', 'false']))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/doc/crm.8.adoc new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/crm.8.adoc
--- old/crmsh-4.0.0+git.1519721966.9abd841c/doc/crm.8.adoc 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/crm.8.adoc 2018-03-29 01:00:03.000000000 +0200
@@ -217,7 +217,7 @@
cleanup list promote start up
demote manage quit status utilization
end meta refresh stop
-exit migrate reprobe unmanage
+exit migrate unmanage
crm(live)configure# primitive fence-1 <TAB><TAB>
heartbeat: lsb: ocf: stonith:
@@ -1947,9 +1947,11 @@
[[cmdhelp_resource_cleanup,cleanup resource status]]
==== `cleanup`
-Cleanup resource status. Typically done after the resource has
-temporarily failed. If a node is omitted, cleanup on all nodes.
-If there are many nodes, the command may take a while.
+If resource has any past failures, clear its history and fail
+count. Typically done after the resource has temporarily
+failed.
+
+If a node is omitted, cleanup on all nodes.
+(Pacemaker 1.1.14)+ Pass force to cleanup the resource itself,
otherwise the cleanup command will apply to the parent resource (if
@@ -1957,7 +1959,7 @@
Usage:
...............
-cleanup <rsc> [<node>] [force]
+cleanup [<rsc>] [<node>] [force]
...............
[[cmdhelp_resource_clear,Clear any relocation constraint]]
@@ -2126,36 +2128,14 @@
promote <rsc>
...............
-[[cmdhelp_resource_refresh,refresh CIB from the LRM status]]
+[[cmdhelp_resource_refresh,Recheck current resource status and drop failure history]]
==== `refresh`
-Refresh CIB from the LRM status.
-
-.Note
-****************************
-`refresh` has been deprecated and is now
-an alias for `cleanup`.
-****************************
-
-Usage:
-...............
-refresh [<node>]
-...............
-
-[[cmdhelp_resource_reprobe,probe for resources not started by the CRM]]
-==== `reprobe`
-
-Probe for resources not started by the CRM.
-
-.Note
-****************************
-`reprobe` has been deprecated and is now
-an alias for `cleanup`.
-****************************
+Delete resource's history (including failures) so its current state is rechecked.
Usage:
...............
-reprobe [<node>]
+refresh [<rsc>] [<node>] [force]
...............
[[cmdhelp_resource_restart,restart resources]]
@@ -3423,7 +3403,7 @@
`group` can be passed the "container" meta attribute, to indicate that
it is to be used to group VM resources monitored using Nagios. The
resource referred to by the container attribute must be of type
-`ocf:heartbeat:Xen`, `oxf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
+`ocf:heartbeat:Xen`, `ocf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
Usage:
...............
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-1.2.adoc new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-1.2.adoc
--- old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-1.2.adoc 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-1.2.adoc 2018-03-29 01:00:03.000000000 +0200
@@ -1758,7 +1758,7 @@
`group` can be passed the "container" meta attribute, to indicate that
it is to be used to group VM resources monitored using Nagios. The
resource referred to by the container attribute must be of type
-`ocf:heartbeat:Xen`, `oxf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
+`ocf:heartbeat:Xen`, `ocf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
Usage:
...............
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-2.0.adoc new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-2.0.adoc
--- old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-2.0.adoc 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-2.0.adoc 2018-03-29 01:00:03.000000000 +0200
@@ -3064,7 +3064,7 @@
`group` can be passed the "container" meta attribute, to indicate that
it is to be used to group VM resources monitored using Nagios. The
resource referred to by the container attribute must be of type
-`ocf:heartbeat:Xen`, `oxf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
+`ocf:heartbeat:Xen`, `ocf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
Usage:
...............
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-3.adoc new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-3.adoc
--- old/crmsh-4.0.0+git.1519721966.9abd841c/doc/website-v1/man-3.adoc 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/doc/website-v1/man-3.adoc 2018-03-29 01:00:03.000000000 +0200
@@ -3325,7 +3325,7 @@
`group` can be passed the "container" meta attribute, to indicate that
it is to be used to group VM resources monitored using Nagios. The
resource referred to by the container attribute must be of type
-`ocf:heartbeat:Xen`, `oxf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
+`ocf:heartbeat:Xen`, `ocf:heartbeat:VirtualDomain` or `ocf:heartbeat:lxc`.
Usage:
...............
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/test/testcases/resource new/crmsh-4.0.0+git.1522278003.cd7ae188/test/testcases/resource
--- old/crmsh-4.0.0+git.1519721966.9abd841c/test/testcases/resource 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/test/testcases/resource 2018-03-29 01:00:03.000000000 +0200
@@ -40,10 +40,24 @@
resource start cg
resource stop cg
resource stop p3
+%setenv showobj=
configure rename p3 p4
configure primitive p3 Dummy
resource stop p3
-%setenv showobj=
+resource start p3
+resource cleanup
+resource cleanup p3
+resource cleanup p3 node1
+resource cleanup force
+resource cleanup p3 force
+resource cleanup p3 node1 force
+resource refresh
+resource refresh p3
+resource refresh p3 node1
+resource refresh force
+resource refresh p3 force
+resource refresh p3 node1 force
+resource stop p3
configure rm cg
configure ms msg g
resource scores
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/test/testcases/resource.exp new/crmsh-4.0.0+git.1522278003.cd7ae188/test/testcases/resource.exp
--- old/crmsh-4.0.0+git.1519721966.9abd841c/test/testcases/resource.exp 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/test/testcases/resource.exp 2018-03-29 01:00:03.000000000 +0200
@@ -1,5 +1,5 @@
.TRY resource status p0
-.EXT crm_resource --locate -r 'p0'
+.EXT crm_resource --locate --resource 'p0'
resource p0 is NOT running
.SETENV showobj=p3
.TRY resource start p3
@@ -87,7 +87,7 @@
.SETENV showobj=cli-prefer-p3
.TRY resource migrate p3 node1
-.EXT crm_resource --quiet --move -r 'p3' --node='node1'
+.EXT crm_resource --quiet --move --resource 'p3' --node 'node1'
INFO: Move constraint created for p3 to node1
.INP: configure
.INP: _regtest on
@@ -106,11 +106,11 @@
.SETENV showobj=
.TRY resource unmigrate p3
-.EXT crm_resource --quiet --clear -r 'p3'
+.EXT crm_resource --quiet --clear --resource 'p3'
INFO: Removed migration constraints for p3
.SETENV showobj=cli-prefer-p3
.TRY resource migrate p3 node1 force
-.EXT crm_resource --quiet --move -r 'p3' --node='node1' --force
+.EXT crm_resource --quiet --move --resource 'p3' --node 'node1' --force
INFO: Move constraint created for p3 to node1
.INP: configure
.INP: _regtest on
@@ -129,11 +129,11 @@
.SETENV showobj=
.TRY resource unmigrate p3
-.EXT crm_resource --quiet --clear -r 'p3'
+.EXT crm_resource --quiet --clear --resource 'p3'
INFO: Removed migration constraints for p3
.SETENV showobj=p0
.TRY resource param p0 set a0 "1 2 3"
-.EXT crm_resource -r 'p0' -p 'a0' -v '1 2 3'
+.EXT crm_resource --resource 'p0' --set-parameter 'a0' --parameter-value '1 2 3'
Set 'p0' option: id=p0-instance_attributes-a0 set=p0-instance_attributes name=a0=1 2 3
.INP: configure
@@ -156,7 +156,7 @@
</cib>
.TRY resource param p0 show a0
-.EXT crm_resource -r 'p0' -g 'a0'
+.EXT crm_resource --resource 'p0' --get-parameter 'a0'
1 2 3
.INP: configure
.INP: _regtest on
@@ -178,7 +178,7 @@
</cib>
.TRY resource param p0 delete a0
-.EXT crm_resource -r 'p0' -d 'a0'
+.EXT crm_resource --resource 'p0' --delete-parameter 'a0'
Deleted 'p0' option: id=p0-instance_attributes-a0 name=a0
.INP: configure
.INP: _regtest on
@@ -198,7 +198,7 @@
</cib>
.TRY resource meta p0 set m0 123
-.EXT crm_resource --meta -r 'p0' -p 'm0' -v '123'
+.EXT crm_resource --meta --resource 'p0' --set-parameter 'm0' --parameter-value '123'
Set 'p0' option: id=p0-meta_attributes-m0 set=p0-meta_attributes name=m0=123
.INP: configure
@@ -222,7 +222,7 @@
</cib>
.TRY resource meta p0 show m0
-.EXT crm_resource --meta -r 'p0' -g 'm0'
+.EXT crm_resource --meta --resource 'p0' --get-parameter 'm0'
123
.INP: configure
.INP: _regtest on
@@ -245,7 +245,7 @@
</cib>
.TRY resource meta p0 delete m0
-.EXT crm_resource --meta -r 'p0' -d 'm0'
+.EXT crm_resource --meta --resource 'p0' --delete-parameter 'm0'
Deleted 'p0' option: id=p0-meta_attributes-m0 name=m0
.INP: configure
.INP: _regtest on
@@ -822,100 +822,57 @@
</configuration>
</cib>
+.SETENV showobj=
.TRY configure rename p3 p4
.EXT crm_resource --show-metadata stonith:null
.EXT stonithd metadata
.EXT crm_resource --show-metadata ocf:pacemaker:Dummy
.EXT crm_resource --show-metadata ocf:heartbeat:Delay
-.INP: configure
-.INP: _regtest on
-.INP: show xml p0
-<?xml version="1.0" ?>
-<cib>
- <configuration>
- <crm_config/>
- <nodes/>
- <resources>
- <clone id="cg">
- <meta_attributes id="cg-meta_attributes">
- <nvpair id="cg-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- <group id="g">
- <primitive id="p0" class="ocf" provider="pacemaker" type="Dummy"/>
- <primitive id="p4" class="ocf" provider="pacemaker" type="Dummy">
- <meta_attributes id="p3-meta_attributes">
- <nvpair id="p3-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- </primitive>
- </group>
- </clone>
- </resources>
- <constraints/>
- </configuration>
-</cib>
-
.TRY configure primitive p3 Dummy
.EXT crm_resource --show-metadata stonith:null
.EXT stonithd metadata
.EXT crm_resource --show-metadata ocf:pacemaker:Dummy
.EXT crm_resource --show-metadata ocf:heartbeat:Delay
.EXT crm_resource --show-metadata ocf:heartbeat:Dummy
-.INP: configure
-.INP: _regtest on
-.INP: show xml p0
-<?xml version="1.0" ?>
-<cib>
- <configuration>
- <crm_config/>
- <nodes/>
- <resources>
- <clone id="cg">
- <meta_attributes id="cg-meta_attributes">
- <nvpair id="cg-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- <group id="g">
- <primitive id="p0" class="ocf" provider="pacemaker" type="Dummy"/>
- <primitive id="p4" class="ocf" provider="pacemaker" type="Dummy">
- <meta_attributes id="p3-meta_attributes">
- <nvpair id="p3-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- </primitive>
- </group>
- </clone>
- </resources>
- <constraints/>
- </configuration>
-</cib>
-
.TRY resource stop p3
-.INP: configure
-.INP: _regtest on
-.INP: show xml p0
-<?xml version="1.0" ?>
-<cib>
- <configuration>
- <crm_config/>
- <nodes/>
- <resources>
- <clone id="cg">
- <meta_attributes id="cg-meta_attributes">
- <nvpair id="cg-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- <group id="g">
- <primitive id="p0" class="ocf" provider="pacemaker" type="Dummy"/>
- <primitive id="p4" class="ocf" provider="pacemaker" type="Dummy">
- <meta_attributes id="p3-meta_attributes">
- <nvpair id="p3-meta_attributes-target-role" name="target-role" value="Stopped"/>
- </meta_attributes>
- </primitive>
- </group>
- </clone>
- </resources>
- <constraints/>
- </configuration>
-</cib>
-
-.SETENV showobj=
+.TRY resource start p3
+.TRY resource cleanup
+.EXT crm_resource --cleanup
+Error performing operation: Transport endpoint is not connected
+.TRY resource cleanup p3
+.EXT crm_resource --cleanup --resource p3
+Error performing operation: Transport endpoint is not connected
+.TRY resource cleanup p3 node1
+.EXT crm_resource --cleanup --resource p3 --node node1
+Error performing operation: Transport endpoint is not connected
+.TRY resource cleanup force
+.EXT crm_resource --cleanup --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource cleanup p3 force
+.EXT crm_resource --cleanup --resource p3 --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource cleanup p3 node1 force
+.EXT crm_resource --cleanup --resource p3 --node node1 --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh
+.EXT crm_resource --refresh
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh p3
+.EXT crm_resource --refresh --resource p3
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh p3 node1
+.EXT crm_resource --refresh --resource p3 --node node1
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh force
+.EXT crm_resource --refresh --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh p3 force
+.EXT crm_resource --refresh --resource p3 --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource refresh p3 node1 force
+.EXT crm_resource --refresh --resource p3 --node node1 --force
+Error performing operation: Transport endpoint is not connected
+.TRY resource stop p3
.TRY configure rm cg
.TRY configure ms msg g
.TRY resource scores
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/crmsh-4.0.0+git.1519721966.9abd841c/test/unittests/test_resource.py new/crmsh-4.0.0+git.1522278003.cd7ae188/test/unittests/test_resource.py
--- old/crmsh-4.0.0+git.1519721966.9abd841c/test/unittests/test_resource.py 2018-02-27 09:59:26.000000000 +0100
+++ new/crmsh-4.0.0+git.1522278003.cd7ae188/test/unittests/test_resource.py 2018-03-29 01:00:03.000000000 +0200
@@ -1,4 +1,4 @@
-# Copyright (C) 2014 Kristoffer Gronlund <kgronlund(a)suse.com>
+# Copyright (C) 2014-2018 Kristoffer Gronlund <kgronlund(a)suse.com>
# See COPYING for license information.
@@ -26,10 +26,10 @@
utils.ext_cmd = mockcmd
rscui = ui_resource.RscMgmt()
assert rscui.do_maintenance(mc, 'rsc1') is True
- assert commands[-1] == ("crm_resource -r 'rsc1' --meta -p maintenance -v 'true'",)
+ assert commands[-1] == ("crm_resource --resource 'rsc1' --meta --set-parameter maintenance --parameter-value 'true'",)
assert rscui.do_maintenance(mc, 'rsc1', 'on') is True
- assert commands[-1] == ("crm_resource -r 'rsc1' --meta -p maintenance -v 'true'",)
+ assert commands[-1] == ("crm_resource --resource 'rsc1' --meta --set-parameter maintenance --parameter-value 'true'",)
assert rscui.do_maintenance(mc, 'rsc1', 'off') is True
- assert commands[-1] == ("crm_resource -r 'rsc1' --meta -p maintenance -v 'false'",)
+ assert commands[-1] == ("crm_resource --resource 'rsc1' --meta --set-parameter maintenance --parameter-value 'false'",)
finally:
utils.ext_cmd = _pre_ext_cmd
1
0
Hello community,
here is the log from the commit of package platformsh-cli for openSUSE:Factory checked in at 2018-03-29 11:57:53
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/platformsh-cli (Old)
and /work/SRC/openSUSE:Factory/.platformsh-cli.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "platformsh-cli"
Thu Mar 29 11:57:53 2018 rev:41 rq:592137 version:3.31.4
Changes:
--------
--- /work/SRC/openSUSE:Factory/platformsh-cli/platformsh-cli.changes 2018-03-28 10:32:41.942202317 +0200
+++ /work/SRC/openSUSE:Factory/.platformsh-cli.new/platformsh-cli.changes 2018-03-29 11:57:57.753280198 +0200
@@ -1,0 +2,12 @@
+Thu Mar 29 02:01:44 UTC 2018 - jimmy(a)boombatower.com
+
+- Update to version 3.31.4:
+ * Release v3.31.4
+ * [project:info] show 'git' and 'url' in the property list
+ * [environment:drush] Use deployment routes to find a site URL
+ * Allow accounts API URL to not terminate with / [skip changelog]
+ * Remove CHANGELOG.md: use /releases instead for change logs
+ * [self:release] use GitHub commits API to get the tag SHA [skip changelog]
+ * Test 'drush' command for Drush functionality before using
+
+-------------------------------------------------------------------
Old:
----
platformsh-cli-3.31.3.tar.xz
New:
----
platformsh-cli-3.31.4.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ platformsh-cli.spec ++++++
--- /var/tmp/diff_new_pack.BC8sds/_old 2018-03-29 11:57:58.425255945 +0200
+++ /var/tmp/diff_new_pack.BC8sds/_new 2018-03-29 11:57:58.429255800 +0200
@@ -17,7 +17,7 @@
Name: platformsh-cli
-Version: 3.31.3
+Version: 3.31.4
Release: 0
Summary: Tool for managing Platform.sh services from the command line
# See licenses.txt for dependency licenses.
++++++ _service ++++++
--- /var/tmp/diff_new_pack.BC8sds/_old 2018-03-29 11:57:58.461254645 +0200
+++ /var/tmp/diff_new_pack.BC8sds/_new 2018-03-29 11:57:58.461254645 +0200
@@ -2,7 +2,7 @@
<service name="tar_scm" mode="disabled">
<param name="versionformat">@PARENT_TAG@</param>
<param name="versionrewrite-pattern">v(.*)</param>
- <param name="revision">refs/tags/v3.31.3</param>
+ <param name="revision">refs/tags/v3.31.4</param>
<param name="url">git://github.com/platformsh/platformsh-cli.git</param>
<param name="scm">git</param>
<param name="changesgenerate">enable</param>
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.BC8sds/_old 2018-03-29 11:57:58.477254068 +0200
+++ /var/tmp/diff_new_pack.BC8sds/_new 2018-03-29 11:57:58.481253923 +0200
@@ -1,6 +1,6 @@
<servicedata>
<service name="tar_scm">
<param name="url">git://github.com/platformsh/platformsh-cli.git</param>
- <param name="changesrevision">b11d88cf518d761c2737a0be50081ec548b1069a</param>
+ <param name="changesrevision">4ef5eb28776e289a4e8131b7dfb192dded4ed5b3</param>
</service>
</servicedata>
++++++ platformsh-cli-3.31.3.tar.xz -> platformsh-cli-3.31.4.tar.xz ++++++
++++ 2084 lines of diff (skipped)
++++++ platformsh-cli-vendor.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/vendor/autoload.php new/vendor/autoload.php
--- old/vendor/autoload.php 2018-03-27 04:39:51.435700356 +0200
+++ new/vendor/autoload.php 2018-03-29 04:01:46.828820489 +0200
@@ -4,4 +4,4 @@
require_once __DIR__ . '/composer/autoload_real.php';
-return ComposerAutoloaderInited2162764f3dc00784016a5843b92abf::getLoader();
+return ComposerAutoloaderInitabedc0cb3383310d6509812bb78d1eb1::getLoader();
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/vendor/composer/autoload_real.php new/vendor/composer/autoload_real.php
--- old/vendor/composer/autoload_real.php 2018-03-27 04:39:51.435700356 +0200
+++ new/vendor/composer/autoload_real.php 2018-03-29 04:01:46.828820489 +0200
@@ -2,7 +2,7 @@
// autoload_real.php @generated by Composer
-class ComposerAutoloaderInited2162764f3dc00784016a5843b92abf
+class ComposerAutoloaderInitabedc0cb3383310d6509812bb78d1eb1
{
private static $loader;
@@ -19,15 +19,15 @@
return self::$loader;
}
- spl_autoload_register(array('ComposerAutoloaderInited2162764f3dc00784016a5843b92abf', 'loadClassLoader'), true, true);
+ spl_autoload_register(array('ComposerAutoloaderInitabedc0cb3383310d6509812bb78d1eb1', 'loadClassLoader'), true, true);
self::$loader = $loader = new \Composer\Autoload\ClassLoader();
- spl_autoload_unregister(array('ComposerAutoloaderInited2162764f3dc00784016a5843b92abf', 'loadClassLoader'));
+ spl_autoload_unregister(array('ComposerAutoloaderInitabedc0cb3383310d6509812bb78d1eb1', 'loadClassLoader'));
$useStaticLoader = PHP_VERSION_ID >= 50600 && !defined('HHVM_VERSION') && (!function_exists('zend_loader_file_encoded') || !zend_loader_file_encoded());
if ($useStaticLoader) {
require_once __DIR__ . '/autoload_static.php';
- call_user_func(\Composer\Autoload\ComposerStaticInited2162764f3dc00784016a5843b92abf::getInitializer($loader));
+ call_user_func(\Composer\Autoload\ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1::getInitializer($loader));
} else {
$map = require __DIR__ . '/autoload_namespaces.php';
foreach ($map as $namespace => $path) {
@@ -48,19 +48,19 @@
$loader->register(true);
if ($useStaticLoader) {
- $includeFiles = Composer\Autoload\ComposerStaticInited2162764f3dc00784016a5843b92abf::$files;
+ $includeFiles = Composer\Autoload\ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1::$files;
} else {
$includeFiles = require __DIR__ . '/autoload_files.php';
}
foreach ($includeFiles as $fileIdentifier => $file) {
- composerRequireed2162764f3dc00784016a5843b92abf($fileIdentifier, $file);
+ composerRequireabedc0cb3383310d6509812bb78d1eb1($fileIdentifier, $file);
}
return $loader;
}
}
-function composerRequireed2162764f3dc00784016a5843b92abf($fileIdentifier, $file)
+function composerRequireabedc0cb3383310d6509812bb78d1eb1($fileIdentifier, $file)
{
if (empty($GLOBALS['__composer_autoload_files'][$fileIdentifier])) {
require $file;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/vendor/composer/autoload_static.php new/vendor/composer/autoload_static.php
--- old/vendor/composer/autoload_static.php 2018-03-27 04:39:51.435700356 +0200
+++ new/vendor/composer/autoload_static.php 2018-03-29 04:01:46.828820489 +0200
@@ -4,7 +4,7 @@
namespace Composer\Autoload;
-class ComposerStaticInited2162764f3dc00784016a5843b92abf
+class ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1
{
public static $files = array (
'0e6d7bf4a5811bfa5cf40c5ccd6fae6a' => __DIR__ . '/..' . '/symfony/polyfill-mbstring/bootstrap.php',
@@ -195,9 +195,9 @@
public static function getInitializer(ClassLoader $loader)
{
return \Closure::bind(function () use ($loader) {
- $loader->prefixLengthsPsr4 = ComposerStaticInited2162764f3dc00784016a5843b92abf::$prefixLengthsPsr4;
- $loader->prefixDirsPsr4 = ComposerStaticInited2162764f3dc00784016a5843b92abf::$prefixDirsPsr4;
- $loader->classMap = ComposerStaticInited2162764f3dc00784016a5843b92abf::$classMap;
+ $loader->prefixLengthsPsr4 = ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1::$prefixLengthsPsr4;
+ $loader->prefixDirsPsr4 = ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1::$prefixDirsPsr4;
+ $loader->classMap = ComposerStaticInitabedc0cb3383310d6509812bb78d1eb1::$classMap;
}, null, ClassLoader::class);
}
1
0
Hello community,
here is the log from the commit of package python-kiwisolver for openSUSE:Factory checked in at 2018-03-29 11:57:50
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-kiwisolver (Old)
and /work/SRC/openSUSE:Factory/.python-kiwisolver.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-kiwisolver"
Thu Mar 29 11:57:50 2018 rev:2 rq:592125 version:1.0.1
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-kiwisolver/python-kiwisolver.changes 2018-03-24 16:16:42.968607881 +0100
+++ /work/SRC/openSUSE:Factory/.python-kiwisolver.new/python-kiwisolver.changes 2018-03-29 11:57:53.353439002 +0200
@@ -1,0 +2,5 @@
+Wed Mar 28 15:28:16 UTC 2018 - jengelh(a)inai.de
+
+- Quantify speeds in description.
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-kiwisolver.spec ++++++
--- /var/tmp/diff_new_pack.MrMCfx/_old 2018-03-29 11:57:54.069413159 +0200
+++ /var/tmp/diff_new_pack.MrMCfx/_new 2018-03-29 11:57:54.069413159 +0200
@@ -20,7 +20,7 @@
Name: python-kiwisolver
Version: 1.0.1
Release: 0
-Summary: A fast implementation of the Cassowary constraint solver
+Summary: An implementation of the Cassowary constraint solver
License: BSD-3-Clause
Group: Development/Languages/Python
Url: https://github.com/nucleic/kiwi
@@ -35,12 +35,12 @@
%python_subpackages
%description
-Kiwi is an efficient C++ implementation of the Cassowary constraint solving
-algorithm. Kiwi is an implementation of the algorithm based on the seminal
-Cassowary paper. It is *not* a refactoring of the original C++ solver. Kiwi
-has been designed from the ground up to be lightweight and fast. Kiwi ranges
-from 10x to 500x faster than the original Cassowary solver with typical use
-cases gaining a 40x improvement. Memory savings are consistently > 5x.
+Kiwi is a C++ implementation of the Cassowary constraint solving
+algorithm. Kiwi is an implementation of the algorithm based on the
+seminal Cassowary paper, but it is not a refactoring of the original
+C++ solver. Kiwi ranges from 10x to 500x faster processing than the
+original Cassowary solver with the same input set, with typical use
+cases gaining a 40x improvement. Memory savings are consistently >5x.
In addition to the C++ solver, Kiwi ships with hand-rolled Python bindings.
1
0
Hello community,
here is the log from the commit of package tellico for openSUSE:Factory checked in at 2018-03-29 11:57:46
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/tellico (Old)
and /work/SRC/openSUSE:Factory/.tellico.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "tellico"
Thu Mar 29 11:57:46 2018 rev:74 rq:592112 version:3.1.2
Changes:
--------
--- /work/SRC/openSUSE:Factory/tellico/tellico.changes 2018-01-17 21:58:49.625763135 +0100
+++ /work/SRC/openSUSE:Factory/.tellico.new/tellico.changes 2018-03-29 11:57:49.537576731 +0200
@@ -1,0 +2,18 @@
+Wed Mar 28 11:48:43 UTC 2018 - wbauer(a)tmo.at
+
+- Update to 3.1.2:
+ Improvements:
+ * Updated KINO.de data source.
+ * Updated Internet Movie Database (IMDB.com) data source.
+ * Updated ISBNdb.com data source.
+ * Updated MusicBrainz data source.
+ Bug fixes:
+ * Added workaround for crash when reading EXIV data (kde#390744).
+ * Fixed bug with inconsistent selection (kde#391614).
+ * Fixed bug with "Filter by Group" (kde#389931).
+- Add fix-build-with-Qt5.6.patch to make it compile on Leap 42.3
+ (kde#392457)
+- Use cmake() syntax for KF5 and Qt5 BuildRequires
+- Mark license files as %license
+
+-------------------------------------------------------------------
Old:
----
tellico-3.1.1.tar.xz
New:
----
fix-build-with-Qt5.6.patch
tellico-3.1.2.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ tellico.spec ++++++
--- /var/tmp/diff_new_pack.PBk5im/_old 2018-03-29 11:57:50.213552331 +0200
+++ /var/tmp/diff_new_pack.PBk5im/_new 2018-03-29 11:57:50.217552188 +0200
@@ -17,52 +17,54 @@
Name: tellico
-Version: 3.1.1
+Version: 3.1.2
Release: 0
Summary: A Collection Manager for KDE
License: GPL-2.0+
Group: Productivity/Office/Other
Url: http://tellico-project.org/
Source0: http://tellico-project.org/files/%{name}-%{version}.tar.xz
+# PATCH-FIX-UPSTREAM
+Patch: fix-build-with-Qt5.6.patch
BuildRequires: extra-cmake-modules
BuildRequires: fdupes
-BuildRequires: karchive-devel
-BuildRequires: kcodecs-devel
-BuildRequires: kconfig-devel
-BuildRequires: kconfigwidgets-devel
-BuildRequires: kcoreaddons-devel
-BuildRequires: kcrash-devel
-BuildRequires: kdoctools-devel
-BuildRequires: kfilemetadata5-devel
-BuildRequires: kguiaddons-devel
-BuildRequires: khtml-devel
-BuildRequires: ki18n-devel
-BuildRequires: kiconthemes-devel
-BuildRequires: kio-devel
-BuildRequires: kitemmodels-devel
-BuildRequires: kjobwidgets-devel
-BuildRequires: knewstuff-devel
-BuildRequires: kwallet-devel
-BuildRequires: kwidgetsaddons-devel
-BuildRequires: kwindowsystem-devel
-BuildRequires: kxmlgui-devel
BuildRequires: libcdio-devel
BuildRequires: libexempi-devel
-BuildRequires: libksane-devel
BuildRequires: libpoppler-qt5-devel
BuildRequires: libv4l-devel
BuildRequires: libxslt-devel
BuildRequires: libyaz-devel
BuildRequires: pkgconfig
-BuildRequires: solid-devel
BuildRequires: taglib-devel
-BuildRequires: pkgconfig(Qt5Core)
-BuildRequires: pkgconfig(Qt5DBus)
-BuildRequires: pkgconfig(Qt5Gui)
-BuildRequires: pkgconfig(Qt5Network)
-BuildRequires: pkgconfig(Qt5Test)
-BuildRequires: pkgconfig(Qt5Widgets)
-BuildRequires: pkgconfig(Qt5Xml)
+BuildRequires: cmake(KF5Archive)
+BuildRequires: cmake(KF5Codecs)
+BuildRequires: cmake(KF5Config)
+BuildRequires: cmake(KF5ConfigWidgets)
+BuildRequires: cmake(KF5CoreAddons)
+BuildRequires: cmake(KF5Crash)
+BuildRequires: cmake(KF5DocTools)
+BuildRequires: cmake(KF5FileMetaData)
+BuildRequires: cmake(KF5GuiAddons)
+BuildRequires: cmake(KF5I18n)
+BuildRequires: cmake(KF5IconThemes)
+BuildRequires: cmake(KF5ItemModels)
+BuildRequires: cmake(KF5JobWidgets)
+BuildRequires: cmake(KF5KHtml)
+BuildRequires: cmake(KF5KIO)
+BuildRequires: cmake(KF5NewStuff)
+BuildRequires: cmake(KF5Sane)
+BuildRequires: cmake(KF5Solid)
+BuildRequires: cmake(KF5Wallet)
+BuildRequires: cmake(KF5WidgetsAddons)
+BuildRequires: cmake(KF5WindowSystem)
+BuildRequires: cmake(KF5XmlGui)
+BuildRequires: cmake(Qt5Core)
+BuildRequires: cmake(Qt5DBus)
+BuildRequires: cmake(Qt5Gui)
+BuildRequires: cmake(Qt5Network)
+BuildRequires: cmake(Qt5Test)
+BuildRequires: cmake(Qt5Widgets)
+BuildRequires: cmake(Qt5Xml)
BuildRequires: pkgconfig(libxml-2.0)
Requires(post): shared-mime-info
Requires(postun): shared-mime-info
@@ -85,6 +87,7 @@
%prep
%setup -q
+%patch -p1
%build
%cmake_kf5 "-DENABLE_WEBCAM=true" -d build
@@ -111,7 +114,8 @@
%files
%defattr(-,root,root,-)
-%doc AUTHORS COPYING ChangeLog README
+%license COPYING
+%doc AUTHORS ChangeLog README
%{_kf5_applicationsdir}/org.kde.tellico.desktop
%dir %{_kf5_appsdir}/kconf_update
%{_kf5_appsdir}/kconf_update/tellico*
++++++ fix-build-with-Qt5.6.patch ++++++
>From 8e6c74d6f23ff06d615d1eb5e3fda2a786a1b3fc Mon Sep 17 00:00:00 2001
From: Robby Stephenson <robby(a)periapsis.org>
Date: Wed, 28 Mar 2018 12:51:46 -0400
Subject: Fix compilation with Qt 5.6
QStringLiteral doesn't work everywhere apparently for Qt 5.6. Revert
some changes made by clazy.
Thanks to the patch from Wolfgang Bauer.
BUG: 392457
FIXED-IN: 3.1.3
---
ChangeLog | 4 ++++
src/fetch/allocinefetcher.cpp | 6 +++---
src/fetch/discogsfetcher.cpp | 14 +++++++-------
src/fetch/doubanfetcher.cpp | 12 ++++++------
src/fetch/filmasterfetcher.cpp | 2 +-
src/fetch/googlebookfetcher.cpp | 2 +-
src/fetch/igdbfetcher.cpp | 4 ++--
src/fetch/imdbfetcher.cpp | 2 +-
src/fetch/isbndbfetcher.cpp | 4 ++--
src/fetch/kinofetcher.cpp | 2 +-
src/fetch/moviemeterfetcher.cpp | 2 +-
src/fetch/openlibraryfetcher.cpp | 4 ++--
src/fetch/themoviedbfetcher.cpp | 10 +++++-----
13 files changed, 36 insertions(+), 32 deletions(-)
diff --git a/ChangeLog b/ChangeLog
index f8528d9..d0e8fd9 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,9 @@
2018-03-28 Robby Stephenson <robby(a)periapsis.org>
+ * Fixed compilation for Qt 5.6 (Bug 392457).
+
+2018-03-28 Robby Stephenson <robby(a)periapsis.org>
+
* Released Tellico 3.1.2.
* Updated Kino.de data source.
diff --git a/src/fetch/allocinefetcher.cpp b/src/fetch/allocinefetcher.cpp
index 522c558..0a91fb2 100644
--- a/src/fetch/allocinefetcher.cpp
+++ b/src/fetch/allocinefetcher.cpp
@@ -376,19 +376,19 @@ void AbstractAllocineFetcher::populateEntry(Data::EntryPtr entry, const QVariant
entry->setField(QStringLiteral("studio"), mapValue(releaseMap, "distributor", "name"));
QStringList genres;
- foreach(const QVariant& variant, resultMap.value(QStringLiteral("genre")).toList()) {
+ foreach(const QVariant& variant, resultMap.value(QLatin1String("genre")).toList()) {
genres << i18n(mapValue(variant.toMap(), "$").toUtf8().constData());
}
entry->setField(QStringLiteral("genre"), genres.join(FieldFormat::delimiterString()));
QStringList nats;
- foreach(const QVariant& variant, resultMap.value(QStringLiteral("nationality")).toList()) {
+ foreach(const QVariant& variant, resultMap.value(QLatin1String("nationality")).toList()) {
nats << mapValue(variant.toMap(), "$");
}
entry->setField(QStringLiteral("nationality"), nats.join(FieldFormat::delimiterString()));
QStringList langs;
- foreach(const QVariant& variant, resultMap.value(QStringLiteral("language")).toList()) {
+ foreach(const QVariant& variant, resultMap.value(QLatin1String("language")).toList()) {
langs << mapValue(variant.toMap(), "$");
}
entry->setField(QStringLiteral("language"), langs.join(FieldFormat::delimiterString()));
diff --git a/src/fetch/discogsfetcher.cpp b/src/fetch/discogsfetcher.cpp
index 2f7f441..7f6360f 100644
--- a/src/fetch/discogsfetcher.cpp
+++ b/src/fetch/discogsfetcher.cpp
@@ -280,7 +280,7 @@ void DiscogsFetcher::slotComplete(KJob*) {
}
int count = 0;
- foreach(const QVariant& result, resultMap.value(QStringLiteral("results")).toList()) {
+ foreach(const QVariant& result, resultMap.value(QLatin1String("results")).toList()) {
if(count >= DISCOGS_MAX_RETURNS_TOTAL) {
break;
}
@@ -306,13 +306,13 @@ void DiscogsFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& res
entry_->setField(QStringLiteral("genre"), mapValue(resultMap_, "genres"));
QStringList artists;
- foreach(const QVariant& artist, resultMap_.value(QStringLiteral("artists")).toList()) {
+ foreach(const QVariant& artist, resultMap_.value(QLatin1String("artists")).toList()) {
artists << mapValue(artist.toMap(), "name");
}
entry_->setField(QStringLiteral("artist"), artists.join(FieldFormat::delimiterString()));
QStringList labels;
- foreach(const QVariant& label, resultMap_.value(QStringLiteral("labels")).toList()) {
+ foreach(const QVariant& label, resultMap_.value(QLatin1String("labels")).toList()) {
labels << mapValue(label.toMap(), "name");
}
entry_->setField(QStringLiteral("label"), labels.join(FieldFormat::delimiterString()));
@@ -331,7 +331,7 @@ void DiscogsFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& res
// check the formats, it could have multiple
// if there is a CD, prefer that in the track list
bool hasCD = false;
- foreach(const QVariant& format, resultMap_.value(QStringLiteral("formats")).toList()) {
+ foreach(const QVariant& format, resultMap_.value(QLatin1String("formats")).toList()) {
if(mapValue(format.toMap(), "name") == QLatin1String("CD")) {
entry_->setField(QStringLiteral("medium"), i18n("Compact Disc"));
hasCD = true;
@@ -346,7 +346,7 @@ void DiscogsFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& res
}
QStringList tracks;
- foreach(const QVariant& track, resultMap_.value(QStringLiteral("tracklist")).toList()) {
+ foreach(const QVariant& track, resultMap_.value(QLatin1String("tracklist")).toList()) {
const QVariantMap trackMap = track.toMap();
if(mapValue(trackMap, "type_") != QLatin1String("track")) {
continue;
@@ -363,7 +363,7 @@ void DiscogsFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& res
trackInfo << mapValue(trackMap, "title");
if(trackMap.contains(QStringLiteral("artists"))) {
QStringList artists;
- foreach(const QVariant& artist, trackMap.value(QStringLiteral("artists")).toList()) {
+ foreach(const QVariant& artist, trackMap.value(QLatin1String("artists")).toList()) {
artists << mapValue(artist.toMap(), "name");
}
trackInfo << artists.join(FieldFormat::delimiterString());
@@ -385,7 +385,7 @@ void DiscogsFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& res
if(entry_->collection()->hasField(QStringLiteral("producer"))) {
QStringList producers;
- foreach(const QVariant& extraartist, resultMap_.value(QStringLiteral("extraartists")).toList()) {
+ foreach(const QVariant& extraartist, resultMap_.value(QLatin1String("extraartists")).toList()) {
if(mapValue(extraartist.toMap(), "role").contains(QLatin1String("Producer"))) {
producers << mapValue(extraartist.toMap(), "name");
}
diff --git a/src/fetch/doubanfetcher.cpp b/src/fetch/doubanfetcher.cpp
index 146833b..9e0e33e 100644
--- a/src/fetch/doubanfetcher.cpp
+++ b/src/fetch/doubanfetcher.cpp
@@ -227,7 +227,7 @@ void DoubanFetcher::slotComplete(KJob* job_) {
switch(request().collectionType) {
case Data::Collection::Book:
case Data::Collection::Bibtex:
- foreach(const QVariant& v, resultsMap.value(QStringLiteral("books")).toList()) {
+ foreach(const QVariant& v, resultsMap.value(QLatin1String("books")).toList()) {
const QVariantMap resultMap = v.toMap();
FetchResult* r = new FetchResult(Fetcher::Ptr(this), mapValue(resultMap, "title"),
mapValue(resultMap, "author") + QLatin1Char('/') +
@@ -239,7 +239,7 @@ void DoubanFetcher::slotComplete(KJob* job_) {
break;
case Data::Collection::Video:
- foreach(const QVariant& v, resultsMap.value(QStringLiteral("subjects")).toList()) {
+ foreach(const QVariant& v, resultsMap.value(QLatin1String("subjects")).toList()) {
const QVariantMap resultMap = v.toMap();
FetchResult* r = new FetchResult(Fetcher::Ptr(this), mapValue(resultMap, "title"),
mapValue(resultMap, "directors", "name") + QLatin1Char('/') +
@@ -253,7 +253,7 @@ void DoubanFetcher::slotComplete(KJob* job_) {
break;
case Data::Collection::Album:
- foreach(const QVariant& v, resultsMap.value(QStringLiteral("musics")).toList()) {
+ foreach(const QVariant& v, resultsMap.value(QLatin1String("musics")).toList()) {
const QVariantMap resultMap = v.toMap();
FetchResult* r = new FetchResult(Fetcher::Ptr(this), mapValue(resultMap, "title"),
mapValue(resultMap, "attrs", "singer") + QLatin1Char('/') +
@@ -411,7 +411,7 @@ void DoubanFetcher::populateVideoEntry(Data::EntryPtr entry, const QVariantMap&
entry->setField(QStringLiteral("plot"), mapValue(resultMap_, "summary"));
QStringList actors;
- foreach(const QVariant& v, resultMap_.value(QStringLiteral("casts")).toList()) {
+ foreach(const QVariant& v, resultMap_.value(QLatin1String("casts")).toList()) {
actors << v.toMap().value(QStringLiteral("name")).toString();
}
entry->setField(QStringLiteral("cast"), actors.join(FieldFormat::rowDelimiterString()));
@@ -438,8 +438,8 @@ void DoubanFetcher::populateMusicEntry(Data::EntryPtr entry, const QVariantMap&
}
QStringList values, tracks;
- foreach(const QVariant& v, resultMap_.value(QStringLiteral("attrs"))
- .toMap().value(QStringLiteral("tracks")).toList()) {
+ foreach(const QVariant& v, resultMap_.value(QLatin1String("attrs"))
+ .toMap().value(QLatin1String("tracks")).toList()) {
// some cases have all the tracks in one item, separated by "\n" and using 01. track numbers
if(v.toString().contains(QLatin1Char('\n'))) {
values << v.toString().split(QStringLiteral("\n"));
diff --git a/src/fetch/filmasterfetcher.cpp b/src/fetch/filmasterfetcher.cpp
index 6cb4bd9..df0be0f 100644
--- a/src/fetch/filmasterfetcher.cpp
+++ b/src/fetch/filmasterfetcher.cpp
@@ -260,7 +260,7 @@ void FilmasterFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& r
entry_->setField(QStringLiteral("plot"), mapValue(result_, "description"));
QStringList directors;
- foreach(const QVariant& director, result_.value(QStringLiteral("directors")).toList()) {
+ foreach(const QVariant& director, result_.value(QLatin1String("directors")).toList()) {
const QVariantMap directorMap = director.toMap();
directors << mapValue(directorMap, "name") + QLatin1Char(' ') + mapValue(directorMap, "surname");
}
diff --git a/src/fetch/googlebookfetcher.cpp b/src/fetch/googlebookfetcher.cpp
index d7e85a0..749cf20 100644
--- a/src/fetch/googlebookfetcher.cpp
+++ b/src/fetch/googlebookfetcher.cpp
@@ -320,7 +320,7 @@ void GoogleBookFetcher::populateEntry(Data::EntryPtr entry, const QVariantMap& r
entry->setField(QStringLiteral("keyword"), catList.join(FieldFormat::delimiterString()));
QString isbn;
- foreach(const QVariant& idVariant, volumeMap.value(QStringLiteral("industryIdentifiers")).toList()) {
+ foreach(const QVariant& idVariant, volumeMap.value(QLatin1String("industryIdentifiers")).toList()) {
const QVariantMap idMap = idVariant.toMap();
if(mapValue(idMap, "type") == QLatin1String("ISBN_10")) {
isbn = mapValue(idMap, "identifier");
diff --git a/src/fetch/igdbfetcher.cpp b/src/fetch/igdbfetcher.cpp
index eaac822..a6a62e9 100644
--- a/src/fetch/igdbfetcher.cpp
+++ b/src/fetch/igdbfetcher.cpp
@@ -151,7 +151,7 @@ Tellico::Data::EntryPtr IGDBFetcher::fetchEntryHook(uint uid_) {
QStringList publishers;
// grab the publisher data
if(entry->field(QStringLiteral("publisher")).isEmpty()) {
- foreach(const QString& pid, FieldFormat::splitValue(entry->field(QStringLiteral("pub-id")))) {
+ foreach(const QString& pid, FieldFormat::splitValue(entry->field(QLatin1String("pub-id")))) {
const QString publisher = companyName(pid);
if(!publisher.isEmpty()) {
publishers << publisher;
@@ -163,7 +163,7 @@ Tellico::Data::EntryPtr IGDBFetcher::fetchEntryHook(uint uid_) {
QStringList developers;
// grab the developer data
if(entry->field(QStringLiteral("developer")).isEmpty()) {
- foreach(const QString& did, FieldFormat::splitValue(entry->field(QStringLiteral("dev-id")))) {
+ foreach(const QString& did, FieldFormat::splitValue(entry->field(QLatin1String("dev-id")))) {
const QString developer = companyName(did);
if(!developer.isEmpty()) {
developers << developer;
diff --git a/src/fetch/imdbfetcher.cpp b/src/fetch/imdbfetcher.cpp
index 31662e1..c033e41 100644
--- a/src/fetch/imdbfetcher.cpp
+++ b/src/fetch/imdbfetcher.cpp
@@ -1203,7 +1203,7 @@ void IMDBFetcher::doLists2(const QString& str_, Tellico::Data::EntryPtr entry_)
genres << token.trimmed();
}
} else if(tag == data.language) {
- foreach(const QString& token, value.split(QRegExp(QStringLiteral("[,|]")))) {
+ foreach(const QString& token, value.split(QRegExp(QLatin1String("[,|]")))) {
langs << token.trimmed();
}
} else if(tag == data.sound) {
diff --git a/src/fetch/isbndbfetcher.cpp b/src/fetch/isbndbfetcher.cpp
index aeaf123..9afd408 100644
--- a/src/fetch/isbndbfetcher.cpp
+++ b/src/fetch/isbndbfetcher.cpp
@@ -259,7 +259,7 @@ void ISBNdbFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& resu
QString pubYear = mapValue(resultMap_, "date_published").remove(QRegExp(QStringLiteral("[^\\d]"))).left(4);
entry_->setField(QStringLiteral("pub_year"), pubYear);
QStringList authors;
- foreach(const QVariant& author, resultMap_.value(QStringLiteral("authors")).toList()) {
+ foreach(const QVariant& author, resultMap_.value(QLatin1String("authors")).toList()) {
authors += author.toString();
}
entry_->setField(QStringLiteral("author"), authors.join(FieldFormat::delimiterString()));
@@ -278,7 +278,7 @@ void ISBNdbFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap& resu
entry_->setField(QStringLiteral("binding"), i18n(binding.toUtf8().constData()));
}
QStringList subjects;
- foreach(const QVariant& subject, resultMap_.value(QStringLiteral("subjects")).toList()) {
+ foreach(const QVariant& subject, resultMap_.value(QLatin1String("subjects")).toList()) {
subjects += subject.toString();
}
entry_->setField(QStringLiteral("genre"), subjects.join(FieldFormat::delimiterString()));
diff --git a/src/fetch/kinofetcher.cpp b/src/fetch/kinofetcher.cpp
index 801ae7e..a82e516 100644
--- a/src/fetch/kinofetcher.cpp
+++ b/src/fetch/kinofetcher.cpp
@@ -230,7 +230,7 @@ void KinoFetcher::parseEntry(Data::EntryPtr entry, const QString& str_) {
entry->setField(QStringLiteral("director"), mapValue(objectMap, "director", "name"));
QStringList actors;
- foreach(QVariant v, objectMap.value(QStringLiteral("actor")).toList()) {
+ foreach(QVariant v, objectMap.value(QLatin1String("actor")).toList()) {
const QString actor = mapValue(v.toMap(), "name");
if(!actor.isEmpty()) actors += actor;
}
diff --git a/src/fetch/moviemeterfetcher.cpp b/src/fetch/moviemeterfetcher.cpp
index a678040..d132091 100644
--- a/src/fetch/moviemeterfetcher.cpp
+++ b/src/fetch/moviemeterfetcher.cpp
@@ -260,7 +260,7 @@ void MovieMeterFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap&
entry_->setField(QStringLiteral("nationality"), mapValue(resultMap_, "countries"));
QStringList castList;
- foreach(const QVariant& actor, resultMap_.value(QStringLiteral("actors")).toList()) {
+ foreach(const QVariant& actor, resultMap_.value(QLatin1String("actors")).toList()) {
castList << mapValue(actor.toMap(), "name");
}
entry_->setField(QStringLiteral("cast"), castList.join(FieldFormat::rowDelimiterString()));
diff --git a/src/fetch/openlibraryfetcher.cpp b/src/fetch/openlibraryfetcher.cpp
index dd94cc0..6a8a290 100644
--- a/src/fetch/openlibraryfetcher.cpp
+++ b/src/fetch/openlibraryfetcher.cpp
@@ -298,7 +298,7 @@ void OpenLibraryFetcher::slotComplete(KJob* job_) {
}
QStringList authors;
- foreach(const QVariant& authorMap, resultMap.value(QStringLiteral("authors")).toList()) {
+ foreach(const QVariant& authorMap, resultMap.value(QLatin1String("authors")).toList()) {
const QString key = mapValue(authorMap.toMap(), "key");
if(!key.isEmpty()) {
QUrl authorUrl(QString::fromLatin1(OPENLIBRARY_QUERY_URL));
@@ -323,7 +323,7 @@ void OpenLibraryFetcher::slotComplete(KJob* job_) {
}
QStringList langs;
- foreach(const QVariant& langMap, resultMap.value(QStringLiteral("languages")).toList()) {
+ foreach(const QVariant& langMap, resultMap.value(QLatin1String("languages")).toList()) {
const QString key = mapValue(langMap.toMap(), "key");
if(!key.isEmpty()) {
QUrl langUrl(QString::fromLatin1(OPENLIBRARY_QUERY_URL));
diff --git a/src/fetch/themoviedbfetcher.cpp b/src/fetch/themoviedbfetcher.cpp
index c8a676b..236b385 100644
--- a/src/fetch/themoviedbfetcher.cpp
+++ b/src/fetch/themoviedbfetcher.cpp
@@ -355,8 +355,8 @@ void TheMovieDBFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap&
}
if(entry_->collection()->hasField(QStringLiteral("alttitle"))) {
QStringList atitles;
- foreach(const QVariant& atitle, resultMap_.value(QStringLiteral("alternative_titles")).toMap()
- .value(QStringLiteral("titles")).toList()) {
+ foreach(const QVariant& atitle, resultMap_.value(QLatin1String("alternative_titles")).toMap()
+ .value(QLatin1String("titles")).toList()) {
atitles << mapValue(atitle.toMap(), "title");
}
entry_->setField(QStringLiteral("alttitle"), atitles.join(FieldFormat::rowDelimiterString()));
@@ -372,13 +372,13 @@ void TheMovieDBFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap&
entry_->setField(QStringLiteral("cast"), actors.join(FieldFormat::rowDelimiterString()));
QStringList studios;
- foreach(const QVariant& studio, resultMap_.value(QStringLiteral("production_companies")).toList()) {
+ foreach(const QVariant& studio, resultMap_.value(QLatin1String("production_companies")).toList()) {
studios << mapValue(studio.toMap(), "name");
}
entry_->setField(QStringLiteral("studio"), studios.join(FieldFormat::delimiterString()));
QStringList countries;
- foreach(const QVariant& country, resultMap_.value(QStringLiteral("production_countries")).toList()) {
+ foreach(const QVariant& country, resultMap_.value(QLatin1String("production_countries")).toList()) {
QString name = mapValue(country.toMap(), "name");
if(name == QLatin1String("United States of America")) {
name = QStringLiteral("USA");
@@ -388,7 +388,7 @@ void TheMovieDBFetcher::populateEntry(Data::EntryPtr entry_, const QVariantMap&
entry_->setField(QStringLiteral("nationality"), countries.join(FieldFormat::delimiterString()));
QStringList genres;
- foreach(const QVariant& genre, resultMap_.value(QStringLiteral("genres")).toList()) {
+ foreach(const QVariant& genre, resultMap_.value(QLatin1String("genres")).toList()) {
genres << mapValue(genre.toMap(), "name");
}
entry_->setField(QStringLiteral("genre"), genres.join(FieldFormat::delimiterString()));
--
cgit v0.11.2
++++++ tellico-3.1.1.tar.xz -> tellico-3.1.2.tar.xz ++++++
++++ 95844 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package php7-ice for openSUSE:Factory checked in at 2018-03-29 11:57:44
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/php7-ice (Old)
and /work/SRC/openSUSE:Factory/.php7-ice.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "php7-ice"
Thu Mar 29 11:57:44 2018 rev:3 rq:592105 version:1.3.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/php7-ice/php7-ice.changes 2018-03-26 13:17:01.171238111 +0200
+++ /work/SRC/openSUSE:Factory/.php7-ice.new/php7-ice.changes 2018-03-29 11:57:46.069701902 +0200
@@ -1,0 +2,6 @@
+Wed Mar 28 15:49:14 UTC 2018 - jengelh(a)inai.de
+
+- Replace %__-type macro indirections.
+- Ensure neutrality of descriptions.
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ php7-ice.spec ++++++
--- /var/tmp/diff_new_pack.0YkDn0/_old 2018-03-29 11:57:46.669680246 +0200
+++ /var/tmp/diff_new_pack.0YkDn0/_new 2018-03-29 11:57:46.669680246 +0200
@@ -1,7 +1,7 @@
#
# spec file for package php7-ice
#
-# Copyright (c) 2015 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -24,28 +24,28 @@
Name: %{_php}-%{_name}
Version: 1.3.0
Release: 0
-Summary: Simple and fast PHP framework delivered as C-extension
+Summary: PHP framework delivered as C extension
License: BSD-3-Clause
Group: Development/Libraries/PHP
Url: http://www.iceframework.org/
Source0: https://github.com/ice/framework/archive/%{version}.tar.gz#/%{_name}-%{vers…
Patch1: ice-notime.diff
-BuildRoot: %_tmppath/%name-%version-build
-BuildRequires: gcc
-BuildRequires: re2c
+BuildRoot: %{_tmppath}/%{name}-%{version}-build
+BuildRequires: %{_php}-ctype
BuildRequires: %{_php}-devel
BuildRequires: %{_php}-json
-BuildRequires: %{_php}-ctype
-BuildRequires: %{_php}-pdo
+BuildRequires: %{_php}-mbstring
BuildRequires: %{_php}-mysql
BuildRequires: %{_php}-openssl
+BuildRequires: %{_php}-pdo
BuildRequires: %{_php}-tokenizer
-BuildRequires: %{_php}-mbstring
+BuildRequires: gcc
+BuildRequires: re2c
%description
-Simple and fast PHP framework delivered as C-extension. You don't need
+ICE is a PHP framework delivered as C extension. You don't need
learn or use the C language, since the functionality is exposed as
-PHP classes ready for you to use.
+PHP classes.
%prep
%setup -qn framework-%version
@@ -63,8 +63,8 @@
make %{?_smp_mflags}
%install
-%{__mkdir_p} %{buildroot}%{_libdir}/%{_php}/extensions
-%{__mkdir_p} %{buildroot}%{_sysconfdir}/%{_php}/conf.d
+mkdir -p %{buildroot}%{_libdir}/%{_php}/extensions
+mkdir -p %{buildroot}%{_sysconfdir}/%{_php}/conf.d
pushd build/%{_php}
make install INSTALL_ROOT=%{buildroot}
1
0
Hello community,
here is the log from the commit of package yakuake for openSUSE:Factory checked in at 2018-03-29 11:57:42
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/yakuake (Old)
and /work/SRC/openSUSE:Factory/.yakuake.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "yakuake"
Thu Mar 29 11:57:42 2018 rev:45 rq:592104 version:3.0.5
Changes:
--------
--- /work/SRC/openSUSE:Factory/yakuake/yakuake.changes 2017-04-12 18:25:36.390285989 +0200
+++ /work/SRC/openSUSE:Factory/.yakuake.new/yakuake.changes 2018-03-29 11:57:44.169770480 +0200
@@ -1,0 +2,21 @@
+Wed Mar 28 11:41:47 UTC 2018 - wbauer(a)tmo.at
+
+- Update to 3.0.5
+ * Improved Wayland support.
+ * Yakuake's window title now always matches what's shown in its
+ title bar.
+ * Fixed button icons in the 'Appearance' settings page.
+ * Yakuake now installs a D-Bus service file. This allows calling
+ its D-Bus methods even when Yakuake is not running yet- it will
+ then be started implicitly.
+ * Fixed a crash due to a missing bounds check in the
+ 'sessionAtTab' D-Bus method.
+ * Fixed a type marshalling issue with the 'addSession' D-Bus
+ method that caused a noisy warning when using it via qdbus.
+ * Yakuake now depends on KDE Frameworks 5.29 or higher.
+- Drop 0001-Revert-Removed-usage-to-deprecated-interface.patch,
+ Leap 42.2 is no longer supported
+- Use cmake() syntax for BuildRequires
+- Mark license files as %license
+
+-------------------------------------------------------------------
Old:
----
0001-Revert-Removed-usage-to-deprecated-interface.patch
yakuake-3.0.4.tar.xz
New:
----
yakuake-3.0.5.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ yakuake.spec ++++++
--- /var/tmp/diff_new_pack.cYeIes/_old 2018-03-29 11:57:44.737749979 +0200
+++ /var/tmp/diff_new_pack.cYeIes/_new 2018-03-29 11:57:44.741749834 +0200
@@ -1,7 +1,7 @@
#
# spec file for package yakuake
#
-# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,37 +17,35 @@
Name: yakuake
-Version: 3.0.4
+Version: 3.0.5
Release: 0
Summary: Terminal for KDE
License: GPL-2.0+
Group: System/GUI/KDE
Url: https://yakuake.kde.org
Source: http://download.kde.org/stable/yakuake/%{version}/src/%{name}-%{version}.ta…
-# PATCH-FIX-OPENSUSE 0001-Revert-Removed-usage-to-deprecated-interface.patch
-Patch: 0001-Revert-Removed-usage-to-deprecated-interface.patch
BuildRequires: fdupes
-BuildRequires: karchive-devel >= 5.15.0
-BuildRequires: kconfig-devel >= 5.15.0
-BuildRequires: kcoreaddons-devel >= 5.15.0
-BuildRequires: kcrash-devel >= 5.15.0
-BuildRequires: kdbusaddons-devel >= 5.15.0
BuildRequires: kf5-filesystem
-BuildRequires: kglobalaccel-devel >= 5.15.0
-BuildRequires: ki18n-devel >= 5.15.0
-BuildRequires: kiconthemes-devel >= 5.15.0
-BuildRequires: kio-devel >= 5.15.0
-BuildRequires: knewstuff-devel >= 5.15.0
-BuildRequires: knotifications-devel >= 5.15.0
-BuildRequires: knotifyconfig-devel >= 5.15.0
-BuildRequires: kparts-devel >= 5.15.0
-BuildRequires: kwayland-devel
-BuildRequires: kwidgetsaddons-devel >= 5.15.0
-BuildRequires: kwindowsystem-devel >= 5.15.0
BuildRequires: update-desktop-files
-BuildRequires: pkgconfig(Qt5Core) >= 5.2.0
-BuildRequires: pkgconfig(Qt5Widgets) >= 5.2.0
-BuildRequires: pkgconfig(Qt5X11Extras) >= 5.2.0
+BuildRequires: cmake(KF5Archive)
+BuildRequires: cmake(KF5Config)
+BuildRequires: cmake(KF5CoreAddons)
+BuildRequires: cmake(KF5Crash)
+BuildRequires: cmake(KF5DBusAddons)
+BuildRequires: cmake(KF5GlobalAccel)
+BuildRequires: cmake(KF5I18n)
+BuildRequires: cmake(KF5IconThemes)
+BuildRequires: cmake(KF5KIO)
+BuildRequires: cmake(KF5NewStuff)
+BuildRequires: cmake(KF5Notifications)
+BuildRequires: cmake(KF5NotifyConfig)
+BuildRequires: cmake(KF5Parts)
+BuildRequires: cmake(KF5Wayland)
+BuildRequires: cmake(KF5WidgetsAddons)
+BuildRequires: cmake(KF5WindowSystem)
+BuildRequires: cmake(Qt5Core)
+BuildRequires: cmake(Qt5Widgets)
+BuildRequires: cmake(Qt5X11Extras)
Requires: konsole-part > 15.12
Recommends: konsole > 15.12
Requires(post): desktop-file-utils
@@ -62,9 +60,6 @@
%prep
%setup -q
-%if 0%{?suse_version} == 1315 && 0%{?sle_version} <= 120200
-%patch -p1
-%endif
%build
%cmake_kf5 -d build
@@ -86,7 +81,8 @@
%files
%defattr(-,root,root)
-%doc README AUTHORS ChangeLog COPYING COPYING.DOC NEWS
+%license COPYING COPYING.DOC
+%doc README AUTHORS ChangeLog NEWS
%{_kf5_bindir}/yakuake
%config %{_kf5_configdir}/yakuake.knsrc
%{_kf5_applicationsdir}/org.kde.yakuake.desktop
@@ -96,6 +92,7 @@
%{_kf5_sharedir}/icons/hicolor/*/*/*.*
%{_kf5_sharedir}/yakuake/
%{_kf5_appstreamdir}/
+%{_kf5_sharedir}/dbus-1/services/org.kde.yakuake.service
%files lang -f %{name}.lang
%defattr(-,root,root)
++++++ yakuake-3.0.4.tar.xz -> yakuake-3.0.5.tar.xz ++++++
++++ 39422 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package grpc for openSUSE:Factory checked in at 2018-03-29 11:57:37
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/grpc (Old)
and /work/SRC/openSUSE:Factory/.grpc.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "grpc"
Thu Mar 29 11:57:37 2018 rev:7 rq:592077 version:1.10.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/grpc/grpc.changes 2018-02-06 16:49:35.998816713 +0100
+++ /work/SRC/openSUSE:Factory/.grpc.new/grpc.changes 2018-03-29 11:57:37.965994411 +0200
@@ -1,0 +2,10 @@
+Wed Mar 28 14:30:53 UTC 2018 - jengelh(a)inai.de
+
+- Update to new upstream release 1.10.0
+ * Several features of core have been removed from the surface
+ or GPR API as they were not used in any wrapped language
+ (except C++) or in external applications.
+ * Changed resolver and LB policy APIs to C++
+ * C++ headers are moved from <grpc++/> to <grpcpp/>.
+
+-------------------------------------------------------------------
Old:
----
v1.9.0.tar.gz
New:
----
v1.10.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ grpc.spec ++++++
--- /var/tmp/diff_new_pack.longw0/_old 2018-03-29 11:57:40.069918467 +0200
+++ /var/tmp/diff_new_pack.longw0/_new 2018-03-29 11:57:40.069918467 +0200
@@ -16,9 +16,9 @@
#
-%define lname libgrpc-1_9_0
+%define lname libgrpc6
Name: grpc
-Version: 1.9.0
+Version: 1.10.0
Release: 0
Summary: HTTP/2-based Remote Procedure Call implementation
License: Apache-2.0
@@ -88,20 +88,20 @@
mkdir -p "$b/%_libdir/pkgconfig"
cp x/opt/pkgconfig/*.pc "$b/%_libdir/pkgconfig/"
%cmake_install
-for i in "$b/%_libdir"/libg*-%version.so; do
- ln -s "${i##*/}" "${i%%-%version.so}.so"
-done
+#for i in "$b/%_libdir"/libg*-%version.so; do
+# ln -s "${i##*/}" "${i%%-%version.so}.so"
+#done
%post -n %lname -p /sbin/ldconfig
%postun -n %lname -p /sbin/ldconfig
%files -n %lname
%defattr(-,root,root)
-%_libdir/libg*-%version.so
+%_libdir/libg*.so.6
%files devel
%defattr(-,root,root)
-%doc LICENSE
+%license LICENSE
%_bindir/*
%_includedir/*
%_libdir/pkgconfig/*.pc
++++++ grpc-build.diff ++++++
--- /var/tmp/diff_new_pack.longw0/_old 2018-03-29 11:57:40.097917456 +0200
+++ /var/tmp/diff_new_pack.longw0/_new 2018-03-29 11:57:40.097917456 +0200
@@ -7,14 +7,14 @@
cmake/cares.cmake | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
-Index: grpc-1.9.0/cmake/cares.cmake
+Index: grpc-1.10.0/cmake/cares.cmake
===================================================================
---- grpc-1.9.0.orig/cmake/cares.cmake
-+++ grpc-1.9.0/cmake/cares.cmake
-@@ -30,10 +30,5 @@ if("${gRPC_CARES_PROVIDER}" STREQUAL "mo
- set(gRPC_INSTALL FALSE)
+--- grpc-1.10.0.orig/cmake/cares.cmake
++++ grpc-1.10.0/cmake/cares.cmake
+@@ -31,10 +31,5 @@ if("${gRPC_CARES_PROVIDER}" STREQUAL "mo
endif()
elseif("${gRPC_CARES_PROVIDER}" STREQUAL "package")
+ # Use "CONFIG" as there is no built-in cmake module for c-ares.
- find_package(c-ares REQUIRED CONFIG)
- if(TARGET c-ares::cares)
- set(_gRPC_CARES_LIBRARIES c-ares::cares)
++++++ grpc-versioning.diff ++++++
--- /var/tmp/diff_new_pack.longw0/_old 2018-03-29 11:57:40.109917023 +0200
+++ /var/tmp/diff_new_pack.longw0/_new 2018-03-29 11:57:40.109917023 +0200
@@ -4,98 +4,107 @@
Unversioned libraries are an ABI nightmare, so work around that in short order.
---
- CMakeLists.txt | 11 +++++++++++
- 1 file changed, 11 insertions(+)
+ CMakeLists.txt | 13 +++++++++++++
+ 1 file changed, 13 insertions(+)
-Index: grpc-1.9.0/CMakeLists.txt
+Index: grpc-1.10.0/CMakeLists.txt
===================================================================
---- grpc-1.9.0.orig/CMakeLists.txt
-+++ grpc-1.9.0/CMakeLists.txt
-@@ -787,6 +787,7 @@ target_link_libraries(gpr_test_util
+--- grpc-1.10.0.orig/CMakeLists.txt
++++ grpc-1.10.0/CMakeLists.txt
+@@ -29,6 +29,8 @@ set(PACKAGE_STRING "${PACKAGE_NAME} $
+ set(PACKAGE_TARNAME "${PACKAGE_NAME}-${PACKAGE_VERSION}")
+ set(PACKAGE_BUGREPORT "https://github.com/grpc/grpc/issues/")
+ project(${PACKAGE_NAME} C CXX)
++# from Makefile:
++set(LIBVER 6)
+
+ set(gRPC_INSTALL_BINDIR "bin" CACHE STRING "Installation directory for executables")
+ set(gRPC_INSTALL_LIBDIR "lib" CACHE STRING "Installation directory for libraries")
+@@ -775,6 +777,7 @@ target_link_libraries(gpr_test_util
${_gRPC_ALLTARGETS_LIBRARIES}
gpr
)
-+set_target_properties(gpr PROPERTIES OUTPUT_NAME "gpr-${PACKAGE_VERSION}")
++set_target_properties(gpr PROPERTIES VERSION ${LIBVER})
endif (gRPC_BUILD_TESTS)
-@@ -1044,6 +1045,7 @@ add_library(grpc
+@@ -1032,6 +1035,7 @@ add_library(grpc
src/core/ext/filters/workarounds/workaround_utils.cc
src/core/plugin_registry/grpc_plugin_registry.cc
)
-+set_target_properties(grpc PROPERTIES OUTPUT_NAME "grpc-${PACKAGE_VERSION}")
++set_target_properties(grpc PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc PROPERTIES COMPILE_PDB_NAME "grpc"
-@@ -1355,6 +1357,7 @@ add_library(grpc_cronet
+@@ -1343,6 +1347,7 @@ add_library(grpc_cronet
src/core/ext/filters/load_reporting/server_load_reporting_plugin.cc
src/core/plugin_registry/grpc_cronet_plugin_registry.cc
)
-+set_target_properties(grpc_cronet PROPERTIES OUTPUT_NAME "grpc_cronet-${PACKAGE_VERSION}")
++set_target_properties(grpc_cronet PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc_cronet PROPERTIES COMPILE_PDB_NAME "grpc_cronet"
-@@ -2192,6 +2195,7 @@ add_library(grpc_unsecure
+@@ -2220,6 +2225,7 @@ add_library(grpc_unsecure
src/core/ext/filters/workarounds/workaround_utils.cc
src/core/plugin_registry/grpc_unsecure_plugin_registry.cc
)
-+set_target_properties(grpc_unsecure PROPERTIES OUTPUT_NAME "grpc_unsecure-${PACKAGE_VERSION}")
++set_target_properties(grpc_unsecure PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc_unsecure PROPERTIES COMPILE_PDB_NAME "grpc_unsecure"
-@@ -2400,6 +2404,7 @@ add_library(grpc++
+@@ -2429,6 +2435,7 @@ add_library(grpc++
src/cpp/util/time_cc.cc
src/cpp/codegen/codegen_init.cc
)
-+set_target_properties(grpc++ PROPERTIES OUTPUT_NAME "grpc++-${PACKAGE_VERSION}")
++set_target_properties(grpc++ PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc++ PROPERTIES COMPILE_PDB_NAME "grpc++"
-@@ -2880,6 +2885,7 @@ add_library(grpc++_cronet
+@@ -2969,6 +2976,7 @@ add_library(grpc++_cronet
third_party/nanopb/pb_decode.c
third_party/nanopb/pb_encode.c
)
-+set_target_properties(grpc++_cronet PROPERTIES OUTPUT_NAME "grpc++_cronet-${PACKAGE_VERSION}")
++set_target_properties(grpc++_cronet PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc++_cronet PROPERTIES COMPILE_PDB_NAME "grpc++_cronet"
-@@ -3076,6 +3082,7 @@ add_library(grpc++_error_details
+@@ -3231,6 +3239,7 @@ add_library(grpc++_error_details
${_gRPC_PROTO_GENS_DIR}/src/proto/grpc/status/status.grpc.pb.h
src/cpp/util/error_details.cc
)
-+set_target_properties(grpc++_error_details PROPERTIES OUTPUT_NAME "grpc++_error_details-${PACKAGE_VERSION}")
++set_target_properties(grpc++_error_details PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc++_error_details PROPERTIES COMPILE_PDB_NAME "grpc++_error_details"
-@@ -3198,6 +3205,7 @@ add_library(grpc++_reflection
+@@ -3355,6 +3364,7 @@ add_library(grpc++_reflection
${_gRPC_PROTO_GENS_DIR}/src/proto/grpc/reflection/v1alpha/reflection.pb.h
${_gRPC_PROTO_GENS_DIR}/src/proto/grpc/reflection/v1alpha/reflection.grpc.pb.h
)
-+set_target_properties(grpc++_reflection PROPERTIES OUTPUT_NAME "grpc++_reflection-${PACKAGE_VERSION}")
++set_target_properties(grpc++_reflection PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc++_reflection PROPERTIES COMPILE_PDB_NAME "grpc++_reflection"
-@@ -3321,6 +3329,7 @@ add_library(grpc++_test_util
+@@ -3479,6 +3489,7 @@ add_library(grpc++_test_util
test/cpp/util/test_credentials_provider.cc
src/cpp/codegen/codegen_init.cc
)
-+set_target_properties(grpc++_unsecure PROPERTIES OUTPUT_NAME "grpc++_unsecure-${PACKAGE_VERSION}")
++set_target_properties(grpc++_unsecure PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc++_test_util PROPERTIES COMPILE_PDB_NAME "grpc++_test_util"
-@@ -3916,6 +3925,7 @@ add_library(grpc_plugin_support
+@@ -4206,6 +4217,7 @@ add_library(grpc_plugin_support
src/compiler/python_generator.cc
src/compiler/ruby_generator.cc
)
-+set_target_properties(grpc_plugin_support PROPERTIES OUTPUT_NAME "grpc_plugin_support-${PACKAGE_VERSION}")
++set_target_properties(grpc_plugin_support PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc_plugin_support PROPERTIES COMPILE_PDB_NAME "grpc_plugin_support"
-@@ -4409,6 +4419,7 @@ endif (gRPC_BUILD_TESTS)
+@@ -4700,6 +4712,7 @@ endif (gRPC_BUILD_TESTS)
add_library(grpc_csharp_ext SHARED
src/csharp/ext/grpc_csharp_ext.c
)
-+set_target_properties(grpc_csharp_ext PROPERTIES OUTPUT_NAME "grpc_csharp_ext-${PACKAGE_VERSION}")
++set_target_properties(grpc_csharp_ext PROPERTIES VERSION ${LIBVER})
if(WIN32 AND MSVC)
set_target_properties(grpc_csharp_ext PROPERTIES COMPILE_PDB_NAME "grpc_csharp_ext"
++++++ v1.9.0.tar.gz -> v1.10.0.tar.gz ++++++
/work/SRC/openSUSE:Factory/grpc/v1.9.0.tar.gz /work/SRC/openSUSE:Factory/.grpc.new/v1.10.0.tar.gz differ: char 13, line 1
1
0
Hello community,
here is the log from the commit of package apache2-mod_nss for openSUSE:Factory checked in at 2018-03-29 11:57:32
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/apache2-mod_nss (Old)
and /work/SRC/openSUSE:Factory/.apache2-mod_nss.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "apache2-mod_nss"
Thu Mar 29 11:57:32 2018 rev:30 rq:592034 version:1.0.17
Changes:
--------
--- /work/SRC/openSUSE:Factory/apache2-mod_nss/apache2-mod_nss.changes 2018-03-20 22:00:50.444643599 +0100
+++ /work/SRC/openSUSE:Factory/.apache2-mod_nss.new/apache2-mod_nss.changes 2018-03-29 11:57:36.982029928 +0200
@@ -1,0 +2,10 @@
+Tue Mar 27 21:16:15 UTC 2018 - vcizek(a)suse.com
+
+- Update to 1.0.17
+ * Add TLSv1.3 support
+ * Update documentation for TLS 1.3
+ * Add TLS 1.3 support to the cipher tests
+ * PEP-8 fixups
+ * Change the default certificate database format to SQLite.
+
+-------------------------------------------------------------------
Old:
----
mod_nss-1.0.16.tar.gz
New:
----
mod_nss-1.0.17.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ apache2-mod_nss.spec ++++++
--- /var/tmp/diff_new_pack.aJMz8n/_old 2018-03-29 11:57:37.658005527 +0200
+++ /var/tmp/diff_new_pack.aJMz8n/_new 2018-03-29 11:57:37.658005527 +0200
@@ -25,7 +25,7 @@
%define apache_mmn %(MMN=$(%{apxs} -q LIBEXECDIR)_MMN; test -x $MMN && $MMN)
%define apache_sysconf_nssdir %{apache_sysconfdir}/mod_nss.d
Name: apache2-mod_nss
-Version: 1.0.16
+Version: 1.0.17
Release: 0
Summary: SSL/TLS module for the Apache HTTP server
License: Apache-2.0
++++++ mod_nss-1.0.16.tar.gz -> mod_nss-1.0.17.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/ChangeLog new/mod_nss-1.0.17/ChangeLog
--- old/mod_nss-1.0.16/ChangeLog 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/ChangeLog 2018-03-27 22:40:30.000000000 +0200
@@ -1,3 +1,15 @@
+2018-03-27 Rob Crittenden <rcritten(a)redhat.com>
+ * PEP-8 fixups
+ * Add TLS 1.3 support to the cipher tests
+ * Update documentation for TLSv1.3
+ * Become 1.0.17
+
+2018-03-05 Vitezslav Cizek <vcizek(a)suse.com>
+ * Change the default certificate database format to SQLite.
+
+2018-02-16 Christian Heimes <cheimes(a)redhat.com>
+ * Add TLSv1.3 support
+
2018-01-19 Rob Crittenden <rcritten(a)redhat.com>
* Fix some merge issues in the ciphers (that'll teach me to test
BEFORE making the tag)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/Makefile.am new/mod_nss-1.0.17/Makefile.am
--- old/mod_nss-1.0.16/Makefile.am 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/Makefile.am 2018-03-27 22:40:30.000000000 +0200
@@ -22,7 +22,7 @@
## Set the includes and libraries needed
AM_CPPFLAGS = -I@apache_inc@ @nspr_inc@ @nss_inc@ @apr_inc@
-LIBS = @nspr_lib@ @nss_lib@ -lssl3 -lsmime3 -lnss3 -lplc4 -lplds4 -lnspr4
+LIBS = @nspr_lib@ @nss_lib@ -lssl3 -lsmime3 -lnss3 -lplc4 -lplds4 -lnspr4 -lnssutil3
EXTRA_CPPFLAGS=@extra_cppflags@
install-libLTLIBRARIES: libmodnss.la
@@ -102,8 +102,8 @@
rm -rf work; \
nosetests -v test_cipher.py; \
if [ `id -u` != 0 ]; then \
- ./setup.sh -s 1; \
- nosetests -v test.py; \
+ ./setup.sh -s 1 dbm:; \
+ DBPREFIX=dbm: nosetests -v test.py; \
sleep 5; \
rm -rf work; \
./setup.sh -s 1 sql:; \
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/configure.ac new/mod_nss-1.0.17/configure.ac
--- old/mod_nss-1.0.16/configure.ac 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/configure.ac 2018-03-27 22:40:30.000000000 +0200
@@ -1,5 +1,5 @@
# Required initializer
-AC_INIT([mod_nss],[1.0.16])
+AC_INIT([mod_nss],[1.0.17])
m4_include([acinclude.m4])
@@ -249,34 +249,53 @@
AX_CHECK_DEFINE(nss3/sslproto.h, TLS_RSA_WITH_AES_128_GCM_SHA256, gcm=$enableval, gcm=no)
if test "$gcm" = yes; then
extra_cppflags="$extra_cppflags -DENABLE_GCM"
- echo "ENABLE_GCM=1" > test/variable.py
+ echo "ENABLE_GCM = 1" > test/variable.py
else
- echo "ENABLE_GCM=0" > test/variable.py
+ echo "ENABLE_GCM = 0" > test/variable.py
fi
AX_CHECK_DEFINE(nss3/sslproto.h, TLS_RSA_WITH_AES_256_GCM_SHA384, sha384=$enableval, sha384=no)
if test "$sha384" = yes; then
extra_cppflags="$extra_cppflags -DENABLE_SHA384"
- echo "ENABLE_SHA384=1" >> test/variable.py
+ echo "ENABLE_SHA384 = 1" >> test/variable.py
else
- echo "ENABLE_SHA384=0" >> test/variable.py
+ echo "ENABLE_SHA384 = 0" >> test/variable.py
fi
AX_CHECK_DEFINE(nss3/sslproto.h, TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, chacha20=yes, chacha20=no)
if test "$chacha20" = yes; then
extra_cppflags="$extra_cppflags -DENABLE_CHACHA20"
- echo "ENABLE_CHACHA20=1" >> test/variable.py
+ echo "ENABLE_CHACHA20 = 1" >> test/variable.py
else
- echo "ENABLE_CHACHA20=0" >> test/variable.py
+ echo "ENABLE_CHACHA20 = 0" >> test/variable.py
fi
CPPFLAGS="$CPPFLAGS $nspr_inc"
AX_CHECK_DEFINE(nss3/ssl.h, SSL_ENABLE_SERVER_DHE, server_dhe=yes, server_dhe=no)
if test "$server_dhe" = yes; then
extra_cppflags="$extra_cppflags -DENABLE_SERVER_DHE"
- echo "ENABLE_SERVER_DHE=1" >> test/variable.py
+ echo "ENABLE_SERVER_DHE = 1" >> test/variable.py
else
- echo "ENABLE_SERVER_DHE=0" >> test/variable.py
+ echo "ENABLE_SERVER_DHE = 0" >> test/variable.py
+fi
+
+# TLS 1.3 is available since NSS 3.28
+have_tls13=no
+AC_MSG_CHECKING(for TLS 1.3 support)
+AC_RUN_IFELSE([AC_LANG_PROGRAM([
+#include <nss3/nss.h>
+],[
+#if (((NSS_VMAJOR == 3) && (NSS_VMINOR >= 28)) || (NSS_VMAJOR > 3))
+return 1;
+#endif
+])], [have_tls13=no], [have_tls13=yes])
+if test "$have_tls13" = yes; then
+ extra_cppflags="$extra_cppflags -DNSS_SUPPORTS_TLS_1_3"
+ echo "ENABLE_TLS13 = 1" >> test/variable.py
+ AC_MSG_RESULT(yes)
+else
+ echo "ENABLE_TLS13 = 0" >> test/variable.py
+ AC_MSG_RESULT(no)
fi
# Substitute values
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/docs/mod_nss.html new/mod_nss-1.0.17/docs/mod_nss.html
--- old/mod_nss-1.0.16/docs/mod_nss.html 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/docs/mod_nss.html 2018-03-27 22:40:30.000000000 +0200
@@ -50,8 +50,7 @@
Refer to the README file included with the distribution.<br>
<br>
To build you'll need <a href="http://www.mozilla.org/projects/nspr/">NSPR</a>
-4.4.1 or above and <a href="http://www.mozilla.org/projects/security/pki/nss/">NSS</a> 3.9.2
-or above.
+4.4.1 or above and <a href="http://www.mozilla.org/projects/security/pki/nss/">NSS</a> 3.9.2 or above. TLS 1.3 support was introduced in NSS 3.28.
It may work with earlier versions but these are recommended (or
tested). These can be retrieved from <a href="http://www.mozilla.org/">http://www.mozilla.org/</a>.
The --with-nspr and --with-nss options require that the package be
@@ -221,17 +220,17 @@
[ Lots of output removed ]
</pre>
You should now have the following files:<br>
-<pre>/etc/httpd/nss/cert8.db
-/etc/httpd/nss/key3.db
-/etc/httpd/nss/secmod.db
+<pre>/etc/httpd/nss/cert9.db
+/etc/httpd/nss/key4.db
+/etc/httpd/nss/pkcs11.txt
</pre>
These 3 files make up an NSS certificate database.<br>
<br>
-If you have a sql: prefix on the path, like sql:/etc/httpd/nss, then it
-will generate an SQLite NSS database consisting of the following files:<br>
-<pre>/etc/httpd/nss/cert9.db
-/etc/httpd/nss/key4.db
-/etc/httpd/nss/pkcs11.txt</pre>
+If you have a dbm: prefix on the path, like dbm:/etc/httpd/nss, then it
+will generate an Berkeley DB NSS database consisting of the following files:<br>
+<pre>/etc/httpd/nss/cert8.db
+/etc/httpd/nss/key3.db
+/etc/httpd/nss/secmod.db</pre>
<h1><a name="Startup"></a>Server Startup</h1>
@@ -383,10 +382,12 @@
<font size="+2">NSSCertificateDatabase</font><br>
<br>
Specifies the location of the NSS certificate database to be used. An
-NSS certificate database consists of 3 files: cert8.db, key3.db and
-secmod.db. cert8.db stores certificates and Certificate Revocation
-Lists (CRLs), key3.db stores keys and secmod.db stores information
+NSS certificate database consists of 3 files: cert9.db, key4.db and
+pkcs11.txt. cert9.db stores certificates and Certificate Revocation
+Lists (CRLs), key4.db stores keys and pkcs11.txt stores information
about available PKCS#11 modules.<br>
+Before version 3.35, NSS used a Berkeley DB format by default.
+It consisted of files cert8.db, key3.db and secmod.db.<br>
<br>
This directive specifies a path, not a filename. To use a sqlite
NSS database include the prefix sql: in the path.<br>
@@ -398,16 +399,16 @@
<br>
<big><big>NSSDBPrefix</big></big><br>
<br>
-Normally a certificate database consists of 3 files: cert8.db, key3.db
-and secmod.db. This directive allows you to add a named prefix to the
-filenames of cert8.db and key3.db so you can store multiple databases
+Normally a certificate database consists of 3 files: cert9.db, key4.db
+and pkcs11.txt. This directive allows you to add a named prefix to the
+filenames of cert9.db and key4.db so you can store multiple databases
in one directory. <br>
<br>
<span style="font-weight: bold;">Example</span><br>
<br>
<code>NSSDBPrefix my-prefix-</code><br>
<br>
-You would then need: my-prefix-cert8.db, my-prefix-key3.db and secmod.db<br>
+You would then need: my-prefix-cert9.db, my-prefix-key4.db and secmod.db<br>
<br>
In order to work with files with a prefix using the NSS command-line
tools use the -P flag.<br>
@@ -497,10 +498,10 @@
<br>
Enables or disables FIPS 140 mode. This replaces the standard
internal PKCS#11 module with a FIPS-enabled one. It also forces the
-enabled protocols to TLSv1.2, TLSv1.1 and TLSv1.0 and disables all ciphers
-but the FIPS ones. You may still select which ciphers you would like
-limited to those that are FIPS-certified. Any non-FIPS that are
-included in the NSSCipherSuite entry are automatically disabled.
+enabled protocols to TLSv1.3, TLSv1.2, TLSv1.1 and TLSv1.0 and disables
+all ciphers but the FIPS ones. You may still select which ciphers you
+would like limited to those that are FIPS-certified. Any non-FIPS that
+are included in the NSSCipherSuite entry are automatically disabled.
The allowable ciphers are (with ecc-enabled set):<br>
<ul>
<li>rsa_3des_sha</li>
@@ -756,6 +757,27 @@
</td>
<td valign="top">TLSv1.2</td>
</tr>
+<tr>
+ <td valign="top">aes_128_gcm_sha_256<br>
+ </td>
+ <td valign="top">TLS_AES_128_GCM_SHA256<br>
+ </td>
+ <td valign="top">TLSv1.3</td>
+</tr>
+<tr>
+ <td valign="top">aes_256_gcm_sha_384<br>
+ </td>
+ <td valign="top">TLS_AES_256_GCM_SHA384<br>
+ </td>
+ <td valign="top">TLSv1.3</td>
+</tr>
+<tr>
+ <td valign="top">chacha20_poly1305_sha_256<br>
+ </td>
+ <td valign="top">TLS_CHACHA20_POLY1305_SHA256<br>
+ </td>
+ <td valign="top">TLSv1.3</td>
+</tr>
</tbody>
</table>
@@ -1017,15 +1039,16 @@
<li><code>TLSv1.0</code></li>
<li><code>TLSv1.1</code></li>
<li><code>TLSv1.2</code></li>
+ <li><code>TLSv1.3</code></li>
<li><code>All</code></li>
</ul>
Note that this differs from mod_ssl in that you can't add or subtract
protocols.<br>
<br>
If no NSSProtocol is specified, mod_nss will default to allowing the use of
-the TLSv1.0, TLSv1.1 and TLSv1.2 protocols, where TLSv1.0 will be set to
-be the minimum protocol allowed, and TLSv1.2 will be set to be the maximum
-protocol allowed.
+the TLSv1.0, TLSv1.1, TLSv1.2 and TLSv1.3 protocols, where TLSv1.0 will be
+set to be the minimum protocol allowed, and TLSv1.3 will be set to be the
+maximum protocol allowed.
<br>
If values for NSSProtocol are specified, mod_nss will set both the minimum
and the maximum allowed protocols based upon these entries allowing for the
@@ -1338,7 +1361,7 @@
<tr>
<td style="vertical-align: top; width: 45%;"><code>SSL_PROTOCOL<br>
</code></td>
- <td style="vertical-align: top;">SSLv3, TLSv1.0, TLSv1.1 or TLSv1.2<br>
+ <td style="vertical-align: top;">SSLv3, TLSv1.0, TLSv1.1, TLSv1.2 or TLSv1.3<br>
</td>
</tr>
<tr>
@@ -1553,14 +1576,14 @@
<br>
<h1><a name="Database_Management"></a>Database Management</h1>
-NSS stores it's certificates and keys in a set of files referred to as
-the "certificate database." The files by default (with NSS 3.x) are
-named cert8.db, key3.db and secmod.db. See the NSS documentation at <a href="http://www.mozilla.org/projects/security/pki/nss/">http://www.mozilla.org/projects/security/pki/nss/</a>
+NSS stores its certificates and keys in a set of files referred to as
+the "certificate database." The files by default (with NSS >= 3.35) are
+named cert9.db, key4.db and pkcs11.txt. See the NSS documentation at <a href="http://www.mozilla.org/projects/security/pki/nss/">http://www.mozilla.org/projects/security/pki/nss/</a>
for more information on these specific files.<p>
-By default the NSS databases use the Berkeley Database format (cert8 and
-key3). To use the sqlite format (cert9 and key4) either include sql: in
-all references to the database (-d sql:/path/to/database) or
-<code>export NSS_DEFAULT_DB_TYPE="sql"</code>.
+Before version 3.35 the NSS databases used the Berkeley Database format by default
+(cert8 and key3). To use the old format either include dbm: in
+all references to the database (-d dbm:/path/to/database) or
+<code>export NSS_DEFAULT_DB_TYPE="dbm"</code>.
</p><p>
For more details see
<a href="https://wiki.mozilla.org/NSS_Shared_DB">https://wiki.mozilla.org/NSS_Shared_DB</a><br>
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/mod_nss.spec new/mod_nss-1.0.17/mod_nss.spec
--- old/mod_nss-1.0.16/mod_nss.spec 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/mod_nss.spec 2018-03-27 22:40:30.000000000 +0200
@@ -26,7 +26,7 @@
URL: http://directory.fedora.redhat.com/
Source: %{name}-%{version}.tar.gz
BuildRoot: %{_tmppath}/%{name}-%{version}-root
-BuildPreReq: httpd-devel,apr-devel
+BuildRequires: httpd-devel,apr-devel,nss-devel,flex,byacc
# Without Autoreq: 0, rpmbuild finds all sorts of crazy
# dependencies that we don't care about, and refuses to install
Autoreq: 0
@@ -56,7 +56,7 @@
# configure requires nspr, nss, ldapsdk, adminutil
# if can't find apxs, use --with-apxs=/path/to/apxs
-./configure --with-apr-config --with-nspr-inc=%{nsprincdir} --with-nspr-lib=%{nsprlibdir} --with-nss-inc=%{nssincdir} --with-nss-lib=%{nsslibdir}
+./configure --with-apr-config --with-nspr-inc=%{nsprincdir} --with-nspr-lib=%{nsprlibdir} --with-nss-inc=%{nssincdir} --with-nss-lib=%{nsslibdir} --enable-ecc
CFLAGS="$flag $mycflags" make
%install
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/nss.conf.in new/mod_nss-1.0.17/nss.conf.in
--- old/mod_nss-1.0.16/nss.conf.in 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/nss.conf.in 2018-03-27 22:40:30.000000000 +0200
@@ -122,14 +122,14 @@
# Server Certificate Database:
# The NSS security database directory that holds the certificates and
-# keys. The database consists of 3 files: cert8.db, key3.db and secmod.db.
+# keys. The database consists of 3 files: cert9.db, key4.db and pkcs11.txt
# Provide the directory that these files exist.
NSSCertificateDatabase @apache_conf@
# Database Prefix:
# In order to be able to store multiple NSS databases in one directory
# they need unique names. This option sets the database prefix used for
-# cert8.db and key3.db.
+# cert9.db and key4.db.
#NSSDBPrefix my-prefix-
# Client Authentication (Type):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/nss_engine_cipher.c new/mod_nss-1.0.17/nss_engine_cipher.c
--- old/mod_nss-1.0.16/nss_engine_cipher.c 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/nss_engine_cipher.c 2018-03-27 22:40:30.000000000 +0200
@@ -123,6 +123,14 @@
{"ecdhe_ecdsa_chacha20_poly1305_sha_256", TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, "ECDHE-ECDSA-CHACHA20-POLY1305", SSL_kEECDH|SSL_aECDSA|SSL_CHACHA20POLY1305|SSL_AEAD, TLSV1_2, SSL_HIGH, 256, 256},
{"dhe_rsa_chacha20_poly1305_sha_256", TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256, "DHE-RSA-CHACHA20-POLY1305", SSL_kDHE|SSL_aRSA|SSL_CHACHA20POLY1305|SSL_AEAD, TLSV1_2, SSL_HIGH, 256, 256},
#endif
+#ifdef NSS_SUPPORTS_TLS_1_3
+ /* Special TLS 1.3 cipher suites that really just specify AEAD
+ * TLS 1.3 ciphers don't specify key exchange and authentication.
+ */
+ {"aes_128_gcm_sha_256", TLS_AES_128_GCM_SHA256, "TLS-AES-128-GCM-SHA256", SSL_AES128GCM|SSL_AEAD, TLSV1_3, SSL_HIGH, 128, 128, NULL},
+ {"aes_256_gcm_sha_384", TLS_AES_256_GCM_SHA384, "TLS-AES-256-GCM-SHA384", SSL_AES256GCM|SSL_AEAD, TLSV1_3, SSL_HIGH, 256, 256, NULL},
+ {"chacha20_poly1305_sha_256", TLS_CHACHA20_POLY1305_SHA256, "TLS-CHACHA20-POLY1305_SHA256", SSL_CHACHA20POLY1305|SSL_AEAD, TLSV1_3, SSL_HIGH, 256, 256},
+#endif
};
#define CIPHERNUM sizeof(ciphers_def) / sizeof(cipher_properties)
@@ -170,11 +178,11 @@
rv = parse_nss_ciphers(s, ciphers, cipher_list);
} else {
rv = parse_openssl_ciphers(s, ciphers, cipher_list);
- if (rv == 0 && 0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2)) {
+ if (rv == 0 && 0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2|TLSV1_3)) {
rv = parse_nss_ciphers(s, ciphers, cipher_list);
}
}
- if (0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2)) {
+ if (0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2|TLSV1_3)) {
ap_log_error(APLOG_MARK, APLOG_INFO, 0, s,
"no cipher match");
}
@@ -406,6 +414,8 @@
protocol |= TLSV1;
} else if (!strcmp(cipher, "TLSv1.2")) {
protocol |= TLSV1_2;
+ } else if (!strcmp(cipher, "TLSv1.3")) {
+ protocol |= TLSV1_3;
} else if (!strcmp(cipher, "HIGH")) {
strength |= SSL_HIGH;
} else if (!strcmp(cipher, "MEDIUM")) {
@@ -493,7 +503,7 @@
cipher = ciphers;
}
- if (found && 0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2))
+ if (found && 0 == countciphers(cipher_list, SSLV3|TLSV1|TLSV1_2|TLSV1_3))
return 1; /* no matching ciphers */
return 0;
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/nss_engine_cipher.h new/mod_nss-1.0.17/nss_engine_cipher.h
--- old/mod_nss-1.0.16/nss_engine_cipher.h 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/nss_engine_cipher.h 2018-03-27 22:40:30.000000000 +0200
@@ -86,6 +86,7 @@
#define SSLV3 0x00000002L
#define TLSV1 SSLV3
#define TLSV1_2 0x00000004L
+#define TLSV1_3 0x00000005L
/* the table itself is defined in nss_engine_cipher.c */
#if 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/nss_engine_init.c new/mod_nss-1.0.17/nss_engine_init.c
--- old/mod_nss-1.0.16/nss_engine_init.c 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/nss_engine_init.c 2018-03-27 22:40:30.000000000 +0200
@@ -191,9 +191,9 @@
}
/* Strip the known prefixes */
- if (strncasecmp(mc->pCertificateDatabase, "sql:", 4) == 0)
+ if (strncasecmp(mc->pCertificateDatabase, SQLDB, 4) == 0)
dbdir = (char *)mc->pCertificateDatabase + 4;
- else if (strncasecmp(mc->pCertificateDatabase, "dbm:", 4) == 0)
+ else if (strncasecmp(mc->pCertificateDatabase, LEGACY, 4) == 0)
dbdir = (char *)mc->pCertificateDatabase + 4;
else
dbdir = (char *)mc->pCertificateDatabase;
@@ -767,6 +767,9 @@
modnss_ctx_t *mctx)
{
int ssl3, tls, tls1_1, tls1_2;
+#ifdef NSS_SUPPORTS_TLS_1_3
+ int tls1_3 = 0;
+#endif
char *protocol_marker = NULL;
char *lprotocols = NULL;
SECStatus stat;
@@ -789,16 +792,27 @@
}
if (mctx->auth.protocols == NULL) {
+#ifdef NSS_SUPPORTS_TLS_1_3
+ ap_log_error(APLOG_MARK, APLOG_WARNING, 0, s,
+ "%s value not set; using: TLSv1.0, TLSv1.1, TLSv1.2, and TLSv1.3",
+ protocol_marker);
+ tls = tls1_1 = tls1_2 = tls1_3 = 1;
+#else
ap_log_error(APLOG_MARK, APLOG_WARNING, 0, s,
- "%s value not set; using: TLSv1.0, TLSv1.1 and TLSv1.2",
+ "%s value not set; using: TLSv1.0, TLSv1.1, and TLSv1.2",
protocol_marker);
tls = tls1_1 = tls1_2 = 1;
+#endif
} else {
lprotocols = strdup(mctx->auth.protocols);
ap_str_tolower(lprotocols);
if (strstr(lprotocols, "all") != NULL) {
+#ifdef NSS_SUPPORTS_TLS_1_3
+ ssl3 = tls = tls1_1 = tls1_2 = tls1_3 = 1;
+#else
ssl3 = tls = tls1_1 = tls1_2 = 1;
+#endif
} else {
char *protocol_list = NULL;
char *saveptr = NULL;
@@ -846,6 +860,13 @@
"%s: Enabling TLSv1.2",
protocol_marker);
tls1_2 = 1;
+#ifdef NSS_SUPPORTS_TLS_1_3
+ } else if (strcmp(token, "tlsv1.3") == 0) {
+ ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
+ "%s: Enabling TLSv1.3",
+ protocol_marker);
+ tls1_3 = 1;
+#endif
} else {
ap_log_error(APLOG_MARK, APLOG_WARNING, 0, s,
"%s: Unknown protocol '%s' not supported",
@@ -861,12 +882,21 @@
* if FIPS mode is enabled with no TLS protocols,
* enable ALL TLS protocols.
*/
+#ifdef NSS_SUPPORTS_TLS_1_3
+ if ((mctx->sc->fips) && (tls == 0) && (tls1_1 == 0) && (tls1_2 == 0) && (tls1_3 == 0)) {
+ ap_log_error(APLOG_MARK, APLOG_INFO, 0, s,
+ "%s: FIPS mode no valid protocols set, enabling TLSv1.0, TLSv1.1, TLSv1.2 and TLSv1.3",
+ protocol_marker);
+ tls = tls1_1 = tls1_2 = tls1_3 = 1;
+ }
+#else
if ((mctx->sc->fips) && (tls == 0) && (tls1_1 == 0) && (tls1_2 == 0)) {
ap_log_error(APLOG_MARK, APLOG_INFO, 0, s,
- "%s: FIPS mode no valid protocols set, enabling TLSv1.0, TLSv1.1 and TLSv1.2",
+ "%s: FIPS mode no valid protocols set, enabling TLSv1.0, TLSv1.1, and TLSv1.2",
protocol_marker);
tls = tls1_1 = tls1_2 = 1;
}
+#endif
}
stat = SSL_OptionSet(mctx->model, SSL_ENABLE_SSL2, PR_FALSE);
@@ -893,7 +923,7 @@
if (stat == SECSuccess) {
/* Set minimum protocol version (lowest -> highest)
*
- * SSL 3.0 -> TLS 1.0 -> TLS 1.1 -> TLS 1.2
+ * SSL 3.0 -> TLS 1.0 -> TLS 1.1 -> TLS 1.2 -> TLS 1.3
*/
if (ssl3 == 1) {
enabledVersions.min = SSL_LIBRARY_VERSION_3_0;
@@ -915,6 +945,13 @@
ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
"%s: [TLS 1.2] (minimum)",
protocol_marker);
+#ifdef NSS_SUPPORTS_TLS_1_3
+ } else if (tls1_3 == 1) {
+ enabledVersions.min = SSL_LIBRARY_VERSION_TLS_1_3;
+ ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
+ "%s: [TLS 1.3] (minimum)",
+ protocol_marker);
+#endif
} else {
/* Set default minimum protocol version to SSL 3.0 */
enabledVersions.min = SSL_LIBRARY_VERSION_3_0;
@@ -925,9 +962,18 @@
/* Set maximum protocol version (highest -> lowest)
*
- * TLS 1.2 -> TLS 1.1 -> TLS 1.0 -> SSL 3.0
+ * TLS 1.3 -> TLS 1.2 -> TLS 1.1 -> TLS 1.0 -> SSL 3.0
*/
+#ifdef NSS_SUPPORTS_TLS_1_3
+ if (tls1_3 == 1) {
+ enabledVersions.max = SSL_LIBRARY_VERSION_TLS_1_3;
+ ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
+ "%s: [TLS 1.3] (maximum)",
+ protocol_marker);
+ } else if (tls1_2 == 1) {
+#else
if (tls1_2 == 1) {
+#endif
enabledVersions.max = SSL_LIBRARY_VERSION_TLS_1_2;
ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
"%s: [TLS 1.2] (maximum)",
@@ -962,12 +1008,19 @@
ap_log_error(APLOG_MARK, APLOG_ERR, 0, s,
"%s: SSL/TLS protocol initialization failed.",
protocol_marker);
+ ap_log_error(APLOG_MARK, APLOG_ERR, 0, s,
+ "Enabled versions: min 0x%04x, max 0x%04x",
+ enabledVersions.min, enabledVersions.max);
nss_log_nss_error(APLOG_MARK, APLOG_ERR, s);
nss_die();
}
mctx->ssl3 = ssl3;
+#ifdef NSS_SUPPORTS_TLS_1_3
+ mctx->tls = tls || tls1_1 || tls1_2 || tls1_3;
+#else
mctx->tls = tls || tls1_1 || tls1_2;
+#endif
ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, s,
"%sabling TLS Session Tickets", mctx->sc->session_tickets == PR_TRUE ? "En" : "Dis");
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/nss_engine_vars.c new/mod_nss-1.0.17/nss_engine_vars.c
--- old/mod_nss-1.0.16/nss_engine_vars.c 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/nss_engine_vars.c 2018-03-27 22:40:30.000000000 +0200
@@ -776,6 +776,11 @@
case SSL_LIBRARY_VERSION_TLS_1_2:
result = "TLSv1.2";
break;
+#ifdef NSS_SUPPORTS_TLS_1_3
+ case SSL_LIBRARY_VERSION_TLS_1_3:
+ result = "TLSv1.3";
+ break;
+#endif
}
}
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/msupn.py new/mod_nss-1.0.17/test/msupn.py
--- old/mod_nss-1.0.16/test/msupn.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/msupn.py 2018-03-27 22:40:30.000000000 +0200
@@ -1,14 +1,17 @@
from pyasn1.codec.der import encoder
from pyasn1.type import univ, char, tag
+
def fill_sequence(seq, *vals):
for i in range(len(vals)):
seq.setComponentByPosition(i, vals[i])
+
class SequenceImplicitlyTagged0(univ.Sequence):
tagSet = univ.Sequence.tagSet.tagImplicitly(
tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
+
class UTF8StringTagged0(char.GeneralString):
tagSet = char.UTF8String.tagSet.tagExplicitly(
tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/test.py new/mod_nss-1.0.17/test/test.py
--- old/mod_nss-1.0.16/test/test.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/test.py 2018-03-27 22:40:30.000000000 +0200
@@ -16,22 +16,19 @@
# Other older python we use the urllib3 bundled copy
from urllib3.packages.ssl_match_hostname import CertificateError
+
class test_suite1(Declarative):
@classmethod
def setUpClass(cls):
write_template_file('suite1.tmpl', 'work/httpd/conf/test.conf',
- {'DBPREFIX': os.environ.get('DBPREFIX', ''),
- 'SNI': 'off',
- 'PRESERVEHOST': 'Off',
- }
- )
+ {'DBPREFIX': os.environ.get('DBPREFIX', ''),
+ 'SNI': 'off',
+ 'PRESERVEHOST': 'Off'})
# Generate a single VH to do negative SNI testing
write_template_file('sni.tmpl', 'work/httpd/conf.d/sni1.conf',
- {'DBPREFIX': os.environ.get('DBPREFIX', ''),
- 'SNINAME': 'www1.example.com',
- 'SNINUM': 1,
- }
- )
+ {'DBPREFIX': os.environ.get('DBPREFIX', ''),
+ 'SNINAME': 'www1.example.com',
+ 'SNINUM': 1})
restart_apache()
@classmethod
@@ -66,12 +63,12 @@
cipher='AES128-SHA',
),
-# dict(
-# desc='Default protocol check',
-# request=('/', {}),
-# expected=200,
-# protocol='TLSv1.0',
-# ),
+ # dict(
+ # desc='Default protocol check',
+ # request=('/', {}),
+ # expected=200,
+ # protocol='TLSv1.0',
+ # ),
dict(
desc='client-side cipher check',
@@ -96,8 +93,7 @@
desc='Basic client auth, valid certificate',
request=('/acl/aclS01.html', {
'key_file': 'work/httpd/alpha.key',
- 'cert_file': 'work/httpd/alpha.crt',}
- ),
+ 'cert_file': 'work/httpd/alpha.crt'}),
expected=200,
),
@@ -111,8 +107,7 @@
desc='NSSRequire auth, valid certificate',
request=('/acl/aclS02.html', {
'key_file': 'work/httpd/alpha.key',
- 'cert_file': 'work/httpd/alpha.crt',}
- ),
+ 'cert_file': 'work/httpd/alpha.crt'}),
expected=200,
),
@@ -120,8 +115,7 @@
desc='NSSRequire auth, not allowed certificate',
request=('/acl/aclS02.html', {
'key_file': 'work/httpd/beta.key',
- 'cert_file': 'work/httpd/beta.crt',}
- ),
+ 'cert_file': 'work/httpd/beta.crt'}),
expected=403,
),
@@ -135,8 +129,7 @@
desc='FakeBasicAuth, valid certificate',
request=('/acl/aclS03.html', {
'key_file': 'work/httpd/alpha.key',
- 'cert_file': 'work/httpd/alpha.crt',}
- ),
+ 'cert_file': 'work/httpd/alpha.crt'}),
expected=200,
),
@@ -144,8 +137,7 @@
desc='FakeBasicAuth, not allowed user',
request=('/acl/aclS03.html', {
'key_file': 'work/httpd/beta.key',
- 'cert_file': 'work/httpd/beta.crt',}
- ),
+ 'cert_file': 'work/httpd/beta.crt'}),
expected=401,
),
@@ -153,8 +145,7 @@
desc='FakeBasicAuth, certificate with colon',
request=('/acl/aclS03.html', {
'key_file': 'work/httpd/colon.key',
- 'cert_file': 'work/httpd/colon.crt',}
- ),
+ 'cert_file': 'work/httpd/colon.crt'}),
expected=403,
),
@@ -180,40 +171,36 @@
dict(
desc='Try SSLv23 client on SSLv3 location',
request=('/protocolssl3/index.html',
- {'ssl_version': ssl.PROTOCOL_SSLv23}
- ),
- expected=403, # connects as TLSv1
+ {'ssl_version': ssl.PROTOCOL_SSLv23}),
+ expected=403, # connects as TLSv1
),
dict(
desc='Try TLSv1 client on SSLv3 location',
request=('/protocoltls1/index.html',
- {'ssl_version': ssl.PROTOCOL_TLSv1}
- ),
+ {'ssl_version': ssl.PROTOCOL_TLSv1}),
expected=requests.exceptions.SSLError(),
),
dict(
desc='Try TLSv1 client on TLSv1.1 location',
request=('/protocoltls11/index.html',
- {'ssl_version': ssl.PROTOCOL_TLSv1}
- ),
+ {'ssl_version': ssl.PROTOCOL_TLSv1}),
expected=requests.exceptions.SSLError(),
),
-# dict(
-# desc='Try SSLv23 client on TLSv1 location',
-# request=('/protocoltls1/index.html',
-# {'ssl_version': ssl.PROTOCOL_SSLv23}
-# ),
-# expected=200,
-# ),
+ # dict(
+ # desc='Try SSLv23 client on TLSv1 location',
+ # request=('/protocoltls1/index.html',
+ # {'ssl_version': ssl.PROTOCOL_SSLv23}
+ # ),
+ # expected=200,
+ # ),
dict(
desc='Try SSLv23 client on 1.2-only location',
request=('/protocoltls12/index.html',
- {'ssl_version': ssl.PROTOCOL_SSLv23}
- ),
+ {'ssl_version': ssl.PROTOCOL_SSLv23}),
expected=403,
),
@@ -226,18 +213,16 @@
dict(
desc='Try SSLv3 client on 1.2-only VH',
request=('/protocoltls12/index.html',
- {'port': 8001,
- 'ssl_version': ssl.PROTOCOL_SSLv3}
- ),
+ {'port': 8001,
+ 'ssl_version': ssl.PROTOCOL_SSLv3}),
expected=requests.exceptions.SSLError(),
),
dict(
desc='Try TLSv1 client on 1.2-only VH',
request=('/protocoltls12/index.html',
- {'port': 8001,
- 'ssl_version': ssl.PROTOCOL_TLSv1}
- ),
+ {'port': 8001,
+ 'ssl_version': ssl.PROTOCOL_TLSv1}),
expected=requests.exceptions.SSLError(),
),
@@ -250,8 +235,7 @@
dict(
desc='SNI request when SNI is disabled',
request=('/index.html',
- {'host': 'www1.example.com', 'port': 8000}
- ),
+ {'host': 'www1.example.com', 'port': 8000}),
expected=requests.exceptions.SSLError(),
expected_str='doesn\'t match',
),
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/test_cipher.py new/mod_nss-1.0.17/test/test_cipher.py
--- old/mod_nss-1.0.16/test/test_cipher.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/test_cipher.py 2018-03-27 22:40:30.000000000 +0200
@@ -4,7 +4,9 @@
from nose.tools import make_decorator
# This file is auto-generated by configure
-from variable import ENABLE_SHA384, ENABLE_GCM, ENABLE_SERVER_DHE, ENABLE_CHACHA20
+from variable import (
+ ENABLE_SHA384, ENABLE_GCM, ENABLE_SERVER_DHE,
+ ENABLE_CHACHA20, ENABLE_TLS13)
# Things to be aware of if some tests fail:
#
@@ -22,49 +24,67 @@
ciphernum = 0
-CIPHERS_NOT_IN_NSS = ['ECDH-RSA-AES128-SHA256',
- 'ECDH-ECDSA-AES128-GCM-SHA256',
- 'ECDH-ECDSA-AES128-SHA256',
- 'ECDH-RSA-AES128-GCM-SHA256',
- 'EXP-DES-CBC-SHA',
- 'ECDH-RSA-AES256-GCM-SHA384',
- 'ECDH-ECDSA-AES256-SHA384',
- 'ECDH-RSA-AES256-SHA384',
- 'ECDH-ECDSA-AES256-GCM-SHA384',
- 'EXP-EDH-RSA-DES-CBC-SHA',
- 'ECDH-ECDSA-AES128-SHA',
- 'ECDH-RSA-AES128-SHA',
- 'CAMELLIA256-SHA256',
- 'CAMELLIA128-SHA256',
- 'ECDHE-ECDSA-CAMELLIA128-SHA256',
- 'ECDHE-RSA-CAMELLIA256-SHA384',
- 'ECDHE-ECDSA-CAMELLIA256-SHA384',
- 'ECDHE-RSA-CAMELLIA128-SHA256',
- 'DHE-RSA-CAMELLIA128-SHA256',
- 'DHE-RSA-CAMELLIA256-SHA256',
+CIPHERS_NOT_IN_NSS = [
+ 'ECDH-RSA-AES128-SHA256',
+ 'ECDH-ECDSA-AES128-GCM-SHA256',
+ 'ECDH-ECDSA-AES128-SHA256',
+ 'ECDH-RSA-AES128-GCM-SHA256',
+ 'EXP-DES-CBC-SHA',
+ 'ECDH-RSA-AES256-GCM-SHA384',
+ 'ECDH-ECDSA-AES256-SHA384',
+ 'ECDH-RSA-AES256-SHA384',
+ 'ECDH-ECDSA-AES256-GCM-SHA384',
+ 'EXP-EDH-RSA-DES-CBC-SHA',
+ 'ECDH-ECDSA-AES128-SHA',
+ 'ECDH-RSA-AES128-SHA',
+ 'CAMELLIA256-SHA256',
+ 'CAMELLIA128-SHA256',
+ 'ECDHE-ECDSA-CAMELLIA128-SHA256',
+ 'ECDHE-RSA-CAMELLIA256-SHA384',
+ 'ECDHE-ECDSA-CAMELLIA256-SHA384',
+ 'ECDHE-RSA-CAMELLIA128-SHA256',
+ 'DHE-RSA-CAMELLIA128-SHA256',
+ 'DHE-RSA-CAMELLIA256-SHA256',
]
-CIPHERS_NOT_IN_OPENSSL = ['ECDH-RSA-NULL-SHA',
- 'ECDH-RSA-AES128-SHA',
- 'ECDH-RSA-DES-CBC3-SHA',
- 'ECDH-ECDSA-AES128-SHA',
- 'ECDH-ECDSA-NULL-SHA',
- 'ECDH-RSA-AES256-SHA',
- 'ECDH-ECDSA-DES-CBC3-SHA',
- 'ECDH-ECDSA-AES256-SHA',
+CIPHERS_NOT_IN_OPENSSL = [
+ 'ECDH-RSA-NULL-SHA',
+ 'ECDH-RSA-AES128-SHA',
+ 'ECDH-RSA-DES-CBC3-SHA',
+ 'ECDH-ECDSA-AES128-SHA',
+ 'ECDH-ECDSA-NULL-SHA',
+ 'ECDH-RSA-AES256-SHA',
+ 'ECDH-ECDSA-DES-CBC3-SHA',
+ 'ECDH-ECDSA-AES256-SHA',
]
-OPENSSL_CIPHERS_IGNORE = ":-SSLv2:-KRB5:-PSK:-ADH:-DSS:-SEED:-IDEA:-SRP:-AESCCM:-AESCCM8"
+OPENSSL_CIPHERS_IGNORE = ":-SSLv2:-KRB5:-PSK:-ADH:-DSS:-SEED:-IDEA" \
+ ":-SRP:-AESCCM:-AESCCM8"
if ENABLE_SERVER_DHE == 0:
OPENSSL_CIPHERS_IGNORE += ':-DH'
+
def openssl_CHACHA20():
"""Check to see if CHACHA20 is available in OpenSSL"""
(out, err, rc) = run([openssl, 'ciphers', 'CHACHA20'])
return rc == 0
+
+def openssl_tls13():
+ """Check to see if TLS 1.3 is available in OpenSSL"""
+ (out, err, rc) = run([openssl, 'ciphers', 'tls1_3'])
+ return rc == 0
+
OPENSSL_CHACHA20 = openssl_CHACHA20()
+OPENSSL_TLS13 = openssl_tls13()
+
+tls13_ciphers = [
+ 'TLS-AES-128-GCM-SHA256',
+ 'TLS-AES-256-GCM-SHA384',
+ 'TLS-CHACHA20-POLY1305_SHA256',
+]
+
def assert_equal_openssl(ciphers):
nss_ciphers = ciphers + ":-EXP:-LOW:-RC4:-EDH"
@@ -97,10 +117,13 @@
ossl_list = t
# OpenSSL 1.0.2 doesn't support CHACHA20 but NSS might.
+ # OpenSSL 1.1.0.g doesn't support TLS 1.3 yet but 1.1.1 will.
n = list()
for c in nss_list:
if not OPENSSL_CHACHA20 and ENABLE_CHACHA20 and 'CHACHA20' in c:
continue
+ if not OPENSSL_TLS13 and ENABLE_TLS13 and c in tls13_ciphers:
+ continue
if c in CIPHERS_NOT_IN_OPENSSL:
continue
n.append(c)
@@ -113,13 +136,16 @@
else:
diff = ''
- assert nss_list == ossl_list, '%r != %r. Difference %r' % (':'.join(nss_list), ':'.join(ossl_list), diff)
+ assert nss_list == ossl_list, '%r != %r. Difference %r' % (
+ ':'.join(nss_list), ':'.join(ossl_list), diff)
+
def assert_no_NULL(nss_ciphers):
(nss, err, rc) = run([exe, "--o", nss_ciphers])
assert rc == 0
assert('NULL' not in nss)
+
class test_ciphers(object):
@classmethod
def setUpClass(cls):
@@ -308,15 +334,18 @@
assert_no_NULL("DEFAULT:aRSA")
def test_SYSTEM_DEFAULT(self):
- # I've added in !DHE here which differs from F-23 default
- assert_equal_openssl("!SSLv2:kEECDH:kRSA:kEDH:kPSK:+3DES:!aNULL:!eNULL:!MD5:!EXP:!RC4:!SEED:!IDEA:!DES:!DHE")
+ # I've added in !DHE here which differs from F-23 default
+ assert_equal_openssl("!SSLv2:kEECDH:kRSA:kEDH:kPSK:+3DES:"
+ "!aNULL:!eNULL:!MD5:!EXP:!RC4:!SEED:"
+ "!IDEA:!DES:!DHE")
def test_cipher_reorder(self):
# re-ordering not allowed but shouldn't blow up either
assert_equal_openssl("3DES:AES:+3DES:SHA256")
def test_nss_subtraction(self):
- (out, err, rc) = run([exe, "+rsa_rc4_128_md5,+rsa_rc4_128_sha,-rsa_rc4_128_md5"])
+ (out, err, rc) = run([exe, "+rsa_rc4_128_md5,+rsa_rc4_128_sha,"
+ "-rsa_rc4_128_md5"])
assert rc == 0
assert_equal(out, 'rsa_rc4_128_sha')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/test_config.py new/mod_nss-1.0.17/test/test_config.py
--- old/mod_nss-1.0.16/test/test_config.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/test_config.py 2018-03-27 22:40:30.000000000 +0200
@@ -29,17 +29,18 @@
# Utility functions to assist in creating Apache configuration based
# on test suite
-DEF_PORT=8000
+DEF_PORT = 8000
FQDN = socket.gethostname()
default_vars = dict(
- DBPREFIX = '',
- SERVER_PORT = DEF_PORT,
- SERVER_NAME = FQDN,
- TEST_ROOT = '%s/work/httpd' % os.getcwd(),
- SERVER_ROOT = '%s/work/httpd' % os.getcwd(),
+ DBPREFIX='',
+ SERVER_PORT=DEF_PORT,
+ SERVER_NAME=FQDN,
+ TEST_ROOT='%s/work/httpd' % os.getcwd(),
+ SERVER_ROOT='%s/work/httpd' % os.getcwd(),
)
+
def template_str(txt, vars):
val = string.Template(txt).substitute(vars)
@@ -51,17 +52,20 @@
return val
+
def template_file(infilename, vars):
"""Read a file and perform template substitutions"""
with open(infilename) as f:
return template_str(f.read(), vars)
+
def write_template_file(infilename, outfilename, vars):
"""Read a file and perform template substitutions"""
replacevars = dict(default_vars.items() + vars.items())
with open(outfilename, 'w') as f:
f.write('%s\n' % template_file(infilename, replacevars))
+
def stop_apache():
"""Stop the Apache process"""
cwd = os.getcwd()
@@ -71,6 +75,7 @@
p = subprocess.Popen(['./stop'],
close_fds=True)
+
def restart_apache():
"""Restart the Apache process"""
cwd = os.getcwd()
@@ -93,6 +98,7 @@
options = %r
%s: %s"""
+
class Declarative(object):
"""A declarative-style test suite
@@ -134,7 +140,7 @@
def make_request(self, uri, options):
session = requests.Session()
session.mount('https://', test_request.MyAdapter())
- verify = dict(verify = options)
+ verify = dict(verify=options)
port = options.get('port', DEF_PORT)
host = options.get('host', FQDN)
request = session.get('https://%s:%d%s' % (host, port, uri), **verify)
@@ -145,7 +151,7 @@
expected_str=None, content=None):
# TODO: need way to set auth, etc.
(uri, options) = request
- if not 'verify' in options:
+ if 'verify' not in options:
options['verify'] = 'work/httpd/alias/ca.pem'
if isinstance(expected, Exception):
self.check_exception(nice, uri, options, expected, expected_str)
@@ -183,7 +189,7 @@
request = self.make_request(uri, options)
has_sni = options.get('sni', False)
- if content and not content in request.content:
+ if content and content not in request.content:
raise AssertionError(
'Expected %s not in %s' % (content, request.content)
)
@@ -201,9 +207,11 @@
raise AssertionError('Cannot do protocol tests in SNI')
if protocol != client_cipher[1]:
raise AssertionError(
- 'Expected protocol %s, got %s' % (protocol, client_cipher[1])
+ 'Expected protocol %s, got %s' %
+ (protocol, client_cipher[1])
)
if expected != request.status_code:
raise AssertionError(
- 'Expected status %s, got %s' % (expected, request.status_code)
+ 'Expected status %s, got %s' %
+ (expected, request.status_code)
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/test_request.py new/mod_nss-1.0.17/test/test_request.py
--- old/mod_nss-1.0.16/test/test_request.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/test_request.py 2018-03-27 22:40:30.000000000 +0200
@@ -9,20 +9,26 @@
from requests.packages.urllib3.util import get_host
from requests.packages.urllib3.util.timeout import Timeout
from requests.packages.urllib3.contrib import pyopenssl
-from requests.packages.urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool, VerifiedHTTPSConnection
+from requests.packages.urllib3.connectionpool import (
+ HTTPConnectionPool, HTTPSConnectionPool, VerifiedHTTPSConnection)
try:
- from requests.packages.urllib3.contrib.pyopenssl import DEFAULT_SSL_CIPHER_LIST
+ from requests.packages.urllib3.contrib.pyopenssl import (
+ DEFAULT_SSL_CIPHER_LIST)
except ImportError:
try:
from urllib3.util.ssl_ import DEFAULT_CIPHERS
DEFAULT_SSL_CIPHER_LIST = DEFAULT_CIPHERS
except ImportError:
- DEFAULT_SSL_CIPHER_LIST = 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:TLS13-AES-128-GCM-SHA256:ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:RSA+AESGCM:RSA+AES:!aNULL:!eNULL:!MD5'
+ DEFAULT_SSL_CIPHER_LIST = (
+ 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:'
+ 'TLS13-AES-128-GCM-SHA256:ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:'
+ 'DH+CHACHA20:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:'
+ 'RSA+AESGCM:RSA+AES:!aNULL:!eNULL:!MD5')
# Don't bend over backwards for ssl support, assume it is there.
import ssl
-try: # Python 3
+try: # Python 3
from http.client import HTTPConnection, HTTPException
from http.client import HTTP_PORT, HTTPS_PORT
from http.client import HTTPSConnection
@@ -37,10 +43,12 @@
except ImportError:
try:
# Older python where the backport from pypi is installed
- from backports.ssl_match_hostname import match_hostname, CertificateError
+ from backports.ssl_match_hostname import (
+ match_hostname, CertificateError)
except ImportError:
# Other older python we use the urllib3 bundled copy
- from urllib3.packages.ssl_match_hostname import match_hostname, CertificateError
+ from urllib3.packages.ssl_match_hostname import (
+ match_hostname, CertificateError)
SAVE_DEFAULT_SSL_CIPHER_LIST = DEFAULT_SSL_CIPHER_LIST
@@ -73,12 +81,16 @@
else:
return HTTPConnectionPool(host, port=port, **kw)
+
class MyHTTPSConnectionPool(HTTPSConnectionPool):
def __init__(self, host, port=None,
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None,
key_file=None, cert_file=None,
- cert_reqs='CERT_REQUIRED', ca_certs='/etc/ssl/certs/ca-certificates.crt', ssl_version=ssl.PROTOCOL_SSLv23, ciphers=None):
+ cert_reqs='CERT_REQUIRED',
+ ca_certs='/etc/ssl/certs/ca-certificates.crt',
+ ssl_version=ssl.PROTOCOL_SSLv23,
+ ciphers=None):
super(HTTPSConnectionPool, self).__init__(host, port,
strict, timeout, maxsize,
@@ -100,12 +112,12 @@
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
- #if not ssl: # Platform-specific: Python compiled without +ssl
- # if not HTTPSConnection or HTTPSConnection is object:
- # raise SSLError("Can't connect to HTTPS URL because the SSL "
- # "module is not available.")
+ # if not ssl: # Platform-specific: Python compiled without +ssl
+ # if not HTTPSConnection or HTTPSConnection is object:
+ # raise SSLError("Can't connect to HTTPS URL because the SSL "
+ # "module is not available.")
- # return HTTPSConnection(host=self.host, port=self.port)
+ # return HTTPSConnection(host=self.host, port=self.port)
connection = MyVerifiedHTTPSConnection(host=self.host, port=self.port)
connection.sni = self.sni
@@ -115,6 +127,7 @@
connection.set_ciphers(self.ciphers)
return connection
+
class MyVerifiedHTTPSConnection(VerifiedHTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
@@ -123,7 +136,7 @@
cert_reqs = None
ca_certs = None
client_cipher = None
- is_verified = True # squelch warning
+ is_verified = True # squelch warning
sni = False
assert_hostname = None
assert_fingerprint = None
@@ -174,6 +187,7 @@
self.client_cipher = self.sock.cipher()
super(MyVerifiedHTTPSConnection, self).close()
+
class MyAdapter(requests.adapters.HTTPAdapter):
def get_connection(self, url, proxies=None):
@@ -201,14 +215,17 @@
if 'key_file' in verify:
conn.key_file = verify['key_file']
conn.sni = verify.get('sni', False)
- else: # huh? Do nothing
+ else: # huh? Do nothing
pass
"""
s = requests.Session()
s.mount('https://', MyAdapter())
try:
- r = s.get('https://test.example.com:8000/', verify={'verify': False, 'ssl_version': ssl.PROTOCOL_SSLv23, 'ciphers': 'HIGH'})
+ r = s.get('https://test.example.com:8000/',
+ verify={'verify': False,
+ 'ssl_version': ssl.PROTOCOL_SSLv23,
+ 'ciphers': 'HIGH'})
cipher = r.raw._pool._get_conn().client_cipher
except requests.exceptions.SSLError, e:
print e.message
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/test_util.py new/mod_nss-1.0.17/test/test_util.py
--- old/mod_nss-1.0.16/test/test_util.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/test_util.py 2018-03-27 22:40:30.000000000 +0200
@@ -2,7 +2,9 @@
import time
import subprocess
-def host_port_open(host, port, socket_type=socket.SOCK_STREAM, socket_timeout=None):
+
+def host_port_open(host, port, socket_type=socket.SOCK_STREAM,
+ socket_timeout=None):
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket_type):
af, socktype, proto, canonname, sa = res
try:
@@ -30,6 +32,7 @@
return False
+
def wait_for_open_ports(host, ports, timeout=0):
"""
Wait until the specified port(s) on the remote host are open. Timeout
@@ -47,13 +50,15 @@
if port_open:
break
- if timeout and time.time() > op_timeout: # timeout exceeded
+ if timeout and time.time() > op_timeout: # timeout exceeded
raise socket.timeout()
time.sleep(1)
+
def shell_quote(string):
return "'" + string.replace("'", "'\\''") + "'"
+
def run(args):
"""
Execute a command and return stdin, stdout and the process return code.
@@ -71,7 +76,7 @@
try:
p = subprocess.Popen(args, stdout=p_out, stderr=p_err,
close_fds=True)
- stdout,stderr = p.communicate(None)
+ stdout, stderr = p.communicate(None)
except KeyboardInterrupt:
p.wait()
raise
@@ -82,7 +87,7 @@
def assert_equal(got, expected):
if got.strip() != expected.strip():
raise AssertionError(
- "assert_deepequal: expected != got. " \
+ "assert_deepequal: expected != got. "
"expected = %r got = %r" %
(expected, got)
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mod_nss-1.0.16/test/testsni.py new/mod_nss-1.0.17/test/testsni.py
--- old/mod_nss-1.0.16/test/testsni.py 2018-01-19 21:44:16.000000000 +0100
+++ new/mod_nss-1.0.17/test/testsni.py 2018-03-27 22:40:30.000000000 +0200
@@ -4,22 +4,19 @@
import requests.exceptions
import os
+
class test_suite1(Declarative):
@classmethod
def setUpClass(cls):
write_template_file('suite1.tmpl', 'work/httpd/conf/test.conf',
- {'DBPREFIX': os.environ.get('DBPREFIX', ''),
- 'SNI': 'on',
- 'PRESERVEHOST': 'Off',
- }
- )
- for i in range(1,26):
+ {'DBPREFIX': os.environ.get('DBPREFIX', ''),
+ 'SNI': 'on',
+ 'PRESERVEHOST': 'Off'})
+ for i in range(1, 26):
write_template_file('sni.tmpl', 'work/httpd/conf.d/sni%d.conf' % i,
- {'DBPREFIX': os.environ.get('DBPREFIX', ''),
- 'SNINAME': 'www%d.example.com' % i,
- 'SNINUM': i,
- }
- )
+ {'DBPREFIX': os.environ.get('DBPREFIX', ''),
+ 'SNINAME': 'www%d.example.com' % i,
+ 'SNINUM': i})
restart_apache()
@classmethod
@@ -81,10 +78,9 @@
request=('/acl/aclS01.html', {
'host': 'www10.example.com', 'sni': True,
'key_file': 'work/httpd/alpha.key',
- 'cert_file': 'work/httpd/alpha.crt',}
- ),
+ 'cert_file': 'work/httpd/alpha.crt'}),
expected=200,
- content='sni10',
+ content='sni10',
),
dict(
1
0