openSUSE Commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
May 2019
- 1 participants
- 2232 discussions
Hello community,
here is the log from the commit of package ceph for openSUSE:Factory checked in at 2019-05-25 13:20:18
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/ceph (Old)
and /work/SRC/openSUSE:Factory/.ceph.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "ceph"
Sat May 25 13:20:18 2019 rev:44 rq:704486 version:14.2.1.448+g1bd10a856f
Changes:
--------
--- /work/SRC/openSUSE:Factory/ceph/ceph-test.changes 2019-05-21 10:22:27.859598633 +0200
+++ /work/SRC/openSUSE:Factory/.ceph.new.5148/ceph-test.changes 2019-05-25 13:20:21.208333916 +0200
@@ -1,0 +2,19 @@
+Tue May 21 13:46:52 UTC 2019 - Nathan Cutler <ncutler(a)suse.com>
+
+- Update to 14.2.1-448-g1bd10a856f:
+ + monitoring: update Grafana dashboards
+ + mgr/dashboard: fix some performance data are not displayed
+ + monitoring: SNMP OID per every Prometheus alert rule
+ + mgr/dashboard: Validate if any client belongs to more than one group
+ + mgr/dashboard: Admin resource not honored
+ + mgr/dashboard: Unable to see tcmu-runner perf counters
+ + mgr/dashboard: iSCSI form does not support IPv6
+
+-------------------------------------------------------------------
+Mon May 20 15:28:49 UTC 2019 - Nathan Cutler <ncutler(a)suse.com>
+
+- Update to 14.2.1-440-g0ac6920288:
+ + rebase on top of upstream nautilus branch, SHA1 1dc43a036fcc0121e3a0c1fe7ca6cd77cde1bf60
+ + client: fix vxattr nanosecond field padding (bsc#1135219, bsc#1135221)
+
+-------------------------------------------------------------------
ceph.changes: same change
Old:
----
ceph-14.2.1-431-gd032e5dd80.tar.bz2
New:
----
ceph-14.2.1-448-g1bd10a856f.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ ceph-test.spec ++++++
--- /var/tmp/diff_new_pack.SVHhjg/_old 2019-05-25 13:20:24.040332863 +0200
+++ /var/tmp/diff_new_pack.SVHhjg/_new 2019-05-25 13:20:24.044332862 +0200
@@ -100,7 +100,7 @@
# main package definition
#################################################################################
Name: ceph-test
-Version: 14.2.1.431+gd032e5dd80
+Version: 14.2.1.448+g1bd10a856f
Release: 0%{?dist}
%if 0%{?fedora} || 0%{?rhel}
Epoch: 2
@@ -116,7 +116,7 @@
Group: System/Filesystems
%endif
URL: http://ceph.com/
-Source0: %{?_remote_tarball_prefix}ceph-14.2.1-431-gd032e5dd80.tar.bz2
+Source0: %{?_remote_tarball_prefix}ceph-14.2.1-448-g1bd10a856f.tar.bz2
%if 0%{?suse_version}
Source96: checkin.sh
Source97: README-checkin.txt
@@ -499,7 +499,7 @@
%if 0%{?suse_version}
%endif
%prep
-%autosetup -p1 -n ceph-14.2.1-431-gd032e5dd80
+%autosetup -p1 -n ceph-14.2.1-448-g1bd10a856f
%build
++++++ ceph.spec ++++++
--- /var/tmp/diff_new_pack.SVHhjg/_old 2019-05-25 13:20:24.064332854 +0200
+++ /var/tmp/diff_new_pack.SVHhjg/_new 2019-05-25 13:20:24.064332854 +0200
@@ -100,7 +100,7 @@
# main package definition
#################################################################################
Name: ceph
-Version: 14.2.1.431+gd032e5dd80
+Version: 14.2.1.448+g1bd10a856f
Release: 0%{?dist}
%if 0%{?fedora} || 0%{?rhel}
Epoch: 2
@@ -116,7 +116,7 @@
Group: System/Filesystems
%endif
URL: http://ceph.com/
-Source0: %{?_remote_tarball_prefix}ceph-14.2.1-431-gd032e5dd80.tar.bz2
+Source0: %{?_remote_tarball_prefix}ceph-14.2.1-448-g1bd10a856f.tar.bz2
%if 0%{?suse_version}
# _insert_obs_source_lines_here
ExclusiveArch: x86_64 aarch64 ppc64le s390x
@@ -1068,7 +1068,7 @@
# common
#################################################################################
%prep
-%autosetup -p1 -n ceph-14.2.1-431-gd032e5dd80
+%autosetup -p1 -n ceph-14.2.1-448-g1bd10a856f
%build
++++++ ceph-14.2.1-431-gd032e5dd80.tar.bz2 -> ceph-14.2.1-448-g1bd10a856f.tar.bz2 ++++++
/work/SRC/openSUSE:Factory/ceph/ceph-14.2.1-431-gd032e5dd80.tar.bz2 /work/SRC/openSUSE:Factory/.ceph.new.5148/ceph-14.2.1-448-g1bd10a856f.tar.bz2 differ: char 11, line 1
1
0
Hello community,
here is the log from the commit of package gcc9 for openSUSE:Factory checked in at 2019-05-25 13:20:01
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/gcc9 (Old)
and /work/SRC/openSUSE:Factory/.gcc9.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "gcc9"
Sat May 25 13:20:01 2019 rev:2 rq:704170 version:9.1.1+r271393
Changes:
--------
--- /work/SRC/openSUSE:Factory/gcc9/cross-aarch64-gcc9.changes 2019-05-10 08:59:24.505262173 +0200
+++ /work/SRC/openSUSE:Factory/.gcc9.new.5148/cross-aarch64-gcc9.changes 2019-05-25 13:20:05.584339726 +0200
@@ -1,0 +2,38 @@
+Mon May 20 07:50:43 UTC 2019 - rguenther(a)suse.com
+
+- Update to gcc-9-branch head (r271393).
+- Always use ISL for crosses like for native compilers.
+
+-------------------------------------------------------------------
+Fri May 10 07:16:15 UTC 2019 - rguenther(a)suse.com
+
+- Update to gcc-9-branch head (r271050).
+
+-------------------------------------------------------------------
+Fri May 3 12:53:11 UTC 2019 - Martin Liška <mliska(a)suse.cz>
+
+- Strip -flto from $optflags as we use LTO bootstrap config.
+
+-------------------------------------------------------------------
+Fri May 3 08:27:32 UTC 2019 - rguenther(a)suse.com
+
+- Update to GCC 9.1.0 release.
+
+-------------------------------------------------------------------
+Thu May 2 14:22:43 UTC 2019 - rguenther(a)suse.com
+
+- Update to gcc-9-branch head (r270796).
+- Enable D for s390x.
+
+-------------------------------------------------------------------
+Wed May 1 07:33:25 UTC 2019 - Andreas Schwab <schwab(a)suse.de>
+
+- Enable D for aarch64 and riscv64
+
+-------------------------------------------------------------------
+Tue Apr 30 13:32:42 UTC 2019 - rguenther(a)suse.com
+
+- Update to gcc-9-branch head (r270689).
+ * GCC 9.1 RC2.
+
+-------------------------------------------------------------------
cross-arm-gcc9.changes: same change
cross-arm-none-gcc9-bootstrap.changes: same change
cross-arm-none-gcc9.changes: same change
cross-avr-gcc9-bootstrap.changes: same change
cross-avr-gcc9.changes: same change
cross-epiphany-gcc9-bootstrap.changes: same change
cross-epiphany-gcc9.changes: same change
cross-hppa-gcc9.changes: same change
cross-i386-gcc9.changes: same change
cross-m68k-gcc9.changes: same change
cross-mips-gcc9.changes: same change
cross-nvptx-gcc9.changes: same change
cross-ppc64-gcc9.changes: same change
cross-ppc64le-gcc9.changes: same change
cross-riscv64-elf-gcc9-bootstrap.changes: same change
cross-riscv64-elf-gcc9.changes: same change
cross-riscv64-gcc9.changes: same change
cross-rx-gcc9-bootstrap.changes: same change
cross-rx-gcc9.changes: same change
cross-s390x-gcc9.changes: same change
cross-sparc-gcc9.changes: same change
cross-sparc64-gcc9.changes: same change
cross-x86_64-gcc9.changes: same change
gcc9-testresults.changes: same change
gcc9.changes: same change
Old:
----
gcc-9.0.1+r270591.tar.xz
New:
----
gcc-9.1.1+r271393.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ cross-aarch64-gcc9.spec ++++++
--- /var/tmp/diff_new_pack.TVQfyu/_old 2019-05-25 13:20:13.216336888 +0200
+++ /var/tmp/diff_new_pack.TVQfyu/_new 2019-05-25 13:20:13.220336886 +0200
@@ -121,10 +121,8 @@
%else
BuildRequires: texinfo
%endif
-BuildRequires: zlib-devel
-%ifarch %ix86 x86_64 ppc ppc64 s390 s390x ia64 %sparc hppa %arm
BuildRequires: isl-devel
-%endif
+BuildRequires: zlib-devel
%ifarch ia64
BuildRequires: libunwind-devel
%endif
@@ -175,7 +173,7 @@
%define biarch_targets x86_64 s390x powerpc64 powerpc sparc sparc64
URL: https://gcc.gnu.org/
-Version: 9.0.1+r270591
+Version: 9.1.1+r271393
Release: 0
%define gcc_dir_version %(echo %version | sed 's/+.*//' | cut -d '.' -f 1)
%define gcc_snapshot_revision %(echo %version | sed 's/[3-9]\.[0-9]\.[0-6]//' | sed 's/+/-/')
@@ -276,6 +274,7 @@
#test patching end
%build
+%define _lto_cflags %{nil}
# Avoid rebuilding of generated files
contrib/gcc_update --touch
cross-arm-gcc9.spec: same change
cross-arm-none-gcc9-bootstrap.spec: same change
cross-arm-none-gcc9.spec: same change
cross-avr-gcc9-bootstrap.spec: same change
cross-avr-gcc9.spec: same change
cross-epiphany-gcc9-bootstrap.spec: same change
cross-epiphany-gcc9.spec: same change
cross-hppa-gcc9.spec: same change
cross-i386-gcc9.spec: same change
cross-m68k-gcc9.spec: same change
cross-mips-gcc9.spec: same change
cross-nvptx-gcc9.spec: same change
cross-ppc64-gcc9.spec: same change
cross-ppc64le-gcc9.spec: same change
cross-riscv64-elf-gcc9-bootstrap.spec: same change
cross-riscv64-elf-gcc9.spec: same change
cross-riscv64-gcc9.spec: same change
cross-rx-gcc9-bootstrap.spec: same change
cross-rx-gcc9.spec: same change
cross-s390x-gcc9.spec: same change
cross-sparc-gcc9.spec: same change
cross-sparc64-gcc9.spec: same change
cross-x86_64-gcc9.spec: same change
++++++ gcc9-testresults.spec ++++++
--- /var/tmp/diff_new_pack.TVQfyu/_old 2019-05-25 13:20:13.752336688 +0200
+++ /var/tmp/diff_new_pack.TVQfyu/_new 2019-05-25 13:20:13.752336688 +0200
@@ -70,7 +70,7 @@
%define build_objc 1
%define build_objcp 1
%define build_go 1
-%ifarch x86_64 %ix86 %arm
+%ifarch x86_64 %ix86 %arm aarch64 riscv64 s390x
%define build_d 1
%else
%define build_d 0
@@ -260,7 +260,7 @@
%define biarch_targets x86_64 s390x powerpc64 powerpc sparc sparc64
URL: https://gcc.gnu.org/
-Version: 9.0.1+r270591
+Version: 9.1.1+r271393
Release: 0
%define gcc_dir_version %(echo %version | sed 's/+.*//' | cut -d '.' -f 1)
%define gcc_snapshot_revision %(echo %version | sed 's/[3-9]\.[0-9]\.[0-6]//' | sed 's/+/-/')
@@ -448,6 +448,7 @@
#test patching end
%build
+%define _lto_cflags %{nil}
# Avoid rebuilding of generated files
contrib/gcc_update --touch
++++++ gcc9.spec ++++++
--- /var/tmp/diff_new_pack.TVQfyu/_old 2019-05-25 13:20:13.800336671 +0200
+++ /var/tmp/diff_new_pack.TVQfyu/_new 2019-05-25 13:20:13.820336663 +0200
@@ -52,7 +52,7 @@
%define build_objc 1
%define build_objcp 1
%define build_go 1
-%ifarch x86_64 %ix86 %arm
+%ifarch x86_64 %ix86 %arm aarch64 riscv64 s390x
%define build_d 1
%else
%define build_d 0
@@ -242,7 +242,7 @@
%define biarch_targets x86_64 s390x powerpc64 powerpc sparc sparc64
URL: https://gcc.gnu.org/
-Version: 9.0.1+r270591
+Version: 9.1.1+r271393
Release: 0
%define gcc_dir_version %(echo %version | sed 's/+.*//' | cut -d '.' -f 1)
%define gcc_snapshot_revision %(echo %version | sed 's/[3-9]\.[0-9]\.[0-6]//' | sed 's/+/-/')
@@ -1720,6 +1720,7 @@
#test patching end
%build
+%define _lto_cflags %{nil}
# Avoid rebuilding of generated files
contrib/gcc_update --touch
@@ -2556,10 +2557,6 @@
%endif
%if %{build_fortran}
%{libsubdir}/include/ISO_Fortran_binding.h
-%if %{separate_biarch}
-%dir %{versmainlibdirbi}/include
-%{versmainlibdirbi}/include/ISO_Fortran_binding.h
-%endif
%endif
%versmainlib *crt*.o
%versmainlib libgcc*.a
++++++ cross.spec.in ++++++
--- /var/tmp/diff_new_pack.TVQfyu/_old 2019-05-25 13:20:14.108336556 +0200
+++ /var/tmp/diff_new_pack.TVQfyu/_new 2019-05-25 13:20:14.112336555 +0200
@@ -101,9 +101,7 @@
BuildRequires: texinfo
%endif
BuildRequires: zlib-devel
-%ifarch %ix86 x86_64 ppc ppc64 s390 s390x ia64 %sparc hppa %arm
BuildRequires: isl-devel
-%endif
%ifarch ia64
BuildRequires: libunwind-devel
%endif
++++++ gcc-9.0.1+r270591.tar.xz -> gcc-9.1.1+r271393.tar.xz ++++++
/work/SRC/openSUSE:Factory/gcc9/gcc-9.0.1+r270591.tar.xz /work/SRC/openSUSE:Factory/.gcc9.new.5148/gcc-9.1.1+r271393.tar.xz differ: char 26, line 1
++++++ gcc.spec.in ++++++
--- /var/tmp/diff_new_pack.TVQfyu/_old 2019-05-25 13:20:14.164336535 +0200
+++ /var/tmp/diff_new_pack.TVQfyu/_new 2019-05-25 13:20:14.164336535 +0200
@@ -52,7 +52,7 @@
%define build_objc 1
%define build_objcp 1
%define build_go 1
-%ifarch x86_64 %ix86 %arm
+%ifarch x86_64 %ix86 %arm aarch64 riscv64 s390x
%define build_d 1
%else
%define build_d 0
@@ -246,7 +246,7 @@
%define biarch_targets x86_64 s390x powerpc64 powerpc sparc sparc64
URL: https://gcc.gnu.org/
-Version: 9.0.1+r270591
+Version: 9.1.1+r271393
Release: 1
%define gcc_dir_version %(echo %version | sed 's/+.*//' | cut -d '.' -f 1)
%define gcc_snapshot_revision %(echo %version | sed 's/[3-9]\.[0-9]\.[0-6]//' | sed 's/+/-/')
@@ -1041,6 +1041,7 @@
#test patching end
%build
+%define _lto_cflags %{nil}
# Avoid rebuilding of generated files
contrib/gcc_update --touch
@@ -1884,10 +1885,6 @@
%endif
%if %{build_fortran}
%{libsubdir}/include/ISO_Fortran_binding.h
-%if %{separate_biarch}
-%dir %{versmainlibdirbi}/include
-%{versmainlibdirbi}/include/ISO_Fortran_binding.h
-%endif
%endif
%versmainlib *crt*.o
%versmainlib libgcc*.a
1
0
Hello community,
here is the log from the commit of package curl for openSUSE:Factory checked in at 2019-05-25 13:19:57
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/curl (Old)
and /work/SRC/openSUSE:Factory/.curl.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "curl"
Sat May 25 13:19:57 2019 rev:146 rq:704820 version:7.65.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/curl/curl-mini.changes 2019-04-15 11:50:29.938488128 +0200
+++ /work/SRC/openSUSE:Factory/.curl.new.5148/curl-mini.changes 2019-05-25 13:19:59.252342080 +0200
@@ -1,0 +2,101 @@
+Wed May 22 11:41:49 UTC 2019 - Pedro Monreal Gonzalez <pmonrealgonzalez(a)suse.com>
+
+- Update to 7.65.0 [bsc#1135176, CVE-2019-5435][bsc#1135170, CVE-2019-5436]
+ * Changes:
+ - CURLOPT_DNS_USE_GLOBAL_CACHE: removed
+ - CURLOPT_MAXAGE_CONN: set the maximum allowed age for conn reuse
+ - pipelining: removed
+ * Bugfixes:
+ - CVE-2019-5435: Integer overflows in curl_url_set
+ - CVE-2019-5436: tftp: use the current blksize for recvfrom()
+ - --config: clarify that initial : and = might need quoting
+ - CURLMOPT_TIMERFUNCTION.3: warn about the recursive risk
+ - CURLOPT_ADDRESS_SCOPE: fix range check and more
+ - CURLOPT_CHUNK_BGN_FUNCTION.3: document the struct and time value
+ - CURLOPT_READFUNCTION.3: see also CURLOPT_UPLOAD_BUFFERSIZE
+ - CURL_MAX_INPUT_LENGTH: largest acceptable string input size
+ - Curl_disconnect: treat all CONNECT_ONLY connections as "dead"
+ - OS400/ccsidcurl: replace use of Curl_vsetopt
+ - OpenSSL: Report -fips in version if OpenSSL is built with FIPS
+ - WRITEFUNCTION: add missing set_in_callback around callback
+ - altsvc: Fix building with cookies disabled
+ - auth: Rename the various authentication clean up functions
+ - base64: build conditionally if there are users
+ - cmake: avoid linking executable for some tests with cmake 3.6+
+ - cmake: clear CMAKE_REQUIRED_LIBRARIES after each use
+ - cmake: set SSL_BACKENDS
+ - configure: avoid unportable '==' test(1) operator
+ - configure: error out if OpenSSL wasn't detected when asked for
+ - configure: fix default location for fish completions
+ - cookie: Guard against possible NULL ptr deref
+ - curl: make code work with protocol-disabled libcurl
+ - curl: report error for "--no-" on non-boolean options
+ - curlver.h: use parenthesis in CURL_VERSION_BITS macro
+ - docs/INSTALL: fix broken link
+ - doh: acknowledge CURL_DISABLE_DOH
+ - doh: disable DOH for the cases it doesn't work
+ - examples: remove unused variables
+ - ftplistparser: fix LGTM alert "Empty block without comment"
+ - hostip: acknowledge CURL_DISABLE_SHUFFLE_DNS
+ - http: Ignore HTTP/2 prior knowledge setting for HTTP proxies
+ - http: acknowledge CURL_DISABLE_HTTP_AUTH
+ - http: mark bundle as not for multiuse on < HTTP/2 response
+ - http_digest: Don't expose functions when HTTP and Crypto Auth are disabled
+ - http_negotiate: do not treat failure of gss_init_sec_context() as fatal
+ - http_ntlm: Corrected the name of the include guard
+ - http_ntlm_wb: Handle auth for only a single request
+ - http_ntlm_wb: Return the correct error on receiving an empty auth message
+ - lib509: add missing include for strdup
+ - lib557: initialize variables
+ - mbedtls: enable use of EC keys
+ - mime: acknowledge CURL_DISABLE_MIME
+ - multi: improved HTTP_1_1_REQUIRED handling
+ - netrc: acknowledge CURL_DISABLE_NETRC
+ - nss: allow fifos and character devices for certificates
+ - nss: provide more specific error messages on failed init
+ - ntlm: Fix misaligned function comments for Curl_auth_ntlm_cleanup
+ - ntlm: Support the NT response in the type-3 when OpenSSL doesn't include MD4
+ - openssl: mark connection for close on TLS close_notify
+ - openvms: Remove pre-processor for SecureTransport
+ - parse_proxy: use the URL parser API
+ - parsedate: disabled on CURL_DISABLE_PARSEDATE
+ - pingpong: disable more when no pingpong protocols are enabled
+ - polarssl_threadlock: remove conditionally unused code
+ - progress: acknowledge CURL_DISABLE_PROGRESS_METER
+ - proxy: acknowledge DISABLE_PROXY more
+ - resolve: apply Happy Eyeballs philosophy to parallel c-ares queries
+ - revert "multi: support verbose conncache closure handle"
+ - sasl: Don't send authcid as authzid for the PLAIN mechanism as per RFC 4616
+ - sasl: only enable if there's a protocol enabled using it
+ - singleipconnect: show port in the verbose "Trying ..." message
+ - socks5: user name and passwords must be shorter than 256
+ - socks: fix error message
+ - socksd: new SOCKS 4+5 server for tests
+ - spnego_gssapi: fix return code on gss_init_sec_context() failure
+ - ssh-libssh: remove unused variable
+ - ssh: define USE_SSH if SSH is enabled (any backend)
+ - ssh: move variable declaration to where it's used
+ - test1002: correct the name
+ - test2100: Fix typos in test description
+ - tests: Run global cleanup at end of tests
+ - tests: make Impacket (SMB server) Python 3 compatible
+ - tool_cb_wrt: fix bad-function-cast warning
+ - tool_formparse: remove redundant assignment
+ - tool_help: Warn if curl and libcurl versions do not match
+ - tool_help: include for strcasecmp
+ - url: always clone the CUROPT_CURLU handle
+ - url: convert the zone id from a IPv6 URL to correct scope id
+ - urlapi: add CURLUPART_ZONEID to set and get
+ - urlapi: increase supported scheme length to 40 bytes
+ - urlapi: require a non-zero host name length when parsing URL
+ - urlapi: stricter CURLUPART_PORT parsing
+ - urlapi: strip off zone id from numerical IPv6 addresses
+ - urlapi: urlencode characters above 0x7f correctly
+ - vauth/cleartext: update the PLAIN login to match RFC 4616
+ - vauth/oauth2: Fix OAUTHBEARER token generation
+ - vauth: Fix incorrect function description for Curl_auth_user_contains_domain
+ - vtls: fix potential ssl_buffer stack overflow
+ - wildcard: disable from build when FTP isn't present
+ - xattr: skip unittest on unsupported platforms
+
+-------------------------------------------------------------------
curl.changes: same change
Old:
----
curl-7.64.1.tar.xz
curl-7.64.1.tar.xz.asc
New:
----
curl-7.65.0.tar.xz
curl-7.65.0.tar.xz.asc
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ curl-mini.spec ++++++
--- /var/tmp/diff_new_pack.KCSZY6/_old 2019-05-25 13:20:00.516341610 +0200
+++ /var/tmp/diff_new_pack.KCSZY6/_new 2019-05-25 13:20:00.528341606 +0200
@@ -29,7 +29,7 @@
# need ssl always for python-pycurl
%bcond_without openssl
Name: curl-mini
-Version: 7.64.1
+Version: 7.65.0
Release: 0
Summary: A Tool for Transferring Data from URLs
License: curl
@@ -204,15 +204,15 @@
%files
%doc README RELEASE-NOTES
-%doc docs/{BUGS,FAQ,FEATURES,MANUAL,RESOURCES,TODO,TheArtOfHttpScripting}
+%doc docs/{BUGS,FAQ,FEATURES,RESOURCES,TODO,TheArtOfHttpScripting}
%{_bindir}/curl
%{_datadir}/zsh/site-functions/_curl
%{_mandir}/man1/curl.1%{ext_man}
%dir %{_datadir}/zsh
%dir %{_datadir}/zsh/site-functions
%dir %{_datadir}/fish/
-%dir %{_datadir}/fish/completions/
-%{_datadir}/fish/completions/curl.fish
+%dir %{_datadir}/fish/vendor_completions.d/
+%{_datadir}/fish/vendor_completions.d/curl.fish
%files -n libcurl4%{?mini}
%license COPYING
++++++ curl.spec ++++++
--- /var/tmp/diff_new_pack.KCSZY6/_old 2019-05-25 13:20:00.656341558 +0200
+++ /var/tmp/diff_new_pack.KCSZY6/_new 2019-05-25 13:20:00.676341550 +0200
@@ -27,7 +27,7 @@
# need ssl always for python-pycurl
%bcond_without openssl
Name: curl
-Version: 7.64.1
+Version: 7.65.0
Release: 0
Summary: A Tool for Transferring Data from URLs
License: curl
@@ -202,15 +202,15 @@
%files
%doc README RELEASE-NOTES
-%doc docs/{BUGS,FAQ,FEATURES,MANUAL,RESOURCES,TODO,TheArtOfHttpScripting}
+%doc docs/{BUGS,FAQ,FEATURES,RESOURCES,TODO,TheArtOfHttpScripting}
%{_bindir}/curl
%{_datadir}/zsh/site-functions/_curl
%{_mandir}/man1/curl.1%{ext_man}
%dir %{_datadir}/zsh
%dir %{_datadir}/zsh/site-functions
%dir %{_datadir}/fish/
-%dir %{_datadir}/fish/completions/
-%{_datadir}/fish/completions/curl.fish
+%dir %{_datadir}/fish/vendor_completions.d/
+%{_datadir}/fish/vendor_completions.d/curl.fish
%files -n libcurl4%{?mini}
%license COPYING
++++++ curl-7.64.1.tar.xz -> curl-7.65.0.tar.xz ++++++
++++ 44636 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package acpica for openSUSE:Factory checked in at 2019-05-25 13:19:14
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/acpica (Old)
and /work/SRC/openSUSE:Factory/.acpica.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "acpica"
Sat May 25 13:19:14 2019 rev:59 rq:704812 version:20190509
Changes:
--------
--- /work/SRC/openSUSE:Factory/acpica/acpica.changes 2019-04-12 09:11:05.225586137 +0200
+++ /work/SRC/openSUSE:Factory/.acpica.new.5148/acpica.changes 2019-05-25 13:19:19.144356993 +0200
@@ -1,0 +2,7 @@
+Wed May 22 09:29:13 UTC 2019 - trenn(a)suse.de
+
+- Update to version 20190509
+ Includes a fix that breaks VirtualBox
+ https://github.com/acpica/acpica/issues/462
+
+-------------------------------------------------------------------
Old:
----
acpica-unix-20190405.tar.gz
New:
----
acpica-unix-20190509.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ acpica.spec ++++++
--- /var/tmp/diff_new_pack.fHGpE1/_old 2019-05-25 13:19:20.840356362 +0200
+++ /var/tmp/diff_new_pack.fHGpE1/_new 2019-05-25 13:19:20.840356362 +0200
@@ -20,7 +20,7 @@
%define kver %(rpm -q --qf '%%{VERSION}' kernel-source)
%define dmp_ver %{kver}
Name: acpica
-Version: 20190405
+Version: 20190509
Release: 0
Summary: This is a set of tools to display and debug your BIOS ACPI tables
License: GPL-2.0-only
++++++ acpica-unix-20190405.tar.gz -> acpica-unix-20190509.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/changes.txt new/acpica-unix-20190509/changes.txt
--- old/acpica-unix-20190405/changes.txt 2019-04-05 17:12:57.000000000 +0200
+++ new/acpica-unix-20190509/changes.txt 2019-05-10 00:04:14.000000000 +0200
@@ -1,4 +1,91 @@
----------------------------------------
+09 May 2019. Summary of changes for version 20190509:
+
+
+1) ACPICA kernel-resident subsystem:
+
+Revert commit 6c43e1a ("ACPICA: Clear status of GPEs before enabling
+them") that causes problems with Thunderbolt controllers to occur if a
+dock device is connected at init time (the xhci_hcd and thunderbolt
+modules crash which prevents peripherals connected through them from
+working). Commit 6c43e1a effectively causes commit ecc1165b8b74 ("ACPICA:
+Dispatch active GPEs at init time") to get undone, so the problem
+addressed by commit ecc1165b8b74 appears again as a result of it.
+
+
+2) iASL Compiler/Disassembler and ACPICA tools:
+
+Reverted iASL: Additional forward reference detection. This change
+reverts forward reference detection for field declarations. The feature
+unintentionally emitted AML bytecode with incorrect package lengths for
+some ASL code related to Fields and OperationRegions. This malformed AML
+can cause systems to crash
+during boot. The malformed AML bytecode is emitted in iASL version
+20190329 and 20190405.
+
+iASL: improve forward reference detection. This change improves forward
+reference detection for named objects inside of scopes. If a parse object
+has the OP_NOT_FOUND_DURING_LOAD set, it means that Op is a reference to
+a named object that is declared later in the AML bytecode. This is
+allowed if the reference is inside of a method and the declaration is
+outside of a method like so:
+
+DefinitionBlock(...)
+{
+ Method (TEST)
+ {
+ Return (NUM0)
+ }
+ Name (NUM0,0)
+}
+
+However, if the declaration and reference are both in the same method or
+outside any methods, this is a forward reference and should be marked as
+an error because it would result in runtime errors.
+
+DefinitionBlock(...)
+{
+ Name (BUFF, Buffer (NUM0) {}) // Forward reference
+ Name (NUM0, 0x0)
+
+ Method (TEST)
+ {
+ Local0 = NUM1
+ Name (NUM1, 0x1) // Forward reference
+ return (Local0)
+ }
+}
+
+iASL: Implemented additional buffer overflow analysis for BufferField
+declarations. Check if a buffer index argument to a create buffer field
+operation is beyond the end of the target buffer.
+
+This affects these AML operators:
+
+ AML_CREATE_FIELD_OP
+ AML_CREATE_BIT_FIELD_OP
+ AML_CREATE_BYTE_FIELD_OP
+ AML_CREATE_WORD_FIELD_OP
+ AML_CREATE_DWORD_FIELD_OP
+ AML_CREATE_QWORD_FIELD_OP
+
+ There are three conditions that must be satisfied in order to allow this
+validation at compile time:
+
+ 1) The length of the target buffer must be an integer constant
+ 2) The index specified in the create* must be an integer constant
+ 3) For CreateField, the bit length argument must be non-zero.
+
+Example:
+ Name (BUF1, Buffer() {1,2})
+ CreateField (BUF1, 7, 9, CF03) // 3: ERR
+
+dsdt.asl 14: CreateField (BUF1, 7, 9, CF03) // 3: ERR
+Error 6165 - ^ Buffer index beyond end of
+target buffer
+
+
+----------------------------------------
05 April 2019. Summary of changes for version 20190405:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslcodegen.c new/acpica-unix-20190509/source/compiler/aslcodegen.c
--- old/acpica-unix-20190405/source/compiler/aslcodegen.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslcodegen.c 2019-05-10 00:04:15.000000000 +0200
@@ -690,7 +690,8 @@
{
if (FlReadFile (ASL_FILE_AML_OUTPUT, &FileByte, 1) != AE_OK)
{
- printf ("EOF while reading checksum bytes\n");
+ AslError (ASL_ERROR, ASL_MSG_COMPILER_INTERNAL, NULL,
+ "Table length is greater than size of the input file");
return;
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslcompile.c new/acpica-unix-20190509/source/compiler/aslcompile.c
--- old/acpica-unix-20190405/source/compiler/aslcompile.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslcompile.c 2019-05-10 00:04:15.000000000 +0200
@@ -224,7 +224,6 @@
if (AslGbl_PreprocessOnly)
{
UtEndEvent (Event);
- CmCleanupAndExit ();
return (AE_OK);
}
}
@@ -919,7 +918,7 @@
* We will delete the AML file if there are errors and the
* force AML output option has not been used.
*/
- if (AslGbl_ParserErrorDetected || ((AslGbl_ExceptionCount[ASL_ERROR] > 0) &&
+ if (AslGbl_ParserErrorDetected || AslGbl_PreprocessOnly || ((AslGbl_ExceptionCount[ASL_ERROR] > 0) &&
(!AslGbl_IgnoreErrors) &&
AslGbl_Files[ASL_FILE_AML_OUTPUT].Handle))
{
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslcompiler.h new/acpica-unix-20190509/source/compiler/aslcompiler.h
--- old/acpica-unix-20190405/source/compiler/aslcompiler.h 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslcompiler.h 2019-05-10 00:04:15.000000000 +0200
@@ -1105,10 +1105,6 @@
FlGetCurrentFileNode (
void);
-BOOLEAN
-FlInputFileExists (
- char *InputFilename);
-
/*
* aslhwmap - hardware map summary
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslerror.c new/acpica-unix-20190509/source/compiler/aslerror.c
--- old/acpica-unix-20190405/source/compiler/aslerror.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslerror.c 2019-05-10 00:04:15.000000000 +0200
@@ -869,16 +869,8 @@
return;
}
- if (!FlInputFileExists (Filename))
- {
- /*
- * This means that this file is an include file. Record the .src
- * file as the error message source because this file is not in
- * the global file list.
- */
- Enode->SourceFilename =
- FileNode->Files[ASL_FILE_SOURCE_OUTPUT].Filename;
- }
+ Enode->SourceFilename =
+ FileNode->Files[ASL_FILE_SOURCE_OUTPUT].Filename;
}
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslfiles.c new/acpica-unix-20190509/source/compiler/aslfiles.c
--- old/acpica-unix-20190405/source/compiler/aslfiles.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslfiles.c 2019-05-10 00:04:15.000000000 +0200
@@ -163,6 +163,10 @@
ACPI_PARSE_OBJECT *Op,
char *Filename);
+static BOOLEAN
+FlInputFileExists (
+ char *InputFilename);
+
#ifdef ACPI_OBSOLETE_FUNCTIONS
ACPI_STATUS
FlParseInputPathname (
@@ -250,7 +254,7 @@
*
******************************************************************************/
-BOOLEAN
+static BOOLEAN
FlInputFileExists (
char *Filename)
{
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslload.c new/acpica-unix-20190509/source/compiler/aslload.c
--- old/acpica-unix-20190405/source/compiler/aslload.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslload.c 2019-05-10 00:04:15.000000000 +0200
@@ -263,10 +263,29 @@
ACPI_WALK_STATE *WalkState)
{
ACPI_PARSE_OBJECT *Child = NULL;
+ ACPI_PARSE_OBJECT *SourceRegion;
ACPI_NAMESPACE_NODE *Node;
ACPI_STATUS Status;
+
+ SourceRegion = UtGetArg (Op, 0);
+ if (SourceRegion)
+ {
+ Status = AcpiNsLookup (WalkState->ScopeInfo,
+ SourceRegion->Asl.Value.String,
+ ACPI_TYPE_REGION, ACPI_IMODE_EXECUTE,
+ ACPI_NS_DONT_OPEN_SCOPE, NULL, &Node);
+ if (Status == AE_NOT_FOUND)
+ {
+ /*
+ * If the named object is not found, it means that it is either a
+ * forward reference or the named object does not exist.
+ */
+ SourceRegion->Asl.CompileFlags |= OP_NOT_FOUND_DURING_LOAD;
+ }
+ }
+
/* Get the first named field element */
switch (Op->Asl.AmlOpcode)
@@ -493,7 +512,7 @@
case AML_FIELD_OP:
Status = LdLoadFieldElements (Op, WalkState);
- break;
+ return (Status);
case AML_INT_CONNECTION_OP:
@@ -557,8 +576,7 @@
* We only want references to named objects:
* Store (2, WXYZ) -> Attempt to resolve the name
*/
- if ((OpInfo->Class == AML_CLASS_NAMED_OBJECT) &&
- (OpInfo->Type != AML_TYPE_NAMED_FIELD))
+ if (OpInfo->Class == AML_CLASS_NAMED_OBJECT)
{
return (AE_OK);
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslmain.c new/acpica-unix-20190509/source/compiler/aslmain.c
--- old/acpica-unix-20190405/source/compiler/aslmain.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslmain.c 2019-05-10 00:04:15.000000000 +0200
@@ -291,7 +291,8 @@
/*
* At this point, compilation of a data table or disassembly is complete.
*/
- if (AslGbl_FileType == ASL_INPUT_TYPE_ASCII_DATA || AcpiGbl_DisasmFlag)
+ if (AslGbl_PreprocessOnly || AcpiGbl_DisasmFlag ||
+ AslGbl_FileType == ASL_INPUT_TYPE_ASCII_DATA)
{
goto CleanupAndExit;
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslmessages.c new/acpica-unix-20190509/source/compiler/aslmessages.c
--- old/acpica-unix-20190405/source/compiler/aslmessages.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslmessages.c 2019-05-10 00:04:15.000000000 +0200
@@ -283,7 +283,7 @@
/* ASL_MSG_NO_RETVAL */ "Called method returns no value",
/* ASL_MSG_NO_WHILE */ "No enclosing While statement",
/* ASL_MSG_NON_ASCII */ "Invalid characters found in file",
-/* ASL_MSG_NON_ZERO */ "Operand evaluates to zero",
+/* ASL_MSG_BUFFER_FIELD_LENGTH */ "Field length must be non-zero",
/* ASL_MSG_NOT_EXIST */ "Object does not exist",
/* ASL_MSG_NOT_FOUND */ "Object not found or not accessible from current scope",
/* ASL_MSG_NOT_METHOD */ "Not a control method, cannot invoke",
@@ -342,7 +342,7 @@
/* ASL_MSG_RANGE */ "Constant out of range",
/* ASL_MSG_BUFFER_ALLOCATION */ "Could not allocate line buffer",
/* ASL_MSG_MISSING_DEPENDENCY */ "Missing dependency",
-/* ASL_MSG_ILLEGAL_FORWARD_REF */ "Forward references are not supported by the ASL language",
+/* ASL_MSG_ILLEGAL_FORWARD_REF */ "Illegal forward reference",
/* ASL_MSG_ILLEGAL_METHOD_REF */ "Object is declared in a different method",
/* ASL_MSG_LOCAL_NOT_USED */ "Method Local is set but never used",
/* ASL_MSG_ARG_AS_LOCAL_NOT_USED */ "Method Argument (as a local) is set but never used",
@@ -364,7 +364,8 @@
/* ASL_MSG_NAMEPATH_NOT_EXIST */ "One or more objects within the Pathname do not exist",
/* ASL_MSG_REGION_LENGTH */ "Operation Region declared with zero length",
/* ASL_MSG_TEMPORARY_OBJECT */ "Object is created temporarily in another method and cannot be accessed",
-/* ASL_MSG_UNDEFINED_EXTERNAL */ "Named object was declared external but the actual definition does not exist"
+/* ASL_MSG_UNDEFINED_EXTERNAL */ "Named object was declared external but the actual definition does not exist",
+/* ASL_MSG_BUFFER_FIELD_OVERFLOW */ "Buffer field extends beyond end of target buffer"
};
/* Table compiler */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslmessages.h new/acpica-unix-20190509/source/compiler/aslmessages.h
--- old/acpica-unix-20190405/source/compiler/aslmessages.h 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslmessages.h 2019-05-10 00:04:15.000000000 +0200
@@ -285,7 +285,7 @@
ASL_MSG_NO_RETVAL,
ASL_MSG_NO_WHILE,
ASL_MSG_NON_ASCII,
- ASL_MSG_NON_ZERO,
+ ASL_MSG_BUFFER_FIELD_LENGTH,
ASL_MSG_NOT_EXIST,
ASL_MSG_NOT_FOUND,
ASL_MSG_NOT_METHOD,
@@ -367,6 +367,7 @@
ASL_MSG_REGION_LENGTH,
ASL_MSG_TEMPORARY_OBJECT,
ASL_MSG_UNDEFINED_EXTERNAL,
+ ASL_MSG_BUFFER_FIELD_OVERFLOW,
/* These messages are used by the Data Table compiler only */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/asloperands.c new/acpica-unix-20190509/source/compiler/asloperands.c
--- old/acpica-unix-20190405/source/compiler/asloperands.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/asloperands.c 2019-05-10 00:04:15.000000000 +0200
@@ -819,6 +819,7 @@
BufferLengthOp->Asl.Value.Integer = BufferLength;
(void) OpcSetOptimalIntegerSize (BufferLengthOp);
+ UtSetParseOpName (BufferLengthOp);
/* Remaining nodes are handled via the tree walk */
}
@@ -905,6 +906,7 @@
*/
Op->Asl.Child->Asl.ParseOpcode = PARSEOP_INTEGER;
Op->Asl.Child->Asl.Value.Integer = PackageLength;
+ UtSetParseOpName (Op);
/* Set the AML opcode */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslstartup.c new/acpica-unix-20190509/source/compiler/aslstartup.c
--- old/acpica-unix-20190405/source/compiler/aslstartup.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslstartup.c 2019-05-10 00:04:15.000000000 +0200
@@ -550,14 +550,10 @@
Status = CmDoCompile ();
if (ACPI_FAILURE (Status))
{
+ PrTerminatePreprocessor ();
return (Status);
}
- /* Cleanup (for next source file) and exit */
-
- AeClearErrorLog ();
- PrTerminatePreprocessor ();
-
/*
* At this point, we know how many lines are in the input file. Save it
* to display for post-compilation summary.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslutils.c new/acpica-unix-20190509/source/compiler/aslutils.c
--- old/acpica-unix-20190405/source/compiler/aslutils.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslutils.c 2019-05-10 00:04:15.000000000 +0200
@@ -541,8 +541,14 @@
{
UINT32 i;
ASL_GLOBAL_FILE_NODE *FileNode;
+ BOOLEAN DisplayAMLSummary;
+ DisplayAMLSummary =
+ !AslGbl_PreprocessOnly && !AslGbl_ParserErrorDetected &&
+ ((AslGbl_ExceptionCount[ASL_ERROR] == 0) || AslGbl_IgnoreErrors) &&
+ AslGbl_Files[ASL_FILE_AML_OUTPUT].Handle;
+
if (FileId != ASL_FILE_STDOUT)
{
/* Compiler name and version number */
@@ -595,9 +601,7 @@
/* AML summary */
- if (!AslGbl_ParserErrorDetected &&
- ((AslGbl_ExceptionCount[ASL_ERROR] == 0) || AslGbl_IgnoreErrors) &&
- AslGbl_Files[ASL_FILE_AML_OUTPUT].Handle)
+ if (DisplayAMLSummary)
{
FlPrintFile (FileId,
"%-14s %s - %7u bytes %6u opcodes %6u named objects\n",
@@ -633,7 +637,7 @@
continue;
}
- FlPrintFile (FileId, "%14s %s - %u bytes\n",
+ FlPrintFile (FileId, "%-14s %s - %7u bytes\n",
AslGbl_FileDescs[i].ShortDescription,
AslGbl_Files[i].Filename, FlGetFileSize (i));
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslwalks.c new/acpica-unix-20190509/source/compiler/aslwalks.c
--- old/acpica-unix-20190405/source/compiler/aslwalks.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslwalks.c 2019-05-10 00:04:15.000000000 +0200
@@ -165,6 +165,14 @@
AnAnalyzeStoreOperator (
ACPI_PARSE_OBJECT *Op);
+static BOOLEAN
+AnIsValidBufferConstant (
+ ACPI_PARSE_OBJECT *Op);
+
+static void
+AnValidateCreateBufferField (
+ ACPI_PARSE_OBJECT *CreateBufferFieldOp);
+
/*******************************************************************************
*
@@ -669,6 +677,14 @@
OpInfo = AcpiPsGetOpcodeInfo (Op->Asl.AmlOpcode);
+ if (OpInfo->Flags & AML_CREATE)
+ {
+ /* This group contains all of the Create Buffer Field operators */
+
+ AnValidateCreateBufferField (Op);
+ return (AE_OK);
+ }
+
/*
* Determine if an execution class operator actually does something by
* checking if it has a target and/or the function return value is used.
@@ -734,10 +750,10 @@
}
}
-
/*
* Semantic checks for individual ASL operators
*/
+
switch (Op->Asl.ParseOpcode)
{
case PARSEOP_STORE:
@@ -785,22 +801,6 @@
}
break;
- case PARSEOP_CREATEFIELD:
- /*
- * Check for a zero Length (NumBits) operand. NumBits is the 3rd operand
- */
- ArgOp = Op->Asl.Child;
- ArgOp = ArgOp->Asl.Next;
- ArgOp = ArgOp->Asl.Next;
-
- if ((ArgOp->Asl.ParseOpcode == PARSEOP_ZERO) ||
- ((ArgOp->Asl.ParseOpcode == PARSEOP_INTEGER) &&
- (ArgOp->Asl.Value.Integer == 0)))
- {
- AslError (ASL_ERROR, ASL_MSG_NON_ZERO, ArgOp, NULL);
- }
- break;
-
case PARSEOP_CONNECTION:
/*
* Ensure that the referenced operation region has the correct SPACE_ID.
@@ -886,6 +886,194 @@
}
+/*******************************************************************************
+ *
+ * FUNCTION: AnValidateCreateBufferField
+ *
+ * PARAMETERS: Op - A create buffer field operator
+ *
+ * RETURN: None
+ *
+ * DESCRIPTION: Check if a buffer index argument to a create buffer field
+ * operation is beyond the end of the target buffer.
+ *
+ * Validates these AML operators:
+ *
+ * AML_CREATE_FIELD_OP
+ * AML_CREATE_BIT_FIELD_OP
+ * AML_CREATE_BYTE_FIELD_OP
+ * AML_CREATE_WORD_FIELD_OP
+ * AML_CREATE_DWORD_FIELD_OP
+ * AML_CREATE_QWORD_FIELD_OP
+ *
+ * There are two conditions that must be satisfied in order to enable
+ * validation at compile time:
+ *
+ * 1) The length of the target buffer must be an integer constant
+ * 2) The index specified in the create* must be an integer constant
+ * 3) For CreateField, the bit length argument must be non-zero.
+ *
+ ******************************************************************************/
+
+static void
+AnValidateCreateBufferField (
+ ACPI_PARSE_OBJECT *CreateBufferFieldOp)
+{
+ ACPI_PARSE_OBJECT *TargetBufferOp;
+ ACPI_PARSE_OBJECT *ArgOp;
+ UINT32 TargetBufferLength;
+ UINT32 LastFieldByteIndex;
+
+
+ /*
+ * 1) Get the length of the target buffer
+ */
+ ArgOp = CreateBufferFieldOp->Asl.Child; /* Reference to target buffer */
+
+ /*
+ * If no attached Node, the target buffer may be something like an
+ * ArgX or LocalX and cannot be evaluated at compile time.
+ */
+ if (!ArgOp->Asl.Node)
+ {
+ return;
+ }
+
+ TargetBufferOp = ArgOp->Asl.Node->Op;
+ TargetBufferOp = TargetBufferOp->Asl.Child; /* Target buffer */
+ TargetBufferOp = TargetBufferOp->Asl.Next; /* "Buffer" keyword */
+ if (!TargetBufferOp)
+ {
+ /* Not a statement of the form NAME(XXXX, Buffer.... */
+
+ return;
+ }
+
+ /* Get the buffer length argument. It must be an integer constant */
+
+ ArgOp = TargetBufferOp->Asl.Child;
+ if (!AnIsValidBufferConstant (ArgOp))
+ {
+ return;
+ }
+
+ TargetBufferLength = (UINT32) ArgOp->Asl.Value.Integer;
+
+ /*
+ * 2) Get the value of the buffer index argument. It must be
+ * an integer constant.
+ */
+ ArgOp = CreateBufferFieldOp->Asl.Child; /* Reference to target buffer */
+ ArgOp = ArgOp->Asl.Next; /* Buffer Index argument*/
+ if (!AnIsValidBufferConstant (ArgOp))
+ {
+ return;
+ }
+
+ LastFieldByteIndex =
+ (UINT32) ArgOp->Asl.Value.Integer; /* Index can be in either bytes or bits */
+
+ /*
+ * 3) Get the length of the new buffer field, in bytes. Also,
+ * create the final target buffer index for the last byte of the field
+ */
+ switch (CreateBufferFieldOp->Asl.ParseOpcode)
+ {
+ case PARSEOP_CREATEBITFIELD: /* A one bit field */
+
+ LastFieldByteIndex = ACPI_ROUND_BITS_DOWN_TO_BYTES (LastFieldByteIndex);
+ break;
+
+ case PARSEOP_CREATEBYTEFIELD:
+ break;
+
+ case PARSEOP_CREATEWORDFIELD:
+
+ LastFieldByteIndex += (sizeof (UINT16) - 1);
+ break;
+
+ case PARSEOP_CREATEDWORDFIELD:
+
+ LastFieldByteIndex += (sizeof (UINT32) - 1);
+ break;
+
+ case PARSEOP_CREATEQWORDFIELD:
+
+ LastFieldByteIndex += (sizeof (UINT64) - 1);
+ break;
+
+ case PARSEOP_CREATEFIELD: /* Multi-bit field */
+
+ ArgOp = ArgOp->Asl.Next; /* Length argument, in bits */
+ if (!AnIsValidBufferConstant (ArgOp))
+ {
+ return;
+ }
+
+ /* The buffer field length is not allowed to be zero */
+
+ if (ArgOp->Asl.Value.Integer == 0)
+ {
+ AslError (ASL_WARNING, ASL_MSG_BUFFER_FIELD_LENGTH, ArgOp, NULL);
+ return;
+ }
+
+ LastFieldByteIndex +=
+ ((UINT32) ArgOp->Asl.Value.Integer - 1); /* Create final bit index */
+
+ /* Convert bit index to a byte index */
+
+ LastFieldByteIndex = ACPI_ROUND_BITS_DOWN_TO_BYTES (LastFieldByteIndex);
+ break;
+
+ default:
+ return;
+ }
+
+ /*
+ * 4) Check for an access (index) beyond the end of the target buffer,
+ * or a zero length target buffer.
+ */
+ if (!TargetBufferLength || (LastFieldByteIndex >= TargetBufferLength))
+ {
+ AslError (ASL_WARNING, ASL_MSG_BUFFER_FIELD_OVERFLOW, ArgOp, NULL);
+ }
+}
+
+
+/*******************************************************************************
+ *
+ * FUNCTION: AnIsValidBufferConstant
+ *
+ * PARAMETERS: Op - A buffer-related operand
+ *
+ * RETURN: TRUE if operand is valid constant, FALSE otherwise
+ *
+ * DESCRIPTION: Check if the input Op is valid constant that can be used
+ * in compile-time analysis.
+ *
+ ******************************************************************************/
+
+static BOOLEAN
+AnIsValidBufferConstant (
+ ACPI_PARSE_OBJECT *Op)
+{
+ if (!Op)
+ {
+ return (FALSE);
+ }
+
+ if ((Op->Asl.ParseOpcode == PARSEOP_INTEGER) ||
+ (Op->Asl.ParseOpcode == PARSEOP_ZERO) ||
+ (Op->Asl.ParseOpcode == PARSEOP_ONE))
+ {
+ return (TRUE);
+ }
+
+ return (FALSE);
+}
+
+
/*******************************************************************************
*
* FUNCTION: AnAnalyzeStoreOperator
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/compiler/aslxref.c new/acpica-unix-20190509/source/compiler/aslxref.c
--- old/acpica-unix-20190405/source/compiler/aslxref.c 2019-04-05 17:12:58.000000000 +0200
+++ new/acpica-unix-20190509/source/compiler/aslxref.c 2019-05-10 00:04:15.000000000 +0200
@@ -460,6 +460,8 @@
ASL_METHOD_LOCAL *MethodArgs = NULL;
int RegisterNumber;
UINT32 i;
+ ACPI_NAMESPACE_NODE *DeclarationParentMethod;
+ ACPI_PARSE_OBJECT *ReferenceParentMethod;
ACPI_FUNCTION_TRACE_PTR (XfNamespaceLocateBegin, Op);
@@ -613,8 +615,7 @@
(Op->Asl.ParseOpcode != PARSEOP_NAMESTRING) &&
(Op->Asl.ParseOpcode != PARSEOP_NAMESEG) &&
(Op->Asl.ParseOpcode != PARSEOP_METHODCALL) &&
- (Op->Asl.ParseOpcode != PARSEOP_EXTERNAL) &&
- (OpInfo->Type != AML_TYPE_NAMED_FIELD))
+ (Op->Asl.ParseOpcode != PARSEOP_EXTERNAL))
{
return_ACPI_STATUS (AE_OK);
}
@@ -638,8 +639,7 @@
if ((Op->Asl.ParseOpcode == PARSEOP_NAMESTRING) ||
(Op->Asl.ParseOpcode == PARSEOP_NAMESEG) ||
(Op->Asl.ParseOpcode == PARSEOP_METHODCALL) ||
- (Op->Asl.ParseOpcode == PARSEOP_EXTERNAL) ||
- (OpInfo->Type == AML_TYPE_NAMED_FIELD))
+ (Op->Asl.ParseOpcode == PARSEOP_EXTERNAL))
{
/*
* These are name references, do not push the scope stack
@@ -676,10 +676,6 @@
Path = NextOp->Asl.Value.String;
}
- else if (OpInfo->Type == AML_TYPE_NAMED_FIELD)
- {
- Path = Op->Asl.Child->Asl.Value.String;
- }
else
{
Path = Op->Asl.Value.String;
@@ -798,24 +794,45 @@
return_ACPI_STATUS (Status);
}
- /* Object was found above, check for an illegal forward reference */
+ /* Object was found above, check for an illegal forward reference */
if (Op->Asl.CompileFlags & OP_NOT_FOUND_DURING_LOAD)
{
/*
* During the load phase, this Op was flagged as a possible
- * illegal forward reference
+ * illegal forward reference. In other words, Op is a name path or
+ * name segment that refers to a named object declared after the
+ * reference. In this scinario, Node refers to the actual declaration
+ * and Op is a parse node that references the named object.
*
- * Note: Allow "forward references" from within a method to an
- * object that is not within any method (module-level code)
+ * Note:
+ *
+ * Object references inside of control methods are allowed to
+ * refer to objects declared outside of control methods.
+ *
+ * If the declaration and reference are both contained inside of the
+ * same method or outside of any method, this is a forward reference
+ * and should be reported as a compiler error.
*/
- if (!WalkState->ScopeInfo || (UtGetParentMethod (Node) &&
- !UtNodeIsDescendantOf (WalkState->ScopeInfo->Scope.Node,
- UtGetParentMethod (Node))))
+ DeclarationParentMethod = UtGetParentMethod (Node);
+ ReferenceParentMethod = XfGetParentMethod (Op);
+
+ /* case 1: declaration and refrence are both outside of method */
+
+ if (!ReferenceParentMethod && !DeclarationParentMethod)
{
AslError (ASL_ERROR, ASL_MSG_ILLEGAL_FORWARD_REF, Op,
Op->Asl.ExternalName);
}
+
+ /* case 2: declaration and reference are both inside of the same method */
+
+ else if (ReferenceParentMethod && DeclarationParentMethod &&
+ ReferenceParentMethod == DeclarationParentMethod->Op)
+ {
+ AslError (ASL_ERROR, ASL_MSG_ILLEGAL_FORWARD_REF, Op,
+ Op->Asl.ExternalName);
+ }
}
/* Check for a reference vs. name declaration */
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/components/events/evgpe.c new/acpica-unix-20190509/source/components/events/evgpe.c
--- old/acpica-unix-20190405/source/components/events/evgpe.c 2019-04-05 17:12:59.000000000 +0200
+++ new/acpica-unix-20190509/source/components/events/evgpe.c 2019-05-10 00:04:16.000000000 +0200
@@ -240,14 +240,6 @@
ACPI_FUNCTION_TRACE (EvEnableGpe);
- /* Clear the GPE (of stale events) */
-
- Status = AcpiHwClearGpe(GpeEventInfo);
- if (ACPI_FAILURE(Status))
- {
- return_ACPI_STATUS(Status);
- }
-
/* Enable the requested GPE */
Status = AcpiHwLowSetGpe (GpeEventInfo, ACPI_GPE_ENABLE);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/include/acpixf.h new/acpica-unix-20190509/source/include/acpixf.h
--- old/acpica-unix-20190405/source/include/acpixf.h 2019-04-05 17:13:01.000000000 +0200
+++ new/acpica-unix-20190509/source/include/acpixf.h 2019-05-10 00:04:18.000000000 +0200
@@ -154,7 +154,7 @@
/* Current ACPICA subsystem version in YYYYMMDD format */
-#define ACPI_CA_VERSION 0x20190405
+#define ACPI_CA_VERSION 0x20190509
#include "acconfig.h"
#include "actypes.h"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/acpica-unix-20190405/source/include/platform/aclinux.h new/acpica-unix-20190509/source/include/platform/aclinux.h
--- old/acpica-unix-20190405/source/include/platform/aclinux.h 2019-04-05 17:13:01.000000000 +0200
+++ new/acpica-unix-20190509/source/include/platform/aclinux.h 2019-05-10 00:04:18.000000000 +0200
@@ -205,6 +205,11 @@
#define ACPI_INIT_FUNCTION __init
+/* Use a specific bugging default separate from ACPICA */
+
+#undef ACPI_DEBUG_DEFAULT
+#define ACPI_DEBUG_DEFAULT (ACPI_LV_INFO | ACPI_LV_REPAIR)
+
#ifndef CONFIG_ACPI
/* External globals for __KERNEL__, stubs is needed */
@@ -221,11 +226,6 @@
#define ACPI_NO_ERROR_MESSAGES
#undef ACPI_DEBUG_OUTPUT
-/* Use a specific bugging default separate from ACPICA */
-
-#undef ACPI_DEBUG_DEFAULT
-#define ACPI_DEBUG_DEFAULT (ACPI_LV_INFO | ACPI_LV_REPAIR)
-
/* External interface for __KERNEL__, stub is needed */
#define ACPI_EXTERNAL_RETURN_STATUS(Prototype) \
1
0
Hello community,
here is the log from the commit of package resource-agents for openSUSE:Factory checked in at 2019-05-25 13:19:08
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/resource-agents (Old)
and /work/SRC/openSUSE:Factory/.resource-agents.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "resource-agents"
Sat May 25 13:19:08 2019 rev:94 rq:704801 version:4.2.0+git104.25f2052b
Changes:
--------
--- /work/SRC/openSUSE:Factory/resource-agents/resource-agents.changes 2019-05-09 10:02:18.467807926 +0200
+++ /work/SRC/openSUSE:Factory/.resource-agents.new.5148/resource-agents.changes 2019-05-25 13:19:09.800360468 +0200
@@ -29,0 +30 @@
+ * This patch was integrated upstream and is not needed as an extra patch.
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
1
0
Hello community,
here is the log from the commit of package libreoffice for openSUSE:Factory checked in at 2019-05-25 13:18:32
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/libreoffice (Old)
and /work/SRC/openSUSE:Factory/.libreoffice.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "libreoffice"
Sat May 25 13:18:32 2019 rev:181 rq:705180 version:6.2.3.2
Changes:
--------
--- /work/SRC/openSUSE:Factory/libreoffice/libreoffice.changes 2019-05-17 23:41:51.285955938 +0200
+++ /work/SRC/openSUSE:Factory/.libreoffice.new.5148/libreoffice.changes 2019-05-25 13:18:43.692370175 +0200
@@ -1,0 +2,12 @@
+Fri May 24 07:22:08 UTC 2019 - Tomáš Chvátal <tchvatal(a)suse.com>
+
+- If there is no firebird engine we still need java to run hsqldb
+ bsc#1135189
+
+-------------------------------------------------------------------
+Tue May 21 06:33:21 UTC 2019 - Andras Timar <andras.timar(a)collabora.com>
+
+- LO-L3: PPTX: Rectangle turns from green to blue and loses transparency when transparency is set
+ * bsc1135228.patch
+
+-------------------------------------------------------------------
New:
----
bsc1135228.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ libreoffice.spec ++++++
--- /var/tmp/diff_new_pack.VYFNRv/_old 2019-05-25 13:18:50.608367604 +0200
+++ /var/tmp/diff_new_pack.VYFNRv/_new 2019-05-25 13:18:50.612367602 +0200
@@ -107,6 +107,8 @@
Patch14: bsc1124869.patch
# LO-L3: Image from PPTX shown in a square, not a circle
Patch15: bsc1121874.patch
+# LO-L3: PPTX: Rectangle turns from green to blue and loses transparency when transparency is set
+Patch16: bsc1135228.patch
# try to save space by using hardlinks
Patch990: install-with-hardlinks.diff
# save time by relying on rpm check rather than doing stupid find+grep
@@ -415,6 +417,13 @@
# default database connector
%if %{with firebird}
Requires: %{name}-base-drivers-firebird
+%else
+%ifarch %{ix86}
+Requires: jre-32 >= 1.6
+%endif
+%ifarch x86_64 aarch64
+Requires: jre-64 >= 1.6
+%endif
%endif
%description base
@@ -973,6 +982,7 @@
%patch13 -p1
%patch14 -p1
%patch15 -p1
+%patch16 -p1
%patch990 -p1
%patch991 -p1
++++++ bsc1135228.patch ++++++
diff --git a/include/oox/ppt/presentationfragmenthandler.hxx b/include/oox/ppt/presentationfragmenthandler.hxx
index ab24a8262fae..bb9166e24afc 100644
--- a/include/oox/ppt/presentationfragmenthandler.hxx
+++ b/include/oox/ppt/presentationfragmenthandler.hxx
@@ -50,7 +50,7 @@ private:
void importSlide( const ::oox::core::FragmentHandlerRef& rSlideFragmentHandler,
const oox::ppt::SlidePersistPtr& rPersist );
void importSlide(sal_uInt32 nSlide, bool bFirstSlide, bool bImportNotes);
- void saveThemeToGrabBag(const oox::drawingml::ThemePtr& pThemePtr, const OUString& sTheme);
+ void saveThemeToGrabBag(const oox::drawingml::ThemePtr& pThemePtr, sal_Int32 nThemeIdx);
std::vector< OUString > maSlideMasterVector;
std::vector< OUString > maSlidesVector;
diff --git a/oox/source/ppt/presentationfragmenthandler.cxx b/oox/source/ppt/presentationfragmenthandler.cxx
index f2477b2cef6e..13bba2da95a3 100644
--- a/oox/source/ppt/presentationfragmenthandler.cxx
+++ b/oox/source/ppt/presentationfragmenthandler.cxx
@@ -160,7 +160,7 @@ static void ResolveTextFields( XmlFilterBase const & rFilter )
}
void PresentationFragmentHandler::saveThemeToGrabBag(const oox::drawingml::ThemePtr& pThemePtr,
- const OUString& sTheme)
+ sal_Int32 nThemeIdx)
{
if (!pThemePtr)
return;
@@ -198,8 +198,11 @@ void PresentationFragmentHandler::saveThemeToGrabBag(const oox::drawingml::Theme
aCurrentTheme[nId].Value = rColor;
}
+
// add new theme to the sequence
- aTheme[0].Name = sTheme;
+ // Export code uses the master slide's index to find the right theme
+ // so use the same index in the grabbag.
+ aTheme[0].Name = "ppt/theme/theme" + OUString::number(nThemeIdx) + ".xml";
const uno::Any& rCurrentTheme = makeAny(aCurrentTheme);
aTheme[0].Value = rCurrentTheme;
@@ -273,10 +276,17 @@ void PresentationFragmentHandler::importSlide(sal_uInt32 nSlide, bool bFirstPage
Reference< drawing::XMasterPagesSupplier > xMPS( xModel, uno::UNO_QUERY_THROW );
Reference< drawing::XDrawPages > xMasterPages( xMPS->getMasterPages(), uno::UNO_QUERY_THROW );
+ sal_Int32 nIndex;
if( rFilter.getMasterPages().empty() )
- xMasterPages->getByIndex( 0 ) >>= xMasterPage;
+ {
+ nIndex = 0;
+ xMasterPages->getByIndex( nIndex ) >>= xMasterPage;
+ }
else
- xMasterPage = xMasterPages->insertNewByIndex( xMasterPages->getCount() );
+ {
+ nIndex = xMasterPages->getCount();
+ xMasterPage = xMasterPages->insertNewByIndex( nIndex );
+ }
pMasterPersistPtr = std::make_shared<SlidePersist>( rFilter, true, false, xMasterPage,
ShapePtr( new PPTShape( Master, "com.sun.star.drawing.GroupShape" ) ), mpTextListStyle );
@@ -306,7 +316,7 @@ void PresentationFragmentHandler::importSlide(sal_uInt32 nSlide, bool bFirstPage
UNO_QUERY_THROW));
rThemes[ aThemeFragmentPath ] = pThemePtr;
pThemePtr->setFragment(xDoc);
- saveThemeToGrabBag(pThemePtr, aThemeFragmentPath);
+ saveThemeToGrabBag(pThemePtr, nIndex + 1);
}
else
{
diff --git a/include/oox/export/drawingml.hxx b/include/oox/export/drawingml.hxx
index 24adf475be16..2da61a6f5300 100644
--- a/include/oox/export/drawingml.hxx
+++ b/include/oox/export/drawingml.hxx
@@ -170,14 +170,14 @@ public:
OUString WriteImage( const Graphic &rGraphic , bool bRelPathToMedia = false);
void WriteColor( ::Color nColor, sal_Int32 nAlpha = MAX_PERCENT );
- void WriteColor( const OUString& sColorSchemeName, const css::uno::Sequence< css::beans::PropertyValue >& aTransformations );
- void WriteColorTransformations( const css::uno::Sequence< css::beans::PropertyValue >& aTransformations );
+ void WriteColor( const OUString& sColorSchemeName, const css::uno::Sequence< css::beans::PropertyValue >& aTransformations, sal_Int32 nAlpha = MAX_PERCENT );
+ void WriteColorTransformations( const css::uno::Sequence< css::beans::PropertyValue >& aTransformations, sal_Int32 nAlpha = MAX_PERCENT );
void WriteGradientStop( sal_uInt16 nStop, ::Color nColor );
void WriteLineArrow( const css::uno::Reference< css::beans::XPropertySet >& rXPropSet, bool bLineStart );
void WriteConnectorConnections( EscherConnectorListEntry& rConnectorEntry, sal_Int32 nStartID, sal_Int32 nEndID );
void WriteSolidFill( ::Color nColor, sal_Int32 nAlpha = MAX_PERCENT );
- void WriteSolidFill( const OUString& sSchemeName, const css::uno::Sequence< css::beans::PropertyValue >& aTransformations );
+ void WriteSolidFill( const OUString& sSchemeName, const css::uno::Sequence< css::beans::PropertyValue >& aTransformations, sal_Int32 nAlpha = MAX_PERCENT );
void WriteSolidFill( const css::uno::Reference< css::beans::XPropertySet >& rXPropSet );
void WriteGradientFill( const css::uno::Reference< css::beans::XPropertySet >& rXPropSet );
void WriteGradientFill( css::awt::Gradient rGradient );
diff --git a/oox/source/export/drawingml.cxx b/oox/source/export/drawingml.cxx
index 23065ec67678..30f330226788 100644
--- a/oox/source/export/drawingml.cxx
+++ b/oox/source/export/drawingml.cxx
@@ -231,7 +231,7 @@ void DrawingML::WriteColor( ::Color nColor, sal_Int32 nAlpha )
}
}
-void DrawingML::WriteColor( const OUString& sColorSchemeName, const Sequence< PropertyValue >& aTransformations )
+void DrawingML::WriteColor( const OUString& sColorSchemeName, const Sequence< PropertyValue >& aTransformations, sal_Int32 nAlpha )
{
// prevent writing a tag with empty val attribute
if( sColorSchemeName.isEmpty() )
@@ -242,7 +242,15 @@ void DrawingML::WriteColor( const OUString& sColorSchemeName, const Sequence< Pr
mpFS->startElementNS( XML_a, XML_schemeClr,
XML_val, USS( sColorSchemeName ),
FSEND );
- WriteColorTransformations( aTransformations );
+ WriteColorTransformations( aTransformations, nAlpha );
+ mpFS->endElementNS( XML_a, XML_schemeClr );
+ }
+ else if(nAlpha < MAX_PERCENT)
+ {
+ mpFS->startElementNS( XML_a, XML_schemeClr,
+ XML_val, USS( sColorSchemeName ),
+ FSEND );
+ mpFS->singleElementNS(XML_a, XML_alpha, XML_val, OString::number(nAlpha), FSEND);
mpFS->endElementNS( XML_a, XML_schemeClr );
}
else
@@ -253,15 +261,22 @@ void DrawingML::WriteColor( const OUString& sColorSchemeName, const Sequence< Pr
}
}
-void DrawingML::WriteColorTransformations( const Sequence< PropertyValue >& aTransformations )
+void DrawingML::WriteColorTransformations( const Sequence< PropertyValue >& aTransformations, sal_Int32 nAlpha )
{
for( sal_Int32 i = 0; i < aTransformations.getLength(); i++ )
{
sal_Int32 nToken = Color::getColorTransformationToken( aTransformations[i].Name );
if( nToken != XML_TOKEN_INVALID && aTransformations[i].Value.hasValue() )
{
- sal_Int32 nValue = aTransformations[i].Value.get<sal_Int32>();
- mpFS->singleElementNS( XML_a, nToken, XML_val, I32S( nValue ), FSEND );
+ if(nToken == XML_alpha && nAlpha < MAX_PERCENT)
+ {
+ mpFS->singleElementNS( XML_a, nToken, XML_val, I32S( nAlpha ), FSEND );
+ }
+ else
+ {
+ sal_Int32 nValue = aTransformations[i].Value.get<sal_Int32>();
+ mpFS->singleElementNS( XML_a, nToken, XML_val, I32S( nValue ), FSEND );
+ }
}
}
}
@@ -273,10 +288,10 @@ void DrawingML::WriteSolidFill( ::Color nColor, sal_Int32 nAlpha )
mpFS->endElementNS( XML_a, XML_solidFill );
}
-void DrawingML::WriteSolidFill( const OUString& sSchemeName, const Sequence< PropertyValue >& aTransformations )
+void DrawingML::WriteSolidFill( const OUString& sSchemeName, const Sequence< PropertyValue >& aTransformations, sal_Int32 nAlpha )
{
mpFS->startElementNS( XML_a, XML_solidFill, FSEND );
- WriteColor( sSchemeName, aTransformations );
+ WriteColor( sSchemeName, aTransformations, nAlpha );
mpFS->endElementNS( XML_a, XML_solidFill );
}
@@ -326,22 +341,36 @@ void DrawingML::WriteSolidFill( const Reference< XPropertySet >& rXPropSet )
else if ( !sColorFillScheme.isEmpty() )
{
// the shape had a scheme color and the user didn't change it
- WriteSolidFill( sColorFillScheme, aTransformations );
+ WriteSolidFill( sColorFillScheme, aTransformations, nAlpha );
}
else if ( aStyleProperties.hasElements() )
{
sal_uInt32 nThemeColor = 0;
+ sal_Int32 nThemeAlpha = MAX_PERCENT;
for( sal_Int32 i=0; i < aStyleProperties.getLength(); ++i )
{
if( aStyleProperties[i].Name == "Color" )
{
aStyleProperties[i].Value >>= nThemeColor;
- break;
+ }
+ else if(aStyleProperties[i].Name == "Transformations" )
+ {
+ Sequence< PropertyValue > aStyleTransformations;
+ aStyleProperties[i].Value >>= aStyleTransformations;
+ for( sal_Int32 j = 0; j < aStyleTransformations.getLength(); j++ )
+ {
+ if (aStyleTransformations[j].Name == "alpha" )
+ {
+ aStyleTransformations[j].Value >>= nThemeAlpha;
+ break;
+ }
+ }
}
}
- if ( nFillColor != nThemeColor )
+ if ( nFillColor != nThemeColor || nAlpha != nThemeAlpha )
// the shape contains a theme but it wasn't being used
WriteSolidFill( ::Color(nFillColor & 0xffffff), nAlpha );
+
// in case the shape used the style color and the user didn't change it,
// we must not write a <a: solidFill> tag.
}
1
0
Hello community,
here is the log from the commit of package tomcat for openSUSE:Factory checked in at 2019-05-25 13:18:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/tomcat (Old)
and /work/SRC/openSUSE:Factory/.tomcat.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "tomcat"
Sat May 25 13:18:24 2019 rev:53 rq:704517 version:9.0.20
Changes:
--------
--- /work/SRC/openSUSE:Factory/tomcat/tomcat.changes 2019-05-03 22:29:20.496326256 +0200
+++ /work/SRC/openSUSE:Factory/.tomcat.new.5148/tomcat.changes 2019-05-25 13:18:26.460376583 +0200
@@ -1,0 +2,7 @@
+Mon May 20 20:40:08 UTC 2019 - Matei <malbu(a)suse.com>
+
+- Update to Tomcat 9.0.20. See changelog at
+ http://tomcat.apache.org/tomcat-9.0-doc/changelog.html#Tomcat_9.0.20_(markt)
+- increase maximum number of threads and open files for tomcat (bsc#1111966)
+
+-------------------------------------------------------------------
Old:
----
apache-tomcat-9.0.19-src.tar.gz
apache-tomcat-9.0.19-src.tar.gz.asc
New:
----
apache-tomcat-9.0.20-src.tar.gz
apache-tomcat-9.0.20-src.tar.gz.asc
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ tomcat.spec ++++++
--- /var/tmp/diff_new_pack.VO9AVs/_old 2019-05-25 13:18:30.928374921 +0200
+++ /var/tmp/diff_new_pack.VO9AVs/_new 2019-05-25 13:18:30.952374912 +0200
@@ -22,7 +22,7 @@
%define elspec 3.0
%define major_version 9
%define minor_version 0
-%define micro_version 19
+%define micro_version 20
%define packdname apache-tomcat-%{version}-src
# FHS 2.3 compliant tree structure - http://www.pathname.com/fhs/2.3/
%global basedir /srv/%{name}
++++++ apache-tomcat-9.0.19-src.tar.gz -> apache-tomcat-9.0.20-src.tar.gz ++++++
++++ 7345 lines of diff (skipped)
++++++ tomcat-9.0.service ++++++
--- /var/tmp/diff_new_pack.VO9AVs/_old 2019-05-25 13:18:35.012373403 +0200
+++ /var/tmp/diff_new_pack.VO9AVs/_new 2019-05-25 13:18:35.028373397 +0200
@@ -8,6 +8,8 @@
After=syslog.target network.target
[Service]
+TasksMax=576
+LimitNOFILE=8192
Type=simple
EnvironmentFile=/etc/tomcat/tomcat.conf
Environment="NAME="
1
0
Hello community,
here is the log from the commit of package python-cloudpickle for openSUSE:Factory checked in at 2019-05-25 13:17:55
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-cloudpickle (Old)
and /work/SRC/openSUSE:Factory/.python-cloudpickle.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-cloudpickle"
Sat May 25 13:17:55 2019 rev:6 rq:704439 version:1.1.1
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-cloudpickle/python-cloudpickle.changes 2019-04-02 09:21:46.028681327 +0200
+++ /work/SRC/openSUSE:Factory/.python-cloudpickle.new.5148/python-cloudpickle.changes 2019-05-25 13:17:58.988386797 +0200
@@ -1,0 +2,10 @@
+Tue May 21 11:38:21 UTC 2019 - Marketa Calabkova <mcalabkova(a)suse.com>
+
+- Update to 1.1.1
+ * Support the pickling of interactively-defined functions with
+ positional-only arguments.
+ * Track the provenance of dynamic classes and enums.
+ * Fix a bug making functions with keyword-only arguments forget
+ the default values of these arguments after being pickled.
+
+-------------------------------------------------------------------
Old:
----
cloudpickle-0.8.1.tar.gz
New:
----
cloudpickle-1.1.1.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-cloudpickle.spec ++++++
--- /var/tmp/diff_new_pack.CpWZNj/_old 2019-05-25 13:17:59.660386547 +0200
+++ /var/tmp/diff_new_pack.CpWZNj/_new 2019-05-25 13:17:59.664386546 +0200
@@ -18,7 +18,7 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
Name: python-cloudpickle
-Version: 0.8.1
+Version: 1.1.1
Release: 0
Summary: Extended pickling support for Python objects
License: BSD-3-Clause
@@ -68,7 +68,7 @@
%check
export PYTHONPATH='.:tests'
-%python_expand py.test-%{$python_bin_suffix} -s
+%pytest -s
%files %{python_files}
%doc README.md
++++++ cloudpickle-0.8.1.tar.gz -> cloudpickle-1.1.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/PKG-INFO new/cloudpickle-1.1.1/PKG-INFO
--- old/cloudpickle-0.8.1/PKG-INFO 2019-03-25 10:07:23.000000000 +0100
+++ new/cloudpickle-1.1.1/PKG-INFO 2019-05-15 10:50:45.000000000 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 1.1
+Metadata-Version: 2.1
Name: cloudpickle
-Version: 0.8.1
+Version: 1.1.1
Summary: Extended pickling support for Python objects
Home-page: https://github.com/cloudpipe/cloudpickle
Author: Cloudpipe
@@ -120,3 +120,4 @@
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Scientific/Engineering
Classifier: Topic :: System :: Distributed Computing
+Description-Content-Type: text/markdown
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/cloudpickle/__init__.py new/cloudpickle-1.1.1/cloudpickle/__init__.py
--- old/cloudpickle-0.8.1/cloudpickle/__init__.py 2019-03-25 10:07:01.000000000 +0100
+++ new/cloudpickle-1.1.1/cloudpickle/__init__.py 2019-05-15 10:49:51.000000000 +0200
@@ -2,4 +2,4 @@
from cloudpickle.cloudpickle import *
-__version__ = '0.8.1'
+__version__ = '1.1.1'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/cloudpickle/cloudpickle.py new/cloudpickle-1.1.1/cloudpickle/cloudpickle.py
--- old/cloudpickle-0.8.1/cloudpickle/cloudpickle.py 2019-03-25 09:45:31.000000000 +0100
+++ new/cloudpickle-1.1.1/cloudpickle/cloudpickle.py 2019-05-15 10:43:28.000000000 +0200
@@ -44,7 +44,6 @@
import dis
from functools import partial
-import importlib
import io
import itertools
import logging
@@ -56,12 +55,26 @@
import traceback
import types
import weakref
+import uuid
+import threading
+
+
+try:
+ from enum import Enum
+except ImportError:
+ Enum = None
# cloudpickle is meant for inter process communication: we expect all
# communicating processes to run the same Python version hence we favor
# communication speed over compatibility:
DEFAULT_PROTOCOL = pickle.HIGHEST_PROTOCOL
+# Track the provenance of reconstructed dynamic classes to make it possible to
+# recontruct instances from the matching singleton class definition when
+# appropriate and preserve the usual "isinstance" semantics of Python objects.
+_DYNAMIC_CLASS_TRACKER_BY_CLASS = weakref.WeakKeyDictionary()
+_DYNAMIC_CLASS_TRACKER_BY_ID = weakref.WeakValueDictionary()
+_DYNAMIC_CLASS_TRACKER_LOCK = threading.Lock()
if sys.version_info[0] < 3: # pragma: no branch
from pickle import Pickler
@@ -71,12 +84,37 @@
from StringIO import StringIO
string_types = (basestring,) # noqa
PY3 = False
+ PY2 = True
+ PY2_WRAPPER_DESCRIPTOR_TYPE = type(object.__init__)
+ PY2_METHOD_WRAPPER_TYPE = type(object.__eq__)
+ PY2_CLASS_DICT_BLACKLIST = (PY2_METHOD_WRAPPER_TYPE,
+ PY2_WRAPPER_DESCRIPTOR_TYPE)
else:
types.ClassType = type
from pickle import _Pickler as Pickler
from io import BytesIO as StringIO
string_types = (str,)
PY3 = True
+ PY2 = False
+
+
+def _ensure_tracking(class_def):
+ with _DYNAMIC_CLASS_TRACKER_LOCK:
+ class_tracker_id = _DYNAMIC_CLASS_TRACKER_BY_CLASS.get(class_def)
+ if class_tracker_id is None:
+ class_tracker_id = uuid.uuid4().hex
+ _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
+ _DYNAMIC_CLASS_TRACKER_BY_ID[class_tracker_id] = class_def
+ return class_tracker_id
+
+
+def _lookup_class_or_track(class_tracker_id, class_def):
+ if class_tracker_id is not None:
+ with _DYNAMIC_CLASS_TRACKER_LOCK:
+ class_def = _DYNAMIC_CLASS_TRACKER_BY_ID.setdefault(
+ class_tracker_id, class_def)
+ _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
+ return class_def
def _make_cell_set_template_code():
@@ -112,7 +150,7 @@
# NOTE: we are marking the cell variable as a free variable intentionally
# so that we simulate an inner function instead of the outer function. This
# is what gives us the ``nonlocal`` behavior in a Python 2 compatible way.
- if not PY3: # pragma: no branch
+ if PY2: # pragma: no branch
return types.CodeType(
co.co_argcount,
co.co_nlocals,
@@ -130,24 +168,43 @@
(),
)
else:
- return types.CodeType(
- co.co_argcount,
- co.co_kwonlyargcount,
- co.co_nlocals,
- co.co_stacksize,
- co.co_flags,
- co.co_code,
- co.co_consts,
- co.co_names,
- co.co_varnames,
- co.co_filename,
- co.co_name,
- co.co_firstlineno,
- co.co_lnotab,
- co.co_cellvars, # this is the trickery
- (),
- )
-
+ if hasattr(types.CodeType, "co_posonlyargcount"): # pragma: no branch
+ return types.CodeType(
+ co.co_argcount,
+ co.co_posonlyargcount, # Python3.8 with PEP570
+ co.co_kwonlyargcount,
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ co.co_cellvars, # this is the trickery
+ (),
+ )
+ else:
+ return types.CodeType(
+ co.co_argcount,
+ co.co_kwonlyargcount,
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ co.co_cellvars, # this is the trickery
+ (),
+ )
_cell_set_template_code = _make_cell_set_template_code()
@@ -220,7 +277,7 @@
global-referencing instructions in *code*.
"""
code = getattr(code, 'co_code', b'')
- if not PY3: # pragma: no branch
+ if PY2: # pragma: no branch
code = map(ord, code)
n = len(code)
@@ -250,6 +307,39 @@
yield op, instr.arg
+def _extract_class_dict(cls):
+ """Retrieve a copy of the dict of a class without the inherited methods"""
+ clsdict = dict(cls.__dict__) # copy dict proxy to a dict
+ if len(cls.__bases__) == 1:
+ inherited_dict = cls.__bases__[0].__dict__
+ else:
+ inherited_dict = {}
+ for base in reversed(cls.__bases__):
+ inherited_dict.update(base.__dict__)
+ to_remove = []
+ for name, value in clsdict.items():
+ try:
+ base_value = inherited_dict[name]
+ if value is base_value:
+ to_remove.append(name)
+ elif PY2:
+ # backward compat for Python 2
+ if hasattr(value, "im_func"):
+ if value.im_func is getattr(base_value, "im_func", None):
+ to_remove.append(name)
+ elif isinstance(value, PY2_CLASS_DICT_BLACKLIST):
+ # On Python 2 we have no way to pickle those specific
+ # methods types nor to check that they are actually
+ # inherited. So we assume that they are always inherited
+ # from builtin types.
+ to_remove.append(name)
+ except KeyError:
+ pass
+ for name in to_remove:
+ clsdict.pop(name)
+ return clsdict
+
+
class CloudPickler(Pickler):
dispatch = Pickler.dispatch.copy()
@@ -277,7 +367,7 @@
dispatch[memoryview] = save_memoryview
- if not PY3: # pragma: no branch
+ if PY2: # pragma: no branch
def save_buffer(self, obj):
self.save(str(obj))
@@ -300,12 +390,23 @@
Save a code object
"""
if PY3: # pragma: no branch
- args = (
- obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
- obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames,
- obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
- obj.co_cellvars
- )
+ if hasattr(obj, "co_posonlyargcount"): # pragma: no branch
+ args = (
+ obj.co_argcount, obj.co_posonlyargcount,
+ obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
+ obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
+ obj.co_varnames, obj.co_filename, obj.co_name,
+ obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
+ obj.co_cellvars
+ )
+ else:
+ args = (
+ obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
+ obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
+ obj.co_names, obj.co_varnames, obj.co_filename,
+ obj.co_name, obj.co_firstlineno, obj.co_lnotab,
+ obj.co_freevars, obj.co_cellvars
+ )
else:
args = (
obj.co_argcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code,
@@ -460,15 +561,40 @@
# then discards the reference to it
self.write(pickle.POP)
- def save_dynamic_class(self, obj):
+ def _save_dynamic_enum(self, obj, clsdict):
+ """Special handling for dynamic Enum subclasses
+
+ Use a dedicated Enum constructor (inspired by EnumMeta.__call__) as the
+ EnumMeta metaclass has complex initialization that makes the Enum
+ subclasses hold references to their own instances.
"""
- Save a class that can't be stored as module global.
+ members = dict((e.name, e.value) for e in obj)
+
+ # Python 2.7 with enum34 can have no qualname:
+ qualname = getattr(obj, "__qualname__", None)
+
+ self.save_reduce(_make_skeleton_enum,
+ (obj.__bases__, obj.__name__, qualname, members,
+ obj.__module__, _ensure_tracking(obj), None),
+ obj=obj)
+
+ # Cleanup the clsdict that will be passed to _rehydrate_skeleton_class:
+ # Those attributes are already handled by the metaclass.
+ for attrname in ["_generate_next_value_", "_member_names_",
+ "_member_map_", "_member_type_",
+ "_value2member_map_"]:
+ clsdict.pop(attrname, None)
+ for member in members:
+ clsdict.pop(member)
+
+ def save_dynamic_class(self, obj):
+ """Save a class that can't be stored as module global.
This method is used to serialize classes that are defined inside
functions, or that otherwise can't be serialized as attribute lookups
from global modules.
"""
- clsdict = dict(obj.__dict__) # copy dict proxy to a dict
+ clsdict = _extract_class_dict(obj)
clsdict.pop('__weakref__', None)
# For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
@@ -496,8 +622,8 @@
for k in obj.__slots__:
clsdict.pop(k, None)
- # If type overrides __dict__ as a property, include it in the type kwargs.
- # In Python 2, we can't set this attribute after construction.
+ # If type overrides __dict__ as a property, include it in the type
+ # kwargs. In Python 2, we can't set this attribute after construction.
__dict__ = clsdict.pop('__dict__', None)
if isinstance(__dict__, property):
type_kwargs['__dict__'] = __dict__
@@ -524,8 +650,16 @@
write(pickle.MARK)
# Create and memoize an skeleton class with obj's name and bases.
- tp = type(obj)
- self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj)
+ if Enum is not None and issubclass(obj, Enum):
+ # Special handling of Enum subclasses
+ self._save_dynamic_enum(obj, clsdict)
+ else:
+ # "Regular" class definition:
+ tp = type(obj)
+ self.save_reduce(_make_skeleton_class,
+ (tp, obj.__name__, obj.__bases__, type_kwargs,
+ _ensure_tracking(obj), None),
+ obj=obj)
# Now save the rest of obj's __dict__. Any references to obj
# encountered while saving will point to the skeleton class.
@@ -591,6 +725,8 @@
state['annotations'] = func.__annotations__
if hasattr(func, '__qualname__'):
state['qualname'] = func.__qualname__
+ if hasattr(func, '__kwdefaults__'):
+ state['kwdefaults'] = func.__kwdefaults__
save(state)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
@@ -776,7 +912,7 @@
save(stuff)
write(pickle.BUILD)
- if not PY3: # pragma: no branch
+ if PY2: # pragma: no branch
dispatch[types.InstanceType] = save_inst
def save_property(self, obj):
@@ -1075,6 +1211,8 @@
func.__module__ = state['module']
if 'qualname' in state:
func.__qualname__ = state['qualname']
+ if 'kwdefaults' in state:
+ func.__kwdefaults__ = state['kwdefaults']
cells = func.__closure__
if cells is not None:
@@ -1115,6 +1253,22 @@
return types.FunctionType(code, base_globals, None, None, closure)
+def _make_skeleton_class(type_constructor, name, bases, type_kwargs,
+ class_tracker_id, extra):
+ """Build dynamic class with an empty __dict__ to be filled once memoized
+
+ If class_tracker_id is not None, try to lookup an existing class definition
+ matching that id. If none is found, track a newly reconstructed class
+ definition under that id so that other instances stemming from the same
+ class id will also reuse this class definition.
+
+ The "extra" variable is meant to be a dict (or None) that can be used for
+ forward compatibility shall the need arise.
+ """
+ skeleton_class = type_constructor(name, bases, type_kwargs)
+ return _lookup_class_or_track(class_tracker_id, skeleton_class)
+
+
def _rehydrate_skeleton_class(skeleton_class, class_dict):
"""Put attributes from `class_dict` back on `skeleton_class`.
@@ -1133,6 +1287,39 @@
return skeleton_class
+def _make_skeleton_enum(bases, name, qualname, members, module,
+ class_tracker_id, extra):
+ """Build dynamic enum with an empty __dict__ to be filled once memoized
+
+ The creation of the enum class is inspired by the code of
+ EnumMeta._create_.
+
+ If class_tracker_id is not None, try to lookup an existing enum definition
+ matching that id. If none is found, track a newly reconstructed enum
+ definition under that id so that other instances stemming from the same
+ class id will also reuse this enum definition.
+
+ The "extra" variable is meant to be a dict (or None) that can be used for
+ forward compatibility shall the need arise.
+ """
+ # enums always inherit from their base Enum class at the last position in
+ # the list of base classes:
+ enum_base = bases[-1]
+ metacls = enum_base.__class__
+ classdict = metacls.__prepare__(name, bases)
+
+ for member_name, member_value in members.items():
+ classdict[member_name] = member_value
+ enum_class = metacls.__new__(metacls, name, bases, classdict)
+ enum_class.__module__ = module
+
+ # Python 2.7 compat
+ if qualname is not None:
+ enum_class.__qualname__ = qualname
+
+ return _lookup_class_or_track(class_tracker_id, enum_class)
+
+
def _is_dynamic(module):
"""
Return True if the module is special module that cannot be imported by its
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/cloudpickle.egg-info/PKG-INFO new/cloudpickle-1.1.1/cloudpickle.egg-info/PKG-INFO
--- old/cloudpickle-0.8.1/cloudpickle.egg-info/PKG-INFO 2019-03-25 10:07:23.000000000 +0100
+++ new/cloudpickle-1.1.1/cloudpickle.egg-info/PKG-INFO 2019-05-15 10:50:45.000000000 +0200
@@ -1,6 +1,6 @@
-Metadata-Version: 1.1
+Metadata-Version: 2.1
Name: cloudpickle
-Version: 0.8.1
+Version: 1.1.1
Summary: Extended pickling support for Python objects
Home-page: https://github.com/cloudpipe/cloudpickle
Author: Cloudpipe
@@ -120,3 +120,4 @@
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Scientific/Engineering
Classifier: Topic :: System :: Distributed Computing
+Description-Content-Type: text/markdown
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/setup.py new/cloudpickle-1.1.1/setup.py
--- old/cloudpickle-0.8.1/setup.py 2019-01-31 14:05:30.000000000 +0100
+++ new/cloudpickle-1.1.1/setup.py 2019-05-15 10:49:00.000000000 +0200
@@ -31,6 +31,7 @@
license='BSD 3-Clause License',
packages=['cloudpickle'],
long_description=open('README.md').read(),
+ long_description_content_type="text/markdown",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/cloudpickle-0.8.1/tests/cloudpickle_test.py new/cloudpickle-1.1.1/tests/cloudpickle_test.py
--- old/cloudpickle-0.8.1/tests/cloudpickle_test.py 2019-03-25 09:45:31.000000000 +0100
+++ new/cloudpickle-1.1.1/tests/cloudpickle_test.py 2019-05-15 10:43:28.000000000 +0200
@@ -42,6 +42,7 @@
import cloudpickle
from cloudpickle.cloudpickle import _is_dynamic
from cloudpickle.cloudpickle import _make_empty_cell, cell_set
+from cloudpickle.cloudpickle import _extract_class_dict
from .testutils import subprocess_pickle_echo
from .testutils import assert_run_python_script
@@ -71,6 +72,32 @@
return raw_filepath.replace("\\", r"\\\\")
+def test_extract_class_dict():
+ class A(int):
+ """A docstring"""
+ def method(self):
+ return "a"
+
+ class B:
+ """B docstring"""
+ B_CONSTANT = 42
+
+ def method(self):
+ return "b"
+
+ class C(A, B):
+ C_CONSTANT = 43
+
+ def method_c(self):
+ return "c"
+
+ clsdict = _extract_class_dict(C)
+ assert sorted(clsdict.keys()) == ["C_CONSTANT", "__doc__", "method_c"]
+ assert clsdict["C_CONSTANT"] == 43
+ assert clsdict["__doc__"] is None
+ assert clsdict["method_c"](C()) == C().method_c()
+
+
class CloudPickleTest(unittest.TestCase):
protocol = cloudpickle.DEFAULT_PROTOCOL
@@ -924,21 +951,18 @@
self.assertEqual(cloned.__qualname__, func.__qualname__)
def test_namedtuple(self):
-
MyTuple = collections.namedtuple('MyTuple', ['a', 'b', 'c'])
- t = MyTuple(1, 2, 3)
+ t1 = MyTuple(1, 2, 3)
+ t2 = MyTuple(3, 2, 1)
- depickled_t, depickled_MyTuple = pickle_depickle(
- [t, MyTuple], protocol=self.protocol)
- self.assertTrue(isinstance(depickled_t, depickled_MyTuple))
-
- self.assertEqual((depickled_t.a, depickled_t.b, depickled_t.c),
- (1, 2, 3))
- self.assertEqual((depickled_t[0], depickled_t[1], depickled_t[2]),
- (1, 2, 3))
+ depickled_t1, depickled_MyTuple, depickled_t2 = pickle_depickle(
+ [t1, MyTuple, t2], protocol=self.protocol)
- self.assertEqual(depickled_MyTuple.__name__, 'MyTuple')
- self.assertTrue(issubclass(depickled_MyTuple, tuple))
+ assert isinstance(depickled_t1, MyTuple)
+ assert depickled_t1 == t1
+ assert depickled_MyTuple is MyTuple
+ assert isinstance(depickled_t2, MyTuple)
+ assert depickled_t2 == t2
def test_builtin_type__new__(self):
# Functions occasionally take the __new__ of these types as default
@@ -1197,6 +1221,123 @@
""".format(protocol=self.protocol)
assert_run_python_script(code)
+ def test_interactive_dynamic_type_and_remote_instances(self):
+ code = """if __name__ == "__main__":
+ from testutils import subprocess_worker
+
+ with subprocess_worker(protocol={protocol}) as w:
+
+ class CustomCounter:
+ def __init__(self):
+ self.count = 0
+ def increment(self):
+ self.count += 1
+ return self
+
+ counter = CustomCounter().increment()
+ assert counter.count == 1
+
+ returned_counter = w.run(counter.increment)
+ assert returned_counter.count == 2, returned_counter.count
+
+ # Check that the class definition of the returned instance was
+ # matched back to the original class definition living in __main__.
+
+ assert isinstance(returned_counter, CustomCounter)
+
+ # Check that memoization does not break provenance tracking:
+
+ def echo(*args):
+ return args
+
+ C1, C2, c1, c2 = w.run(echo, CustomCounter, CustomCounter,
+ CustomCounter(), returned_counter)
+ assert C1 is CustomCounter
+ assert C2 is CustomCounter
+ assert isinstance(c1, CustomCounter)
+ assert isinstance(c2, CustomCounter)
+
+ """.format(protocol=self.protocol)
+ assert_run_python_script(code)
+
+ def test_interactive_dynamic_type_and_stored_remote_instances(self):
+ """Simulate objects stored on workers to check isinstance semantics
+
+ Such instances stored in the memory of running worker processes are
+ similar to dask-distributed futures for instance.
+ """
+ code = """if __name__ == "__main__":
+ import cloudpickle, uuid
+ from testutils import subprocess_worker
+
+ with subprocess_worker(protocol={protocol}) as w:
+
+ class A:
+ '''Original class definition'''
+ pass
+
+ def store(x):
+ storage = getattr(cloudpickle, "_test_storage", None)
+ if storage is None:
+ storage = cloudpickle._test_storage = dict()
+ obj_id = uuid.uuid4().hex
+ storage[obj_id] = x
+ return obj_id
+
+ def lookup(obj_id):
+ return cloudpickle._test_storage[obj_id]
+
+ id1 = w.run(store, A())
+
+ # The stored object on the worker is matched to a singleton class
+ # definition thanks to provenance tracking:
+ assert w.run(lambda obj_id: isinstance(lookup(obj_id), A), id1)
+
+ # Retrieving the object from the worker yields a local copy that
+ # is matched back the local class definition this instance
+ # originally stems from.
+ assert isinstance(w.run(lookup, id1), A)
+
+ # Changing the local class definition should be taken into account
+ # in all subsequent calls. In particular the old instances on the
+ # worker do not map back to the new class definition, neither on
+ # the worker itself, nor locally on the main program when the old
+ # instance is retrieved:
+
+ class A:
+ '''Updated class definition'''
+ pass
+
+ assert not w.run(lambda obj_id: isinstance(lookup(obj_id), A), id1)
+ retrieved1 = w.run(lookup, id1)
+ assert not isinstance(retrieved1, A)
+ assert retrieved1.__class__ is not A
+ assert retrieved1.__class__.__doc__ == "Original class definition"
+
+ # New instances on the other hand are proper instances of the new
+ # class definition everywhere:
+
+ a = A()
+ id2 = w.run(store, a)
+ assert w.run(lambda obj_id: isinstance(lookup(obj_id), A), id2)
+ assert isinstance(w.run(lookup, id2), A)
+
+ # Monkeypatch the class defintion in the main process to a new
+ # class method:
+ A.echo = lambda cls, x: x
+
+ # Calling this method on an instance will automatically update
+ # the remote class definition on the worker to propagate the monkey
+ # patch dynamically.
+ assert w.run(a.echo, 42) == 42
+
+ # The stored instance can therefore also access the new class
+ # method:
+ assert w.run(lambda obj_id: lookup(obj_id).echo(43), id2) == 43
+
+ """.format(protocol=self.protocol)
+ assert_run_python_script(code)
+
@pytest.mark.skipif(platform.python_implementation() == 'PyPy',
reason="Skip PyPy because memory grows too much")
def test_interactive_remote_function_calls_no_memory_leak(self):
@@ -1226,9 +1367,10 @@
import gc
w.run(gc.collect)
- # By this time the worker process has processed worth of 100MB of
- # data passed in the closures its memory size should now have
- # grown by more than a few MB.
+ # By this time the worker process has processed 100MB worth of data
+ # passed in the closures. The worker memory size should not have
+ # grown by more than a few MB as closures are garbage collected at
+ # the end of each remote function call.
growth = w.memsize() - reference_size
assert growth < 1e7, growth
@@ -1368,6 +1510,88 @@
pickle_depickle(DataClass, protocol=self.protocol)
assert data.x == pickle_depickle(data, protocol=self.protocol).x == 42
+ def test_locally_defined_enum(self):
+ enum = pytest.importorskip("enum")
+
+ class StringEnum(str, enum.Enum):
+ """Enum when all members are also (and must be) strings"""
+
+ class Color(StringEnum):
+ """3-element color space"""
+ RED = "1"
+ GREEN = "2"
+ BLUE = "3"
+
+ def is_green(self):
+ return self is Color.GREEN
+
+ green1, green2, ClonedColor = pickle_depickle(
+ [Color.GREEN, Color.GREEN, Color], protocol=self.protocol)
+ assert green1 is green2
+ assert green1 is ClonedColor.GREEN
+ assert green1 is not ClonedColor.BLUE
+ assert isinstance(green1, str)
+ assert green1.is_green()
+
+ # cloudpickle systematically tracks provenance of class definitions
+ # and ensure reconciliation in case of round trips:
+ assert green1 is Color.GREEN
+ assert ClonedColor is Color
+
+ green3 = pickle_depickle(Color.GREEN, protocol=self.protocol)
+ assert green3 is Color.GREEN
+
+ def test_locally_defined_intenum(self):
+ enum = pytest.importorskip("enum")
+ # Try again with a IntEnum defined with the functional API
+ DynamicColor = enum.IntEnum("Color", {"RED": 1, "GREEN": 2, "BLUE": 3})
+
+ green1, green2, ClonedDynamicColor = pickle_depickle(
+ [DynamicColor.GREEN, DynamicColor.GREEN, DynamicColor],
+ protocol=self.protocol)
+
+ assert green1 is green2
+ assert green1 is ClonedDynamicColor.GREEN
+ assert green1 is not ClonedDynamicColor.BLUE
+ assert ClonedDynamicColor is DynamicColor
+
+ def test_interactively_defined_enum(self):
+ pytest.importorskip("enum")
+ code = """if __name__ == "__main__":
+ from enum import Enum
+ from testutils import subprocess_worker
+
+ with subprocess_worker(protocol={protocol}) as w:
+
+ class Color(Enum):
+ RED = 1
+ GREEN = 2
+
+ def check_positive(x):
+ return Color.GREEN if x >= 0 else Color.RED
+
+ result = w.run(check_positive, 1)
+
+ # Check that the returned enum instance is reconciled with the
+ # locally defined Color enum type definition:
+
+ assert result is Color.GREEN
+
+ # Check that changing the definition of the Enum class is taken
+ # into account on the worker for subsequent calls:
+
+ class Color(Enum):
+ RED = 1
+ BLUE = 2
+
+ def check_positive(x):
+ return Color.BLUE if x >= 0 else Color.RED
+
+ result = w.run(check_positive, 1)
+ assert result is Color.BLUE
+ """.format(protocol=self.protocol)
+ assert_run_python_script(code)
+
def test_relative_import_inside_function(self):
# Make sure relative imports inside round-tripped functions is not
# broken.This was a bug in cloudpickle versions <= 0.5.3 and was
@@ -1392,6 +1616,54 @@
cloned_func = pickle_depickle(func, protocol=self.protocol)
assert cloned_func() == "hello from a {}!".format(source)
+ @pytest.mark.skipif(sys.version_info[0] < 3,
+ reason="keyword only arguments were introduced in "
+ "python 3")
+ def test_interactively_defined_func_with_keyword_only_argument(self):
+ # fixes https://github.com/cloudpipe/cloudpickle/issues/263
+ # The source code of this test is bundled in a string and is ran from
+ # the __main__ module of a subprocess in order to avoid a SyntaxError
+ # in python2 when pytest imports this file, as the keyword-only syntax
+ # is python3-only.
+ code = """
+ from cloudpickle import loads, dumps
+
+ def f(a, *, b=1):
+ return a + b
+
+ depickled_f = loads(dumps(f, protocol={protocol}))
+
+ for func in (f, depickled_f):
+ assert func(2) == 3
+ assert func.__kwdefaults__ == {{'b': 1}}
+ """.format(protocol=self.protocol)
+ assert_run_python_script(textwrap.dedent(code))
+
+ @pytest.mark.skipif(not hasattr(types.CodeType, "co_posonlyargcount"),
+ reason="Requires positional-only argument syntax")
+ def test_interactively_defined_func_with_positional_only_argument(self):
+ # Fixes https://github.com/cloudpipe/cloudpickle/issues/266
+ # The source code of this test is bundled in a string and is ran from
+ # the __main__ module of a subprocess in order to avoid a SyntaxError
+ # in versions of python that do not support positional-only argument
+ # syntax.
+ code = """
+ import pytest
+ from cloudpickle import loads, dumps
+
+ def f(a, /, b=1):
+ return a + b
+
+ depickled_f = loads(dumps(f, protocol={protocol}))
+
+ for func in (f, depickled_f):
+ assert func(2) == 3
+ assert func.__code__.co_posonlyargcount == 1
+ with pytest.raises(TypeError):
+ func(a=2)
+
+ """.format(protocol=self.protocol)
+ assert_run_python_script(textwrap.dedent(code))
class Protocol2CloudPickleTest(CloudPickleTest):
1
0
Hello community,
here is the log from the commit of package insserv-compat for openSUSE:Factory checked in at 2019-05-25 13:17:48
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/insserv-compat (Old)
and /work/SRC/openSUSE:Factory/.insserv-compat.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "insserv-compat"
Sat May 25 13:17:48 2019 rev:19 rq:704437 version:0.1
Changes:
--------
--- /work/SRC/openSUSE:Factory/insserv-compat/insserv-compat.changes 2019-04-23 14:41:33.421658217 +0200
+++ /work/SRC/openSUSE:Factory/.insserv-compat.new.5148/insserv-compat.changes 2019-05-25 13:17:49.504390324 +0200
@@ -1,0 +2,5 @@
+Thu May 9 15:19:24 UTC 2019 - Ludwig Nussel <lnussel(a)suse.com>
+
+- fix handling of ,start= parameters (bsc#1133306)
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ insserv.pl ++++++
--- /var/tmp/diff_new_pack.9LH1Ts/_old 2019-05-25 13:17:50.140390087 +0200
+++ /var/tmp/diff_new_pack.9LH1Ts/_new 2019-05-25 13:17:50.144390086 +0200
@@ -242,7 +242,9 @@
if (@ARGV) {
my $p = $ARGV[0] =~ /\// ? $ARGV[0] : "$init_d/$ARGV[0]";
- if (!-e $p) {
+ # need to strip off potential extra parameters
+ my $script = $1 if $p =~ /([^,]+)/;
+ if (!-e $script) {
warn "$p doesn't exist";
usage(1) if @ARGV;
cleanup();
1
0
Hello community,
here is the log from the commit of package python-ipy for openSUSE:Factory checked in at 2019-05-25 13:17:41
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-ipy (Old)
and /work/SRC/openSUSE:Factory/.python-ipy.new.5148 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-ipy"
Sat May 25 13:17:41 2019 rev:9 rq:704422 version:1.00
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-ipy/python-ipy.changes 2018-11-26 10:30:27.601017171 +0100
+++ /work/SRC/openSUSE:Factory/.python-ipy.new.5148/python-ipy.changes 2019-05-25 13:17:42.760392831 +0200
@@ -1,0 +2,8 @@
+Tue May 21 10:06:13 UTC 2019 - pgajdos(a)suse.com
+
+- version update to 1.00
+ * Fix IPv6 string interpretation for small ints
+ * Various Python3 language fixes
+ * consider 127.0 range LOOPBACK not PRIVATE
+
+-------------------------------------------------------------------
Old:
----
IPy-0.83.tar.gz
New:
----
IPy-1.00.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-ipy.spec ++++++
--- /var/tmp/diff_new_pack.jIq7uG/_old 2019-05-25 13:17:44.160392311 +0200
+++ /var/tmp/diff_new_pack.jIq7uG/_new 2019-05-25 13:17:44.180392303 +0200
@@ -1,7 +1,7 @@
#
# spec file for package python-ipy
#
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -12,13 +12,13 @@
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.
-# Please submit bugfixes or comments via http://bugs.opensuse.org/
+# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
Name: python-ipy
-Version: 0.83
+Version: 1.00
Release: 0
Summary: Class and tools for handling of IPv4 and IPv6 addresses and networks
License: BSD-3-Clause
@@ -48,10 +48,14 @@
%install
%python_install
%python_expand %fdupes %{buildroot}%{$python_sitelib}
-#install COPYING %{buildroot}
%files %{python_files}
%{python_sitelib}/*
%license COPYING
+%check
+%python_exec test/test_IPy.py
+# one of 3000 subtest fails, probably https://github.com/autocracy/python-ipy/issues/27
+# %%python_exec test/test_fuzz.py
+
%changelog
++++++ IPy-0.83.tar.gz -> IPy-1.00.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/.gitignore new/IPy-1.00/.gitignore
--- old/IPy-0.83/.gitignore 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/.gitignore 1970-01-01 01:00:00.000000000 +0100
@@ -1,2 +0,0 @@
-*.pyc
-*.swp
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/COPYING new/IPy-1.00/COPYING
--- old/IPy-0.83/COPYING 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/COPYING 2018-02-01 09:32:18.000000000 +0100
@@ -1,3 +1,4 @@
+"BSD 3-clause" License
Copyright (c) 2006, INL
Copyright (c) 2001-2005, Maximillian Dornseif
All rights reserved.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/ChangeLog new/IPy-1.00/ChangeLog
--- old/IPy-0.83/ChangeLog 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/ChangeLog 2019-02-28 00:46:40.000000000 +0100
@@ -1,3 +1,8 @@
+Version 1.00 (2019-02-27)
+ * Fix IPv6 string interpretation for small ints
+ * Various Python3 language fixes
+ * consider 127.0 range LOOPBACK not PRIVATE
+
Version 0.83 (2015-04-04)
------------
* Add carrier grade NAT ranges
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/IPy.py new/IPy-1.00/IPy.py
--- old/IPy-0.83/IPy.py 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/IPy.py 2019-02-28 01:00:55.000000000 +0100
@@ -6,7 +6,7 @@
https://github.com/haypo/python-ipy
"""
-__version__ = '0.83'
+__version__ = '1.00'
import bisect
import collections
@@ -21,7 +21,7 @@
'00000000': 'PRIVATE', # 0/8
'00001010': 'PRIVATE', # 10/8
'0110010001': 'CARRIER_GRADE_NAT', #100.64/10
- '01111111': 'PRIVATE', # 127.0/8
+ '01111111': 'LOOPBACK', # 127.0/8
'1': 'PUBLIC', # fall back
'1010100111111110': 'PRIVATE', # 169.254/16
'101011000001': 'PRIVATE', # 172.16/12
@@ -243,7 +243,7 @@
else:
raise ValueError("can't parse")
- (self.ip, parsedVersion) = parseAddress(ip)
+ (self.ip, parsedVersion) = parseAddress(ip, ipversion)
if ipversion == 0:
ipversion = parsedVersion
if prefixlen == -1:
@@ -475,7 +475,7 @@
"""Return a description of the IP type ('PRIVATE', 'RESERVED', etc).
>>> print(IP('127.0.0.1').iptype())
- PRIVATE
+ LOOPBACK
>>> print(IP('192.168.1.1').iptype())
PRIVATE
>>> print(IP('195.185.1.2').iptype())
@@ -558,6 +558,8 @@
"""
return True
+ def __bool__(self):
+ return self.__nonzero__()
def __len__(self):
"""
@@ -769,6 +771,9 @@
def __lt__(self, other):
return self.__cmp__(other) < 0
+ def __le__(self, other):
+ return self.__cmp__(other) <= 0
+
def __hash__(self):
"""Called for the key object for dictionary operations, and by
the built-in function hash(). Should return a 32-bit integer
@@ -1336,7 +1341,7 @@
index += 1
return value
-def parseAddress(ipstr):
+def parseAddress(ipstr, ipversion=0):
"""
Parse a string and return the corresponding IP address (as integer)
and a guess of the IP version.
@@ -1405,7 +1410,7 @@
# assume IPv6 in pure hexadecimal notation
return (hexval, 6)
- elif ipstr.find('.') != -1 or (intval is not None and intval < 256):
+ elif ipstr.find('.') != -1 or (intval is not None and intval < 256 and ipversion != 6):
# assume IPv4 ('127' gets interpreted as '127.0.0.0')
bytes = ipstr.split('.')
if len(bytes) > 4:
@@ -1423,7 +1428,7 @@
# will be interpreted as IPv4 first byte
if intval > MAX_IPV6_ADDRESS:
raise ValueError("IP Address can't be larger than %x: %x" % (MAX_IPV6_ADDRESS, intval))
- if intval <= MAX_IPV4_ADDRESS:
+ if intval <= MAX_IPV4_ADDRESS and ipversion != 6:
return (intval, 4)
else:
return (intval, 6)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/MANIFEST.in new/IPy-1.00/MANIFEST.in
--- old/IPy-0.83/MANIFEST.in 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/MANIFEST.in 2018-02-01 09:32:18.000000000 +0100
@@ -1,4 +1,5 @@
include AUTHORS
+include README.rst
include ChangeLog
include COPYING
include MANIFEST.in
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/Makefile new/IPy-1.00/Makefile
--- old/IPy-0.83/Makefile 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/Makefile 1970-01-01 01:00:00.000000000 +0100
@@ -1,27 +0,0 @@
-.PHONY: tests egg install clean
-
-PYTHON=python
-
-tests:
- @echo "[ run unit tests in python 2 ]"
- PYTHONPATH=$(PWD) $(PYTHON)2.6 test/test_IPy.py || exit $$?
- @echo "[ run unit tests in python 3 ]"
- PYTHONPATH=$(PWD) $(PYTHON)3.4 test/test_IPy.py || exit $$?
- @echo
- @echo "[ test README in python 2 ]"
- $(PYTHON)2.6 test_doc.py || exit $$?
- @echo "[ test README in python 3 ]"
- $(PYTHON)3.4 test_doc.py || exit $$?
-
-egg: clean
- $(PYTHON) setup.py sdist bdist_egg
-
-IPy.html: README
- rst2html README $@ --stylesheet=rest.css
-
-install:
- ./setup.py install
-
-clean:
- rm -rf IPy.html *.pyc build dist IPy.egg-info
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/PKG-INFO new/IPy-1.00/PKG-INFO
--- old/IPy-0.83/PKG-INFO 1970-01-01 01:00:00.000000000 +0100
+++ new/IPy-1.00/PKG-INFO 2019-02-28 01:20:19.000000000 +0100
@@ -0,0 +1,367 @@
+Metadata-Version: 1.1
+Name: IPy
+Version: 1.00
+Summary: Class and tools for handling of IPv4 and IPv6 addresses and networks
+Home-page: https://github.com/autocracy/python-ipy
+Author: Jeff Ferland
+Author-email: jeff(a)storyinmemo.com
+License: BSD License
+Download-URL: https://github.com/autocracy/python-ipy
+Description: IPy - class and tools for handling of IPv4 and IPv6 addresses and networks.
+
+ Website: https://github.com/autocracy/python-ipy/
+
+ Presentation of the API
+ =======================
+
+ The IP class allows a comfortable parsing and handling for most
+ notations in use for IPv4 and IPv6 addresses and networks. It was
+ greatly inspired by RIPE's Perl module NET::IP's interface but
+ doesn't share the implementation. It doesn't share non-CIDR netmasks,
+ so funky stuff like a netmask of 0xffffff0f can't be done here. ::
+
+ >>> from IPy import IP
+ >>> ip = IP('127.0.0.0/30')
+ >>> for x in ip:
+ ... print(x)
+ ...
+ 127.0.0.0
+ 127.0.0.1
+ 127.0.0.2
+ 127.0.0.3
+ >>> ip2 = IP('0x7f000000/30')
+ >>> ip == ip2
+ 1
+ >>> ip.reverseNames()
+ ['0.0.0.127.in-addr.arpa.', '1.0.0.127.in-addr.arpa.', '2.0.0.127.in-addr.arpa.', '3.0.0.127.in-addr.arpa.']
+ >>> ip.reverseName()
+ '0-3.0.0.127.in-addr.arpa.'
+ >>> ip.iptype()
+ 'LOOPBACK'
+
+
+ Supports most IP address formats
+ ================================
+
+ It can detect about a dozen different ways of expressing IP addresses
+ and networks, parse them and distinguish between IPv4 and IPv6 addresses: ::
+
+ >>> IP('10.0.0.0/8').version()
+ 4
+ >>> IP('::1').version()
+ 6
+
+ IPv4 addresses
+ --------------
+
+ ::
+
+ >>> print(IP(0x7f000001))
+ 127.0.0.1
+ >>> print(IP('0x7f000001'))
+ 127.0.0.1
+ >>> print(IP('127.0.0.1'))
+ 127.0.0.1
+ >>> print(IP('10'))
+ 10.0.0.0
+
+ IPv6 addresses
+ --------------
+
+ ::
+
+ >>> print(IP('1080:0:0:0:8:800:200C:417A'))
+ 1080::8:800:200c:417a
+ >>> print(IP('1080::8:800:200C:417A'))
+ 1080::8:800:200c:417a
+ >>> print(IP('::1'))
+ ::1
+ >>> print(IP('::13.1.68.3'))
+ ::d01:4403
+
+ Network mask and prefixes
+ -------------------------
+
+ ::
+
+ >>> print(IP('127.0.0.0/8'))
+ 127.0.0.0/8
+ >>> print(IP('127.0.0.0/255.0.0.0'))
+ 127.0.0.0/8
+ >>> print(IP('127.0.0.0-127.255.255.255'))
+ 127.0.0.0/8
+
+
+ Derive network address
+ ===========================
+
+ IPy can transform an IP address into a network address by applying the given
+ netmask: ::
+
+ >>> print(IP('127.0.0.1/255.0.0.0', make_net=True))
+ 127.0.0.0/8
+
+ This can also be done for existing IP instances: ::
+
+ >>> print(IP('127.0.0.1').make_net('255.0.0.0'))
+ 127.0.0.0/8
+
+
+ Convert address to string
+ =========================
+
+ Nearly all class methods which return a string have an optional
+ parameter 'wantprefixlen' which controls if the prefixlen or netmask
+ is printed. Per default the prefilen is always shown if the network
+ contains more than one address: ::
+
+ wantprefixlen == 0 / None don't return anything 1.2.3.0
+ wantprefixlen == 1 /prefix 1.2.3.0/24
+ wantprefixlen == 2 /netmask 1.2.3.0/255.255.255.0
+ wantprefixlen == 3 -lastip 1.2.3.0-1.2.3.255
+
+ You can also change the defaults on an per-object basis by fiddling with
+ the class members:
+
+ - NoPrefixForSingleIp
+ - WantPrefixLen
+
+ Examples of string conversions: ::
+
+ >>> IP('10.0.0.0/32').strNormal()
+ '10.0.0.0'
+ >>> IP('10.0.0.0/24').strNormal()
+ '10.0.0.0/24'
+ >>> IP('10.0.0.0/24').strNormal(0)
+ '10.0.0.0'
+ >>> IP('10.0.0.0/24').strNormal(1)
+ '10.0.0.0/24'
+ >>> IP('10.0.0.0/24').strNormal(2)
+ '10.0.0.0/255.255.255.0'
+ >>> IP('10.0.0.0/24').strNormal(3)
+ '10.0.0.0-10.0.0.255'
+ >>> ip = IP('10.0.0.0')
+ >>> print(ip)
+ 10.0.0.0
+ >>> ip.NoPrefixForSingleIp = None
+ >>> print(ip)
+ 10.0.0.0/32
+ >>> ip.WantPrefixLen = 3
+ >>> print(ip)
+ 10.0.0.0-10.0.0.0
+
+ Work with multiple networks
+ ===========================
+
+ Simple addition of neighboring netblocks that can be aggregated will yield
+ a parent network of both, but more complex range mapping and aggregation
+ requires is available with the ``IPSet`` class which will hold any number of
+ unique address ranges and will aggregate overlapping ranges. ::
+
+ >>> from IPy import IP, IPSet
+ >>> IP('10.0.0.0/22') - IP('10.0.2.0/24')
+ IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24')])
+ >>> IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24'), IP('10.0.2.0/24')])
+ IPSet([IP('10.0.0.0/22')])
+ >>> s = IPSet([IP('10.0.0.0/22')])
+ >>> s.add(IP('192.168.1.0/29'))
+ >>> s
+ IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/29')])
+ >>> s.discard(IP('192.168.1.2'))
+ >>> s
+ IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/31'), IP('192.168.1.3'), IP('192.168.1.4/30')])
+
+ ``IPSet`` supports the ``set`` method ``isdisjoint``: ::
+
+ >>> s.isdisjoint(IPSet([IP('192.168.0.0/16')]))
+ False
+ >>> s.isdisjoint(IPSet([IP('172.16.0.0/12')]))
+ True
+
+ ``IPSet`` supports intersection: ::
+
+ >>> s & IPSet([IP('10.0.0.0/8')])
+ IPSet([IP('10.0.0.0/22')])
+
+ Compatibility and links
+ =======================
+
+ IPy 1.00 works on Python version 2.6 - 3.7.
+
+ The IP module should work in Python 2.5 as long as the subtraction operation
+ is not used. IPSet requires features of the collecitons class which appear
+ in Python 2.6, though they can be backported.
+
+ Eratta
+ ======
+
+ When using IPv6 addresses, it is best to compare using ``IP().len()``
+ instead of ``len(IP)``. Addresses with an integer value > 64 bits can break
+ the 2nd method. See http://stackoverflow.com/questions/15650878 for more
+ info.
+
+ Fuzz testing for ``IPSet`` will throw spurious errors when the ``IPSet`` module
+ combines two smaller prefixes into a larger prefix that matches the random
+ prefix tested against.
+
+ This Python module is under BSD license: see COPYING file.
+
+ Further Information might be available at:
+ https://github.com/autocracy/python-ipy
+
+ What's new
+ ==========
+
+ Version 1.00 (2019-02-27)
+ * Fix IPv6 string interpretation for small ints
+ * Various Python3 language fixes
+ * consider 127.0 range LOOPBACK not PRIVATE
+
+ Version 0.83 (2015-04-04)
+ ------------
+ * Add carrier grade NAT ranges
+ * Unbreak lots of packing systems by not having a letter in the release version
+
+ Version 0.82a (2014-10-07)
+ ------------
+ * Fix version numbers in files
+ * Correct x.next() -> next(x) python3 compatability
+
+ Version 0.82 (2014-10-06)
+ ------------
+
+ * Add support for array slices
+ * Add __and__ and isdisjoint for IPSet
+ * Fix a bug in IPSet where contains may incorrectly return false
+ * Added some fuzz testing
+
+ Version 0.81 (2013-04-08)
+ ------------
+
+ * Correct reverseName() for IPv6 addresses, so IP('::1').reverseName() returns correct.
+ * Add network mask awareness to v46map()
+ * Fix Python 3 errors in IPSet class
+ * Make IPSet base class be object when MutableSet isn't available, fixing
+ errors in Python 2.5
+
+ Version 0.80 (2013-03-26)
+ ------------
+
+ * Drop support of Python older than 2.4
+ * Python 3 does not need 2to3 conversion anymore (same code base)
+ * Fix adding of non-adjacent networks:
+ 192.168.0.0/24 + 192.168.255.0/24 made 192.168.0.0/23
+ * Fix adding networks that don't create a valid subnet:
+ 192.168.1.0/24 + 192.168.2.0/24 made 192.168.1.0/23
+ * Fix adding with an IPv6 address where .int() was < 32 bits made IPy believe it
+ was an IPv4 address:
+ ::ffff:0/112 + ::1:0:0/112 made 255.255.0.0/111
+ * Add support of IPSets
+ * Add support for subtracting a network range
+ * Prevent IPv4 and IPv6 ranges from saying they contain each other
+ * Add a .v46map() method to convert mapped address ranges
+ such as IP('::ffff:192.168.1.1'); RFC 4291
+ * Change sort order to more natural:
+ IPv4 before IPv6; less-specific prefixes first (/0 before /32)
+
+
+ Version 0.76 (2013-03-19)
+ -------------------------
+
+ * ip == other and ip != other doesn't fail with an exception anymore if other
+ is not a IP object
+ * Add IP.get_mac() method: get the 802.3 MAC address from IPv6 RFC 2464
+ address.
+ * Fix IP('::/0')[0]: return an IPv6 instead of an IPv4 address
+
+ Version 0.75 (2011-04-12)
+ -------------------------
+
+ * IP('::/0').netmask() gives IP('::') instead of IP('0.0.0.0')
+
+ Version 0.74 (2011-02-16)
+ -------------------------
+
+ * Fix tests for Python 3.1 and 3.2
+ * ip.__nonzero__() and (ipa in ipb) return a bool instead of 0 or 1
+ * IP('0.0.0.0/0') + IP('0.0.0.0/0') raises an error, fix written by Arfrever
+
+ Version 0.73 (2011-02-15)
+ -------------------------
+
+ * Support Python 3: setup.py runs 2to3
+ * Update the ranges for IPv6 IPs
+ * Fix reverseName() and reverseNames() for IPv4 in IPv6 addresses
+ * Drop support of Python < 2.5
+
+ Version 0.72 (2010-11-23)
+ -------------------------
+
+ * Include examples and MANIFEST.in in source build (add them to
+ MANIFEST.in)
+ * Remove __rcsid__ constant from IPy module
+
+ Version 0.71 (2010-10-01)
+ -------------------------
+
+ * Use xrange() instead of range()
+ * Use isinstance(x, int) instead of type(x) == types.IntType
+ * Prepare support of Python3 (use integer division: x // y)
+ * Fix IP(long) constructor: ensure that the address is not too large
+ * Constructor raise a TypeError if the type is not int, long,
+ str or unicode
+ * 223.0.0.0/8 is now public (belongs to APNIC)
+
+ Version 0.70 (2009-10-29)
+ -------------------------
+
+ * New "major" version because it may break compatibility
+ * Fix __cmp__(): IP('0.0.0.0/0') and IP('0.0.0.0') are not equal
+ * Fix IP.net() of the network "::/0": "::" instead of "0.0.0.0".
+ IPy 0.63 should fix this bug, but it wasn't.
+
+ Version 0.64 (2009-08-19)
+ -------------------------
+
+ * Create MANIFEST.in to fix setup.py bdist_rpm, fix by Robert Nickel
+
+ Version 0.63 (2009-06-23)
+ -------------------------
+
+ * Fix formatting of "IPv4 in IPv6" network, eg. IP('::ffff:192.168.10.0/120'),
+ the netmask ("/120" in the example) was missing!
+
+ Version 0.62 (2008-07-15)
+ -------------------------
+
+ * Fix reverse DNS of IPv6 address: use ".ip6.arpa." suffix instead of
+ deprecated ".ip6.int." suffix
+
+ Version 0.61 (2008-06-12)
+ -------------------------
+
+ * Patch from Aras Vaichas allowing the [-1] operator
+ to work with an IP object of size 1.
+
+ Version 0.60 (2008-05-16)
+ -------------------------
+
+ * strCompressed() formats '::ffff:a.b.c.d' correctly
+ * Use strCompressed() instead of strFullsize() to format IP addresses,
+ ouput is smarter with IPv6 address
+ * Remove check_addr_prefixlen because it generates invalid IP address
+Keywords: ipv4 ipv6 netmask
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: Environment :: Plugins
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Communications
+Classifier: Topic :: Internet
+Classifier: Topic :: System :: Networking
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/README new/IPy-1.00/README
--- old/IPy-0.83/README 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/README 1970-01-01 01:00:00.000000000 +0100
@@ -1,192 +0,0 @@
-IPy - class and tools for handling of IPv4 and IPv6 addresses and networks.
-
-Website: https://github.com/autocracy/python-ipy/
-
-Presentation of the API
-=======================
-
-The IP class allows a comfortable parsing and handling for most
-notations in use for IPv4 and IPv6 addresses and networks. It was
-greatly inspired by RIPE's Perl module NET::IP's interface but
-doesn't share the implementation. It doesn't share non-CIDR netmasks,
-so funky stuff like a netmask of 0xffffff0f can't be done here.
-
- >>> from IPy import IP
- >>> ip = IP('127.0.0.0/30')
- >>> for x in ip:
- ... print(x)
- ...
- 127.0.0.0
- 127.0.0.1
- 127.0.0.2
- 127.0.0.3
- >>> ip2 = IP('0x7f000000/30')
- >>> ip == ip2
- 1
- >>> ip.reverseNames()
- ['0.0.0.127.in-addr.arpa.', '1.0.0.127.in-addr.arpa.', '2.0.0.127.in-addr.arpa.', '3.0.0.127.in-addr.arpa.']
- >>> ip.reverseName()
- '0-3.0.0.127.in-addr.arpa.'
- >>> ip.iptype()
- 'PRIVATE'
-
-
-Supports most IP address formats
-================================
-
-It can detect about a dozen different ways of expressing IP addresses
-and networks, parse them and distinguish between IPv4 and IPv6 addresses:
-
- >>> IP('10.0.0.0/8').version()
- 4
- >>> IP('::1').version()
- 6
-
-IPv4 addresses
---------------
-
- >>> print(IP(0x7f000001))
- 127.0.0.1
- >>> print(IP('0x7f000001'))
- 127.0.0.1
- >>> print(IP('127.0.0.1'))
- 127.0.0.1
- >>> print(IP('10'))
- 10.0.0.0
-
-IPv6 addresses
---------------
-
- >>> print(IP('1080:0:0:0:8:800:200C:417A'))
- 1080::8:800:200c:417a
- >>> print(IP('1080::8:800:200C:417A'))
- 1080::8:800:200c:417a
- >>> print(IP('::1'))
- ::1
- >>> print(IP('::13.1.68.3'))
- ::d01:4403
-
-Network mask and prefixes
--------------------------
-
- >>> print(IP('127.0.0.0/8'))
- 127.0.0.0/8
- >>> print(IP('127.0.0.0/255.0.0.0'))
- 127.0.0.0/8
- >>> print(IP('127.0.0.0-127.255.255.255'))
- 127.0.0.0/8
-
-
-Derive network address
-===========================
-
-IPy can transform an IP address into a network address by applying the given
-netmask:
->>> print(IP('127.0.0.1/255.0.0.0', make_net=True))
-127.0.0.0/8
-
-This can also be done for existing IP instances:
->>> print(IP('127.0.0.1').make_net('255.0.0.0'))
-127.0.0.0/8
-
-
-Convert address to string
-=========================
-
-Nearly all class methods which return a string have an optional
-parameter 'wantprefixlen' which controls if the prefixlen or netmask
-is printed. Per default the prefilen is always shown if the network
-contains more than one address::
-
- wantprefixlen == 0 / None don't return anything 1.2.3.0
- wantprefixlen == 1 /prefix 1.2.3.0/24
- wantprefixlen == 2 /netmask 1.2.3.0/255.255.255.0
- wantprefixlen == 3 -lastip 1.2.3.0-1.2.3.255
-
-You can also change the defaults on an per-object basis by fiddling with
-the class members:
-
- * NoPrefixForSingleIp
- * WantPrefixLen
-
-Examples of string conversions:
-
- >>> IP('10.0.0.0/32').strNormal()
- '10.0.0.0'
- >>> IP('10.0.0.0/24').strNormal()
- '10.0.0.0/24'
- >>> IP('10.0.0.0/24').strNormal(0)
- '10.0.0.0'
- >>> IP('10.0.0.0/24').strNormal(1)
- '10.0.0.0/24'
- >>> IP('10.0.0.0/24').strNormal(2)
- '10.0.0.0/255.255.255.0'
- >>> IP('10.0.0.0/24').strNormal(3)
- '10.0.0.0-10.0.0.255'
- >>> ip = IP('10.0.0.0')
- >>> print(ip)
- 10.0.0.0
- >>> ip.NoPrefixForSingleIp = None
- >>> print(ip)
- 10.0.0.0/32
- >>> ip.WantPrefixLen = 3
- >>> print(ip)
- 10.0.0.0-10.0.0.0
-
-Work with multiple networks
-===========================
-
-Simple addition of neighboring netblocks that can be aggregated will yield
-a parent network of both, but more complex range mapping and aggregation
-requires is available with the IPSet class which will hold any number of
-unique address ranges and will aggregate overlapping ranges.
-
- >>> from IPy import IP, IPSet
- >>> IP('10.0.0.0/22') - IP('10.0.2.0/24')
- IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24')])
- >>> IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24'), IP('10.0.2.0/24')])
- IPSet([IP('10.0.0.0/22')])
- >>> s = IPSet([IP('10.0.0.0/22')])
- >>> s.add(IP('192.168.1.0/29'))
- >>> s
- IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/29')])
- >>> s.discard(IP('192.168.1.2'))
- >>> s
- IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/31'), IP('192.168.1.3'), IP('192.168.1.4/30')])
-
-IPSet supports the `set` method `isdisjoint`:
-
- >>> s.isdisjoint(IPSet([IP('192.168.0.0/16')]))
- False
- >>> s.isdisjoint(IPSet([IP('172.16.0.0/12')]))
- True
-
-IPSet supports intersection:
-
- >>> s & IPSet([IP('10.0.0.0/8')])
- IPSet([IP('10.0.0.0/22')])
-
-Compatibility and links
-=======================
-
-IPy 0.83 works on Python version 2.6 - 3.4.
-
-The IP module should work in Python 2.5 as long as the subtraction operation
-is not used. IPSet requires features of the collecitons class which appear
-in Python 2.6, though they can be backported.
-
-Eratta
-======
-
-When using IPv6 addresses, it is best to compare using IP().len() instead of
-len(IP). Addresses with an integer value > 64 bits can break the 2nd method.
-See http://stackoverflow.com/questions/15650878 for more info.
-
-Fuzz testing for IPSet will throw spurious errors when the IPSet module
-combines two smaller prefixes into a larger prefix that matches the random
-prefix tested against.
-
-This Python module is under BSD license: see COPYING file.
-
-Further Information might be available at:
-https://github.com/autocracy/python-ipy
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/README.rst new/IPy-1.00/README.rst
--- old/IPy-0.83/README.rst 1970-01-01 01:00:00.000000000 +0100
+++ new/IPy-1.00/README.rst 2019-02-28 00:55:17.000000000 +0100
@@ -0,0 +1,201 @@
+IPy - class and tools for handling of IPv4 and IPv6 addresses and networks.
+
+Website: https://github.com/autocracy/python-ipy/
+
+Presentation of the API
+=======================
+
+The IP class allows a comfortable parsing and handling for most
+notations in use for IPv4 and IPv6 addresses and networks. It was
+greatly inspired by RIPE's Perl module NET::IP's interface but
+doesn't share the implementation. It doesn't share non-CIDR netmasks,
+so funky stuff like a netmask of 0xffffff0f can't be done here. ::
+
+ >>> from IPy import IP
+ >>> ip = IP('127.0.0.0/30')
+ >>> for x in ip:
+ ... print(x)
+ ...
+ 127.0.0.0
+ 127.0.0.1
+ 127.0.0.2
+ 127.0.0.3
+ >>> ip2 = IP('0x7f000000/30')
+ >>> ip == ip2
+ 1
+ >>> ip.reverseNames()
+ ['0.0.0.127.in-addr.arpa.', '1.0.0.127.in-addr.arpa.', '2.0.0.127.in-addr.arpa.', '3.0.0.127.in-addr.arpa.']
+ >>> ip.reverseName()
+ '0-3.0.0.127.in-addr.arpa.'
+ >>> ip.iptype()
+ 'LOOPBACK'
+
+
+Supports most IP address formats
+================================
+
+It can detect about a dozen different ways of expressing IP addresses
+and networks, parse them and distinguish between IPv4 and IPv6 addresses: ::
+
+ >>> IP('10.0.0.0/8').version()
+ 4
+ >>> IP('::1').version()
+ 6
+
+IPv4 addresses
+--------------
+
+::
+
+ >>> print(IP(0x7f000001))
+ 127.0.0.1
+ >>> print(IP('0x7f000001'))
+ 127.0.0.1
+ >>> print(IP('127.0.0.1'))
+ 127.0.0.1
+ >>> print(IP('10'))
+ 10.0.0.0
+
+IPv6 addresses
+--------------
+
+::
+
+ >>> print(IP('1080:0:0:0:8:800:200C:417A'))
+ 1080::8:800:200c:417a
+ >>> print(IP('1080::8:800:200C:417A'))
+ 1080::8:800:200c:417a
+ >>> print(IP('::1'))
+ ::1
+ >>> print(IP('::13.1.68.3'))
+ ::d01:4403
+
+Network mask and prefixes
+-------------------------
+
+::
+
+ >>> print(IP('127.0.0.0/8'))
+ 127.0.0.0/8
+ >>> print(IP('127.0.0.0/255.0.0.0'))
+ 127.0.0.0/8
+ >>> print(IP('127.0.0.0-127.255.255.255'))
+ 127.0.0.0/8
+
+
+Derive network address
+===========================
+
+IPy can transform an IP address into a network address by applying the given
+netmask: ::
+
+ >>> print(IP('127.0.0.1/255.0.0.0', make_net=True))
+ 127.0.0.0/8
+
+This can also be done for existing IP instances: ::
+
+ >>> print(IP('127.0.0.1').make_net('255.0.0.0'))
+ 127.0.0.0/8
+
+
+Convert address to string
+=========================
+
+Nearly all class methods which return a string have an optional
+parameter 'wantprefixlen' which controls if the prefixlen or netmask
+is printed. Per default the prefilen is always shown if the network
+contains more than one address: ::
+
+ wantprefixlen == 0 / None don't return anything 1.2.3.0
+ wantprefixlen == 1 /prefix 1.2.3.0/24
+ wantprefixlen == 2 /netmask 1.2.3.0/255.255.255.0
+ wantprefixlen == 3 -lastip 1.2.3.0-1.2.3.255
+
+You can also change the defaults on an per-object basis by fiddling with
+the class members:
+
+- NoPrefixForSingleIp
+- WantPrefixLen
+
+Examples of string conversions: ::
+
+ >>> IP('10.0.0.0/32').strNormal()
+ '10.0.0.0'
+ >>> IP('10.0.0.0/24').strNormal()
+ '10.0.0.0/24'
+ >>> IP('10.0.0.0/24').strNormal(0)
+ '10.0.0.0'
+ >>> IP('10.0.0.0/24').strNormal(1)
+ '10.0.0.0/24'
+ >>> IP('10.0.0.0/24').strNormal(2)
+ '10.0.0.0/255.255.255.0'
+ >>> IP('10.0.0.0/24').strNormal(3)
+ '10.0.0.0-10.0.0.255'
+ >>> ip = IP('10.0.0.0')
+ >>> print(ip)
+ 10.0.0.0
+ >>> ip.NoPrefixForSingleIp = None
+ >>> print(ip)
+ 10.0.0.0/32
+ >>> ip.WantPrefixLen = 3
+ >>> print(ip)
+ 10.0.0.0-10.0.0.0
+
+Work with multiple networks
+===========================
+
+Simple addition of neighboring netblocks that can be aggregated will yield
+a parent network of both, but more complex range mapping and aggregation
+requires is available with the ``IPSet`` class which will hold any number of
+unique address ranges and will aggregate overlapping ranges. ::
+
+ >>> from IPy import IP, IPSet
+ >>> IP('10.0.0.0/22') - IP('10.0.2.0/24')
+ IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24')])
+ >>> IPSet([IP('10.0.0.0/23'), IP('10.0.3.0/24'), IP('10.0.2.0/24')])
+ IPSet([IP('10.0.0.0/22')])
+ >>> s = IPSet([IP('10.0.0.0/22')])
+ >>> s.add(IP('192.168.1.0/29'))
+ >>> s
+ IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/29')])
+ >>> s.discard(IP('192.168.1.2'))
+ >>> s
+ IPSet([IP('10.0.0.0/22'), IP('192.168.1.0/31'), IP('192.168.1.3'), IP('192.168.1.4/30')])
+
+``IPSet`` supports the ``set`` method ``isdisjoint``: ::
+
+ >>> s.isdisjoint(IPSet([IP('192.168.0.0/16')]))
+ False
+ >>> s.isdisjoint(IPSet([IP('172.16.0.0/12')]))
+ True
+
+``IPSet`` supports intersection: ::
+
+ >>> s & IPSet([IP('10.0.0.0/8')])
+ IPSet([IP('10.0.0.0/22')])
+
+Compatibility and links
+=======================
+
+IPy 1.00 works on Python version 2.6 - 3.7.
+
+The IP module should work in Python 2.5 as long as the subtraction operation
+is not used. IPSet requires features of the collecitons class which appear
+in Python 2.6, though they can be backported.
+
+Eratta
+======
+
+When using IPv6 addresses, it is best to compare using ``IP().len()``
+instead of ``len(IP)``. Addresses with an integer value > 64 bits can break
+the 2nd method. See http://stackoverflow.com/questions/15650878 for more
+info.
+
+Fuzz testing for ``IPSet`` will throw spurious errors when the ``IPSet`` module
+combines two smaller prefixes into a larger prefix that matches the random
+prefix tested against.
+
+This Python module is under BSD license: see COPYING file.
+
+Further Information might be available at:
+https://github.com/autocracy/python-ipy
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/setup.py new/IPy-1.00/setup.py
--- old/IPy-0.83/setup.py 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/setup.py 2019-02-28 01:20:06.000000000 +0100
@@ -4,15 +4,14 @@
#
# - set version in IPy.py
# - set version in setup.py
+# - set version in README.rst
# - run unit test: make
-# - run unit test: make PYTHON=python3
# - set release date in ChangeLog
# - git commit -a
# - git tag -a IPy-x.y -m "tag IPy x.y"
# - git push
# - git push --tags
# - python setup.py register sdist upload
-# - update the website
#
# After the release:
# - set version to n+1 (IPy.py and setup.py)
@@ -24,11 +23,11 @@
import sys
from distutils.core import setup
-VERSION = '0.83'
+VERSION = '1.00'
options = {}
-with open('README') as fp:
+with open('README.rst') as fp:
README = fp.read().strip() + "\n\n"
ChangeLog = (
@@ -63,7 +62,7 @@
long_description=LONG_DESCRIPTION,
author="Maximillian Dornseif",
maintainer="Jeff Ferland",
- maintainer_email="jeff AT storyinmemo.com",
+ maintainer_email="jeff(a)storyinmemo.com",
license="BSD License",
keywords="ipv4 ipv6 netmask",
url=URL,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/test/test.rst new/IPy-1.00/test/test.rst
--- old/IPy-0.83/test/test.rst 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/test/test.rst 2019-02-28 00:45:37.000000000 +0100
@@ -29,8 +29,8 @@
>>> d={}
>>> d[IP('0.0.0.0/0')] = 1
>>> d[IP('::/0')] = 2
->>> d
-{IP('::/0'): 2, IP('0.0.0.0/0'): 1}
+>>> sorted(d.items())
+[(IP('0.0.0.0/0'), 1), (IP('::/0'), 2)]
>>> addresses = [IP('0.0.0.0/16'), IP('::7'), IP('::3'), IP('::0'),
... IP('0.0.0.0'), IP('0.0.0.3'), IP('0.0.0.0/0'), IP('::/0')]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/test/test_IPy.py new/IPy-1.00/test/test_IPy.py
--- old/IPy-0.83/test/test_IPy.py 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/test/test_IPy.py 2019-02-28 00:46:37.000000000 +0100
@@ -210,7 +210,7 @@
self.assertEqual(IPy._count1Bits(0xffffffff), 32)
self.assertEqual(IPy._count1Bits(0xffffffffffffffffffffffffffffffff), 128)
- def testCount1Bits(self):
+ def testCount0Bits(self):
self.assertEqual(IPy._count0Bits(0), 0)
self.assertEqual(IPy._count0Bits(0xf0), 4)
self.assertEqual(IPy._count0Bits(0xf00), 8)
@@ -612,7 +612,7 @@
self.assertEqual(str(IPy.IP("0.0.0.0/0").netmask()), "0.0.0.0")
self.assertEqual(str(IPy.IP("0.0.0.0/32").netmask()), "255.255.255.255")
self.assertEqual(str(IPy.IP("127.0.0.0/24").netmask()), "255.255.255.0")
- self.assertEqual(str(IPy.IP("2001:1234:5678:1234::/64").netmask()), "ffff:ffff:ffff:ffff:0000:0000:0000:0000")
+ self.assertEqual(str(IPy.IP("2001:1234:5678:1234::/64").netmask()), "ffff:ffff:ffff:ffff::")
def testInt(self):
"""Prefixlen"""
@@ -661,19 +661,6 @@
result = IPy.IP(a).overlaps(b)
self.assertEqual(answer, result, (a, b, result, answer))
- def testNetmask(self):
- """Normal string Output."""
- testValues = [(338770000845734292534325025077361652240, '0xfedcba9876543210fedcba9876543210'),
- (21932261930451111902915077091070067066, '0x108000000000000000080800200c417a'),
- (338958331222012082418099330867817087043, '0xff010000000000000000000000000043'),
- (0, '0x0'),
- (1, '0x1'),
- (4294967295, '0xffffffff'),
- (3588059479, '0xd5dd7157')]
- for (question, answer) in testValues:
- result = IPy.IP(question).strHex(question).lower()
- self.assertEqual(answer, result, (question, result, answer))
-
def testV46map(self):
four = IPy.IP('192.168.1.1')
six = IPy.IP('::ffff:192.168.1.1')
@@ -904,6 +891,9 @@
self.assertEqual(len(IPy.IP('192.168.0.0/24')), 256)
self.assertRaises(ValueError, IPy.IP, '192.168.1.0/42')
+ def testConsistentIP6StrInt(self):
+ self.assertEqual(IPy.IP('11', ipversion=6), IPy.IP(11, ipversion=6))
+
class TestConstrutor(unittest.TestCase):
def testCheckAddrPrefixlenOff(self):
self.assertRaises(ValueError, IPy.IP, 0xffffffff + 1, ipversion=4)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/IPy-0.83/test_doc.py new/IPy-1.00/test_doc.py
--- old/IPy-0.83/test_doc.py 2015-04-05 02:48:02.000000000 +0200
+++ new/IPy-1.00/test_doc.py 2018-02-01 09:32:18.000000000 +0100
@@ -5,7 +5,7 @@
total_failures, total_tests = (0, 0)
print("=== Test file: README ===")
- failure, tests = doctest.testfile('README', optionflags=doctest.ELLIPSIS)
+ failure, tests = doctest.testfile('README.rst', optionflags=doctest.ELLIPSIS)
total_failures += failure
total_tests += tests
1
0