openSUSE Commits
Threads by month
- ----- 2024 -----
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
January 2016
- 1 participants
- 1523 discussions
Hello community,
here is the log from the commit of package gtksourceview for openSUSE:Factory checked in at 2016-01-21 23:41:06
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/gtksourceview (Old)
and /work/SRC/openSUSE:Factory/.gtksourceview.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "gtksourceview"
Changes:
--------
--- /work/SRC/openSUSE:Factory/gtksourceview/gtksourceview.changes 2015-10-25 19:10:59.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.gtksourceview.new/gtksourceview.changes 2016-01-21 23:41:07.000000000 +0100
@@ -1,0 +2,7 @@
+Thu Jan 14 08:43:50 UTC 2016 - dimstar(a)opensuse.org
+
+- Update to version 3.18.2:
+ + A few bug fixes in *.lang files.
+ + Updated translations.
+
+-------------------------------------------------------------------
Old:
----
gtksourceview-3.18.1.tar.xz
New:
----
gtksourceview-3.18.2.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ gtksourceview.spec ++++++
--- /var/tmp/diff_new_pack.GmsyVt/_old 2016-01-21 23:41:09.000000000 +0100
+++ /var/tmp/diff_new_pack.GmsyVt/_new 2016-01-21 23:41:09.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package gtksourceview
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
Name: gtksourceview
-Version: 3.18.1
+Version: 3.18.2
Release: 0
Summary: GTK+ Source Editing Widget
License: LGPL-2.1+
++++++ gtksourceview-3.18.1.tar.xz -> gtksourceview-3.18.2.tar.xz ++++++
++++ 23595 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package harfbuzz for openSUSE:Factory checked in at 2016-01-21 23:41:01
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/harfbuzz (Old)
and /work/SRC/openSUSE:Factory/.harfbuzz.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "harfbuzz"
Changes:
--------
--- /work/SRC/openSUSE:Factory/harfbuzz/harfbuzz.changes 2015-12-01 10:03:07.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.harfbuzz.new/harfbuzz.changes 2016-01-21 23:41:02.000000000 +0100
@@ -1,0 +2,13 @@
+Thu Jan 14 08:50:37 UTC 2016 - dimstar(a)opensuse.org
+
+- Update to version 1.1.3:
+ + Ported Indic shaper to Unicode 8.0 data.
+ + Universal Shaping Engine fixes.
+ + Speed up CoreText shaper when font fallback happens in
+ CoreText.
+ + Documentation improvements, thanks to Khaled Hosny.
+ + Very rough directwrite shaper for testing.
+ + Misc bug fixes.
+ + API extensions.
+
+-------------------------------------------------------------------
Old:
----
harfbuzz-1.1.2.tar.bz2
New:
----
harfbuzz-1.1.3.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ harfbuzz.spec ++++++
--- /var/tmp/diff_new_pack.VIo4BO/_old 2016-01-21 23:41:03.000000000 +0100
+++ /var/tmp/diff_new_pack.VIo4BO/_new 2016-01-21 23:41:03.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package harfbuzz
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,7 +17,7 @@
Name: harfbuzz
-Version: 1.1.2
+Version: 1.1.3
Release: 0
Summary: An OpenType text shaping engine
License: MIT
++++++ harfbuzz-1.1.2.tar.bz2 -> harfbuzz-1.1.3.tar.bz2 ++++++
++++ 29244 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package smartmontools for openSUSE:Factory checked in at 2016-01-21 23:40:53
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/smartmontools (Old)
and /work/SRC/openSUSE:Factory/.smartmontools.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "smartmontools"
Changes:
--------
--- /work/SRC/openSUSE:Factory/smartmontools/smartmontools.changes 2015-03-16 09:37:58.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.smartmontools.new/smartmontools.changes 2016-01-21 23:40:55.000000000 +0100
@@ -1,0 +2,43 @@
+Thu Jan 14 09:06:55 UTC 2016 - meissner(a)suse.com
+
+- replaced keyring with 2015/2016 key.
+ pub rsa2048/0xDF0F1A49C4A4903A 2014-12-31 [verfällt: 2016-12-31]
+ Schl.-Fingerabdruck = F41F 01FC 0784 4958 4FFC CF57 DF0F 1A49 C4A4 903A
+ uid [ unbekannt] Smartmontools Signing Key (through 2016) <smartmontools-support(a)lists.sourceforge.net>
+
+
+-------------------------------------------------------------------
+Sun Jan 10 14:09:14 UTC 2016 - p.drouand(a)gmail.com
+
+- Update to version 6.0.4
+ * Device type ' *d usbprolific' for Prolific PL2571/277x USB bridges.
+ * SAT: Support for ATA registers returned in fixed format sense data.
+ * smartctl ' *i' and ' * *identify': ATA ACS *4 and SATA 3.2 enhancements.
+ * smartctl ' *l xerror': Support for logs with more than 255 pages.
+ * smartctl ' *l devstat': Prints ACS *3 DSN flags.
+ * smartctl ' *l devstat': Read via SMART command if GP log is not
+ available.
+ * smartctl ' *l scttempsts': Prints SCT SMART STATUS (ACS *4) and
+ vendor specific SCT bytes.
+ * configure option ' * *with *systemdenvfile=auto' as new default.
+ * configure options ' * *disable *drivedb', ' * *enable *savestates'
+ and ' * *enable *attributelog' are deprecated.
+ * Corresponding ' * *with **' options are enhanced accordingly.
+ * Configure option ' * *with *docdir' is deprecated.
+ * autoconf < 2.60 and automake < 1.10 are deprecated.
+ (all of the above still work but a warning is printed if used)
+ * HDD, SSD and USB additions to drive database.
+ * Linux: AACRAID fixes, SMART STATUS should work now.
+ * Linux: '/dev/megaraid_sas_ioctl_node' fd leak fix.
+ * Darwin: ' *S' command implemented, ' *l devstat' should work now.
+ * Cygwin: Compile fix.
+ * Windows: Device type ' *d aacraid' for AACRAID controllers.
+ * Windows: SAT autodetection based on IOCTL_STORAGE_QUERY_PROPERTY.
+ * Windows installer: Fix possible loss of user PATH environment variable.
+- Update smartmontools-drivedb.h to the latest version from the
+ upstream branch RELEASE_6_4_DRIVEDB.
+- Cleanup and remove conditional macros; the package doesn't build
+ for SLE anyway
+- Remove smartmontools-default-enabled.patch; not needed anymore
+
+-------------------------------------------------------------------
Old:
----
smartmontools-6.3.tar.gz
smartmontools-6.3.tar.gz.asc
smartmontools-default-enabled.patch
New:
----
smartmontools-6.4.tar.gz
smartmontools-6.4.tar.gz.asc
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ smartmontools.spec ++++++
--- /var/tmp/diff_new_pack.lkCgSP/_old 2016-01-21 23:40:57.000000000 +0100
+++ /var/tmp/diff_new_pack.lkCgSP/_new 2016-01-21 23:40:57.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package smartmontools
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,10 +17,10 @@
Name: smartmontools
-Version: 6.3
+Version: 6.4
Release: 0
Source: http://sourceforge.net/projects/smartmontools/files/smartmontools/%{version…
-Source1: %{name}-%{version}.tar.gz.asc
+Source1: http://sourceforge.net/projects/smartmontools/files/smartmontools/%{version…
Source2: smartmontools.sysconfig
Source3: %{name}-rpmlintrc
Source4: %{name}.keyring
@@ -30,32 +30,22 @@
Source6: smartmontools-drivedb_h-update.sh
# SOURCE-FEATURE-UPSTREAM smartmontools-drivedb.h bnc851276 sbrabec(a)suse.cz -- Update of drivedb.h. (Following line is handled by smartmontools-drivedb_h-update.sh.)
Source7: smartmontools-drivedb.h
-# PATCH-FEATURE-UNITEDLINUX smartmontools-default-enabled.patch sbrabec(a)suse.cz -- Enable smartd by default.
-Patch1: smartmontools-default-enabled.patch
# PATCH-FEATURE-OPENSUSE smartmontools-suse-default.patch sbrabec(a)suse.cz -- Define smart SUSE defaults.
Patch4: smartmontools-suse-default.patch
# PATCH-FIX-OPENSUSE smartmontools-var-lock-subsys.patch sbrabec(a)suse.cz -- Do not use unsupported /var/lock/subsys.
Patch10: smartmontools-var-lock-subsys.patch
# PATCH-FEATURE-OPENSUSE smartd-service-novm.patch crrodriguez(a)opensuse.org -- Do not start smartd in virtual environment.
Patch11: smartd-service-novm.patch
-PreReq: %fillup_prereq
-PreReq: %insserv_prereq
+Requires(pre): %fillup_prereq
# Needed by generate_smartd_opt:
-PreReq: coreutils
-%if 0%{?suse_version} <= 1100
-Requires: powersave
-%endif
+Requires(pre): coreutils
Url: http://smartmontools.sourceforge.net/
BuildRequires: gcc-c++
BuildRoot: %{_tmppath}/%{name}-%{version}-build
-%if 0%{?suse_version} >= 1110
-BuildRequires: libselinux-devel
-%endif
-%if 0%{?suse_version} >= 1130
BuildRequires: libcap-ng-devel
+BuildRequires: libselinux-devel
BuildRequires: pkgconfig(systemd)
%{?systemd_requires}
-%endif
Summary: Monitor for SMART devices
License: GPL-2.0+
Group: Hardware/Other
@@ -78,7 +68,6 @@
cp -a %{SOURCE2} %{SOURCE5} .
# Following line is handled by smartmontools-drivedb_h-update.sh.
cp -a %{SOURCE7} drivedb.h.new
-%patch1
%patch4
%patch10 -p1
%patch11
@@ -105,19 +94,13 @@
fi
%build
-%if 0%{?suse_version} > 1000
export CFLAGS="%{optflags} $(getconf LFS_CFLAGS) -fPIE"
export CXXFLAGS="%{optflags} -fPIE $(getconf LFS_CFLAGS)"
export LDFLAGS="-pie"
-%endif
%configure\
--with-docdir=%{_defaultdocdir}/%{name}\
-%if 0%{?suse_version} >= 1110
--with-selinux\
-%endif
-%if 0%{?suse_version} >= 1130
--with-systemdsystemunitdir=%{_unitdir}\
-%endif
--enable-drivedb\
--enable-savestates\
--enable-attributelog
@@ -127,29 +110,17 @@
%install
%makeinstall
-%if 0%{?suse_version} <= 1100 || 0%{?suse_version} > 1140
mkdir -p %{buildroot}%{_prefix}/lib/smartmontools
-%endif
-%if 0%{?suse_version} <= 1100
-cp examplescripts/Example4 %{buildroot}%{_prefix}/lib/smartmontools/smart-notify
-chmod +x %{buildroot}%{_prefix}/lib/smartmontools/smart-notify
-%endif
mkdir -p %{buildroot}%{_localstatedir}/adm/fillup-templates
cp smartmontools.sysconfig %{buildroot}%{_localstatedir}/adm/fillup-templates/sysconfig.smartmontools
mkdir -p %{buildroot}%{_localstatedir}/lib/smartmontools
-%if 0%{?suse_version} > 1140
touch %{buildroot}%{_localstatedir}/lib/smartmontools/smartd_opts
install generate_smartd_opts %{buildroot}%{_prefix}/lib/smartmontools/
-%endif
cat >%{buildroot}%{_sysconfdir}/smart_drivedb.h <<EOF
/* smart_drivedb.h: Custom drive database. See also %{_datadir}/smartmontools/drivedb.h. */
EOF
-%if 0%{?suse_version} >= 1130
cp smartd.service %{buildroot}/%{_unitdir}
ln -sf %{_sbindir}/service %{buildroot}%{_sbindir}/rcsmartd
-%else
-ln -sf ../../etc/init.d/smartd %{buildroot}%{_sbindir}/rcsmartd
-%endif
# INSTALL file is intended only for packagers.
rm %{buildroot}%{_defaultdocdir}/%{name}/INSTALL
# Create empty ghost files for files created by update-smart-drivedb.
@@ -168,13 +139,8 @@
# Fail if there is no BRANCH= in update-smart-drivedb
grep -q "^BRANCH=\"[^\"]*\"$" update-smart-drivedb
-%clean
-rm -rf %{buildroot}
-
%pre
-%if 0%{?suse_version} >= 1130
%service_add_pre smartd.service
-%endif
# Intelligent drivedb.h update, part 1.
# Extract drivedb.h branch for installed version. We will need it in %%post.
if test -f %{_sbindir}/update-smart-drivedb ; then
@@ -194,20 +160,11 @@
%post
# First prepare sysconfig.
-%if 0%{?suse_version} >= 1130
%{fillup_only}
-%else
-%{fillup_and_insserv -f -y smartd}
-%endif
-
-%if 0%{?suse_version} > 1140
# Then generate initial %%{_localstatedir}/lib/smartmontools/smartd_opts needed by smartd.service.
SMARTD_SKIP_INIT=1 %{_prefix}/lib/smartmontools/generate_smartd_opts
-%endif
# No start by default here.. belongs to -presets packages
-%if 0%{?suse_version} >= 1130
%service_add_post smartd.service
-%endif
# Intelligent drivedb.h update, part 2.
# Now we have the old system drivedb.h.rpmsave and the new packaged drivedb.h.
if test -f %{_datadir}/smartmontools/drivedb.h.rpmsave ; then
@@ -245,19 +202,10 @@
rm -f %{_datadir}/smartmontools/drivedb.h.branch.rpmtemp
%preun
-%if 0%{?suse_version} >= 1130
%service_del_preun smartd.service
-%else
-%{stop_on_removal smartd}
-%endif
%postun
-%if 0%{?suse_version} >= 1130
%service_del_postun smartd.service
-%else
-%{restart_on_update smartd}
-%{insserv_cleanup}
-%endif
# Clean all attrlogs and state files.
if test "$1" = 0 ; then
rm -rf %{_localstatedir}/lib/smartmontools
@@ -273,17 +221,9 @@
%ghost %{_datadir}/smartmontools/drivedb.h.old
%doc %{_mandir}/man*/*
%dir %{_localstatedir}/lib/smartmontools
-%if 0%{?suse_version} > 1140
%ghost %{_localstatedir}/lib/smartmontools/smartd_opts
-%endif
-%if 0%{?suse_version} <= 1100 || 0%{?suse_version} > 1140
%{_prefix}/lib/smartmontools
-%endif
-%if 0%{?suse_version} >= 1130
%{_unitdir}/*
-%else
-%{_sysconfdir}/init.d/smartd
-%endif
%{_sbindir}/*
%config(noreplace) %{_sysconfdir}/smart_drivedb.h
%config(noreplace) %{_sysconfdir}/smartd.conf
++++++ smartmontools-6.3.tar.gz -> smartmontools-6.4.tar.gz ++++++
++++ 8998 lines of diff (skipped)
++++++ smartmontools-drivedb.h ++++++
++++ 1390 lines (skipped)
++++ between /work/SRC/openSUSE:Factory/smartmontools/smartmontools-drivedb.h
++++ and /work/SRC/openSUSE:Factory/.smartmontools.new/smartmontools-drivedb.h
++++++ smartmontools.keyring ++++++
--- /var/tmp/diff_new_pack.lkCgSP/_old 2016-01-21 23:40:57.000000000 +0100
+++ /var/tmp/diff_new_pack.lkCgSP/_new 2016-01-21 23:40:57.000000000 +0100
@@ -1,23 +1,31 @@
-pub 2048R/8F6ED8AA 2012-12-31 [expires: 2014-12-31]
-uid Smartmontools Signing Key (through 2014) <smartmontools-support(a)lists.sourceforge.net>
-
-----BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v2.0.19 (GNU/Linux)
-mQENBFDh0moBCACl0TpfTOv6AiwBhtr1khbyRYhf4uXvTA2W5Dy5/aTgfkkj7M85
-L6E/7MaVeR6Z3qGQTd5RtN61VKybe5x1CETk0LXelNyaE8In2HnrKCk7v01o4UM7
-1bjPa9iU87K5hGJQNAEL4TWVQs5OAkzi39els5CSaboPYtQ00vbuz1e93ZALLsOO
-bdpv9DPfvm27ZGt9kNX2SAYHlVMO/hqwI8kQ2uhw2vE20TNFUMzlajtzTYt+3YaA
-BBR+V5BFJSqSej4BMldopY4x1RS3EQyw5UxT/pOGAaI7PatKSExp80Zp9fJSjxUv
-qS41N1E5cOu19ol+CJoDDKPHh3TV2B5vWnkpABEBAAG0VlNtYXJ0bW9udG9vbHMg
-U2lnbmluZyBLZXkgKHRocm91Z2ggMjAxNCkgPHNtYXJ0bW9udG9vbHMtc3VwcG9y
-dEBsaXN0cy5zb3VyY2Vmb3JnZS5uZXQ+iQE+BBMBAgAoBQJQ4dJqAhsDBQkDwmcA
-BgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRCxnPjmj27YqkFtB/9rKXMzUjKY
-h6FmrCH7hLOCGYV1iTa805wOy/zx7fgIKqInkEjAvPhObyLgVe+F+kFI9M1sqmR0
-JlQd1Q4bc8o08QAt1F+jTO3P+Y+2EX9TnPbQBG2VDUhDds0Z/eqKCPpx0w+geAF6
-Kfjb7F1Ixizwzec2Fr4xn7682MCKJFu+j0QzWDJ7inkoE+V2Vc3fBeoqwH2h9xOD
-boMD4HE7hGvysyhb3xfMIR4g5B7W9RpGe/A9rLY3yvNToYx9QD35DoUXqfz1R9WG
-PGxauujeU9xVKQjhJkj6/yG0VOT42DIpqGWLArUmYmwAo71zcdhLVl863/7CtJmw
-iMCJYgIePjz3
-=/RUR
+mQENBFSkITsBCADgmG6B4Pj9ihLdcRwVjvQbbUId32X6znN5LDNF6mEa5BZRrJC5
+JsKeiFyLHzpFxuhfj9KxRhRxgZyTWLQge5E9FKZL6SHsYHaRrekDqRD61O/eGxeK
+1DO/qJogDn0vG9Pm2FXe4bR2lrjEV4BEDweIBHqCvSA0rPnV2X8x4nnXBoLuGw3g
+IlgYu6TZdfKlu22kKoUgcmqrtHH5zeuRoxDpInsD1l7hXkQnhMgxBsN9dANNvron
+Dfxg9o3+bLE8JbvI24RiY/hrdhtavd7DZ0zUj+zDZx7Di/Li4GpLIsEZ68hQzH92
+dvv/+Taa5GWgEUm2ZsO4Nv8devXNDB7Zo49FABEBAAG0VlNtYXJ0bW9udG9vbHMg
+U2lnbmluZyBLZXkgKHRocm91Z2ggMjAxNikgPHNtYXJ0bW9udG9vbHMtc3VwcG9y
+dEBsaXN0cy5zb3VyY2Vmb3JnZS5uZXQ+iQE+BBMBAgAoBQJUpCE7AhsDBQkDw7iA
+BgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDfDxpJxKSQOkb6B/9ACI12l2/2
+YU6/BxJKaG1do9rWlLxppoq15mlHEi/GhohWFJOKX0U1RQsWkbG7f1WUGI0c0Tk3
+6ufjKOVeHdgXgIMWFxcrAvk/0szKPD4brODCRdpsuvdaXZLX/L0cDYxn2nUDuzok
+Vdh6I9/9gXrXxQmcWWzbSFkshjZBfhhgPtS5DqktQmoGHXZCza6wxlY8zxM0wwQn
+BPAx98Jf6vCijlZ2tV3uLLTnpCSkRwWxlppjsYM3AKHsmlYcfBPfiWzAq0SQz1A0
+18sLXKMnRI4inG5ZVq+bqvt2AyphkokKWnsE5DSw6gsjKlUNZoWB+jrva/sCP/Gw
+w2FAZ0spFTLdiQEcBBMBAgAGBQJUpCNYAAoJELGc+OaPbtiqc9cH/3r4MCQlngJK
+8OqBXQV3J8e2eThhXZgbmubU8nG9jIgyoQNlb4iFyQaDCELJR7Om+IWnRkp27nD+
+oMdZCyYY12gHUFpbdb9HEgTUjg4i62oJKRCBzi/quROGn4WNjkz369z3xhjotSeo
+cHDwuvkCMeSDqw6zVGSk1HVhtPA3pd/pFCUU28gqOgZJbzGlc/onO6U3DoV2eY2s
+kG7HBEqUGdpr5wWZM6nVoNXW7JuIIK9U0bMBssqqiKE62FyK5rC7J8rAkFjKduAr
+rTkrtR5+TUXe+QarxP+1RHO+GKdVA64YTVt7lo3bHK/HvByaEeT+iqhdVSpEG3qb
+fodLvc7FOWKJARwEEwECAAYFAlSkI3oACgkQL83sC9OvGquWSwf/V64ZqsbXDUjm
+fnrZ/GS5CsS0lipP8JAfHeWWsfBvmk3fN+XKZgdnayZJZpDABGGABggiX6oyovps
+ZlX7dwV3RDrT3ft5JPEa+7ixMKvFY5b05kft+embXKt+nBvHoXbaajGvQs4qY7KW
+94dRNjtIPXur3In7+jbgIcqaEw6ak9kMBvqb51UT32XDvsBBncOt/vWPj62+bUmJ
+eYSNv2mYPNv2CIMFsAG9DsDaDAdyuPFBv1UaOpTG5QLI80MHa4XXERmhhSGAU4Ap
+ZlB83p1Af810IeAte/kFDLHwssfimNkeHL0AIjkzSXYQk0hVhpJnsANkxoNN7oSM
+V7pCYnuC7Q==
+=M4lM
-----END PGP PUBLIC KEY BLOCK-----
1
0
Hello community,
here is the log from the commit of package ncurses for openSUSE:Factory checked in at 2016-01-21 23:40:30
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/ncurses (Old)
and /work/SRC/openSUSE:Factory/.ncurses.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "ncurses"
Changes:
--------
--- /work/SRC/openSUSE:Factory/ncurses/ncurses.changes 2015-12-13 09:34:23.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.ncurses.new/ncurses.changes 2016-01-21 23:40:32.000000000 +0100
@@ -1,0 +2,39 @@
+Fri Jan 8 10:09:31 UTC 2016 - werner(a)suse.de
+
+- Add ncurses patch 20160102
+ + modify ncurses c/C color test-screens to take advantage of wide
+ screens, reducing the number of lines used for 88- and 256-colors.
+ + minor refinement to check versus ncv to ignore two parameters of
+ SGR 38 and 48 when those come from color-capabilities.
+- Add ncurses patch 20151226
+ + add check in tic for use of bold, etc., video attributes in the
+ color capabilities, accounting whether the feature is listed in ncv.
+ + add check in tic for conflict between ritm, rmso, rmul versus sgr0.
+- Add ncurses patch 20151219
+ + add a paragraph to curs_getch.3x discussing key naming (discussion
+ with James Crippen).
+ + amend workaround for Solaris vs line-drawing to take the configure
+ check into account.
+ + add a configure check for wcwidth() versus the ncurses line-drawing
+ characters, to use in special-casing systems such as Solaris.
+- Add ncurses patch 20151212
+ + improve CF_XOPEN_CURSES macro used in test/configure, to define as
+ needed NCURSES_WIDECHAR for platforms where _XOPEN_SOURCE_EXTENDED
+ does not work. Also modified the test program to ensure that if
+ building with ncurses, that the cchar_t type is checked, since that
+ normally is since 20111030 ifdef'd depending on this test.
+ + improve 20121222 workaround for broken acs, letting Solaris "work"
+ in spite of its m.isconfigured wcwidth which marks all of the line
+ drawing characters as double-width.
+- Add ncurses patch 20151205
+ + update form_cursor.3x, form_post.3x, menu_attributes.3x to list
+ function names in NAME section (patch by Jason McIntyre).
+ + minor fixes to manpage NAME/SYNOPSIS sections to consistently use
+ rule that either all functions which are prototyped in SYNOPSIS are
+ listed in the NAME section, or the manual-page name is the sole item
+ listed in the NAME section. The latter is used to reduce clutter,
+ e.g., for the top-level library manual pages as well as for certain
+ feature-pages such as SP-funcs and threading (prompted by patches by
+ Jason McIntyre).
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ ncurses.spec ++++++
--- /var/tmp/diff_new_pack.Ze5tgC/_old 2016-01-21 23:40:33.000000000 +0100
+++ /var/tmp/diff_new_pack.Ze5tgC/_new 2016-01-21 23:40:33.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package ncurses
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
++++++ ncurses-6.0-patches.tar.bz2 ++++++
++++ 16739 lines of diff (skipped)
++++++ ncurses-6.0.dif ++++++
++++ 606 lines (skipped)
++++ between /work/SRC/openSUSE:Factory/ncurses/ncurses-6.0.dif
++++ and /work/SRC/openSUSE:Factory/.ncurses.new/ncurses-6.0.dif
1
0
Hello community,
here is the log from the commit of package binutils for openSUSE:Factory checked in at 2016-01-21 23:40:22
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/binutils (Old)
and /work/SRC/openSUSE:Factory/.binutils.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "binutils"
Changes:
--------
--- /work/SRC/openSUSE:Factory/binutils/binutils.changes 2015-10-19 22:14:00.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.binutils.new/binutils.changes 2016-01-21 23:40:24.000000000 +0100
@@ -1,0 +2,6 @@
+Wed Jan 13 08:31:17 UTC 2016 - schwab(a)suse.de
+
+- gold-relocate-tls.patch: Fix internal error when applying TLSDESC
+ relocations with no TLS segment
+
+-------------------------------------------------------------------
cross-aarch64-binutils.changes: same change
cross-arm-binutils.changes: same change
cross-avr-binutils.changes: same change
cross-epiphany-binutils.changes: same change
cross-hppa-binutils.changes: same change
cross-hppa64-binutils.changes: same change
cross-i386-binutils.changes: same change
cross-ia64-binutils.changes: same change
cross-m68k-binutils.changes: same change
cross-mips-binutils.changes: same change
cross-ppc-binutils.changes: same change
cross-ppc64-binutils.changes: same change
cross-ppc64le-binutils.changes: same change
cross-rx-binutils.changes: same change
cross-s390-binutils.changes: same change
cross-s390x-binutils.changes: same change
cross-sparc-binutils.changes: same change
cross-sparc64-binutils.changes: same change
cross-spu-binutils.changes: same change
cross-x86_64-binutils.changes: same change
New:
----
gold-relocate-tls.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ binutils.spec ++++++
--- /var/tmp/diff_new_pack.ITklsP/_old 2016-01-21 23:40:29.000000000 +0100
+++ /var/tmp/diff_new_pack.ITklsP/_new 2016-01-21 23:40:29.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package binutils
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -106,6 +106,7 @@
Patch32: gold-arm64-abi-pagesize.patch
Patch33: s390-troo-insn-type.patch
Patch34: aarch64-common-pagesize.patch
+Patch35: gold-relocate-tls.patch
Patch90: cross-avr-nesc-as.patch
Patch92: cross-avr-omit_section_dynsym.patch
Patch93: cross-avr-size.patch
@@ -190,6 +191,7 @@
%patch32 -p1
%patch33 -p1
%patch34 -p1
+%patch35 -p1
%if "%{TARGET}" == "avr"
cp gas/config/tc-avr.h gas/config/tc-avr-nesc.h
%patch90
cross-aarch64-binutils.spec: same change
cross-arm-binutils.spec: same change
cross-avr-binutils.spec: same change
cross-epiphany-binutils.spec: same change
cross-hppa-binutils.spec: same change
cross-hppa64-binutils.spec: same change
cross-i386-binutils.spec: same change
cross-ia64-binutils.spec: same change
cross-m68k-binutils.spec: same change
cross-mips-binutils.spec: same change
cross-ppc-binutils.spec: same change
cross-ppc64-binutils.spec: same change
cross-ppc64le-binutils.spec: same change
cross-rx-binutils.spec: same change
cross-s390-binutils.spec: same change
cross-s390x-binutils.spec: same change
cross-sparc-binutils.spec: same change
cross-sparc64-binutils.spec: same change
cross-spu-binutils.spec: same change
cross-x86_64-binutils.spec: same change
++++++ gold-relocate-tls.patch ++++++
>From d21f123b0ead1806416cf0dafae12bec4cca8920 Mon Sep 17 00:00:00 2001
From: Cary Coutant <ccoutant(a)gmail.com>
Date: Mon, 11 Jan 2016 23:57:44 -0800
Subject: [PATCH] Fix internal error when applying TLSDESC relocations with no
TLS segment.
gold/
PR gold/19353
* aarch64.cc (Target_aarch64::relocate_tls): Don't insist that
we have a TLS segment for GD-to-IE optimization.
* i386.cc (Target_i386::tls_gd_to_ie): Remove tls_segment parameter.
Adjust all calls.
(Target_i386::tls_desc_gd_to_ie): Likewise.
(Target_i386::relocate_tls): Don't insist that we have a TLS segment
for TLSDESC GD-to-IE optimizations.
* x86_64.cc (Target_x86_64::tls_gd_to_ie): Remove tls_segment parameter.
Adjust all calls.
(Target_x86_64::tls_desc_gd_to_ie): Likewise.
(Target_x86_64::relocate_tls): Don't insist that we have a TLS segment
for TLSDESC GD-to-IE optimizations.
---
gold/ChangeLog | 16 ++++++++++++++++
gold/aarch64.cc | 6 ------
gold/i386.cc | 14 ++------------
gold/x86_64.cc | 14 ++------------
4 files changed, 20 insertions(+), 30 deletions(-)
Index: binutils-2.25.0/gold/aarch64.cc
===================================================================
--- binutils-2.25.0.orig/gold/aarch64.cc
+++ binutils-2.25.0/gold/aarch64.cc
@@ -3689,12 +3689,6 @@ Target_aarch64<size, big_endian>::Reloca
}
if (tlsopt == tls::TLSOPT_TO_IE)
{
- if (tls_segment == NULL)
- {
- gold_assert(parameters->errors()->error_count() > 0
- || issue_undefined_symbol_error(gsym));
- return aarch64_reloc_funcs::STATUS_BAD_RELOC;
- }
return tls_desc_gd_to_ie(relinfo, target, rela, r_type,
view, psymval, got_entry_address,
address);
Index: binutils-2.25.0/gold/i386.cc
===================================================================
--- binutils-2.25.0.orig/gold/i386.cc
+++ binutils-2.25.0/gold/i386.cc
@@ -654,7 +654,6 @@ class Target_i386 : public Sized_target<
// Do a TLS General-Dynamic to Initial-Exec transition.
inline void
tls_gd_to_ie(const Relocate_info<32, false>*, size_t relnum,
- Output_segment* tls_segment,
const elfcpp::Rel<32, false>&, unsigned int r_type,
elfcpp::Elf_types<32>::Elf_Addr value,
unsigned char* view,
@@ -673,7 +672,6 @@ class Target_i386 : public Sized_target<
// transition.
inline void
tls_desc_gd_to_ie(const Relocate_info<32, false>*, size_t relnum,
- Output_segment* tls_segment,
const elfcpp::Rel<32, false>&, unsigned int r_type,
elfcpp::Elf_types<32>::Elf_Addr value,
unsigned char* view,
@@ -2944,7 +2942,7 @@ Target_i386::Relocate::relocate_tls(cons
}
if (optimized_type == tls::TLSOPT_TO_IE)
{
- this->tls_gd_to_ie(relinfo, relnum, tls_segment, rel, r_type,
+ this->tls_gd_to_ie(relinfo, relnum, rel, r_type,
got_offset, view, view_size);
break;
}
@@ -3006,13 +3004,7 @@ Target_i386::Relocate::relocate_tls(cons
}
if (optimized_type == tls::TLSOPT_TO_IE)
{
- if (tls_segment == NULL)
- {
- gold_assert(parameters->errors()->error_count() > 0
- || issue_undefined_symbol_error(gsym));
- return;
- }
- this->tls_desc_gd_to_ie(relinfo, relnum, tls_segment, rel, r_type,
+ this->tls_desc_gd_to_ie(relinfo, relnum, rel, r_type,
got_offset, view, view_size);
break;
}
@@ -3244,7 +3236,6 @@ Target_i386::Relocate::tls_gd_to_le(cons
inline void
Target_i386::Relocate::tls_gd_to_ie(const Relocate_info<32, false>* relinfo,
size_t relnum,
- Output_segment*,
const elfcpp::Rel<32, false>& rel,
unsigned int,
elfcpp::Elf_types<32>::Elf_Addr value,
@@ -3350,7 +3341,6 @@ inline void
Target_i386::Relocate::tls_desc_gd_to_ie(
const Relocate_info<32, false>* relinfo,
size_t relnum,
- Output_segment*,
const elfcpp::Rel<32, false>& rel,
unsigned int r_type,
elfcpp::Elf_types<32>::Elf_Addr value,
Index: binutils-2.25.0/gold/x86_64.cc
===================================================================
--- binutils-2.25.0.orig/gold/x86_64.cc
+++ binutils-2.25.0/gold/x86_64.cc
@@ -798,7 +798,6 @@ class Target_x86_64 : public Sized_targe
// Do a TLS General-Dynamic to Initial-Exec transition.
inline void
tls_gd_to_ie(const Relocate_info<size, false>*, size_t relnum,
- Output_segment* tls_segment,
const elfcpp::Rela<size, false>&, unsigned int r_type,
typename elfcpp::Elf_types<size>::Elf_Addr value,
unsigned char* view,
@@ -817,7 +816,6 @@ class Target_x86_64 : public Sized_targe
// Do a TLSDESC-style General-Dynamic to Initial-Exec transition.
inline void
tls_desc_gd_to_ie(const Relocate_info<size, false>*, size_t relnum,
- Output_segment* tls_segment,
const elfcpp::Rela<size, false>&, unsigned int r_type,
typename elfcpp::Elf_types<size>::Elf_Addr value,
unsigned char* view,
@@ -3613,7 +3611,7 @@ Target_x86_64<size>::Relocate::relocate_
if (optimized_type == tls::TLSOPT_TO_IE)
{
value = target->got_plt_section()->address() + got_offset;
- this->tls_gd_to_ie(relinfo, relnum, tls_segment, rela, r_type,
+ this->tls_gd_to_ie(relinfo, relnum, rela, r_type,
value, view, address, view_size);
break;
}
@@ -3680,14 +3678,8 @@ Target_x86_64<size>::Relocate::relocate_
}
if (optimized_type == tls::TLSOPT_TO_IE)
{
- if (tls_segment == NULL)
- {
- gold_assert(parameters->errors()->error_count() > 0
- || issue_undefined_symbol_error(gsym));
- return;
- }
value = target->got_plt_section()->address() + got_offset;
- this->tls_desc_gd_to_ie(relinfo, relnum, tls_segment,
+ this->tls_desc_gd_to_ie(relinfo, relnum,
rela, r_type, value, view, address,
view_size);
break;
@@ -3843,7 +3835,6 @@ inline void
Target_x86_64<size>::Relocate::tls_gd_to_ie(
const Relocate_info<size, false>* relinfo,
size_t relnum,
- Output_segment*,
const elfcpp::Rela<size, false>& rela,
unsigned int,
typename elfcpp::Elf_types<size>::Elf_Addr value,
@@ -3955,7 +3946,6 @@ inline void
Target_x86_64<size>::Relocate::tls_desc_gd_to_ie(
const Relocate_info<size, false>* relinfo,
size_t relnum,
- Output_segment*,
const elfcpp::Rela<size, false>& rela,
unsigned int r_type,
typename elfcpp::Elf_types<size>::Elf_Addr value,
1
0
Hello community,
here is the log from the commit of package bzip2 for openSUSE:Factory checked in at 2016-01-21 23:40:18
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/bzip2 (Old)
and /work/SRC/openSUSE:Factory/.bzip2.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "bzip2"
Changes:
--------
--- /work/SRC/openSUSE:Factory/bzip2/bzip2.changes 2015-01-20 21:54:27.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.bzip2.new/bzip2.changes 2016-01-21 23:40:19.000000000 +0100
@@ -1,0 +2,7 @@
+Wed Jan 13 08:12:20 UTC 2016 - idonmez(a)suse.com
+
+- Remove bzip2-faster.patch, it causes a crash with libarchive and
+ valgrind points out uninitialized memory. See
+ https://github.com/libarchive/libarchive/issues/637#issuecomment-170612576
+
+-------------------------------------------------------------------
Old:
----
bzip2-faster.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ bzip2.spec ++++++
--- /var/tmp/diff_new_pack.5FqHGz/_old 2016-01-21 23:40:21.000000000 +0100
+++ /var/tmp/diff_new_pack.5FqHGz/_new 2016-01-21 23:40:21.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package bzip2
#
-# Copyright (c) 2015 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -32,10 +32,9 @@
# PATCH-FEATURE-OPENSUSE bzip2-1.0.6-autoconfiscated.patch sbrabec(a)suse.cz -- Convert to a standard autoconf based package.
Patch0: http://ftp.suse.com/pub/people/sbrabec/bzip2/for_downstream/bzip2-1.0.6-aut…
Patch1: bzip2-1.0.6-fix-bashisms.patch
-Patch3: bzip2-faster.patch
-Patch5: bzip2-unsafe_strcpy.patch
-Patch6: bzip2-point-to-doc-pkg.patch
-Patch7: bzip2-ocloexec.patch
+Patch2: bzip2-unsafe_strcpy.patch
+Patch3: bzip2-point-to-doc-pkg.patch
+Patch4: bzip2-ocloexec.patch
BuildRequires: autoconf >= 2.57
BuildRequires: libtool
BuildRequires: pkg-config
@@ -78,10 +77,9 @@
%setup -q
%patch0
%patch1 -p1
-%patch3
-%patch5
-%patch6 -p1
-%patch7
+%patch2
+%patch3 -p1
+%patch4
autoreconf -fiv
%build
1
0
Hello community,
here is the log from the commit of package mkdud for openSUSE:Factory checked in at 2016-01-20 09:55:12
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/mkdud (Old)
and /work/SRC/openSUSE:Factory/.mkdud.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "mkdud"
Changes:
--------
--- /work/SRC/openSUSE:Factory/mkdud/mkdud.changes 2016-01-16 11:56:59.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.mkdud.new/mkdud.changes 2016-01-20 09:55:21.000000000 +0100
@@ -1,0 +2,6 @@
+Tue Jan 19 13:46:13 UTC 2016 - snwint(a)suse.com
+
+- better public key file detection
+- 1.23
+
+-------------------------------------------------------------------
Old:
----
mkdud-1.22.tar.xz
New:
----
mkdud-1.23.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ mkdud.spec ++++++
--- /var/tmp/diff_new_pack.3rN5m5/_old 2016-01-20 09:55:22.000000000 +0100
+++ /var/tmp/diff_new_pack.3rN5m5/_new 2016-01-20 09:55:22.000000000 +0100
@@ -23,7 +23,7 @@
Summary: Create driver update from rpms
License: GPL-3.0+
Group: Hardware/Other
-Version: 1.22
+Version: 1.23
Release: 0
Source: %{name}-%{version}.tar.xz
Url: https://github.com/wfeldt/mkdud
++++++ mkdud-1.22.tar.xz -> mkdud-1.23.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mkdud-1.22/VERSION new/mkdud-1.23/VERSION
--- old/mkdud-1.22/VERSION 2016-01-14 15:15:40.000000000 +0100
+++ new/mkdud-1.23/VERSION 2016-01-19 14:45:13.000000000 +0100
@@ -1 +1 @@
-1.22
+1.23
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mkdud-1.22/changelog new/mkdud-1.23/changelog
--- old/mkdud-1.22/changelog 2016-01-14 15:15:40.000000000 +0100
+++ new/mkdud-1.23/changelog 2016-01-19 14:45:13.000000000 +0100
@@ -1,3 +1,6 @@
+2016-01-19: 1.23
+ - better public key file detection
+
2016-01-14: 1.22
- added --obs-keys option to retrieve necessary obs project keys
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/mkdud-1.22/mkdud new/mkdud-1.23/mkdud
--- old/mkdud-1.22/mkdud 2016-01-14 15:15:40.000000000 +0100
+++ new/mkdud-1.23/mkdud 2016-01-19 14:45:13.000000000 +0100
@@ -706,13 +706,14 @@
}
elsif(-f $_[0] && -s _ && -T _) {
open my $f, $_[0];
+ local $/; # complete file
my $l = <$f>;
close $f;
if($l =~ /^#!/) {
push @files, { type => 'bin', file => $_[0] } if -x $_[0];
return;
}
- elsif($l =~ /^-----BEGIN PGP PUBLIC KEY BLOCK-----/) {
+ elsif($l =~ /^-----BEGIN PGP PUBLIC KEY BLOCK-----/m) {
push @files, { type => 'pubkey', file => $_[0] };
return;
}
1
0
Hello community,
here is the log from the commit of package youtube-dl for openSUSE:Factory checked in at 2016-01-20 09:55:10
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/youtube-dl (Old)
and /work/SRC/openSUSE:Factory/.youtube-dl.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "youtube-dl"
Changes:
--------
--- /work/SRC/openSUSE:Factory/youtube-dl/youtube-dl.changes 2016-01-17 09:23:34.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.youtube-dl.new/youtube-dl.changes 2016-01-20 09:55:20.000000000 +0100
@@ -1,0 +2,7 @@
+Tue Jan 19 12:57:12 UTC 2016 - jengelh(a)inai.de
+
+- Update to new upstream release 2016.01.16
+* cwtv: add new extractor
+* add support for fyi.tv, aetv.com, mylifetime.com, 7tv.de
+
+-------------------------------------------------------------------
Old:
----
youtube-dl-2016.01.09.tar.gz
youtube-dl-2016.01.09.tar.gz.sig
New:
----
youtube-dl-2016.01.15.tar.gz
youtube-dl-2016.01.15.tar.gz.sig
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ youtube-dl.spec ++++++
--- /var/tmp/diff_new_pack.0ob1F2/_old 2016-01-20 09:55:21.000000000 +0100
+++ /var/tmp/diff_new_pack.0ob1F2/_new 2016-01-20 09:55:21.000000000 +0100
@@ -17,7 +17,7 @@
Name: youtube-dl
-Version: 2016.01.09
+Version: 2016.01.15
Release: 0
Summary: A tool for downloading from Youtube
License: SUSE-Public-Domain and CC-BY-SA-3.0
++++++ youtube-dl-2016.01.09.tar.gz -> youtube-dl-2016.01.15.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/devscripts/gh-pages/update-copyright.py new/youtube-dl/devscripts/gh-pages/update-copyright.py
--- old/youtube-dl/devscripts/gh-pages/update-copyright.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/devscripts/gh-pages/update-copyright.py 2016-01-14 10:35:12.000000000 +0100
@@ -5,7 +5,7 @@
import datetime
import glob
-import io # For Python 2 compatibilty
+import io # For Python 2 compatibility
import os
import re
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/docs/supportedsites.md new/youtube-dl/docs/supportedsites.md
--- old/youtube-dl/docs/supportedsites.md 2016-01-09 01:16:08.000000000 +0100
+++ new/youtube-dl/docs/supportedsites.md 2016-01-15 19:43:04.000000000 +0100
@@ -24,6 +24,7 @@
- **AdobeTVShow**
- **AdobeTVVideo**
- **AdultSwim**
+ - **AE**
- **Aftonbladet**
- **AirMozilla**
- **AlJazeera**
@@ -65,6 +66,7 @@
- **Beeg**
- **BehindKink**
- **Bet**
+ - **Bigflix**
- **Bild**: Bild.de
- **BiliBili**
- **BleacherReport**
@@ -84,6 +86,7 @@
- **CamdemyFolder**
- **canalc2.tv**
- **Canalplus**: canalplus.fr, piwiplus.fr and d8.tv
+ - **Canvas**
- **CBS**
- **CBSNews**: CBS News
- **CBSSports**
@@ -121,6 +124,7 @@
- **CSpan**: C-SPAN
- **CtsNews**: 華視新聞
- **culturebox.francetvinfo.fr**
+ - **CWTV**
- **dailymotion**
- **dailymotion:playlist**
- **dailymotion:user**
@@ -228,7 +232,6 @@
- **Helsinki**: helsinki.fi
- **HentaiStigma**
- **HistoricFilms**
- - **History**
- **hitbox**
- **hitbox:live**
- **HornBunny**
@@ -251,7 +254,7 @@
- **Instagram**
- **instagram:user**: Instagram user profile
- **InternetVideoArchive**
- - **IPrima**
+ - **IPrima** (Currently broken)
- **iqiyi**: 爱奇艺
- **Ir90Tv**
- **ivi**: ivi.ru
@@ -602,7 +605,9 @@
- **TruTube**
- **Tube8**
- **TubiTv**
- - **Tudou**
+ - **tudou**
+ - **tudou:album**
+ - **tudou:playlist**
- **Tumblr**
- **tunein:clip**
- **tunein:program**
@@ -655,12 +660,12 @@
- **video.mit.edu**
- **VideoDetective**
- **videofy.me**
- - **VideoMega**
+ - **VideoMega** (Currently broken)
- **videomore**
- **videomore:season**
- **videomore:video**
- **VideoPremium**
- - **VideoTt**: video.tt - Your True Tube
+ - **VideoTt**: video.tt - Your True Tube (Currently broken)
- **videoweed**: VideoWeed
- **Vidme**
- **Vidzi**
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/test/test_YoutubeDL.py new/youtube-dl/test/test_YoutubeDL.py
--- old/youtube-dl/test/test_YoutubeDL.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/test/test_YoutubeDL.py 2016-01-15 19:42:53.000000000 +0100
@@ -12,7 +12,7 @@
from test.helper import FakeYDL, assertRegexpMatches
from youtube_dl import YoutubeDL
-from youtube_dl.compat import compat_str
+from youtube_dl.compat import compat_str, compat_urllib_error
from youtube_dl.extractor import YoutubeIE
from youtube_dl.postprocessor.common import PostProcessor
from youtube_dl.utils import ExtractorError, match_filter_func
@@ -631,6 +631,11 @@
result = get_ids({'playlist_items': '10'})
self.assertEqual(result, [])
+ def test_urlopen_no_file_protocol(self):
+ # see https://github.com/rg3/youtube-dl/issues/8227
+ ydl = YDL()
+ self.assertRaises(compat_urllib_error.URLError, ydl.urlopen, 'file:///etc/passwd')
+
if __name__ == '__main__':
unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/test/test_write_annotations.py new/youtube-dl/test/test_write_annotations.py
--- old/youtube-dl/test/test_write_annotations.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/test/test_write_annotations.py 2016-01-14 10:35:12.000000000 +0100
@@ -66,7 +66,7 @@
textTag = a.find('TEXT')
text = textTag.text
self.assertTrue(text in expected) # assertIn only added in python 2.7
- # remove the first occurance, there could be more than one annotation with the same text
+ # remove the first occurrence, there could be more than one annotation with the same text
expected.remove(text)
# We should have seen (and removed) all the expected annotation texts.
self.assertEqual(len(expected), 0, 'Not all expected annotations were found.')
Files old/youtube-dl/youtube-dl and new/youtube-dl/youtube-dl differ
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/YoutubeDL.py new/youtube-dl/youtube_dl/YoutubeDL.py
--- old/youtube-dl/youtube_dl/YoutubeDL.py 2016-01-01 12:28:48.000000000 +0100
+++ new/youtube-dl/youtube_dl/YoutubeDL.py 2016-01-15 19:42:53.000000000 +0100
@@ -1312,7 +1312,7 @@
# only set the 'formats' fields if the original info_dict list them
# otherwise we end up with a circular reference, the first (and unique)
# element in the 'formats' field in info_dict is info_dict itself,
- # wich can't be exported to json
+ # which can't be exported to json
info_dict['formats'] = formats
if self.params.get('listformats'):
self.list_formats(info_dict)
@@ -1986,8 +1986,19 @@
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = YoutubeDLHandler(self.params, debuglevel=debuglevel)
data_handler = compat_urllib_request_DataHandler()
+
+ # When passing our own FileHandler instance, build_opener won't add the
+ # default FileHandler and allows us to disable the file protocol, which
+ # can be used for malicious purposes (see
+ # https://github.com/rg3/youtube-dl/issues/8227)
+ file_handler = compat_urllib_request.FileHandler()
+
+ def file_open(*args, **kwargs):
+ raise compat_urllib_error.URLError('file:// scheme is explicitly disabled in youtube-dl for security reasons')
+ file_handler.file_open = file_open
+
opener = compat_urllib_request.build_opener(
- proxy_handler, https_handler, cookie_processor, ydlh, data_handler)
+ proxy_handler, https_handler, cookie_processor, ydlh, data_handler, file_handler)
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/downloader/fragment.py new/youtube-dl/youtube_dl/downloader/fragment.py
--- old/youtube-dl/youtube_dl/downloader/fragment.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/downloader/fragment.py 2016-01-14 10:35:12.000000000 +0100
@@ -59,37 +59,43 @@
'filename': ctx['filename'],
'tmpfilename': ctx['tmpfilename'],
}
+
start = time.time()
- ctx['started'] = start
+ ctx.update({
+ 'started': start,
+ # Total complete fragments downloaded so far in bytes
+ 'complete_frags_downloaded_bytes': 0,
+ # Amount of fragment's bytes downloaded by the time of the previous
+ # frag progress hook invocation
+ 'prev_frag_downloaded_bytes': 0,
+ })
def frag_progress_hook(s):
if s['status'] not in ('downloading', 'finished'):
return
- frag_total_bytes = s.get('total_bytes', 0)
- if s['status'] == 'finished':
- state['downloaded_bytes'] += frag_total_bytes
- state['frag_index'] += 1
+ frag_total_bytes = s.get('total_bytes') or 0
estimated_size = (
- (state['downloaded_bytes'] + frag_total_bytes) /
+ (ctx['complete_frags_downloaded_bytes'] + frag_total_bytes) /
(state['frag_index'] + 1) * total_frags)
time_now = time.time()
state['total_bytes_estimate'] = estimated_size
state['elapsed'] = time_now - start
if s['status'] == 'finished':
- progress = self.calc_percent(state['frag_index'], total_frags)
+ state['frag_index'] += 1
+ state['downloaded_bytes'] += frag_total_bytes - ctx['prev_frag_downloaded_bytes']
+ ctx['complete_frags_downloaded_bytes'] = state['downloaded_bytes']
+ ctx['prev_frag_downloaded_bytes'] = 0
else:
frag_downloaded_bytes = s['downloaded_bytes']
- frag_progress = self.calc_percent(frag_downloaded_bytes,
- frag_total_bytes)
- progress = self.calc_percent(state['frag_index'], total_frags)
- progress += frag_progress / float(total_frags)
-
+ state['downloaded_bytes'] += frag_downloaded_bytes - ctx['prev_frag_downloaded_bytes']
state['eta'] = self.calc_eta(
- start, time_now, estimated_size, state['downloaded_bytes'] + frag_downloaded_bytes)
+ start, time_now, estimated_size,
+ state['downloaded_bytes'])
state['speed'] = s.get('speed')
+ ctx['prev_frag_downloaded_bytes'] = frag_downloaded_bytes
self._hook_progress(state)
ctx['dl'].add_progress_hook(frag_progress_hook)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/__init__.py new/youtube-dl/youtube_dl/extractor/__init__.py
--- old/youtube-dl/youtube_dl/extractor/__init__.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/__init__.py 2016-01-15 19:42:53.000000000 +0100
@@ -15,6 +15,7 @@
AdobeTVVideoIE,
)
from .adultswim import AdultSwimIE
+from .ae import AEIE
from .aftonbladet import AftonbladetIE
from .airmozilla import AirMozillaIE
from .aljazeera import AlJazeeraIE
@@ -61,6 +62,7 @@
from .behindkink import BehindKinkIE
from .beatportpro import BeatportProIE
from .bet import BetIE
+from .bigflix import BigflixIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .bleacherreport import (
@@ -85,6 +87,7 @@
)
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
+from .canvas import CanvasIE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .cbssports import CBSSportsIE
@@ -127,6 +130,7 @@
)
from .cspan import CSpanIE
from .ctsnews import CtsNewsIE
+from .cwtv import CWTVIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
@@ -261,7 +265,6 @@
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .historicfilms import HistoricFilmsIE
-from .history import HistoryIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hornbunny import HornBunnyIE
from .hotnewhiphop import HotNewHipHopIE
@@ -722,7 +725,11 @@
from .trutube import TruTubeIE
from .tube8 import Tube8IE
from .tubitv import TubiTvIE
-from .tudou import TudouIE
+from .tudou import (
+ TudouIE,
+ TudouPlaylistIE,
+ TudouAlbumIE,
+)
from .tumblr import TumblrIE
from .tunein import (
TuneInClipIE,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/ae.py new/youtube-dl/youtube_dl/extractor/ae.py
--- old/youtube-dl/youtube_dl/extractor/ae.py 1970-01-01 01:00:00.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/ae.py 2016-01-15 19:42:53.000000000 +0100
@@ -0,0 +1,58 @@
+from __future__ import unicode_literals
+
+from .common import InfoExtractor
+from ..utils import smuggle_url
+
+
+class AEIE(InfoExtractor):
+ _VALID_URL = r'https?://(?:www\.)?(?:(?:history|aetv|mylifetime)\.com|fyi\.tv)/(?:[^/]+/)+(?P<id>[^/]+?)(?:$|[?#])'
+
+ _TESTS = [{
+ 'url': 'http://www.history.com/topics/valentines-day/history-of-valentines-day/vide…',
+ 'info_dict': {
+ 'id': 'g12m5Gyt3fdR',
+ 'ext': 'mp4',
+ 'title': "Bet You Didn't Know: Valentine's Day",
+ 'description': 'md5:7b57ea4829b391995b405fa60bd7b5f7',
+ },
+ 'params': {
+ # m3u8 download
+ 'skip_download': True,
+ },
+ 'add_ie': ['ThePlatform'],
+ }, {
+ 'url': 'http://www.history.com/shows/mountain-men/season-1/episode-1',
+ 'info_dict': {
+ 'id': 'eg47EERs_JsZ',
+ 'ext': 'mp4',
+ 'title': "Winter Is Coming",
+ 'description': 'md5:a40e370925074260b1c8a633c632c63a',
+ },
+ 'params': {
+ # m3u8 download
+ 'skip_download': True,
+ },
+ 'add_ie': ['ThePlatform'],
+ }, {
+ 'url': 'http://www.aetv.com/shows/duck-dynasty/video/inlawful-entry',
+ 'only_matching': True
+ }, {
+ 'url': 'http://www.fyi.tv/shows/tiny-house-nation/videos/207-sq-ft-minnesota-prairi…',
+ 'only_matching': True
+ }, {
+ 'url': 'http://www.mylifetime.com/shows/project-runway-junior/video/season-1/episod…',
+ 'only_matching': True
+ }]
+
+ def _real_extract(self, url):
+ video_id = self._match_id(url)
+
+ webpage = self._download_webpage(url, video_id)
+
+ video_url_re = [
+ r'data-href="[^"]*/%s"[^>]+data-release-url="([^"]+)"' % video_id,
+ r"media_url\s*=\s*'([^']+)'"
+ ]
+ video_url = self._search_regex(video_url_re, webpage, 'video url')
+
+ return self.url_result(smuggle_url(video_url, {'sig': {'key': 'crazyjava', 'secret': 's3cr3t'}}))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/beeg.py new/youtube-dl/youtube_dl/extractor/beeg.py
--- old/youtube-dl/youtube_dl/extractor/beeg.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/beeg.py 2016-01-14 15:43:12.000000000 +0100
@@ -34,7 +34,7 @@
video_id = self._match_id(url)
video = self._download_json(
- 'http://beeg.com/api/v5/video/%s' % video_id, video_id)
+ 'https://api.beeg.com/api/v5/video/%s' % video_id, video_id)
def split(o, e):
def cut(s, x):
@@ -60,7 +60,7 @@
def decrypt_url(encrypted_url):
encrypted_url = self._proto_relative_url(
- encrypted_url.replace('{DATA_MARKERS}', ''), 'http:')
+ encrypted_url.replace('{DATA_MARKERS}', ''), 'https:')
key = self._search_regex(
r'/key=(.*?)%2Cend=', encrypted_url, 'key', default=None)
if not key:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/bigflix.py new/youtube-dl/youtube_dl/extractor/bigflix.py
--- old/youtube-dl/youtube_dl/extractor/bigflix.py 1970-01-01 01:00:00.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/bigflix.py 2016-01-14 10:35:12.000000000 +0100
@@ -0,0 +1,85 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
+import base64
+import re
+
+from .common import InfoExtractor
+from ..compat import compat_urllib_parse_unquote
+
+
+class BigflixIE(InfoExtractor):
+ _VALID_URL = r'https?://(?:www\.)?bigflix\.com/.+/(?P<id>[0-9]+)'
+ _TESTS = [{
+ 'url': 'http://www.bigflix.com/Hindi-movies/Action-movies/Singham-Returns/16537',
+ 'md5': 'ec76aa9b1129e2e5b301a474e54fab74',
+ 'info_dict': {
+ 'id': '16537',
+ 'ext': 'mp4',
+ 'title': 'Singham Returns',
+ 'description': 'md5:3d2ba5815f14911d5cc6a501ae0cf65d',
+ }
+ }, {
+ # 2 formats
+ 'url': 'http://www.bigflix.com/Tamil-movies/Drama-movies/Madarasapatinam/16070',
+ 'info_dict': {
+ 'id': '16070',
+ 'ext': 'mp4',
+ 'title': 'Madarasapatinam',
+ 'description': 'md5:63b9b8ed79189c6f0418c26d9a3452ca',
+ 'formats': 'mincount:2',
+ },
+ 'params': {
+ 'skip_download': True,
+ }
+ }, {
+ # multiple formats
+ 'url': 'http://www.bigflix.com/Malayalam-movies/Drama-movies/Indian-Rupee/15967',
+ 'only_matching': True,
+ }]
+
+ def _real_extract(self, url):
+ video_id = self._match_id(url)
+
+ webpage = self._download_webpage(url, video_id)
+
+ title = self._html_search_regex(
+ r'<div[^>]+class=["\']pagetitle["\'][^>]*>(.+?)</div>',
+ webpage, 'title')
+
+ def decode_url(quoted_b64_url):
+ return base64.b64decode(compat_urllib_parse_unquote(
+ quoted_b64_url).encode('ascii')).decode('utf-8')
+
+ formats = []
+ for height, encoded_url in re.findall(
+ r'ContentURL_(\d{3,4})[pP][^=]+=([^&]+)', webpage):
+ video_url = decode_url(encoded_url)
+ f = {
+ 'url': video_url,
+ 'format_id': '%sp' % height,
+ 'height': int(height),
+ }
+ if video_url.startswith('rtmp'):
+ f['ext'] = 'flv'
+ formats.append(f)
+
+ file_url = self._search_regex(
+ r'file=([^&]+)', webpage, 'video url', default=None)
+ if file_url:
+ video_url = decode_url(file_url)
+ if all(f['url'] != video_url for f in formats):
+ formats.append({
+ 'url': decode_url(file_url),
+ })
+
+ self._sort_formats(formats)
+
+ description = self._html_search_meta('description', webpage)
+
+ return {
+ 'id': video_id,
+ 'title': title,
+ 'description': description,
+ 'formats': formats
+ }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/canalc2.py new/youtube-dl/youtube_dl/extractor/canalc2.py
--- old/youtube-dl/youtube_dl/extractor/canalc2.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/canalc2.py 2016-01-14 10:35:12.000000000 +0100
@@ -9,9 +9,9 @@
class Canalc2IE(InfoExtractor):
IE_NAME = 'canalc2.tv'
- _VALID_URL = r'https?://(?:www\.)?canalc2\.tv/video/(?P<id>\d+)'
+ _VALID_URL = r'https?://(?:(?:www\.)?canalc2\.tv/video/|archives-canalc2\.u-strasbg\.fr/video\.asp\?.*\bidVideo=)(?P<id>\d+)'
- _TEST = {
+ _TESTS = [{
'url': 'http://www.canalc2.tv/video/12163',
'md5': '060158428b650f896c542dfbb3d6487f',
'info_dict': {
@@ -23,24 +23,36 @@
'params': {
'skip_download': True, # Requires rtmpdump
}
- }
+ }, {
+ 'url': 'http://archives-canalc2.u-strasbg.fr/video.asp?idVideo=11427&voir=oui',
+ 'only_matching': True,
+ }]
def _real_extract(self, url):
video_id = self._match_id(url)
- webpage = self._download_webpage(url, video_id)
- video_url = self._search_regex(
- r'jwplayer\((["\'])Player\1\)\.setup\({[^}]*file\s*:\s*(["\'])(?P<file>.+?)\2',
- webpage, 'video_url', group='file')
- formats = [{'url': video_url}]
- if video_url.startswith('rtmp://'):
- rtmp = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+/))(?P<play_path>mp4:.+)$', video_url)
- formats[0].update({
- 'url': rtmp.group('url'),
- 'ext': 'flv',
- 'app': rtmp.group('app'),
- 'play_path': rtmp.group('play_path'),
- 'page_url': url,
- })
+
+ webpage = self._download_webpage(
+ 'http://www.canalc2.tv/video/%s' % video_id, video_id)
+
+ formats = []
+ for _, video_url in re.findall(r'file\s*=\s*(["\'])(.+?)\1', webpage):
+ if video_url.startswith('rtmp://'):
+ rtmp = re.search(
+ r'^(?P<url>rtmp://[^/]+/(?P<app>.+/))(?P<play_path>mp4:.+)$', video_url)
+ formats.append({
+ 'url': rtmp.group('url'),
+ 'format_id': 'rtmp',
+ 'ext': 'flv',
+ 'app': rtmp.group('app'),
+ 'play_path': rtmp.group('play_path'),
+ 'page_url': url,
+ })
+ else:
+ formats.append({
+ 'url': video_url,
+ 'format_id': 'http',
+ })
+ self._sort_formats(formats)
title = self._html_search_regex(
r'(?s)class="[^"]*col_description[^"]*">.*?<h3>(.*?)</h3>', webpage, 'title')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/canvas.py new/youtube-dl/youtube_dl/extractor/canvas.py
--- old/youtube-dl/youtube_dl/extractor/canvas.py 1970-01-01 01:00:00.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/canvas.py 2016-01-15 19:42:53.000000000 +0100
@@ -0,0 +1,65 @@
+from __future__ import unicode_literals
+
+from .common import InfoExtractor
+from ..utils import float_or_none
+
+
+class CanvasIE(InfoExtractor):
+ _VALID_URL = r'https?://(?:www\.)?canvas\.be/video/(?:[^/]+/)*(?P<id>[^/?#&]+)'
+ _TEST = {
+ 'url': 'http://www.canvas.be/video/de-afspraak/najaar-2015/de-afspraak-veilt-voor-d…',
+ 'md5': 'ea838375a547ac787d4064d8c7860a6c',
+ 'info_dict': {
+ 'id': 'mz-ast-5e5f90b6-2d72-4c40-82c2-e134f884e93e',
+ 'display_id': 'de-afspraak-veilt-voor-de-warmste-week',
+ 'ext': 'mp4',
+ 'title': 'De afspraak veilt voor de Warmste Week',
+ 'description': 'md5:24cb860c320dc2be7358e0e5aa317ba6',
+ 'thumbnail': 're:^https?://.*\.jpg$',
+ 'duration': 49.02,
+ }
+ }
+
+ def _real_extract(self, url):
+ display_id = self._match_id(url)
+
+ webpage = self._download_webpage(url, display_id)
+
+ title = self._search_regex(
+ r'<h1[^>]+class="video__body__header__title"[^>]*>(.+?)</h1>',
+ webpage, 'title', default=None) or self._og_search_title(webpage)
+
+ video_id = self._html_search_regex(
+ r'data-video=(["\'])(?P<id>.+?)\1', webpage, 'video id', group='id')
+
+ data = self._download_json(
+ 'https://mediazone.vrt.be/api/v1/canvas/assets/%s' % video_id, display_id)
+
+ formats = []
+ for target in data['targetUrls']:
+ format_url, format_type = target.get('url'), target.get('type')
+ if not format_url or not format_type:
+ continue
+ if format_type == 'HLS':
+ formats.extend(self._extract_m3u8_formats(
+ format_url, display_id, entry_protocol='m3u8_native',
+ ext='mp4', preference=0, fatal=False, m3u8_id=format_type))
+ elif format_type == 'HDS':
+ formats.extend(self._extract_f4m_formats(
+ format_url, display_id, f4m_id=format_type, fatal=False))
+ else:
+ formats.append({
+ 'format_id': format_type,
+ 'url': format_url,
+ })
+ self._sort_formats(formats)
+
+ return {
+ 'id': video_id,
+ 'display_id': display_id,
+ 'title': title,
+ 'description': self._og_search_description(webpage),
+ 'formats': formats,
+ 'duration': float_or_none(data.get('duration'), 1000),
+ 'thumbnail': data.get('posterImageUrl'),
+ }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/common.py new/youtube-dl/youtube_dl/extractor/common.py
--- old/youtube-dl/youtube_dl/extractor/common.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/common.py 2016-01-14 10:35:12.000000000 +0100
@@ -313,9 +313,9 @@
except ExtractorError:
raise
except compat_http_client.IncompleteRead as e:
- raise ExtractorError('A network error has occured.', cause=e, expected=True)
+ raise ExtractorError('A network error has occurred.', cause=e, expected=True)
except (KeyError, StopIteration) as e:
- raise ExtractorError('An extractor error has occured.', cause=e)
+ raise ExtractorError('An extractor error has occurred.', cause=e)
def set_downloader(self, downloader):
"""Sets the downloader for this IE."""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/cwtv.py new/youtube-dl/youtube_dl/extractor/cwtv.py
--- old/youtube-dl/youtube_dl/extractor/cwtv.py 1970-01-01 01:00:00.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/cwtv.py 2016-01-15 19:42:53.000000000 +0100
@@ -0,0 +1,88 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
+from .common import InfoExtractor
+from ..utils import (
+ int_or_none,
+ parse_iso8601,
+)
+
+
+class CWTVIE(InfoExtractor):
+ _VALID_URL = r'https?://(?:www\.)?cw(?:tv|seed)\.com/shows/(?:[^/]+/){2}\?play=(?P<id>[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12})'
+ _TESTS = [{
+ 'url': 'http://cwtv.com/shows/arrow/legends-of-yesterday/?play=6b15e985-9345-4f60-b…',
+ 'info_dict': {
+ 'id': '6b15e985-9345-4f60-baf8-56e96be57c63',
+ 'ext': 'mp4',
+ 'title': 'Legends of Yesterday',
+ 'description': 'Oliver and Barry Allen take Kendra Saunders and Carter Hall to a remote location to keep them hidden from Vandal Savage while they figure out how to defeat him.',
+ 'duration': 2665,
+ 'series': 'Arrow',
+ 'season_number': 4,
+ 'season': '4',
+ 'episode_number': 8,
+ 'upload_date': '20151203',
+ 'timestamp': 1449122100,
+ },
+ 'params': {
+ # m3u8 download
+ 'skip_download': True,
+ }
+ }, {
+ 'url': 'http://www.cwseed.com/shows/whose-line-is-it-anyway/jeff-davis-4/?play=2428…',
+ 'info_dict': {
+ 'id': '24282b12-ead2-42f2-95ad-26770c2c6088',
+ 'ext': 'mp4',
+ 'title': 'Jeff Davis 4',
+ 'description': 'Jeff Davis is back to make you laugh.',
+ 'duration': 1263,
+ 'series': 'Whose Line Is It Anyway?',
+ 'season_number': 11,
+ 'season': '11',
+ 'episode_number': 20,
+ 'upload_date': '20151006',
+ 'timestamp': 1444107300,
+ },
+ 'params': {
+ # m3u8 download
+ 'skip_download': True,
+ }
+ }]
+
+ def _real_extract(self, url):
+ video_id = self._match_id(url)
+ video_data = self._download_json(
+ 'http://metaframe.digitalsmiths.tv/v2/CWtv/assets/%s/partner/132?format=json' % video_id, video_id)
+
+ formats = self._extract_m3u8_formats(
+ video_data['videos']['variantplaylist']['uri'], video_id, 'mp4')
+
+ thumbnails = [{
+ 'url': image['uri'],
+ 'width': image.get('width'),
+ 'height': image.get('height'),
+ } for image_id, image in video_data['images'].items() if image.get('uri')] if video_data.get('images') else None
+
+ video_metadata = video_data['assetFields']
+
+ subtitles = {
+ 'en': [{
+ 'url': video_metadata['UnicornCcUrl'],
+ }],
+ } if video_metadata.get('UnicornCcUrl') else None
+
+ return {
+ 'id': video_id,
+ 'title': video_metadata['title'],
+ 'description': video_metadata.get('description'),
+ 'duration': int_or_none(video_metadata.get('duration')),
+ 'series': video_metadata.get('seriesName'),
+ 'season_number': int_or_none(video_metadata.get('seasonNumber')),
+ 'season': video_metadata.get('seasonName'),
+ 'episode_number': int_or_none(video_metadata.get('episodeNumber')),
+ 'timestamp': parse_iso8601(video_data.get('startTime')),
+ 'thumbnails': thumbnails,
+ 'formats': formats,
+ 'subtitles': subtitles,
+ }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/dailymotion.py new/youtube-dl/youtube_dl/extractor/dailymotion.py
--- old/youtube-dl/youtube_dl/extractor/dailymotion.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/dailymotion.py 2016-01-14 10:35:12.000000000 +0100
@@ -149,14 +149,15 @@
ext = determine_ext(media_url)
if type_ == 'application/x-mpegURL' or ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
- media_url, video_id, 'mp4', m3u8_id='hls', fatal=False))
+ media_url, video_id, 'mp4', preference=-1,
+ m3u8_id='hls', fatal=False))
elif type_ == 'application/f4m' or ext == 'f4m':
formats.extend(self._extract_f4m_formats(
media_url, video_id, preference=-1, f4m_id='hds', fatal=False))
else:
f = {
'url': media_url,
- 'format_id': quality,
+ 'format_id': 'http-%s' % quality,
}
m = re.search(r'H264-(?P<width>\d+)x(?P<height>\d+)', media_url)
if m:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/dcn.py new/youtube-dl/youtube_dl/extractor/dcn.py
--- old/youtube-dl/youtube_dl/extractor/dcn.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/dcn.py 2016-01-14 10:35:12.000000000 +0100
@@ -5,7 +5,10 @@
import base64
from .common import InfoExtractor
-from ..compat import compat_urllib_parse
+from ..compat import (
+ compat_urllib_parse,
+ compat_str,
+)
from ..utils import (
int_or_none,
parse_iso8601,
@@ -186,7 +189,8 @@
entries = []
for video in show['videos']:
+ video_id = compat_str(video['id'])
entries.append(self.url_result(
- 'http://www.dcndigital.ae/media/%s' % video['id'], 'DCNVideo'))
+ 'http://www.dcndigital.ae/media/%s' % video_id, 'DCNVideo', video_id))
return self.playlist_result(entries, season_id, title)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/facebook.py new/youtube-dl/youtube_dl/extractor/facebook.py
--- old/youtube-dl/youtube_dl/extractor/facebook.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/facebook.py 2016-01-14 10:35:12.000000000 +0100
@@ -105,7 +105,7 @@
login_results, 'login error', default=None, group='error')
if error:
raise ExtractorError('Unable to login: %s' % error, expected=True)
- self._downloader.report_warning('unable to log in: bad username/password, or exceded login rate limit (~3/min). Check credentials or wait.')
+ self._downloader.report_warning('unable to log in: bad username/password, or exceeded login rate limit (~3/min). Check credentials or wait.')
return
fb_dtsg = self._search_regex(
@@ -126,7 +126,7 @@
check_response = self._download_webpage(check_req, None,
note='Confirming login')
if re.search(r'id="checkpointSubmitButton"', check_response) is not None:
- self._downloader.report_warning('Unable to confirm login, you have to login in your brower and authorize the login.')
+ self._downloader.report_warning('Unable to confirm login, you have to login in your browser and authorize the login.')
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
self._downloader.report_warning('unable to log in: %s' % error_to_compat_str(err))
return
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/generic.py new/youtube-dl/youtube_dl/extractor/generic.py
--- old/youtube-dl/youtube_dl/extractor/generic.py 2015-12-31 16:50:46.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/generic.py 2016-01-14 10:35:12.000000000 +0100
@@ -487,7 +487,7 @@
'description': 'md5:8145d19d320ff3e52f28401f4c4283b9',
}
},
- # Embeded Ustream video
+ # Embedded Ustream video
{
'url': 'http://www.american.edu/spa/pti/nsa-privacy-janus-2014.cfm',
'md5': '27b99cdb639c9b12a79bca876a073417',
@@ -1644,7 +1644,7 @@
if myvi_url:
return self.url_result(myvi_url)
- # Look for embeded soundcloud player
+ # Look for embedded soundcloud player
mobj = re.search(
r'<iframe\s+(?:[a-zA-Z0-9_-]+="[^"]+"\s+)*src="(?P<url>https?://(?:w\.)?soundcloud\.com/player[^"]+)"',
webpage)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/history.py new/youtube-dl/youtube_dl/extractor/history.py
--- old/youtube-dl/youtube_dl/extractor/history.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/history.py 1970-01-01 01:00:00.000000000 +0100
@@ -1,31 +0,0 @@
-from __future__ import unicode_literals
-
-from .common import InfoExtractor
-from ..utils import smuggle_url
-
-
-class HistoryIE(InfoExtractor):
- _VALID_URL = r'https?://(?:www\.)?history\.com/(?:[^/]+/)+(?P<id>[^/]+?)(?:$|[?#])'
-
- _TESTS = [{
- 'url': 'http://www.history.com/topics/valentines-day/history-of-valentines-day/vide…',
- 'md5': '6fe632d033c92aa10b8d4a9be047a7c5',
- 'info_dict': {
- 'id': 'bLx5Dv5Aka1G',
- 'ext': 'mp4',
- 'title': "Bet You Didn't Know: Valentine's Day",
- 'description': 'md5:7b57ea4829b391995b405fa60bd7b5f7',
- },
- 'add_ie': ['ThePlatform'],
- }]
-
- def _real_extract(self, url):
- video_id = self._match_id(url)
-
- webpage = self._download_webpage(url, video_id)
-
- video_url = self._search_regex(
- r'data-href="[^"]*/%s"[^>]+data-release-url="([^"]+)"' % video_id,
- webpage, 'video url')
-
- return self.url_result(smuggle_url(video_url, {'sig': {'key': 'crazyjava', 'secret': 's3cr3t'}}))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/iprima.py new/youtube-dl/youtube_dl/extractor/iprima.py
--- old/youtube-dl/youtube_dl/extractor/iprima.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/iprima.py 2016-01-14 10:35:12.000000000 +0100
@@ -14,6 +14,7 @@
class IPrimaIE(InfoExtractor):
+ _WORKING = False
_VALID_URL = r'https?://play\.iprima\.cz/(?:[^/]+/)*(?P<id>[^?#]+)'
_TESTS = [{
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/ivi.py new/youtube-dl/youtube_dl/extractor/ivi.py
--- old/youtube-dl/youtube_dl/extractor/ivi.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/ivi.py 2016-01-14 10:35:12.000000000 +0100
@@ -32,7 +32,7 @@
},
'skip': 'Only works from Russia',
},
- # Serial's serie
+ # Serial's series
{
'url': 'http://www.ivi.ru/watch/dvoe_iz_lartsa/9549',
'md5': '221f56b35e3ed815fde2df71032f4b3e',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/mdr.py new/youtube-dl/youtube_dl/extractor/mdr.py
--- old/youtube-dl/youtube_dl/extractor/mdr.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/mdr.py 2016-01-14 10:35:12.000000000 +0100
@@ -17,7 +17,7 @@
_VALID_URL = r'https?://(?:www\.)?(?:mdr|kika)\.de/(?:.*)/[a-z]+(?P<id>\d+)(?:_.+?)?\.html'
_TESTS = [{
- # MDR regularily deletes its videos
+ # MDR regularly deletes its videos
'url': 'http://www.mdr.de/fakt/video189002.html',
'only_matching': True,
}, {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/nbc.py new/youtube-dl/youtube_dl/extractor/nbc.py
--- old/youtube-dl/youtube_dl/extractor/nbc.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/nbc.py 2016-01-14 10:35:12.000000000 +0100
@@ -100,7 +100,7 @@
class NBCSportsIE(InfoExtractor):
- # Does not include https becuase its certificate is invalid
+ # Does not include https because its certificate is invalid
_VALID_URL = r'http://www\.nbcsports\.com//?(?:[^/]+/)+(?P<id>[0-9a-z-]+)'
_TEST = {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/nhl.py new/youtube-dl/youtube_dl/extractor/nhl.py
--- old/youtube-dl/youtube_dl/extractor/nhl.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/nhl.py 2016-01-14 10:35:12.000000000 +0100
@@ -223,7 +223,7 @@
response = self._download_webpage(request_url, playlist_title)
response = self._fix_json(response)
if not response.strip():
- self._downloader.report_warning('Got an empty reponse, trying '
+ self._downloader.report_warning('Got an empty response, trying '
'adding the "newvideos" parameter')
response = self._download_webpage(request_url + '&newvideos=true',
playlist_title)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/ntvde.py new/youtube-dl/youtube_dl/extractor/ntvde.py
--- old/youtube-dl/youtube_dl/extractor/ntvde.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/ntvde.py 2016-01-15 19:42:53.000000000 +0100
@@ -2,6 +2,7 @@
from __future__ import unicode_literals
from .common import InfoExtractor
+from ..compat import compat_urlparse
from ..utils import (
int_or_none,
js_to_json,
@@ -34,7 +35,7 @@
webpage = self._download_webpage(url, video_id)
info = self._parse_json(self._search_regex(
- r'(?s)ntv.pageInfo.article =\s(\{.*?\});', webpage, 'info'),
+ r'(?s)ntv\.pageInfo\.article\s*=\s*(\{.*?\});', webpage, 'info'),
video_id, transform_source=js_to_json)
timestamp = int_or_none(info.get('publishedDateAsUnixTimeStamp'))
vdata = self._parse_json(self._search_regex(
@@ -42,18 +43,24 @@
webpage, 'player data'),
video_id, transform_source=js_to_json)
duration = parse_duration(vdata.get('duration'))
- formats = [{
- 'format_id': 'flash',
- 'url': 'rtmp://fms.n-tv.de/' + vdata['video'],
- }, {
- 'format_id': 'mobile',
- 'url': 'http://video.n-tv.de' + vdata['videoMp4'],
- 'tbr': 400, # estimation
- }]
- m3u8_url = 'http://video.n-tv.de' + vdata['videoM3u8']
- formats.extend(self._extract_m3u8_formats(
- m3u8_url, video_id, ext='mp4',
- entry_protocol='m3u8_native', preference=0))
+
+ formats = []
+ if vdata.get('video'):
+ formats.append({
+ 'format_id': 'flash',
+ 'url': 'rtmp://fms.n-tv.de/%s' % vdata['video'],
+ })
+ if vdata.get('videoMp4'):
+ formats.append({
+ 'format_id': 'mobile',
+ 'url': compat_urlparse.urljoin('http://video.n-tv.de', vdata['videoMp4']),
+ 'tbr': 400, # estimation
+ })
+ if vdata.get('videoM3u8'):
+ m3u8_url = compat_urlparse.urljoin('http://video.n-tv.de', vdata['videoM3u8'])
+ formats.extend(self._extract_m3u8_formats(
+ m3u8_url, video_id, ext='mp4', entry_protocol='m3u8_native',
+ preference=0, m3u8_id='hls', fatal=False))
self._sort_formats(formats)
return {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/ora.py new/youtube-dl/youtube_dl/extractor/ora.py
--- old/youtube-dl/youtube_dl/extractor/ora.py 2015-12-31 16:50:46.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/ora.py 2016-01-14 10:35:12.000000000 +0100
@@ -37,7 +37,7 @@
formats = self._extract_m3u8_formats(
m3u8_url, display_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False)
- # simular to GameSpotIE
+ # similar to GameSpotIE
m3u8_path = compat_urlparse.urlparse(m3u8_url).path
QUALITIES_RE = r'((,[a-z]+\d+)+,?)'
available_qualities = self._search_regex(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/orf.py new/youtube-dl/youtube_dl/extractor/orf.py
--- old/youtube-dl/youtube_dl/extractor/orf.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/orf.py 2016-01-15 19:42:53.000000000 +0100
@@ -170,7 +170,21 @@
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
- _VALID_URL = r'http://fm4\.orf\.at/7tage/?#(?P<date>[0-9]+)/(?P<show>\w+)'
+ _VALID_URL = r'http://fm4\.orf\.at/(?:7tage/?#|player/)(?P<date>[0-9]+)/(?P<show>\w+)'
+
+ _TEST = {
+ 'url': 'http://fm4.orf.at/player/20160110/IS/',
+ 'md5': '01e736e8f1cef7e13246e880a59ad298',
+ 'info_dict': {
+ 'id': '2016-01-10_2100_tl_54_7DaysSun13_11244',
+ 'ext': 'mp3',
+ 'title': 'Im Sumpf',
+ 'description': 'md5:384c543f866c4e422a55f66a62d669cd',
+ 'duration': 7173,
+ 'timestamp': 1452456073,
+ 'upload_date': '20160110',
+ },
+ }
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/pluralsight.py new/youtube-dl/youtube_dl/extractor/pluralsight.py
--- old/youtube-dl/youtube_dl/extractor/pluralsight.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/pluralsight.py 2016-01-14 10:35:12.000000000 +0100
@@ -232,7 +232,7 @@
# { a = author, cn = clip_id, lc = end, m = name }
return {
- 'id': clip['clipName'],
+ 'id': clip.get('clipName') or clip['name'],
'title': '%s - %s' % (module['title'], clip['title']),
'duration': int_or_none(clip.get('duration')) or parse_duration(clip.get('formattedDuration')),
'creator': author,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/prosiebensat1.py new/youtube-dl/youtube_dl/extractor/prosiebensat1.py
--- old/youtube-dl/youtube_dl/extractor/prosiebensat1.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/prosiebensat1.py 2016-01-15 19:42:53.000000000 +0100
@@ -20,7 +20,7 @@
class ProSiebenSat1IE(InfoExtractor):
IE_NAME = 'prosiebensat1'
IE_DESC = 'ProSiebenSat.1 Digital'
- _VALID_URL = r'https?://(?:www\.)?(?:(?:prosieben|prosiebenmaxx|sixx|sat1|kabeleins|the-voice-of-germany)\.(?:de|at|ch)|ran\.de|fem\.com)/(?P<id>.+)'
+ _VALID_URL = r'https?://(?:www\.)?(?:(?:prosieben|prosiebenmaxx|sixx|sat1|kabeleins|the-voice-of-germany|7tv)\.(?:de|at|ch)|ran\.de|fem\.com)/(?P<id>.+)'
_TESTS = [
{
@@ -32,7 +32,7 @@
'url': 'http://www.prosieben.de/tv/circus-halligalli/videos/218-staffel-2-episode-1…',
'info_dict': {
'id': '2104602',
- 'ext': 'mp4',
+ 'ext': 'flv',
'title': 'Episode 18 - Staffel 2',
'description': 'md5:8733c81b702ea472e069bc48bb658fc1',
'upload_date': '20131231',
@@ -138,14 +138,13 @@
'url': 'http://www.the-voice-of-germany.de/video/31-andreas-kuemmert-rocket-man-clip',
'info_dict': {
'id': '2572814',
- 'ext': 'mp4',
+ 'ext': 'flv',
'title': 'Andreas Kümmert: Rocket Man',
'description': 'md5:6ddb02b0781c6adf778afea606652e38',
'upload_date': '20131017',
'duration': 469.88,
},
'params': {
- # rtmp download
'skip_download': True,
},
},
@@ -153,13 +152,12 @@
'url': 'http://www.fem.com/wellness/videos/wellness-video-clip-kurztripps-zum-valen…',
'info_dict': {
'id': '2156342',
- 'ext': 'mp4',
+ 'ext': 'flv',
'title': 'Kurztrips zum Valentinstag',
- 'description': 'Romantischer Kurztrip zum Valentinstag? Wir verraten, was sich hier wirklich lohnt.',
+ 'description': 'Romantischer Kurztrip zum Valentinstag? Nina Heinemann verrät, was sich hier wirklich lohnt.',
'duration': 307.24,
},
'params': {
- # rtmp download
'skip_download': True,
},
},
@@ -172,12 +170,26 @@
},
'playlist_count': 2,
},
+ {
+ 'url': 'http://www.7tv.de/circus-halligalli/615-best-of-circus-halligalli-ganze-fol…',
+ 'info_dict': {
+ 'id': '4187506',
+ 'ext': 'flv',
+ 'title': 'Best of Circus HalliGalli',
+ 'description': 'md5:8849752efd90b9772c9db6fdf87fb9e9',
+ 'upload_date': '20151229',
+ },
+ 'params': {
+ 'skip_download': True,
+ },
+ },
]
_CLIPID_REGEXES = [
r'"clip_id"\s*:\s+"(\d+)"',
r'clipid: "(\d+)"',
r'clip[iI]d=(\d+)',
+ r'clip[iI]d\s*=\s*["\'](\d+)',
r"'itemImageUrl'\s*:\s*'/dynamic/thumbnails/full/\d+/(\d+)",
]
_TITLE_REGEXES = [
@@ -186,12 +198,16 @@
r'<!-- start video -->\s*<h1>(.+?)</h1>',
r'<h1 class="att-name">\s*(.+?)</h1>',
r'<header class="module_header">\s*<h2>([^<]+)</h2>\s*</header>',
+ r'<h2 class="video-title" itemprop="name">\s*(.+?)</h2>',
+ r'<div[^>]+id="veeseoTitle"[^>]*>(.+?)</div>',
]
_DESCRIPTION_REGEXES = [
r'<p itemprop="description">\s*(.+?)</p>',
r'<div class="videoDecription">\s*<p><strong>Beschreibung</strong>: (.+?)</p>',
r'<div class="g-plusone" data-size="medium"></div>\s*</div>\s*</header>\s*(.+?)\s*<footer>',
r'<p class="att-description">\s*(.+?)\s*</p>',
+ r'<p class="video-description" itemprop="description">\s*(.+?)</p>',
+ r'<div[^>]+id="veeseoDescription"[^>]*>(.+?)</div>',
]
_UPLOAD_DATE_REGEXES = [
r'<meta property="og:published_time" content="(.+?)">',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/shahid.py new/youtube-dl/youtube_dl/extractor/shahid.py
--- old/youtube-dl/youtube_dl/extractor/shahid.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/shahid.py 2016-01-14 10:35:12.000000000 +0100
@@ -73,6 +73,9 @@
'https://shahid.mbc.net/arContent/getPlayerContent-param-.id-%s.type-%s.html'
% (video_id, api_vars['type']), video_id, 'Downloading player JSON')
+ if player.get('drm'):
+ raise ExtractorError('This video is DRM protected.', expected=True)
+
formats = self._extract_m3u8_formats(player['url'], video_id, 'mp4')
video = self._download_json(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/testurl.py new/youtube-dl/youtube_dl/extractor/testurl.py
--- old/youtube-dl/youtube_dl/extractor/testurl.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/testurl.py 2016-01-14 10:35:12.000000000 +0100
@@ -7,7 +7,7 @@
class TestURLIE(InfoExtractor):
- """ Allows adressing of the test cases as test:yout.*be_1 """
+ """ Allows addressing of the test cases as test:yout.*be_1 """
IE_DESC = False # Do not list
_VALID_URL = r'test(?:url)?:(?P<id>(?P<extractor>.+?)(?:_(?P<num>[0-9]+))?)$'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/theplatform.py new/youtube-dl/youtube_dl/extractor/theplatform.py
--- old/youtube-dl/youtube_dl/extractor/theplatform.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/theplatform.py 2016-01-15 19:42:53.000000000 +0100
@@ -85,7 +85,7 @@
class ThePlatformIE(ThePlatformBaseIE):
_VALID_URL = r'''(?x)
(?:https?://(?:link|player)\.theplatform\.com/[sp]/(?P<provider_id>[^/]+)/
- (?:(?P<media>(?:[^/]+/)+select/media/)|(?P<config>(?:[^/\?]+/(?:swf|config)|onsite)/select/))?
+ (?:(?P<media>(?:(?:[^/]+/)+select/)?media/)|(?P<config>(?:[^/\?]+/(?:swf|config)|onsite)/select/))?
|theplatform:)(?P<id>[^/\?&]+)'''
_TESTS = [{
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/tudou.py new/youtube-dl/youtube_dl/extractor/tudou.py
--- old/youtube-dl/youtube_dl/extractor/tudou.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/tudou.py 2016-01-14 10:35:12.000000000 +0100
@@ -4,10 +4,16 @@
from .common import InfoExtractor
from ..compat import compat_str
+from ..utils import (
+ int_or_none,
+ float_or_none,
+ unescapeHTML,
+)
class TudouIE(InfoExtractor):
- _VALID_URL = r'https?://(?:www\.)?tudou\.com/(?:listplay|programs(?:/view)?|albumplay)/([^/]+/)*(?P<id>[^/?#]+?)(?:\.html)?/?(?:$|[?#])'
+ IE_NAME = 'tudou'
+ _VALID_URL = r'https?://(?:www\.)?tudou\.com/(?:(?:programs|wlplay)/view|(?:listplay|albumplay)/[\w-]{11})/(?P<id>[\w-]{11})'
_TESTS = [{
'url': 'http://www.tudou.com/listplay/zzdE77v6Mmo/2xN2duXMxmw.html',
'md5': '140a49ed444bd22f93330985d8475fcb',
@@ -16,6 +22,11 @@
'ext': 'f4v',
'title': '卡马乔国足开大脚长传冲吊集锦',
'thumbnail': 're:^https?://.*\.jpg$',
+ 'timestamp': 1372113489000,
+ 'description': '卡马乔卡家军,开大脚先进战术不完全集锦!',
+ 'duration': 289.04,
+ 'view_count': int,
+ 'filesize': int,
}
}, {
'url': 'http://www.tudou.com/programs/view/ajX3gyhL0pc/',
@@ -24,10 +35,12 @@
'ext': 'f4v',
'title': 'La Sylphide-Bolshoi-Ekaterina Krysanova & Vyacheslav Lopatin 2012',
'thumbnail': 're:^https?://.*\.jpg$',
+ 'timestamp': 1349207518000,
+ 'description': 'md5:294612423894260f2dcd5c6c04fe248b',
+ 'duration': 5478.33,
+ 'view_count': int,
+ 'filesize': int,
}
- }, {
- 'url': 'http://www.tudou.com/albumplay/cJAHGih4yYg.html',
- 'only_matching': True,
}]
_PLAYER_URL = 'http://js.tudouui.com/bin/lingtong/PortalPlayer_177.swf'
@@ -42,24 +55,20 @@
def _real_extract(self, url):
video_id = self._match_id(url)
- webpage = self._download_webpage(url, video_id)
+ item_data = self._download_json(
+ 'http://www.tudou.com/tvp/getItemInfo.action?ic=%s' % video_id, video_id)
- youku_vcode = self._search_regex(
- r'vcode\s*:\s*[\'"]([^\'"]*)[\'"]', webpage, 'youku vcode', default=None)
+ youku_vcode = item_data.get('vcode')
if youku_vcode:
return self.url_result('youku:' + youku_vcode, ie='Youku')
- title = self._search_regex(
- r',kw\s*:\s*[\'"]([^\'"]+)[\'"]', webpage, 'title')
- thumbnail_url = self._search_regex(
- r',pic\s*:\s*[\'"]([^\'"]+)[\'"]', webpage, 'thumbnail URL', fatal=False)
-
- player_url = self._search_regex(
- r'playerUrl\s*:\s*[\'"]([^\'"]+\.swf)[\'"]',
- webpage, 'player URL', default=self._PLAYER_URL)
+ title = unescapeHTML(item_data['kw'])
+ description = item_data.get('desc')
+ thumbnail_url = item_data.get('pic')
+ view_count = int_or_none(item_data.get('playTimes'))
+ timestamp = int_or_none(item_data.get('pt'))
- segments = self._parse_json(self._search_regex(
- r'segs: \'([^\']+)\'', webpage, 'segments'), video_id)
+ segments = self._parse_json(item_data['itemSegs'], video_id)
# It looks like the keys are the arguments that have to be passed as
# the hd field in the request url, we pick the higher
# Also, filter non-number qualities (see issue #3643).
@@ -80,8 +89,13 @@
'ext': ext,
'title': title,
'thumbnail': thumbnail_url,
+ 'description': description,
+ 'view_count': view_count,
+ 'timestamp': timestamp,
+ 'duration': float_or_none(part.get('seconds'), 1000),
+ 'filesize': int_or_none(part.get('size')),
'http_headers': {
- 'Referer': player_url,
+ 'Referer': self._PLAYER_URL,
},
}
result.append(part_info)
@@ -92,3 +106,47 @@
'id': video_id,
'title': title,
}
+
+
+class TudouPlaylistIE(InfoExtractor):
+ IE_NAME = 'tudou:playlist'
+ _VALID_URL = r'https?://(?:www\.)?tudou\.com/listplay/(?P<id>[\w-]{11})\.html'
+ _TESTS = [{
+ 'url': 'http://www.tudou.com/listplay/zzdE77v6Mmo.html',
+ 'info_dict': {
+ 'id': 'zzdE77v6Mmo',
+ },
+ 'playlist_mincount': 209,
+ }]
+
+ def _real_extract(self, url):
+ playlist_id = self._match_id(url)
+ playlist_data = self._download_json(
+ 'http://www.tudou.com/tvp/plist.action?lcode=%s' % playlist_id, playlist_id)
+ entries = [self.url_result(
+ 'http://www.tudou.com/programs/view/%s' % item['icode'],
+ 'Tudou', item['icode'],
+ item['kw']) for item in playlist_data['items']]
+ return self.playlist_result(entries, playlist_id)
+
+
+class TudouAlbumIE(InfoExtractor):
+ IE_NAME = 'tudou:album'
+ _VALID_URL = r'https?://(?:www\.)?tudou\.com/album(?:cover|play)/(?P<id>[\w-]{11})'
+ _TESTS = [{
+ 'url': 'http://www.tudou.com/albumplay/v5qckFJvNJg.html',
+ 'info_dict': {
+ 'id': 'v5qckFJvNJg',
+ },
+ 'playlist_mincount': 45,
+ }]
+
+ def _real_extract(self, url):
+ album_id = self._match_id(url)
+ album_data = self._download_json(
+ 'http://www.tudou.com/tvp/alist.action?acode=%s' % album_id, album_id)
+ entries = [self.url_result(
+ 'http://www.tudou.com/programs/view/%s' % item['icode'],
+ 'Tudou', item['icode'],
+ item['kw']) for item in album_data['items']]
+ return self.playlist_result(entries, album_id)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/tv4.py new/youtube-dl/youtube_dl/extractor/tv4.py
--- old/youtube-dl/youtube_dl/extractor/tv4.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/tv4.py 2016-01-14 10:35:12.000000000 +0100
@@ -67,7 +67,7 @@
info = self._download_json(
'http://www.tv4play.se/player/assets/%s.json' % video_id, video_id, 'Downloading video info JSON')
- # If is_geo_restricted is true, it doesn't neceserally mean we can't download it
+ # If is_geo_restricted is true, it doesn't necessarily mean we can't download it
if info['is_geo_restricted']:
self.report_warning('This content might not be available in your country due to licensing restrictions.')
if info['requires_subscription']:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/unistra.py new/youtube-dl/youtube_dl/extractor/unistra.py
--- old/youtube-dl/youtube_dl/extractor/unistra.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/unistra.py 2016-01-15 19:42:53.000000000 +0100
@@ -38,7 +38,7 @@
webpage = self._download_webpage(url, video_id)
- files = set(re.findall(r'file\s*:\s*"([^"]+)"', webpage))
+ files = set(re.findall(r'file\s*:\s*"(/[^"]+)"', webpage))
quality = qualities(['SD', 'HD'])
formats = []
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/videomega.py new/youtube-dl/youtube_dl/extractor/videomega.py
--- old/youtube-dl/youtube_dl/extractor/videomega.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/videomega.py 2016-01-14 10:35:12.000000000 +0100
@@ -8,6 +8,7 @@
class VideoMegaIE(InfoExtractor):
+ _WORKING = False
_VALID_URL = r'(?:videomega:|https?://(?:www\.)?videomega\.tv/(?:(?:view|iframe|cdn)\.php)?\?ref=)(?P<id>[A-Za-z0-9]+)'
_TESTS = [{
'url': 'http://videomega.tv/cdn.php?ref=AOSQBJYKIDDIKYJBQSOA',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/videomore.py new/youtube-dl/youtube_dl/extractor/videomore.py
--- old/youtube-dl/youtube_dl/extractor/videomore.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/videomore.py 2016-01-14 10:35:12.000000000 +0100
@@ -170,7 +170,7 @@
'skip_download': True,
},
}, {
- # season single serie with og:video:iframe
+ # season single series with og:video:iframe
'url': 'http://videomore.ru/poslednii_ment/1_sezon/14_seriya',
'only_matching': True,
}, {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/videott.py new/youtube-dl/youtube_dl/extractor/videott.py
--- old/youtube-dl/youtube_dl/extractor/videott.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/videott.py 2016-01-14 10:35:12.000000000 +0100
@@ -11,6 +11,7 @@
class VideoTtIE(InfoExtractor):
+ _WORKING = False
ID_NAME = 'video.tt'
IE_DESC = 'video.tt - Your True Tube'
_VALID_URL = r'http://(?:www\.)?video\.tt/(?:(?:video|embed)/|watch_video\.php\?v=)(?P<id>[\da-zA-Z]{9})'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/vodlocker.py new/youtube-dl/youtube_dl/extractor/vodlocker.py
--- old/youtube-dl/youtube_dl/extractor/vodlocker.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/vodlocker.py 2016-01-15 19:42:53.000000000 +0100
@@ -5,12 +5,13 @@
from ..compat import compat_urllib_parse
from ..utils import (
ExtractorError,
+ NO_DEFAULT,
sanitized_Request,
)
class VodlockerIE(InfoExtractor):
- _VALID_URL = r'https?://(?:www\.)?vodlocker\.com/(?:embed-)?(?P<id>[0-9a-zA-Z]+)(?:\..*?)?'
+ _VALID_URL = r'https?://(?:www\.)?vodlocker\.(?:com|city)/(?:embed-)?(?P<id>[0-9a-zA-Z]+)(?:\..*?)?'
_TESTS = [{
'url': 'http://vodlocker.com/e8wvyzz4sl42',
@@ -43,16 +44,31 @@
webpage = self._download_webpage(
req, video_id, 'Downloading video page')
+ def extract_file_url(html, default=NO_DEFAULT):
+ return self._search_regex(
+ r'file:\s*"(http[^\"]+)",', html, 'file url', default=default)
+
+ video_url = extract_file_url(webpage, default=None)
+
+ if not video_url:
+ embed_url = self._search_regex(
+ r'<iframe[^>]+src=(["\'])(?P<url>(?:https?://)?vodlocker\.(?:com|city)/embed-.+?)\1',
+ webpage, 'embed url', group='url')
+ embed_webpage = self._download_webpage(
+ embed_url, video_id, 'Downloading embed webpage')
+ video_url = extract_file_url(embed_webpage)
+ thumbnail_webpage = embed_webpage
+ else:
+ thumbnail_webpage = webpage
+
title = self._search_regex(
r'id="file_title".*?>\s*(.*?)\s*<(?:br|span)', webpage, 'title')
thumbnail = self._search_regex(
- r'image:\s*"(http[^\"]+)",', webpage, 'thumbnail')
- url = self._search_regex(
- r'file:\s*"(http[^\"]+)",', webpage, 'file url')
+ r'image:\s*"(http[^\"]+)",', thumbnail_webpage, 'thumbnail', fatal=False)
formats = [{
'format_id': 'sd',
- 'url': url,
+ 'url': video_url,
}]
return {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/xhamster.py new/youtube-dl/youtube_dl/extractor/xhamster.py
--- old/youtube-dl/youtube_dl/extractor/xhamster.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/xhamster.py 2016-01-14 10:35:12.000000000 +0100
@@ -6,7 +6,6 @@
from ..utils import (
float_or_none,
int_or_none,
- str_to_int,
unified_strdate,
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/xtube.py new/youtube-dl/youtube_dl/extractor/xtube.py
--- old/youtube-dl/youtube_dl/extractor/xtube.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/xtube.py 2016-01-15 19:42:53.000000000 +0100
@@ -1,10 +1,12 @@
from __future__ import unicode_literals
+import itertools
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
from ..utils import (
+ int_or_none,
parse_duration,
sanitized_Request,
str_to_int,
@@ -12,7 +14,7 @@
class XTubeIE(InfoExtractor):
- _VALID_URL = r'https?://(?:www\.)?(?P<url>xtube\.com/watch\.php\?v=(?P<id>[^/?&#]+))'
+ _VALID_URL = r'(?:xtube:|https?://(?:www\.)?xtube\.com/watch\.php\?.*\bv=)(?P<id>[^/?&#]+)'
_TEST = {
'url': 'http://www.xtube.com/watch.php?v=kVTUy_G222_',
'md5': '092fbdd3cbe292c920ef6fc6a8a9cdab',
@@ -30,7 +32,7 @@
def _real_extract(self, url):
video_id = self._match_id(url)
- req = sanitized_Request(url)
+ req = sanitized_Request('http://www.xtube.com/watch.php?v=%s' % video_id)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
@@ -88,45 +90,43 @@
class XTubeUserIE(InfoExtractor):
IE_DESC = 'XTube user profile'
- _VALID_URL = r'https?://(?:www\.)?xtube\.com/community/profile\.php\?(.*?)user=(?P<username>[^&#]+)(?:$|[&#])'
+ _VALID_URL = r'https?://(?:www\.)?xtube\.com/profile/(?P<id>[^/]+-\d+)'
_TEST = {
- 'url': 'http://www.xtube.com/community/profile.php?user=greenshowers',
+ 'url': 'http://www.xtube.com/profile/greenshowers-4056496',
'info_dict': {
- 'id': 'greenshowers',
+ 'id': 'greenshowers-4056496',
'age_limit': 18,
},
'playlist_mincount': 155,
}
def _real_extract(self, url):
- mobj = re.match(self._VALID_URL, url)
- username = mobj.group('username')
+ user_id = self._match_id(url)
- profile_page = self._download_webpage(
- url, username, note='Retrieving profile page')
-
- video_count = int(self._search_regex(
- r'<strong>%s\'s Videos \(([0-9]+)\)</strong>' % username, profile_page,
- 'video count'))
-
- PAGE_SIZE = 25
- urls = []
- page_count = (video_count + PAGE_SIZE + 1) // PAGE_SIZE
- for n in range(1, page_count + 1):
- lpage_url = 'http://www.xtube.com/user_videos.php?page=%d&u=%s' % (n, username)
- lpage = self._download_webpage(
- lpage_url, username,
- note='Downloading page %d/%d' % (n, page_count))
- urls.extend(
- re.findall(r'addthis:url="([^"]+)"', lpage))
-
- return {
- '_type': 'playlist',
- 'id': username,
- 'age_limit': 18,
- 'entries': [{
- '_type': 'url',
- 'url': eurl,
- 'ie_key': 'XTube',
- } for eurl in urls]
- }
+ entries = []
+ for pagenum in itertools.count(1):
+ request = sanitized_Request(
+ 'http://www.xtube.com/profile/%s/videos/%d' % (user_id, pagenum),
+ headers={
+ 'Cookie': 'popunder=4',
+ 'X-Requested-With': 'XMLHttpRequest',
+ 'Referer': url,
+ })
+
+ page = self._download_json(
+ request, user_id, 'Downloading videos JSON page %d' % pagenum)
+
+ html = page.get('html')
+ if not html:
+ break
+
+ for _, video_id in re.findall(r'data-plid=(["\'])(.+?)\1', html):
+ entries.append(self.url_result('xtube:%s' % video_id, XTubeIE.ie_key()))
+
+ page_count = int_or_none(page.get('pageCount'))
+ if not page_count or pagenum == page_count:
+ break
+
+ playlist = self.playlist_result(entries, user_id)
+ playlist['age_limit'] = 18
+ return playlist
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/extractor/youtube.py new/youtube-dl/youtube_dl/extractor/youtube.py
--- old/youtube-dl/youtube_dl/extractor/youtube.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/extractor/youtube.py 2016-01-14 10:35:12.000000000 +0100
@@ -1487,7 +1487,7 @@
if codecs:
codecs = codecs.split(',')
if len(codecs) == 2:
- acodec, vcodec = codecs[0], codecs[1]
+ acodec, vcodec = codecs[1], codecs[0]
else:
acodec, vcodec = (codecs[0], 'none') if kind == 'audio' else ('none', codecs[0])
dct.update({
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/swfinterp.py new/youtube-dl/youtube_dl/swfinterp.py
--- old/youtube-dl/youtube_dl/swfinterp.py 2015-12-30 20:30:33.000000000 +0100
+++ new/youtube-dl/youtube_dl/swfinterp.py 2016-01-14 10:35:12.000000000 +0100
@@ -689,7 +689,7 @@
elif mname in _builtin_classes:
res = _builtin_classes[mname]
else:
- # Assume unitialized
+ # Assume uninitialized
# TODO warn here
res = undefined
stack.append(res)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/utils.py new/youtube-dl/youtube_dl/utils.py
--- old/youtube-dl/youtube_dl/utils.py 2016-01-09 01:15:59.000000000 +0100
+++ new/youtube-dl/youtube_dl/utils.py 2016-01-14 10:35:12.000000000 +0100
@@ -984,7 +984,7 @@
if sign == '-':
time = -time
unit = match.group('unit')
- # A bad aproximation?
+ # A bad approximation?
if unit == 'month':
unit = 'day'
time *= 30
@@ -1307,7 +1307,7 @@
if s is None:
return None
- # The lower-case forms are of course incorrect and inofficial,
+ # The lower-case forms are of course incorrect and unofficial,
# but we support those too
_UNIT_TABLE = {
'B': 1,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/youtube-dl/youtube_dl/version.py new/youtube-dl/youtube_dl/version.py
--- old/youtube-dl/youtube_dl/version.py 2016-01-09 01:16:07.000000000 +0100
+++ new/youtube-dl/youtube_dl/version.py 2016-01-15 19:43:03.000000000 +0100
@@ -1,3 +1,3 @@
from __future__ import unicode_literals
-__version__ = '2016.01.09'
+__version__ = '2016.01.15'
1
0
Hello community,
here is the log from the commit of package erlang-rebar for openSUSE:Factory checked in at 2016-01-20 09:55:08
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/erlang-rebar (Old)
and /work/SRC/openSUSE:Factory/.erlang-rebar.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "erlang-rebar"
Changes:
--------
--- /work/SRC/openSUSE:Factory/erlang-rebar/erlang-rebar-obs.changes 2015-10-30 13:42:56.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.erlang-rebar.new/erlang-rebar-obs.changes 2016-01-20 09:55:18.000000000 +0100
@@ -1,0 +2,14 @@
+Sun Jan 17 17:25:39 UTC 2016 - matwey.kornilov(a)gmail.com
+
+- Update to version 2.6.1+git20160115.c534f8a:
+ + fprof: document Cachegrind support
+ + fprof: further document Cachegrind support
+ + Add qualified name tests docs (see pr #119)
+ + Fix windows eunit tests
+ + Lock retest dependency
+ + Allow windows release to use etc conf dir
+ + xref: fix dialyzer warning introduced in 69802f63120
+ + Provide additional debug logging on relup generation
+ + Add the latest OTP 18 version to the Travis build
+
+-------------------------------------------------------------------
erlang-rebar.changes: same change
Old:
----
rebar-2.6.1+git20151013.5d73a8d.tar.bz2
New:
----
rebar-2.6.1+git20160115.c534f8a.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ erlang-rebar-obs.spec ++++++
--- /var/tmp/diff_new_pack.Eqnsiz/_old 2016-01-20 09:55:19.000000000 +0100
+++ /var/tmp/diff_new_pack.Eqnsiz/_new 2016-01-20 09:55:19.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package erlang-rebar-obs
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
%define obs 1
Name: erlang-rebar-obs
-Version: 2.6.1+git20151013.5d73a8d
+Version: 2.6.1+git20160115.c534f8a
Release: 0
%define mod_ver %(echo "%{version}" | cut -d "+" -f1)
Summary: A sophisticated build-tool for Erlang projects that follows OTP principles
++++++ erlang-rebar.spec ++++++
--- /var/tmp/diff_new_pack.Eqnsiz/_old 2016-01-20 09:55:19.000000000 +0100
+++ /var/tmp/diff_new_pack.Eqnsiz/_new 2016-01-20 09:55:19.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package erlang-rebar
#
-# Copyright (c) 2015 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
%define obs 0
Name: erlang-rebar
-Version: 2.6.1+git20151013.5d73a8d
+Version: 2.6.1+git20160115.c534f8a
Release: 0
%define mod_ver %(echo "%{version}" | cut -d "+" -f1)
Summary: A sophisticated build-tool for Erlang projects that follows OTP principles
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.Eqnsiz/_old 2016-01-20 09:55:19.000000000 +0100
+++ /var/tmp/diff_new_pack.Eqnsiz/_new 2016-01-20 09:55:19.000000000 +0100
@@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">git://github.com/rebar/rebar.git</param>
- <param name="changesrevision">5d73a8d8e7c9d56231970ab775231a7a87bcd14a</param></service></servicedata>
\ No newline at end of file
+ <param name="changesrevision">c534f8a903c10bf330c0b819fd75e1495a7e1d39</param></service></servicedata>
\ No newline at end of file
++++++ no-rebar-deps.patch ++++++
--- /var/tmp/diff_new_pack.Eqnsiz/_old 2016-01-20 09:55:19.000000000 +0100
+++ /var/tmp/diff_new_pack.Eqnsiz/_new 2016-01-20 09:55:19.000000000 +0100
@@ -1,7 +1,7 @@
-Index: rebar-2.6.1+git20150928.365ac64/Makefile
+Index: rebar-2.6.1+git20160115.c534f8a/Makefile
===================================================================
---- rebar-2.6.1+git20150928.365ac64.orig/Makefile
-+++ rebar-2.6.1+git20150928.365ac64/Makefile
+--- rebar-2.6.1+git20160115.c534f8a.orig/Makefile
++++ rebar-2.6.1+git20160115.c534f8a/Makefile
@@ -1,7 +1,7 @@
.PHONY: clean xref_warnings deps test test_eunit test_inttest
@@ -19,15 +19,15 @@
test: test_eunit test_inttest
-Index: rebar-2.6.1+git20150928.365ac64/rebar.config.script
+Index: rebar-2.6.1+git20160115.c534f8a/rebar.config.script
===================================================================
---- rebar-2.6.1+git20150928.365ac64.orig/rebar.config.script
-+++ rebar-2.6.1+git20150928.365ac64/rebar.config.script
+--- rebar-2.6.1+git20160115.c534f8a.orig/rebar.config.script
++++ rebar-2.6.1+git20160115.c534f8a/rebar.config.script
@@ -1,7 +1,7 @@
%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*-
%% ex: ts=4 sw=4 ft=erlang et
--ExtraDeps = [{retest, ".*", {git, "git://github.com/dizzyd/retest.git"}}],
+-ExtraDeps = [{retest, ".*", {git, "git://github.com/dizzyd/retest.git", {tag, "4590941a"}}}],
+ExtraDeps = [],
case os:getenv("REBAR_EXTRA_DEPS") of
++++++ rebar-2.6.1+git20151013.5d73a8d.tar.bz2 -> rebar-2.6.1+git20160115.c534f8a.tar.bz2 ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/.travis.yml new/rebar-2.6.1+git20160115.c534f8a/.travis.yml
--- old/rebar-2.6.1+git20151013.5d73a8d/.travis.yml 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/.travis.yml 2016-01-17 18:25:38.000000000 +0100
@@ -9,6 +9,7 @@
- R14B04
- R14B03
- 17.0
+ - 18.0
before_script:
- hostname -f
- cc -v
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/THANKS new/rebar-2.6.1+git20160115.c534f8a/THANKS
--- old/rebar-2.6.1+git20151013.5d73a8d/THANKS 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/THANKS 2016-01-17 18:25:38.000000000 +0100
@@ -145,3 +145,4 @@
Tony Rogvall
Andrey Teplyashin
Duncan McGreggor
+Sebastien Serre
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/priv/templates/simplenode.windows.runner.cmd new/rebar-2.6.1+git20160115.c534f8a/priv/templates/simplenode.windows.runner.cmd
--- old/rebar-2.6.1+git20151013.5d73a8d/priv/templates/simplenode.windows.runner.cmd 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/priv/templates/simplenode.windows.runner.cmd 2016-01-17 18:25:38.000000000 +0100
@@ -17,8 +17,18 @@
@call :set_trim release_version %%J
)
-@set vm_args=%releases_dir%\%release_version%\vm.args
-@set sys_config=%releases_dir%\%release_version%\sys.config
+@if exist "%releases_dir%\%release_version%\vm.args" (
+ @set vm_args="%releases_dir%\%release_version%\vm.args"
+) else (
+ @set vm_args="%node_root%\etc\vm.args"
+)
+
+@if exist "%releases_dir%\%release_version%\sys.config" (
+ @set sys_config="%releases_dir%\%release_version%\sys.config"
+) else (
+ @set sys_config="%node_root%\etc\app.config"
+)
+
@set node_boot_script=%releases_dir%\%release_version%\%node_name%
@set clean_boot_script=%releases_dir%\%release_version%\start_clean
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/rebar.config.script new/rebar-2.6.1+git20160115.c534f8a/rebar.config.script
--- old/rebar-2.6.1+git20151013.5d73a8d/rebar.config.script 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/rebar.config.script 2016-01-17 18:25:38.000000000 +0100
@@ -1,7 +1,7 @@
%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*-
%% ex: ts=4 sw=4 ft=erlang et
-ExtraDeps = [{retest, ".*", {git, "git://github.com/dizzyd/retest.git"}}],
+ExtraDeps = [{retest, ".*", {git, "git://github.com/dizzyd/retest.git", {tag, "4590941a"}}}],
case os:getenv("REBAR_EXTRA_DEPS") of
false ->
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/src/rebar.erl new/rebar-2.6.1+git20160115.c534f8a/src/rebar.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/src/rebar.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/src/rebar.erl 2016-01-17 18:25:38.000000000 +0100
@@ -501,7 +501,9 @@
{profile, $p, "profile", undefined,
"Profile this run of rebar. Via profiler= you can optionally select "
"either fprof (default) or eflame. The result can be found in "
- "fprof.analysis or eflame.svg."},
+ "fprof.analysis or eflame.svg. Additionally, in fprof mode, if "
+ "erlgrind can be found in $PATH, a Cachegrind file (fprof.cgrind) "
+ "will be generated as well."},
{keep_going, $k, "keep-going", undefined,
"Keep running after a command fails"},
{recursive, $r, "recursive", boolean,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_eunit.erl new/rebar-2.6.1+git20160115.c534f8a/src/rebar_eunit.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_eunit.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/src/rebar_eunit.erl 2016-01-17 18:25:38.000000000 +0100
@@ -128,6 +128,7 @@
" name starts with bar and, if no such test exists,~n"
" run the test whose name starts with bar in the~n"
" suite's _tests module)~n"
+ " test[s]=\"foo:bar_test\" (Run bar_test located in module foo)~n"
" random_suite_order=true (Run tests in random order)~n"
" random_suite_order=Seed (Run tests in random order,~n"
" with the PRNG seeded with Seed)~n"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_file_utils.erl new/rebar-2.6.1+git20160115.c534f8a/src/rebar_file_utils.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_file_utils.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/src/rebar_file_utils.erl 2016-01-17 18:25:38.000000000 +0100
@@ -88,7 +88,7 @@
?FMT("move /y \"~s\" \"~s\" 1> nul",
[filename:nativename(Source),
filename:nativename(Dest)]),
- [{use_stdout, false}, return_on_error]),
+ [{use_stdout, false}, abort_on_error]),
case R of
[] ->
ok;
@@ -131,14 +131,14 @@
delete_each_dir_win32([Dir | Rest]) ->
{ok, []} = rebar_utils:sh(?FMT("rd /q /s \"~s\"",
[filename:nativename(Dir)]),
- [{use_stdout, false}, return_on_error]),
+ [{use_stdout, false}, abort_on_error]),
delete_each_dir_win32(Rest).
xcopy_win32(Source,Dest)->
{ok, R} = rebar_utils:sh(
?FMT("xcopy \"~s\" \"~s\" /q /y /e 2> nul",
[filename:nativename(Source), filename:nativename(Dest)]),
- [{use_stdout, false}, return_on_error]),
+ [{use_stdout, false}, abort_on_error]),
case length(R) > 0 of
%% when xcopy fails, stdout is empty and and error message is printed
%% to stderr (which is redirected to nul)
@@ -162,8 +162,10 @@
cp_r_win32(S, {false, filename:join(DestDir, filename:basename(Source))});
cp_r_win32({false, Source},{false, Dest}) ->
%% from file to file
- {ok,_} = file:copy(Source, Dest),
- ok;
+ case file:copy(Source, Dest) of
+ {ok,_} -> ok;
+ _ -> throw(rebar_abort)
+ end;
cp_r_win32({true, SourceDir}, {false, DestDir}) ->
case filelib:is_regular(DestDir) of
true ->
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_upgrade.erl new/rebar-2.6.1+git20160115.c534f8a/src/rebar_upgrade.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_upgrade.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/src/rebar_upgrade.erl 2016-01-17 18:25:38.000000000 +0100
@@ -44,18 +44,24 @@
'generate-upgrade'(Config0, ReltoolFile) ->
%% Get the old release path
{Config, ReltoolConfig} = rebar_rel_utils:load_config(Config0, ReltoolFile),
+ ?DEBUG("reltool.config: ~p~n", [ReltoolConfig]),
TargetParentDir = rebar_rel_utils:get_target_parent_dir(Config,
ReltoolConfig),
TargetDir = rebar_rel_utils:get_target_dir(Config, ReltoolConfig),
+ ?DEBUG("target dir: ~p~n", [TargetDir]),
PrevRelPath = rebar_rel_utils:get_previous_release_path(Config),
OldVerPath = filename:join([TargetParentDir, PrevRelPath]),
+ ?DEBUG("old version path: ~p~n", [OldVerPath]),
%% Run checks to make sure that building a package is possible
{NewVerPath, NewName, NewVer, OldVer} = run_checks(Config, OldVerPath,
ReltoolConfig),
+ ?DEBUG("old version: ~p~n", [OldVer]),
NameVer = NewName ++ "_" ++ NewVer,
OldRelName = get_old_rel_name(OldVerPath, OldVer, NewName),
+ ?DEBUG("new version path: ~p~n", [NewVerPath]),
+ ?DEBUG("old version: ~p~n", [NewVer]),
%% Save the code path prior to doing anything
OrigPath = code:get_path(),
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_xref.erl new/rebar-2.6.1+git20160115.c534f8a/src/rebar_xref.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/src/rebar_xref.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/src/rebar_xref.erl 2016-01-17 18:25:38.000000000 +0100
@@ -60,9 +60,9 @@
true = code:add_path(rebar_utils:ebin_dir()),
%% Add extra paths to code path to, for example, be used
- %% when behaviour modules are defined
- [code:add_path(Path)
- || Path <- rebar_config:get(Config, xref_extra_paths, [])],
+ %% when behaviour modules are defined.
+ lists:foreach(fun(P) -> true = code:add_path(P) end,
+ rebar_config:get(Config, xref_extra_paths, [])),
%% Get list of xref checks we want to run
ConfXrefChecks = rebar_config:get(Config, xref_checks,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_compiler_tests.erl new/rebar-2.6.1+git20160115.c534f8a/test/rebar_compiler_tests.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_compiler_tests.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/test/rebar_compiler_tests.erl 2016-01-17 18:25:38.000000000 +0100
@@ -84,11 +84,11 @@
setup,
fun() ->
setup_basic_project(),
- setup_rebar_config(),
- rebar("compile")
+ setup_rebar_config()
end,
fun teardown/1,
- fun(RebarOut)->
+ fun()->
+ RebarOut = rebar("compile"),
[
{"Exit after error",
?_assert(string:str(RebarOut, "ERROR: compile failed") =/= 0)}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_eunit_tests.erl new/rebar-2.6.1+git20160115.c534f8a/test/rebar_eunit_tests.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_eunit_tests.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/test/rebar_eunit_tests.erl 2016-01-17 18:25:38.000000000 +0100
@@ -56,7 +56,7 @@
?_assert(string:str(RebarOut, "myapp_mymod:") =/= 0)},
{"Tests are only run once",
- ?_assert(string:str(RebarOut, "All 2 tests passed") =/= 0)}]
+ ?_assert(string:str(RebarOut, "2 tests passed") =/= 0)}]
end}.
eunit_with_suites_and_tests_test_() ->
@@ -80,7 +80,7 @@
?_assert(string:str(RebarOut, "myapp_mymod:") =:= 0)},
{"Selected suite tests are only run once",
- ?_assert(string:str(RebarOut, "All 4 tests passed") =/= 0)}]
+ ?_assert(string:str(RebarOut, "4 tests passed") =/= 0)}]
end},
{"Ensure EUnit runs selected _tests suites",
setup, fun() ->
@@ -102,7 +102,7 @@
?_assert(string:str(RebarOut, "myapp_mymod:") =:= 0)},
{"Selected suite tests are only run once",
- ?_assert(string:str(RebarOut, "All 2 tests passed") =/= 0)}]
+ ?_assert(string:str(RebarOut, "2 tests passed") =/= 0)}]
end},
{"Ensure EUnit runs a specific test defined in a selected suite",
setup, fun() ->
@@ -154,7 +154,7 @@
"myapp_mymod2_tests:myfunc2_test/0") =/= 0)]},
{"Selected suite tests are run once",
- ?_assert(string:str(RebarOut, "All 3 tests passed") =/= 0)}]
+ ?_assert(string:str(RebarOut, "3 tests passed") =/= 0)}]
end},
{"Ensure EUnit runs specific test in a _tests suite",
setup,
@@ -190,7 +190,7 @@
=/= 0)]},
{"Selected suite tests is run once",
- ?_assert(string:str(RebarOut, "All 2 tests passed") =/= 0)}]
+ ?_assert(string:str(RebarOut, "2 tests passed") =/= 0)}]
end},
{"Ensure EUnit runs a specific test by qualified function name",
setup,
@@ -325,7 +325,11 @@
assert_rebar_runs() ->
prepare_rebar_script(),
- ?assert(string:str(os:cmd(filename:nativename("./" ++ ?TMP_DIR ++ "rebar")),
+ {ok, Cwd} = file:get_cwd(),
+ ok = file:set_cwd(?TMP_DIR),
+ RebarOut = os:cmd(filename:nativename("./rebar")),
+ ok = file:set_cwd(Cwd),
+ ?assert(string:str(RebarOut,
"No command to run specified!") =/= 0).
basic_setup_test_() ->
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_file_utils_tests.erl new/rebar-2.6.1+git20160115.c534f8a/test/rebar_file_utils_tests.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_file_utils_tests.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/test/rebar_file_utils_tests.erl 2016-01-17 18:25:38.000000000 +0100
@@ -36,7 +36,7 @@
-define(TMP_DIR, "tmp_file_utils").
--define(SRC, "source dir?").
+-define(SRC, "source dir").
-define(DST, "dest (dir)").
-define(FILE1, "file 1").
-define(FILE2, "file(2)").
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_xref_eunit.erl new/rebar-2.6.1+git20160115.c534f8a/test/rebar_xref_eunit.erl
--- old/rebar-2.6.1+git20151013.5d73a8d/test/rebar_xref_eunit.erl 2015-10-15 10:29:57.000000000 +0200
+++ new/rebar-2.6.1+git20160115.c534f8a/test/rebar_xref_eunit.erl 2016-01-17 18:25:38.000000000 +0100
@@ -192,8 +192,8 @@
{unix, _} ->
[] = os:cmd("chmod u+x " ++ Rebar);
{win32, _} ->
- {ok, _} = file:copy(?REBAR_SCRIPT ++ ".bat",
- ?TMP_DIR ++ "rebar.bat")
+ {ok, _} = file:copy(?REBAR_SCRIPT ++ ".cmd",
+ ?TMP_DIR ++ "rebar.cmd")
end.
rebar() ->
++++++ spec.in ++++++
--- /var/tmp/diff_new_pack.Eqnsiz/_old 2016-01-20 09:55:20.000000000 +0100
+++ /var/tmp/diff_new_pack.Eqnsiz/_new 2016-01-20 09:55:20.000000000 +0100
@@ -19,7 +19,7 @@
%define obs OBS
Name: PKGNAME
-Version: 2.6.1+git20151013.5d73a8d
+Version: 2.6.1+git20160115.c534f8a
Release: 0
%define mod_ver %(echo "%{version}" | cut -d "+" -f1)
Summary: A sophisticated build-tool for Erlang projects that follows OTP principles
1
0
Hello community,
here is the log from the commit of package golang-packaging for openSUSE:Factory checked in at 2016-01-20 09:55:07
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/golang-packaging (Old)
and /work/SRC/openSUSE:Factory/.golang-packaging.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "golang-packaging"
Changes:
--------
--- /work/SRC/openSUSE:Factory/golang-packaging/golang-packaging.changes 2016-01-12 16:13:34.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.golang-packaging.new/golang-packaging.changes 2016-01-20 09:55:17.000000000 +0100
@@ -1,0 +2,7 @@
+Tue Jan 19 10:36:14 UTC 2016 - i(a)marguerite.su
+
+- update version 14.3
+ * fix: command not found error for go test on SLE
+ * fix #5 again: gsub importpath itself is not enough
+
+-------------------------------------------------------------------
Old:
----
golang-packaging-14.2.tar.gz
New:
----
golang-packaging-14.3.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ golang-packaging.spec ++++++
--- /var/tmp/diff_new_pack.2vOuos/_old 2016-01-20 09:55:18.000000000 +0100
+++ /var/tmp/diff_new_pack.2vOuos/_new 2016-01-20 09:55:18.000000000 +0100
@@ -17,7 +17,7 @@
Name: golang-packaging
-Version: 14.2
+Version: 14.3
Release: 0
Summary: A toolchain to help packaging golang
License: GPL-3.0
@@ -56,7 +56,7 @@
%files
%defattr(-,root,root)
-%doc COPYING README.md TODO ChangeLog
+%doc COPYING README.md ChangeLog
%config %{_sysconfdir}/rpm/macros.go
%if %{?suse_version} >= 1320
%{_prefix}/lib/rpm/fileattrs/golang.attr
++++++ golang-packaging-14.2.tar.gz -> golang-packaging-14.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/golang-packaging-14.2/ChangeLog new/golang-packaging-14.3/ChangeLog
--- old/golang-packaging-14.2/ChangeLog 2016-01-11 12:16:49.000000000 +0100
+++ new/golang-packaging-14.3/ChangeLog 2016-01-19 11:33:28.000000000 +0100
@@ -1,3 +1,16 @@
+== update version 14.3 ==
+
+ * fix: command not found error for go test on SLE
+ * fix #5 again: gsub importpath itself is not enough
+
+== update version 14.2 ==
+
+ * golang.req: fix ' // indirect' comment in import
+
+== update version 14.1 ==
+
+ * fix: uniq! returns nil if everything is unique
+
== update version 14 ==
* if importpath has "test/example", it should survive (github#5)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/golang-packaging-14.2/TODO new/golang-packaging-14.3/TODO
--- old/golang-packaging-14.2/TODO 2016-01-11 12:16:49.000000000 +0100
+++ new/golang-packaging-14.3/TODO 1970-01-01 01:00:00.000000000 +0100
@@ -1,3 +0,0 @@
-# TODO
-
-* multithreading
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/golang-packaging-14.2/golang/cli.rb new/golang-packaging-14.3/golang/cli.rb
--- old/golang-packaging-14.2/golang/cli.rb 2016-01-11 12:16:49.000000000 +0100
+++ new/golang-packaging-14.3/golang/cli.rb 2016-01-19 11:33:28.000000000 +0100
@@ -1,19 +1,32 @@
module CLI
- def self.run(command="")
-
- # echo the command we run to the buildlog
- puts command
-
- IO.popen(command) {|f| f.each_line {|l| puts l}}
-
- if $? == 0
- File.open("/tmp/exitstatus.txt","w:UTF-8") {|f| f.puts(0)}
+ def write_status(status)
+ file = "/tmp/exitstatus.txt"
+ mode = "w:UTF-8"
+ if status == 0
+ File.open(file,mode) {|f| f.puts(0)}
else
- File.open("/tmp/exitstatus.txt","w:UTF-8") {|f| f.puts(1)}
+ File.open(file,mode) {|f| f.puts(1)}
abort "[ERROR]Go command failed! Please check."
end
+ end
+ def self.run(env={},cmd="")
+ unless RUBY_VERSION.to_f > 1.8
+ # popen in 1.8 doesn't support env hash
+ def popen_env(hash, cmd)
+ hash.each do |k,v|
+ ENV[k] = v
+ end
+ io = IO.popen(cmd)
+ io.close
+ write_status($?)
+ end
+ popen_env(env,cmd) {|f| f.each_line {|l| puts l}}
+ else
+ IO.popen(env,cmd) {|f| f.each_line {|l| puts l}}
+ write_status($?)
+ end
end
end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/golang-packaging-14.2/golang-macros.rb new/golang-packaging-14.3/golang-macros.rb
--- old/golang-packaging-14.2/golang-macros.rb 2016-01-11 12:16:49.000000000 +0100
+++ new/golang-packaging-14.3/golang-macros.rb 2016-01-19 11:33:28.000000000 +0100
@@ -104,14 +104,14 @@
# MODs: nil, "...", "/...", "foo...", "foo/...", "foo bar", "foo bar... baz" and etc
if mods.empty?
- CLI.run("GOPATH=\"#{gopath}\" GOBIN=\"#{gobin}\" go install #{extraflags} #{buildflags} #{importpath}")
+ CLI.run({"GOPATH"=>gopath,"GOBIN"=>gobin}, "go install #{extraflags} #{buildflags} #{importpath}")
else
for mod in mods do
if mod == "..."
- CLI.run("GOPATH=\"#{gopath}\" GOBIN=\"#{gobin}\" go install #{extraflags} #{buildflags} #{importpath}...")
+ CLI.run({"GOPATH"=>gopath,"GOBIN"=>gobin}, "go install #{extraflags} #{buildflags} #{importpath}...")
break
else
- CLI.run("GOPATH=\"#{gopath}\" GOBIN=\"#{gobin}\" go install #{extraflags} #{buildflags} #{importpath}/#{mod}")
+ CLI.run({"GOPATH"=>gopath,"GOBIN"=>gobin}, "go install #{extraflags} #{buildflags} #{importpath}/#{mod}")
end
end
end
@@ -187,7 +187,7 @@
puts "[ERROR]gofix: please specify a valid importpath, see: go help fix"
else
gopath = $builddir + "/go"
- CLI.run("GOPATH=#{gopath} go fix #{ARGV[1]}...")
+ CLI.run({"GOPATH"=>gopath},"go fix #{ARGV[1]}...")
end
puts "Fixed!"
@@ -204,7 +204,7 @@
puts "[ERROR]gotest: please specify a valid importpath, see: go help test"
else
gopath = $builddir + "/go:" + $libdir + "/go/contrib"
- CLI.run("GOPATH=#{gopath} go test -x #{ARGV[1]}...")
+ CLI.run({"GOPATH"=>gopath}, "go test -x #{ARGV[1]}...")
end
puts "Test passed!"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/golang-packaging-14.2/golang.prov new/golang-packaging-14.3/golang.prov
--- old/golang-packaging-14.2/golang.prov 2016-01-11 12:16:49.000000000 +0100
+++ new/golang-packaging-14.3/golang.prov 2016-01-19 11:33:28.000000000 +0100
@@ -9,10 +9,11 @@
# read stdin for filelist rpm feeds us for a (sub) package
filelist = []
+prefix = buildroot + contribdir + "/"
ARGF.each do |l|
# if line has "*.a"
# buildroot + contribdir + golang.org/x/text/collate/colltab.a
- filelist << l.gsub(buildroot + contribdir + "/",'').strip! if ( l.index(/\.a\n/) && ! l.gsub(importpath,'').index(/example|test/) )
+ filelist << l.gsub(prefix,'').strip! if ( l.index(".a\n") && ! l.gsub(prefix + importpath,'').index(/example|test/) )
end
# filelist:
1
0