openSUSE Commits
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
December 2016
- 1 participants
- 377 discussions
Hello community,
here is the log from the commit of package libzbc for openSUSE:Factory checked in at 2016-12-08 00:31:12
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/libzbc (Old)
and /work/SRC/openSUSE:Factory/.libzbc.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "libzbc"
Changes:
--------
--- /work/SRC/openSUSE:Factory/libzbc/libzbc.changes 2016-10-10 16:21:46.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.libzbc.new/libzbc.changes 2016-12-08 00:31:13.000000000 +0100
@@ -1,0 +2,7 @@
+Tue Nov 29 10:31:02 UTC 2016 - jengelh(a)inai.de
+
+- Update to new upstream release 4.3.3
+ * block backend driver: support latest "official" kernel
+ interface queued up for kernel 4.10 release.
+
+-------------------------------------------------------------------
Old:
----
v4.3.0.tar.gz
New:
----
v4.3.3.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ libzbc.spec ++++++
--- /var/tmp/diff_new_pack.N3MIOt/_old 2016-12-08 00:31:14.000000000 +0100
+++ /var/tmp/diff_new_pack.N3MIOt/_new 2016-12-08 00:31:14.000000000 +0100
@@ -17,8 +17,8 @@
Name: libzbc
-%define lname libzbc-4_3_0
-Version: 4.3.0
+%define lname libzbc-4_3_3
+Version: 4.3.3
Release: 0
Summary: Library for manipulating ZBC and ZAC disks
License: BSD-2-Clause and LGPL-3.0+
++++++ v4.3.0.tar.gz -> v4.3.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/.gitignore new/libzbc-4.3.3/.gitignore
--- old/libzbc-4.3.0/.gitignore 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/.gitignore 2016-11-15 07:55:55.000000000 +0100
@@ -26,6 +26,7 @@
*.hex
zbc_info
+zbc_inquiry
zbc_report_zones
zbc_reset_write_ptr
zbc_close_zone
@@ -98,3 +99,8 @@
tools/lkvs/m4/ltversion.m4
tools/lkvs/m4/lt~obsolete.m4
+tools/lkvs/src/examples/lkvsfileput
+tools/lkvs/src/examples/lkvsget
+tools/lkvs/src/examples/lkvsmultiget
+tools/lkvs/src/examples/lkvsmultiput
+tools/lkvs/src/examples/lkvsput
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/configure.ac new/libzbc-4.3.3/configure.ac
--- old/libzbc-4.3.0/configure.ac 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/configure.ac 2016-11-15 07:55:55.000000000 +0100
@@ -1,5 +1,5 @@
-AC_INIT([libzbc], [4.3.0], [damien.lemoal(a)hgst.com, adam.manzanares(a)hgst.com]
+AC_INIT([libzbc], [4.3.3], [damien.lemoal(a)wdc.com, adam.manzanares(a)wdc.com]
AC_CONFIG_AUX_DIR([build-aux])
AC_CONFIG_MACRO_DIR([m4])
AC_CONFIG_HEADER([include/config.h])
@@ -24,6 +24,12 @@
CFLAGS="$CFLAGS $PTHREAD_CFLAGS"
CC="$PTHREAD_CC"
+# Checks for header files.
+AC_CHECK_HEADER(scsi/scsi.h, [], [AC_MSG_ERROR([Couldn't find scsi/scsi.h])])
+AC_CHECK_HEADER(scsi/sg.h, [], [AC_MSG_ERROR([Couldn't find scsi/sg.h])])
+AC_CHECK_HEADER(libgen.h, [], [AC_MSG_ERROR([Couldn't find libgen.h])])
+AC_CHECK_HEADERS([linux/fs.h linux/blkzoned.h])
+
# Conditionals
# Build gzbc only if GTK3 is installed.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/lib/zbc.h new/libzbc-4.3.3/lib/zbc.h
--- old/libzbc-4.3.0/lib/zbc.h 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/lib/zbc.h 2016-11-15 07:55:55.000000000 +0100
@@ -18,7 +18,8 @@
/***** Including files *****/
-#include <libzbc/zbc.h>
+#include "config.h"
+#include "libzbc/zbc.h"
#include "zbc_log.h"
#include <stdlib.h>
@@ -199,22 +200,26 @@
extern int
zbc_scsi_get_zbd_chars(zbc_device_t *dev);
-extern int
-zbc_scsi_report_zones(zbc_device_t *dev,
- uint64_t start_lba,
- enum zbc_reporting_options ro,
- uint64_t *max_lba,
- zbc_zone_t *zones,
- unsigned int *nr_zones);
-
+/**
+ * SCSI backend driver open zone method.
+ * Used in block device backend too.
+ */
extern int
zbc_scsi_open_zone(zbc_device_t *dev,
uint64_t start_lba);
+/**
+ * SCSI backend driver close zone method.
+ * Used in block device backend too.
+ */
extern int
zbc_scsi_close_zone(zbc_device_t *dev,
uint64_t start_lba);
+/**
+ * SCSI backend driver finish zone method.
+ * Used in block device backend too.
+ */
extern int
zbc_scsi_finish_zone(zbc_device_t *dev,
uint64_t start_lba);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/lib/zbc_block.c new/libzbc-4.3.3/lib/zbc_block.c
--- old/libzbc-4.3.0/lib/zbc_block.c 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/lib/zbc_block.c 2016-11-15 07:55:55.000000000 +0100
@@ -24,23 +24,41 @@
#include <sys/stat.h>
#include <sys/types.h>
#include <fcntl.h>
+#include <limits.h>
#include <linux/fs.h>
#include "zbc.h"
#include "zbc_sg.h"
-/***** Macro and types definitions *****/
+#ifdef HAVE_LINUX_BLKZONED_H
+#include <linux/blkzoned.h>
+#endif
-/**
- * Block device descriptor data.
- */
-typedef struct zbc_block_device {
+/***** Inline functions *****/
+
+static inline uint64_t zbc_block_lba2bytes(struct zbc_device *dev,
+ uint64_t lba)
+{
+ return lba * dev->zbd_info.zbd_logical_block_size;
+}
- struct zbc_device dev;
+static inline uint64_t zbc_block_bytes2lba(struct zbc_device *dev,
+ uint64_t bytes)
+{
+ return bytes / dev->zbd_info.zbd_logical_block_size;
+}
- unsigned int zone_sectors;
+static inline uint64_t zbc_block_lba2sector(struct zbc_device *dev,
+ uint64_t lba)
+{
+ return zbc_block_lba2bytes(dev, lba) >> 9;
+}
-} zbc_block_device_t;
+static inline uint64_t zbc_block_sector2lba(struct zbc_device *dev,
+ uint64_t sector)
+{
+ return zbc_block_bytes2lba(dev, sector << 9);
+}
/***** Definition of private functions *****/
@@ -55,70 +73,34 @@
}
/**
- * Convert device address to block device handle address.
- */
-static inline zbc_block_device_t *
-zbc_dev_to_block_dev(struct zbc_device *dev)
-{
- return container_of(dev, struct zbc_block_device, dev);
-}
-
-/**
* Test if the block device is zoned.
*/
static int
zbc_block_device_is_zoned(struct zbc_device *dev)
{
- zbc_block_device_t *bdev = zbc_dev_to_block_dev(dev);
- unsigned long long start, len;
- unsigned int type, nr_zones;
- int is_zoned = 0;
char str[128];
- FILE *zoned;
- int ret = 1;
+ FILE *file;
- /* Check zoned attributes, if any */
+ /* Check that this is a zoned block device */
snprintf(str, sizeof(str),
"/sys/block/%s/queue/zoned",
basename(dev->zbd_filename));
- zoned = fopen(str, "r");
- if ( ! zoned ) {
- /* Not a zoned block device or no kernel support */
- return 0;
+ file = fopen(str, "r");
+ if ( file ) {
+ memset(str, 0, sizeof(str));
+ fscanf(file, "%s", str);
}
+ fclose(file);
- while( 1 ) {
-
- start = len = 0;
- type = -1;
- nr_zones = 0;
- ret = fscanf(zoned, "%llu %llu %u %u", &start, &len, &type, &nr_zones);
- if ( (ret == EOF) || (ret != 4) ) {
- break;
- }
-
- if ( nr_zones == 0 ) {
- /* Not a zoned block device */
- ret = 0;
- break;
- }
-
- if ( type >= 2 ) {
- if ( type == 2 ) {
- dev->zbd_info.zbd_model = ZBC_DM_HOST_MANAGED;
- } else {
- dev->zbd_info.zbd_model = ZBC_DM_HOST_AWARE;
- }
- bdev->zone_sectors = len;
- is_zoned = 1;
- break;
- }
-
+ if ( strcmp(str, "host-aware") == 0 ) {
+ dev->zbd_info.zbd_model = ZBC_DM_HOST_AWARE;
+ return 1;
+ } else if ( strcmp(str, "host-managed") == 0 ) {
+ dev->zbd_info.zbd_model = ZBC_DM_HOST_MANAGED;
+ return 1;
}
- fclose(zoned);
-
- return is_zoned;
+ return 0;
}
@@ -147,7 +129,7 @@
}
}
- return( len );
+ return len;
}
@@ -167,11 +149,10 @@
file = fopen(str, "r");
if ( file ) {
len = zbc_block_get_str(file, str);
- if ( len ) {
+ if ( len )
n = snprintf(dev->zbd_info.zbd_vendor_id,
ZBC_DEVICE_INFO_LENGTH,
"%s ", str);
- }
fclose(file);
}
@@ -181,11 +162,10 @@
file = fopen(str, "r");
if ( file ) {
len = zbc_block_get_str(file, str);
- if ( len ) {
+ if ( len )
n += snprintf(&dev->zbd_info.zbd_vendor_id[n],
ZBC_DEVICE_INFO_LENGTH - n,
"%s ", str);
- }
fclose(file);
}
@@ -195,11 +175,10 @@
file = fopen(str, "r");
if ( file ) {
len = zbc_block_get_str(file, str);
- if ( len ) {
+ if ( len )
snprintf(&dev->zbd_info.zbd_vendor_id[n],
ZBC_DEVICE_INFO_LENGTH - n,
"%s", str);
- }
fclose(file);
ret = 1;
}
@@ -210,10 +189,10 @@
/**
* Test if the device can be handled
- * and set a the block device info.
+ * and get the block device info.
*/
static int
-zbc_block_set_info(struct zbc_device *dev)
+zbc_block_get_info(struct zbc_device *dev)
{
unsigned long long size64;
struct stat st;
@@ -236,10 +215,9 @@
}
/* Is this a zoned device ? And do we have kernel support ? */
- if ( ! zbc_block_device_is_zoned(dev) ) {
+ if ( ! zbc_block_device_is_zoned(dev) )
/* Not a zoned block device: ignore */
return -ENXIO;
- }
/* Get logical block size */
ret = ioctl(dev->zbd_fd, BLKSSZGET, &size32);
@@ -312,9 +290,8 @@
}
/* Use SG_IO to get zone characteristics (maximum number of open zones, etc) */
- if ( zbc_scsi_get_zbd_chars(dev) ) {
+ if ( zbc_scsi_get_zbd_chars(dev) )
return -ENXIO;
- }
/* Get maximum command size */
zbc_sg_get_max_cmd_blocks(dev);
@@ -331,13 +308,17 @@
int flags,
struct zbc_device **pdev)
{
- zbc_block_device_t *bdev;
struct zbc_device *dev;
int fd, ret;
zbc_debug("%s: ########## Trying BLOCK driver ##########\n",
filename);
+#ifndef HAVE_LINUX_BLKZONED_H
+ zbc_debug("libzbc compiled without block driver support\n");
+ return -ENXIO;
+#endif
+
/* Open block device: always add write mode for discard (reset zone) */
fd = open(filename, zbc_open_flags(flags) | O_WRONLY);
if ( fd < 0 ) {
@@ -351,24 +332,19 @@
/* Allocate a handle */
ret = -ENOMEM;
- bdev = calloc(1, sizeof(*bdev));
- if ( ! bdev ) {
+ dev = calloc(1, sizeof(struct zbc_device));
+ if ( ! dev )
goto out;
- }
- bdev->zone_sectors = 0;
- dev = &bdev->dev;
dev->zbd_fd = fd;
dev->zbd_filename = strdup(filename);
- if ( ! dev->zbd_filename ) {
+ if ( ! dev->zbd_filename )
goto out_free_dev;
- }
- /* Set the fake device information */
- ret = zbc_block_set_info(dev);
- if ( ret != 0 ) {
+ /* Get device information */
+ ret = zbc_block_get_info(dev);
+ if ( ret != 0 )
goto out_free_filename;
- }
*pdev = dev;
@@ -383,13 +359,12 @@
out_free_dev:
- free(bdev);
+ free(dev);
out:
- if ( fd >= 0 ) {
+ if ( fd >= 0 )
close(fd);
- }
zbc_debug("%s: ########## BLOCK driver failed %d ##########\n",
filename,
@@ -405,23 +380,23 @@
static int
zbc_block_close(zbc_device_t *dev)
{
- zbc_block_device_t *bdev = zbc_dev_to_block_dev(dev);
int ret = 0;
/* Close device */
- if ( close(dev->zbd_fd) < 0 ) {
+ if ( close(dev->zbd_fd) < 0 )
ret = -errno;
- }
if ( ret == 0 ) {
free(dev->zbd_filename);
- free(bdev);
+ free(dev);
}
return ret;
}
+#ifdef HAVE_LINUX_BLKZONED_H
+
/**
* Flush the device.
*/
@@ -435,9 +410,51 @@
}
/**
- * Get the block device zone information: use SG_IO, but
- * sync the device first to ensure that the current write
- * pointer value is returned.
+ * Test if a zone should be reported depending
+ * on the specified reporting options.
+ */
+static bool
+zbc_block_must_report(struct zbc_zone *zone,
+ enum zbc_reporting_options ro)
+{
+ enum zbc_reporting_options options = ro & (~ZBC_RO_PARTIAL);
+
+ switch ( options ) {
+ case ZBC_RO_ALL:
+ return true;
+ case ZBC_RO_EMPTY:
+ return zbc_zone_empty(zone);
+ case ZBC_RO_IMP_OPEN:
+ return zbc_zone_imp_open(zone);
+ case ZBC_RO_EXP_OPEN:
+ return zbc_zone_exp_open(zone);
+ case ZBC_RO_CLOSED:
+ return zbc_zone_closed(zone);
+ case ZBC_RO_FULL:
+ return zbc_zone_full(zone);
+ case ZBC_RO_RDONLY:
+ return zbc_zone_rdonly(zone);
+ case ZBC_RO_OFFLINE:
+ return zbc_zone_offline(zone);
+ case ZBC_RO_RESET:
+ return zbc_zone_need_reset(zone);
+ case ZBC_RO_NON_SEQ:
+ return zbc_zone_non_seq(zone);
+ case ZBC_RO_NOT_WP:
+ return zbc_zone_not_wp(zone);
+ default:
+ break;
+ }
+
+ return false;
+
+
+}
+
+#define ZBC_BLOCK_ZONE_REPORT_NR_ZONES 8192
+
+/**
+ * Get the block device zone information.
*/
static int
zbc_block_report_zones(struct zbc_device *dev,
@@ -447,11 +464,77 @@
struct zbc_zone *zones,
unsigned int *nr_zones)
{
+ struct zbc_zone zone;
+ struct blk_zone_report *rep;
+ struct blk_zone *blkz;
+ unsigned int i, n = 0;
+ int ret;
+
+ rep = malloc(sizeof(struct blk_zone_report) +
+ sizeof(struct blk_zone) * ZBC_BLOCK_ZONE_REPORT_NR_ZONES);
+ if ( ! rep ) {
+ zbc_error("%s: No memory for report zones\n",
+ dev->zbd_filename);
+ return -ENOMEM;
+ }
+ blkz = (struct blk_zone *)(rep + 1);
+
+ while ( ((! *nr_zones) || (n < *nr_zones))
+ && (start_lba < dev->zbd_info.zbd_logical_blocks) ) {
+
+ /* Get zone info */
+ memset(rep, 0, sizeof(struct blk_zone_report) +
+ sizeof(struct blk_zone) * ZBC_BLOCK_ZONE_REPORT_NR_ZONES);
+ rep->sector = zbc_block_lba2sector(dev, start_lba);
+ rep->nr_zones = ZBC_BLOCK_ZONE_REPORT_NR_ZONES;
+
+ ret = ioctl(dev->zbd_fd, BLKREPORTZONE, rep);
+ if ( ret != 0 ) {
+ ret = -errno;
+ zbc_error("%s: ioctl BLKREPORTZONE at %llu failed %d (%s)\n",
+ dev->zbd_filename,
+ (unsigned long long)start_lba,
+ errno,
+ strerror(errno));
+ goto out;
+ }
+
+ for(i = 0; i < rep->nr_zones; i++) {
+
+ if ( (*nr_zones && (n >= *nr_zones))
+ || (start_lba >= dev->zbd_info.zbd_logical_blocks) )
+ break;
+
+ memset(&zone, 0, sizeof(struct zbc_zone));
+ zone.zbz_type = blkz[i].type;
+ zone.zbz_condition = blkz[i].cond;
+ zone.zbz_length = zbc_block_sector2lba(dev, blkz[i].len);
+ zone.zbz_start = zbc_block_sector2lba(dev, blkz[i].start);
+ zone.zbz_write_pointer = zbc_block_sector2lba(dev, blkz[i].wp);
+ if ( blkz[i].reset )
+ zone.zbz_flags |= ZBC_ZF_NEED_RESET;
+ if ( blkz[i].non_seq )
+ zone.zbz_flags |= ZBC_ZF_NON_SEQ;
+
+ if ( zbc_block_must_report(&zone, ro) ) {
+ if ( zones )
+ memcpy(&zones[n], &zone, sizeof(struct zbc_zone));
+ n++;
+ }
+
+ start_lba += zbc_zone_length(&zone);
+
+ }
+
+ }
+
+ /* Return number of zones */
+ *nr_zones = n;
- fdatasync(dev->zbd_fd);
+out:
+ free(rep);
- return zbc_scsi_report_zones(dev, start_lba, ro,
- max_lba, zones, nr_zones);
+ return ret;
}
@@ -460,7 +543,7 @@
*/
static int
zbc_block_open_zone(zbc_device_t *dev,
- uint64_t start_lba)
+ uint64_t start_lba)
{
return zbc_scsi_open_zone(dev, start_lba);
}
@@ -470,7 +553,7 @@
*/
static int
zbc_block_close_zone(zbc_device_t *dev,
- uint64_t start_lba)
+ uint64_t start_lba)
{
return zbc_scsi_close_zone(dev, start_lba);
}
@@ -480,37 +563,130 @@
*/
static int
zbc_block_finish_zone(zbc_device_t *dev,
- uint64_t start_lba)
+ uint64_t start_lba)
{
return zbc_scsi_finish_zone(dev, start_lba);
}
/**
+ * Reset a single zone write pointer.
+ */
+static int
+zbc_block_reset_one(struct zbc_device *dev,
+ uint64_t start_lba)
+{
+ struct blk_zone_range range;
+ struct zbc_zone zone;
+ unsigned int nr_zones = 1;
+ int ret;
+
+ /* Get zone info */
+ ret = zbc_block_report_zones(dev, start_lba,
+ ZBC_RO_ALL, NULL,
+ &zone, &nr_zones);
+ if (ret)
+ return ret;
+ if (! nr_zones ) {
+ zbc_error("%s: Invalid LBA\n",
+ dev->zbd_filename);
+ return -EINVAL;
+ }
+
+ if (zbc_zone_conventional(&zone)
+ || zbc_zone_empty(&zone))
+ return 0;
+
+ /* Reset zone */
+ range.sector = zbc_block_lba2sector(dev, zbc_zone_start_lba(&zone));
+ range.nr_sectors = zbc_block_lba2sector(dev, zbc_zone_length(&zone));
+ ret = ioctl(dev->zbd_fd, BLKRESETZONE, &range);
+ if ( ret != 0 ) {
+ ret = -errno;
+ zbc_error("%s: ioctl BLKRESETZONE failed %d (%s)\n",
+ dev->zbd_filename,
+ errno,
+ strerror(errno));
+ }
+
+ return ret;
+
+}
+
+/**
+ * Reset all zones write pointer.
+ */
+static int
+zbc_block_reset_all(struct zbc_device *dev)
+{
+ struct zbc_zone *zones;
+ unsigned int i, nr_zones;
+ struct blk_zone_range range;
+ uint64_t start_lba = 0;
+ int ret;
+
+ zones = malloc(sizeof(struct zbc_zone) * ZBC_BLOCK_ZONE_REPORT_NR_ZONES);
+ if ( ! zones ) {
+ zbc_error("%s: No memory for report zones\n",
+ dev->zbd_filename);
+ return -ENOMEM;
+ }
+
+ while ( 1 ) {
+
+ /* Get zone info */
+ nr_zones = ZBC_BLOCK_ZONE_REPORT_NR_ZONES;
+ ret = zbc_block_report_zones(dev, start_lba,
+ ZBC_RO_ALL, NULL,
+ zones, &nr_zones);
+ if (ret || (! nr_zones))
+ break;
+
+ for (i = 0; i < nr_zones; i++) {
+
+ start_lba = zbc_zone_next_lba(&zones[i]);
+
+ if (zbc_zone_conventional(&zones[i])
+ || zbc_zone_empty(&zones[i]))
+ continue;
+
+ /* Reset zone */
+ range.sector = zbc_block_lba2sector(dev, zbc_zone_start_lba(&zones[i]));
+ range.nr_sectors = zbc_block_lba2sector(dev, zbc_zone_length(&zones[i]));
+ ret = ioctl(dev->zbd_fd, BLKRESETZONE, &range);
+ if ( ret != 0 ) {
+ ret = -errno;
+ zbc_error("%s: ioctl BLKRESETZONE failed %d (%s)\n",
+ dev->zbd_filename,
+ errno,
+ strerror(errno));
+ goto out;
+ }
+
+ }
+
+ }
+
+out:
+ free(zones);
+
+ return ret;
+
+}
+
+/**
* Reset zone(s) write pointer: use BLKDISCARD ioctl.
*/
static int
zbc_block_reset_wp(struct zbc_device *dev,
uint64_t start_lba)
{
- zbc_block_device_t *bdev = zbc_dev_to_block_dev(dev);
- uint64_t range[2];
- if ( start_lba == (uint64_t)-1 ) {
- /* Reset all zones */
- range[0] = 0;
- range[1] = dev->zbd_info.zbd_logical_blocks * dev->zbd_info.zbd_logical_block_size;
- } else {
- /* Reset only the zone at start_lba */
- range[0] = start_lba * dev->zbd_info.zbd_logical_block_size;
- range[1] = bdev->zone_sectors << 9;
- }
-
- /* Discard */
- if ( ioctl(dev->zbd_fd, BLKDISCARD, &range) != 0 ) {
- return -errno;
- }
+ if ( start_lba == (uint64_t)-1 )
+ /* All zones */
+ return zbc_block_reset_all(dev);
- return 0;
+ /* One zone */
+ return zbc_block_reset_one(dev, start_lba);
}
@@ -529,13 +705,12 @@
/* Read */
ret = pread(dev->zbd_fd,
buf,
- lba_count * dev->zbd_info.zbd_logical_block_size,
- (zone->zbz_start + lba_ofst) * dev->zbd_info.zbd_logical_block_size);
- if ( ret < 0 ) {
+ zbc_block_lba2bytes(dev, lba_count),
+ zbc_block_lba2bytes(dev, zone->zbz_start + lba_ofst));
+ if ( ret < 0 )
ret = -errno;
- } else {
- ret /= dev->zbd_info.zbd_logical_block_size;
- }
+ else
+ ret = zbc_block_bytes2lba(dev, ret);
return ret;
@@ -556,18 +731,90 @@
/* Read */
ret = pwrite(dev->zbd_fd,
buf,
- lba_count * dev->zbd_info.zbd_logical_block_size,
- (zone->zbz_start + lba_ofst) * dev->zbd_info.zbd_logical_block_size);
- if ( ret < 0 ) {
+ zbc_block_lba2bytes(dev, lba_count),
+ zbc_block_lba2bytes(dev, zone->zbz_start + lba_ofst));
+ if ( ret < 0 )
ret = -errno;
- } else {
- ret /= dev->zbd_info.zbd_logical_block_size;
- }
+ else
+ ret = zbc_block_bytes2lba(dev, ret);
return ret;
}
+#else /* HAVE_LINUX_BLKZONED_H */
+
+static int
+zbc_block_report_zones(struct zbc_device *dev,
+ uint64_t start_lba,
+ enum zbc_reporting_options ro,
+ uint64_t *max_lba,
+ struct zbc_zone *zones,
+ unsigned int *nr_zones)
+{
+ return -EOPNOTSUPP;
+}
+
+static int
+zbc_block_open_zone(zbc_device_t *dev,
+ uint64_t start_lba)
+{
+ return -EOPNOTSUPP;
+}
+
+static int
+zbc_block_close_zone(zbc_device_t *dev,
+ uint64_t start_lba)
+{
+ return -EOPNOTSUPP;
+}
+
+static int
+zbc_block_finish_zone(zbc_device_t *dev,
+ uint64_t start_lba)
+{
+ return -EOPNOTSUPP;
+}
+
+
+static int
+zbc_block_reset_wp(struct zbc_device *dev,
+ uint64_t start_lba)
+{
+ return -EOPNOTSUPP;
+}
+
+static int32_t
+zbc_block_pread(struct zbc_device *dev,
+ zbc_zone_t *zone,
+ void *buf,
+ uint32_t lba_count,
+ uint64_t lba_ofst)
+{
+ return -EOPNOTSUPP;
+}
+
+static int32_t
+zbc_block_pwrite(struct zbc_device *dev,
+ zbc_zone_t *zone,
+ const void *buf,
+ uint32_t lba_count,
+ uint64_t lba_ofst)
+{
+ return -EOPNOTSUPP;
+}
+
+static int
+zbc_block_flush(struct zbc_device *dev,
+ uint64_t lba_offset,
+ uint32_t lba_count,
+ int immediate)
+{
+ return -EOPNOTSUPP;
+}
+
+#endif /* HAVE_LINUX_BLKZONED_H */
+
struct zbc_ops zbc_block_ops = {
.zbd_open = zbc_block_open,
.zbd_close = zbc_block_close,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/lib/zbc_scsi.c new/libzbc-4.3.3/lib/zbc_scsi.c
--- old/libzbc-4.3.0/lib/zbc_scsi.c 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/lib/zbc_scsi.c 2016-11-15 07:55:55.000000000 +0100
@@ -299,7 +299,7 @@
/**
* Get a SCSI device zone information.
*/
-int
+static int
zbc_scsi_report_zones(zbc_device_t *dev,
uint64_t start_lba,
enum zbc_reporting_options ro,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/test/scripts/zbc_test_lib.sh new/libzbc-4.3.3/test/scripts/zbc_test_lib.sh
--- old/libzbc-4.3.0/test/scripts/zbc_test_lib.sh 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/test/scripts/zbc_test_lib.sh 2016-11-15 07:55:55.000000000 +0100
@@ -32,6 +32,7 @@
# Store argument
device=$2
+ device_base=`basename ${device}`
bin_path=$3
# Test name
@@ -48,7 +49,7 @@
rm -f ${log_file}
# Zone info file
- zone_info_file="/tmp/${test_name}_zone_info.log"
+ zone_info_file="/tmp/${test_name}_zone_info.${device_base}.log"
rm -f ${zone_info_file}
# Dump zone info file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/test/zbc_test.sh new/libzbc-4.3.3/test/zbc_test.sh
--- old/libzbc-4.3.0/test/zbc_test.sh 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/test/zbc_test.sh 2016-11-15 07:55:55.000000000 +0100
@@ -18,6 +18,7 @@
fi
device_file=${1}
+device_base=`basename ${device_file}`
# Check credentials
if [ $(id -u) -ne 0 ]; then
@@ -25,16 +26,13 @@
exit 1
fi
-# Set file names
-log_file=${log_path}/${testname}.log
-
# Test function
function zbc_run_test()
{
declare -i run_test_ret=0
ZBC_TEST_SUB_SCR_PATH=${ZBC_TEST_SCR_PATH}/${1}
- ZBC_TEST_SUB_LOG_PATH=${ZBC_TEST_LOG_PATH}/${1}
+ ZBC_TEST_SUB_LOG_PATH=${ZBC_TEST_LOG_PATH}/${device_base}/${1}
if [ ! -d ${ZBC_TEST_SUB_SCR_PATH} ]; then
echo "Test script directory ${ZBC_TEST_SUB_SCR_PATH} does not exist"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/tools/read_zone/zbc_read_zone.c new/libzbc-4.3.3/tools/read_zone/zbc_read_zone.c
--- old/libzbc-4.3.0/tools/read_zone/zbc_read_zone.c 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/tools/read_zone/zbc_read_zone.c 2016-11-15 07:55:55.000000000 +0100
@@ -41,7 +41,7 @@
/**
* System time in usecs.
*/
-static __inline__ unsigned long long
+static inline unsigned long long
zbc_read_zone_usec(void)
{
struct timeval tv;
@@ -264,7 +264,7 @@
ret = 1;
goto out;
}
- ret = posix_memalign((void **) &iobuf, info.zbd_logical_block_size, iosize);
+ ret = posix_memalign((void **) &iobuf, sysconf(_SC_PAGESIZE), iosize);
if ( ret != 0 ) {
fprintf(stderr,
"No memory for I/O buffer (%zu B)\n",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/tools/report_zones/zbc_report_zones.c new/libzbc-4.3.3/tools/report_zones/zbc_report_zones.c
--- old/libzbc-4.3.0/tools/report_zones/zbc_report_zones.c 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/tools/report_zones/zbc_report_zones.c 2016-11-15 07:55:55.000000000 +0100
@@ -29,7 +29,7 @@
char **argv)
{
struct zbc_device_info info;
- unsigned long long lba = 0;
+ unsigned long long lba = 0, nr_lba = 0;
struct zbc_device *dev;
enum zbc_reporting_options ro = ZBC_RO_ALL;
int i, ret = 1;
@@ -232,10 +232,27 @@
}
printf("%u / %u zone%s:\n", nz, nr_zones, (nz > 1) ? "s" : "");
+ lba = 0;
for(i = 0; i < (int)nz; i++) {
+
z = &zones[i];
+
+ if (ro == ZBC_RO_ALL) {
+ /* Check */
+ if (zbc_zone_start_lba(z) != lba) {
+ printf("[WARNING] Zone %05d: LBA %llu should be %llu\n",
+ i,
+ zbc_zone_start_lba(z),
+ lba);
+ lba = zbc_zone_start_lba(z);
+ }
+ nr_lba += zbc_zone_length(z);
+ lba += zbc_zone_length(z);
+ }
+
if ( zbc_zone_conventional(z) ) {
- printf("Zone %05d: type 0x%x (%s), cond 0x%x (%s), LBA %llu, %llu sectors, wp N/A\n",
+ printf("Zone %05d: type 0x%x (%s), cond 0x%x (%s), LBA %llu, "
+ "%llu sectors, wp N/A\n",
i,
zbc_zone_type(z),
zbc_zone_type_str(zbc_zone_type(z)),
@@ -243,19 +260,41 @@
zbc_zone_condition_str(zbc_zone_condition(z)),
zbc_zone_start_lba(z),
zbc_zone_length(z));
- } else {
- printf("Zone %05d: type 0x%x (%s), cond 0x%x (%s), need_reset %d, non_seq %d, LBA %llu, %llu sectors, wp %llu\n",
- i,
- zbc_zone_type(z),
- zbc_zone_type_str(zbc_zone_type(z)),
- zbc_zone_condition(z),
- zbc_zone_condition_str(zbc_zone_condition(z)),
- zbc_zone_need_reset(z),
- zbc_zone_non_seq(z),
- zbc_zone_start_lba(z),
- zbc_zone_length(z),
- zbc_zone_wp_lba(z));
+ continue;
+ }
+
+ if ( zbc_zone_sequential(z) ) {
+ printf("Zone %05d: type 0x%x (%s), cond 0x%x (%s), need_reset %d, "
+ "non_seq %d, LBA %llu, %llu sectors, wp %llu\n",
+ i,
+ zbc_zone_type(z),
+ zbc_zone_type_str(zbc_zone_type(z)),
+ zbc_zone_condition(z),
+ zbc_zone_condition_str(zbc_zone_condition(z)),
+ zbc_zone_need_reset(z),
+ zbc_zone_non_seq(z),
+ zbc_zone_start_lba(z),
+ zbc_zone_length(z),
+ zbc_zone_wp_lba(z));
+ continue;
}
+
+ printf("Zone %05d: unknown type 0x%x, LBA %llu, %llu sectors\n",
+ i,
+ zbc_zone_type(z),
+ zbc_zone_start_lba(z),
+ zbc_zone_length(z));
+
+ }
+
+ if ( ro == ZBC_RO_ALL ) {
+ /* Check */
+ if ( nr_lba != info.zbd_logical_blocks ) {
+ printf("[WARNING] %llu logical blocks reported "
+ "but capacity is %llu logical blocks\n",
+ nr_lba,
+ (unsigned long long)info.zbd_logical_blocks);
+ }
}
out:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libzbc-4.3.0/tools/write_zone/zbc_write_zone.c new/libzbc-4.3.3/tools/write_zone/zbc_write_zone.c
--- old/libzbc-4.3.0/tools/write_zone/zbc_write_zone.c 2016-08-18 04:04:10.000000000 +0200
+++ new/libzbc-4.3.3/tools/write_zone/zbc_write_zone.c 2016-11-15 07:55:55.000000000 +0100
@@ -268,11 +268,10 @@
zbc_zone_wp_lba(iozone));
/* Check I/O size alignment */
- if ( zbc_zone_sequential_req(iozone) ) {
+ if ( zbc_zone_sequential_req(iozone) )
ioalign = info.zbd_physical_block_size;
- } else {
+ else
ioalign = info.zbd_logical_block_size;
- }
if ( iosize % ioalign ) {
fprintf(stderr,
"Invalid I/O size %zu (must be aligned on %zu)\n",
@@ -283,7 +282,7 @@
}
/* Get an I/O buffer */
- ret = posix_memalign((void **) &iobuf, ioalign, iosize);
+ ret = posix_memalign((void **) &iobuf, sysconf(_SC_PAGESIZE), iosize);
if ( ret != 0 ) {
fprintf(stderr,
"No memory for I/O buffer (%zu B)\n",
1
0
Hello community,
here is the log from the commit of package drbd for openSUSE:Factory checked in at 2016-12-08 00:31:06
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/drbd (Old)
and /work/SRC/openSUSE:Factory/.drbd.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "drbd"
Changes:
--------
--- /work/SRC/openSUSE:Factory/drbd/drbd.changes 2016-10-26 13:29:49.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.drbd.new/drbd.changes 2016-12-08 00:31:07.000000000 +0100
@@ -1,0 +2,10 @@
+Fri Nov 25 10:28:35 UTC 2016 - lpechacek(a)suse.com
+
+- Excluded openSUSE from RT KMP build (bsc#1005578)
+
+-------------------------------------------------------------------
+Wed Nov 19 11:56:17 UTC 2016 - lpechacek(a)suse.com
+
+- Enabled RT KMP build (bsc#1005578)
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ drbd.spec ++++++
--- /var/tmp/diff_new_pack.lNt6kF/_old 2016-12-08 00:31:08.000000000 +0100
+++ /var/tmp/diff_new_pack.lNt6kF/_new 2016-12-08 00:31:08.000000000 +0100
@@ -18,9 +18,11 @@
# needssslcertforbuild
+%if ! 0%{?is_opensuse}
%ifarch x86_64
%define buildrt 0
%endif
+%endif
Name: drbd
Version: 9.0.5+git.8d53d3e
1
0
Hello community,
here is the log from the commit of package drbd-utils for openSUSE:Factory checked in at 2016-12-08 00:31:00
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/drbd-utils (Old)
and /work/SRC/openSUSE:Factory/.drbd-utils.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "drbd-utils"
Changes:
--------
--- /work/SRC/openSUSE:Factory/drbd-utils/drbd-utils.changes 2016-08-05 18:17:38.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.drbd-utils.new/drbd-utils.changes 2016-12-08 00:31:01.000000000 +0100
@@ -1,0 +2,6 @@
+Tue Dec 6 08:06:00 UTC 2016 - nwang(a)suse.com
+
+- bsc#1006105, initialize node id when creating md.
+- Add patch initialize-node-id-when-create-md.patch
+
+-------------------------------------------------------------------
New:
----
initialize-node-id-when-create-md.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ drbd-utils.spec ++++++
--- /var/tmp/diff_new_pack.VfiuaO/_old 2016-12-08 00:31:02.000000000 +0100
+++ /var/tmp/diff_new_pack.VfiuaO/_new 2016-12-08 00:31:02.000000000 +0100
@@ -33,6 +33,7 @@
Patch5: fence-after-pacemaker-down.patch
Patch6: support-drbd9-ra.patch
Patch7: fix-segfault-up-stacked-resource.patch
+Patch8: initialize-node-id-when-create-md.patch
Provides: drbd-bash-completion = %{version}
Provides: drbd-pacemaker = %{version}
@@ -82,6 +83,7 @@
%patch5 -p1
%patch6 -p1
%patch7 -p1
+%patch8 -p1
%build
./autogen.sh
++++++ initialize-node-id-when-create-md.patch ++++++
---
user/shared/drbdmeta.c | 1 +
1 file changed, 1 insertion(+)
diff --git a/user/shared/drbdmeta.c b/user/shared/drbdmeta.c
index fd8cc17..e8089f8 100644
--- a/user/shared/drbdmeta.c
+++ b/user/shared/drbdmeta.c
@@ -3845,6 +3845,7 @@ void md_convert_08_to_09(struct format *cfg)
cfg->md.flags &= ~(MDF_CONNECTED_IND | MDF_FULL_SYNC | MDF_PEER_OUT_DATED);
+ cfg->md.node_id = -1;
cfg->md.magic = DRBD_MD_MAGIC_09;
re_initialize_md_offsets(cfg);
--
1.8.5.6
1
0
Hello community,
here is the log from the commit of package hawk2 for openSUSE:Factory checked in at 2016-12-08 00:30:54
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/hawk2 (Old)
and /work/SRC/openSUSE:Factory/.hawk2.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "hawk2"
Changes:
--------
--- /work/SRC/openSUSE:Factory/hawk2/hawk2.changes 2016-12-02 16:42:36.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.hawk2.new/hawk2.changes 2016-12-08 00:30:56.000000000 +0100
@@ -1,0 +2,7 @@
+Mon Dec 05 12:21:15 UTC 2016 - kgronlund(a)suse.com
+
+- Update to version 2.0.0+git.1480940121.2c59e4e:
+ * Doc: Add missing link to HA quick guide
+ * UI: Show local doc links as large buttons
+
+-------------------------------------------------------------------
Old:
----
hawk2-2.0.0+git.1480523437.b6a234a.tar.bz2
New:
----
hawk2-2.0.0+git.1480940121.2c59e4e.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ hawk2.spec ++++++
--- /var/tmp/diff_new_pack.NaoQ6S/_old 2016-12-08 00:30:58.000000000 +0100
+++ /var/tmp/diff_new_pack.NaoQ6S/_new 2016-12-08 00:30:58.000000000 +0100
@@ -33,13 +33,13 @@
%define gname haclient
%define uname hacluster
-%define version_unconverted 2.0.0+git.1480523437.b6a234a
+%define version_unconverted 2.0.0+git.1480940121.2c59e4e
Name: hawk2
Summary: HA Web Konsole
License: GPL-2.0
Group: %{pkg_group}
-Version: 2.0.0+git.1480523437.b6a234a
+Version: 2.0.0+git.1480940121.2c59e4e
Release: 0
Url: http://www.clusterlabs.org/wiki/Hawk
Source: %{name}-%{version}.tar.bz2
@@ -175,7 +175,7 @@
mkdir -p %{buildroot}/usr/share/doc/manual/sle-ha-manuals_en
mkdir -p %{buildroot}/usr/share/doc/manual/sle-ha-geo-manuals_en
mkdir -p %{buildroot}/usr/share/doc/manual/sle-ha-nfs-quick_en-pdf
-mkdir -p %{buildroot}/usr/share/doc/manual/sle-ha-install-quick_en
+mkdir -p %{buildroot}/usr/share/doc/manual/sle-ha-install-quick_en-pdf
# mark .mo files as such (works on SUSE but not FC12, as the latter wants directory to
# be "share/locale", not just "locale", and it also doesn't support appending to %%{name}.lang)
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.NaoQ6S/_old 2016-12-08 00:30:58.000000000 +0100
+++ /var/tmp/diff_new_pack.NaoQ6S/_new 2016-12-08 00:30:58.000000000 +0100
@@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">git://github.com/ClusterLabs/hawk.git</param>
- <param name="changesrevision">b6a234aba9bdd30b90bfa3e2ee0d0bef4576d948</param></service></servicedata>
\ No newline at end of file
+ <param name="changesrevision">2c59e4ee0cf8d0818d3ddea62c9c93d969e4fbc8</param></service></servicedata>
\ No newline at end of file
++++++ hawk2-2.0.0+git.1480523437.b6a234a.tar.bz2 -> hawk2-2.0.0+git.1480940121.2c59e4e.tar.bz2 ++++++
/work/SRC/openSUSE:Factory/hawk2/hawk2-2.0.0+git.1480523437.b6a234a.tar.bz2 /work/SRC/openSUSE:Factory/.hawk2.new/hawk2-2.0.0+git.1480940121.2c59e4e.tar.bz2 differ: char 11, line 1
1
0
Hello community,
here is the log from the commit of package python-pyroute2 for openSUSE:Factory checked in at 2016-12-08 00:30:42
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-pyroute2 (Old)
and /work/SRC/openSUSE:Factory/.python-pyroute2.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-pyroute2"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-pyroute2/python-pyroute2.changes 2016-09-13 22:23:44.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-pyroute2.new/python-pyroute2.changes 2016-12-08 00:30:43.000000000 +0100
@@ -1,0 +2,27 @@
+Mon Nov 21 08:17:08 UTC 2016 - glin(a)suse.com
+
+- Update to 0.4.11
+ + rtnl: #284 -- support vlan_flags
+ + ipdb: #288 -- do not inore link-local addresses
+ + ipdb: #300 -- sort ip addresses
+ + ipdb: #306 -- support net_ns_pid
+ + ipdb: #307 -- fix IPv6 routes management
+ + ipdb: #311 -- vlan interfaces address loading
+ + iprsocket: #305 -- support NETLINK_LISTEN_ALL_NSID
+ + devlink: fix fd leak on broken init
+ + sock_diag: initial NETLINK_SOCK_DIAG support
+ + rtnl: fix critical fd leak in the compat code
+ + rtnl: compat proxying fix
+ + rtnl: compat code is back
+ + netns: custom netns path support
+ + ipset: multiple improvements
+
+-------------------------------------------------------------------
+Tue Nov 15 10:37:01 UTC 2016 - dmueller(a)suse.com
+
+- update to 0.4.6:
+ * ipdb: #278 -- fix initial ports mapping
+ * ipset: #277 -- fix ADT attributes parsing
+ * nl80211: #274, #275, #276 -- BSS-related fixes
+
+-------------------------------------------------------------------
Old:
----
pyroute2-0.4.5.tar.gz
New:
----
pyroute2-0.4.11.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-pyroute2.spec ++++++
--- /var/tmp/diff_new_pack.cRHjwJ/_old 2016-12-08 00:30:44.000000000 +0100
+++ /var/tmp/diff_new_pack.cRHjwJ/_new 2016-12-08 00:30:44.000000000 +0100
@@ -17,7 +17,7 @@
Name: python-pyroute2
-Version: 0.4.5
+Version: 0.4.11
Release: 0
Summary: Python Netlink library
License: GPL-2.0+ or Apache-2.0
++++++ pyroute2-0.4.5.tar.gz -> pyroute2-0.4.11.tar.gz ++++++
++++ 17142 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package python-websocket-client for openSUSE:Factory checked in at 2016-12-08 00:30:34
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-websocket-client (Old)
and /work/SRC/openSUSE:Factory/.python-websocket-client.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-websocket-client"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-websocket-client/python-websocket-client.changes 2016-04-28 16:56:45.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-websocket-client.new/python-websocket-client.changes 2016-12-08 00:30:35.000000000 +0100
@@ -1,0 +2,31 @@
+Tue Nov 15 13:11:34 UTC 2016 - dmueller(a)suse.com
+
+- update to 0.37.0:
+ - fixed fialer that `websocket.create_connection` does not accept `origin` as a parameter (#246 )
+ - added support for using custom connection class (#235)
+ - use Named logger (#238)
+ - implement ping/pong timeout (#241)
+ - Corrects the syntax highlight code (#243)
+ - fixed failure to join thread before it is started (#242)
+ - Prints timings in console (#217)
+ - use inspect.getfullargspec with Python 3.x (#219)
+ - Check that exception message is actually a string before trying for substring check (#224)
+ - Use pre-initialized stream socket (#226)
+ - fixed TypeError: cafile, capath and cadata cannot be all omitted (#227)
+ - Change import style (#203)
+ - fix attribute error on the older python. (#215)
+ - fixed timeout+ssl error handling bug on python 2.7.10 (#190)
+ - add proxy support to wsdump.py (#194)
+ - use wsaccel if available (#193)
+ - add support for ssl cert chains to support client certs (#195)
+ - fix string formatting in exception (#196)
+ - fix typo in README.rst (#197)
+ - introduce on_data callback to pass data type. (#198)
+ - WebSocketBadStatusException for Handshake error (#199)
+ - set close timeout (#192)
+ - Map dict to headers list (#204)
+ - support client certification (#207)
+ - security improvement during handshake (#211)
+ - improve logging of error from callback (#212)
+
+-------------------------------------------------------------------
Old:
----
websocket_client-0.32.0.tar.gz
New:
----
websocket_client-0.37.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-websocket-client.spec ++++++
--- /var/tmp/diff_new_pack.IK3PVt/_old 2016-12-08 00:30:36.000000000 +0100
+++ /var/tmp/diff_new_pack.IK3PVt/_new 2016-12-08 00:30:36.000000000 +0100
@@ -24,13 +24,13 @@
%endif
Name: python-websocket-client
-Version: 0.32.0
+Version: 0.37.0
Release: 0
Summary: WebSocket client implementation
License: LGPL-2.1
Group: Development/Languages/Python
Url: https://github.com/liris/websocket-client/releases
-Source0: https://pypi.python.org/packages/source/w/websocket-client/websocket_client…
+Source0: https://pypi.io/packages/source/w/websocket-client/websocket_client-%{versi…
BuildRequires: %backports
BuildRequires: python-setuptools
BuildRequires: python-six
@@ -90,7 +90,6 @@
update-alternatives --remove wsdump.py %{_bindir}/wsdump.py-%{py_ver}
fi
-
%files
%defattr(-,root,root,-)
%doc LICENSE README.rst
++++++ websocket_client-0.32.0.tar.gz -> websocket_client-0.37.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/ChangeLog new/websocket_client-0.37.0/ChangeLog
--- old/websocket_client-0.32.0/ChangeLog 2015-06-03 02:44:39.000000000 +0200
+++ new/websocket_client-0.37.0/ChangeLog 2016-04-14 02:27:00.000000000 +0200
@@ -1,6 +1,44 @@
ChangeLog
============
+- 0.37.0
+ - fixed fialer that `websocket.create_connection` does not accept `origin` as a parameter (#246 )
+
+- 0.36.0
+ - added support for using custom connection class (#235)
+ - use Named logger (#238)
+ - implement ping/pong timeout (#241)
+ - Corrects the syntax highlight code (#243)
+ - fixed failure to join thread before it is started (#242)
+
+- 0.35.0
+ - Prints timings in console (#217)
+ - use inspect.getfullargspec with Python 3.x (#219)
+ - Check that exception message is actually a string before trying for substring check (#224)
+ - Use pre-initialized stream socket (#226)
+ - fixed TypeError: cafile, capath and cadata cannot be all omitted (#227)
+
+- 0.34.0
+
+ - Change import style (#203)
+ - fix attribute error on the older python. (#215)
+
+- 0.33.0
+
+ - fixed timeout+ssl error handling bug on python 2.7.10 (#190)
+ - add proxy support to wsdump.py (#194)
+ - use wsaccel if available (#193)
+ - add support for ssl cert chains to support client certs (#195)
+ - fix string formatting in exception (#196)
+ - fix typo in README.rst (#197)
+ - introduce on_data callback to pass data type. (#198)
+ - WebSocketBadStatusException for Handshake error (#199)
+ - set close timeout (#192)
+ - Map dict to headers list (#204)
+ - support client certification (#207)
+ - security improvement during handshake (#211)
+ - improve logging of error from callback (#212)
+
- 0.32.0
- fix http proxy bug (#189)
@@ -35,7 +73,7 @@
- remove unittest2 requirements for python 2.6 (#156)
- fixed subprotocol case during header validation (#158)
- get response status and headers (#160)
- - fix out-of-memory due to fragmentation when recieving a very large frame(#163)
+ - fix out-of-memory due to fragmentation when receiving a very large frame(#163)
- fix error if the payload data is nothing.(#166)
- refactoring.
@@ -64,7 +102,7 @@
- Fix not thread-safe of Websocket.close() (#120)
- Try to get proxy info from environment if not explicitly provided (#124)
- - support proxy basic authenticaiton. (#125)
+ - support proxy basic authentication. (#125)
- Fix NoneType exception at WebsocketApp.send (#126)
- not use proxy for localhost (#132)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/PKG-INFO new/websocket_client-0.37.0/PKG-INFO
--- old/websocket_client-0.32.0/PKG-INFO 2015-06-03 02:45:15.000000000 +0200
+++ new/websocket_client-0.37.0/PKG-INFO 2016-04-14 02:27:29.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: websocket_client
-Version: 0.32.0
+Version: 0.37.0
Summary: WebSocket client for python. hybi13 is supported.
Home-page: https://github.com/liris/websocket-client
Author: liris
@@ -51,7 +51,9 @@
Current implementation of websocket-client is using "CONNECT" method via proxy.
- example::
+ example
+
+ .. code:: python
import websocket
ws = websocket.WebSocket()
@@ -63,28 +65,49 @@
Example
=============
- Low Level API example::
+ Low Level API example
+
+ .. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/")
print "Sending 'Hello, World'..."
ws.send("Hello, World")
print "Sent"
- print "Reeiving..."
+ print "Receiving..."
result = ws.recv()
print "Received '%s'" % result
ws.close()
If you want to customize socket options, set sockopt.
- sockopt example::
+ sockopt example
+
+ .. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/",
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),))
+ You can also use your own class for the connection.
+
+ custom connection class example
+
+ ..code:: python
+
+ from websocket import create_connection, WebSocket
+ class MyWebSocket(WebSocket):
+ def recv_frame(self):
+ frame = super().recv_frame()
+ print('yay! I got this frame: ', frame)
+ return frame
+
+ ws = create_connection("ws://echo.websocket.org/",
+ sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),), class_=MyWebSocket)
+
+ JavaScript websocket-like API example
- JavaScript websocket-like API example::
+ .. code:: python
import websocket
import thread
@@ -128,17 +151,23 @@
Please set sslopt to {"cert_reqs": ssl.CERT_NONE}.
- WebSocketApp sample::
+ WebSocketApp sample
+
+ .. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
- create_connection sample::
+ create_connection sample
+
+ .. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"cert_reqs": ssl.CERT_NONE})
- WebSocket sample::
+ WebSocket sample
+
+ .. code:: python
ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE})
ws.connect("wss://echo.websocket.org")
@@ -150,24 +179,30 @@
Please set sslopt to {"check_hostname": False}.
(since v0.18.0)
- WebSocketApp sample::
+ WebSocketApp sample
+
+ .. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"check_hostname": False})
- create_connection sample::
+ create_connection sample
+
+ .. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"check_hostname": False})
- WebSocket sample::
+ WebSocket sample
+
+ .. code:: python
ws = websocket.WebSocket(sslopt={"check_hostname": False})
ws.connect("wss://echo.websocket.org")
How to enable `SNI <http://en.wikipedia.org/wiki/Server_Name_Indication>`_?
- ------------------
+ ---------------------------------------------------------------------------
SNI support is available for Python 2.7.9+ and 3.2+. It will be enabled automatically whenever possible.
@@ -175,10 +210,12 @@
Sub Protocols.
----------------------------------------
- The server needs to support sub protocols, please set the subprotcol like this.
+ The server needs to support sub protocols, please set the subprotocol like this.
+
+ Subprotocol sample
- Subprotocol sample::
+ .. code:: python
ws = websocket.create_connection("ws://exapmle.com/websocket", subprotocols=["binary", "base64"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/README.rst new/websocket_client-0.37.0/README.rst
--- old/websocket_client-0.32.0/README.rst 2015-04-28 02:10:55.000000000 +0200
+++ new/websocket_client-0.37.0/README.rst 2016-04-12 01:43:10.000000000 +0200
@@ -43,7 +43,9 @@
Current implementation of websocket-client is using "CONNECT" method via proxy.
-example::
+example
+
+.. code:: python
import websocket
ws = websocket.WebSocket()
@@ -55,28 +57,49 @@
Example
=============
-Low Level API example::
+Low Level API example
+
+.. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/")
print "Sending 'Hello, World'..."
ws.send("Hello, World")
print "Sent"
- print "Reeiving..."
+ print "Receiving..."
result = ws.recv()
print "Received '%s'" % result
ws.close()
If you want to customize socket options, set sockopt.
-sockopt example::
+sockopt example
+
+.. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/",
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),))
+You can also use your own class for the connection.
+
+custom connection class example
+
+..code:: python
+
+ from websocket import create_connection, WebSocket
+ class MyWebSocket(WebSocket):
+ def recv_frame(self):
+ frame = super().recv_frame()
+ print('yay! I got this frame: ', frame)
+ return frame
+
+ ws = create_connection("ws://echo.websocket.org/",
+ sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),), class_=MyWebSocket)
+
+JavaScript websocket-like API example
-JavaScript websocket-like API example::
+.. code:: python
import websocket
import thread
@@ -120,17 +143,23 @@
Please set sslopt to {"cert_reqs": ssl.CERT_NONE}.
-WebSocketApp sample::
+WebSocketApp sample
+
+.. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
-create_connection sample::
+create_connection sample
+
+.. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"cert_reqs": ssl.CERT_NONE})
-WebSocket sample::
+WebSocket sample
+
+.. code:: python
ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE})
ws.connect("wss://echo.websocket.org")
@@ -142,24 +171,30 @@
Please set sslopt to {"check_hostname": False}.
(since v0.18.0)
-WebSocketApp sample::
+WebSocketApp sample
+
+.. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"check_hostname": False})
-create_connection sample::
+create_connection sample
+
+.. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"check_hostname": False})
-WebSocket sample::
+WebSocket sample
+
+.. code:: python
ws = websocket.WebSocket(sslopt={"check_hostname": False})
ws.connect("wss://echo.websocket.org")
How to enable `SNI <http://en.wikipedia.org/wiki/Server_Name_Indication>`_?
-------------------
+---------------------------------------------------------------------------
SNI support is available for Python 2.7.9+ and 3.2+. It will be enabled automatically whenever possible.
@@ -167,10 +202,12 @@
Sub Protocols.
----------------------------------------
-The server needs to support sub protocols, please set the subprotcol like this.
+The server needs to support sub protocols, please set the subprotocol like this.
+
+Subprotocol sample
-Subprotocol sample::
+.. code:: python
ws = websocket.create_connection("ws://exapmle.com/websocket", subprotocols=["binary", "base64"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/bin/wsdump.py new/websocket_client-0.37.0/bin/wsdump.py
--- old/websocket_client-0.32.0/bin/wsdump.py 2015-05-22 04:01:33.000000000 +0200
+++ new/websocket_client-0.37.0/bin/wsdump.py 2016-01-04 07:13:49.000000000 +0100
@@ -7,6 +7,7 @@
import threading
import time
import websocket
+from six.moves.urllib.parse import urlparse
try:
import readline
except:
@@ -39,6 +40,8 @@
parser = argparse.ArgumentParser(description="WebSocket Simple Dump Tool")
parser.add_argument("url", metavar="ws_url",
help="websocket url. ex. ws://echo.websocket.org/")
+ parser.add_argument("-p", "--proxy",
+ help="proxy url. ex. http://127.0.0.1:8080")
parser.add_argument("-v", "--verbose", default=0, nargs='?', action=VAction,
dest="verbose",
help="set verbose mode. If set to 1, show opcode. "
@@ -52,9 +55,11 @@
parser.add_argument("-o", "--origin",
help="Set origin")
parser.add_argument("--eof-wait", default=0, type=int,
- help="wait time(second) after 'EOF' recieved.")
+ help="wait time(second) after 'EOF' received.")
parser.add_argument("-t", "--text",
help="Send initial text")
+ parser.add_argument("--timings", action="store_true",
+ help="Print timings in seconds")
return parser.parse_args()
@@ -93,10 +98,15 @@
return self.raw_input("")
def main():
+ start_time = time.time()
args = parse_args()
if args.verbose > 1:
websocket.enableTrace(True)
options = {}
+ if (args.proxy):
+ p = urlparse(args.proxy)
+ options["http_proxy_host"] = p.hostname
+ options["http_proxy_port"] = p.port
if (args.origin):
options["origin"] = args.origin
if (args.subprotocols):
@@ -142,7 +152,10 @@
msg = "%s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data)
if msg is not None:
- console.write(msg)
+ if (args.timings):
+ console.write(str(time.time() - start_time) + ": " + msg)
+ else:
+ console.write(msg)
if opcode == websocket.ABNF.OPCODE_CLOSE:
break
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/examples/echoapp_client.py new/websocket_client-0.37.0/examples/echoapp_client.py
--- old/websocket_client-0.32.0/examples/echoapp_client.py 2014-12-03 00:17:56.000000000 +0100
+++ new/websocket_client-0.37.0/examples/echoapp_client.py 2016-01-04 07:13:49.000000000 +0100
@@ -23,8 +23,8 @@
def run(*args):
for i in range(3):
# send the message, then wait
- # so thread doesnt exit and socket
- # isnt closed
+ # so thread doesn't exit and socket
+ # isn't closed
ws.send("Hello %d" % i)
time.sleep(1)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/setup.py new/websocket_client-0.37.0/setup.py
--- old/websocket_client-0.32.0/setup.py 2015-06-03 02:44:39.000000000 +0200
+++ new/websocket_client-0.37.0/setup.py 2016-04-14 02:10:56.000000000 +0200
@@ -1,7 +1,7 @@
from setuptools import setup
import sys
-VERSION = "0.32.0"
+VERSION = "0.37.0"
NAME="websocket_client"
install_requires = ["six"]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/__init__.py new/websocket_client-0.37.0/websocket/__init__.py
--- old/websocket_client-0.32.0/websocket/__init__.py 2015-06-03 02:44:39.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/__init__.py 2016-04-14 02:11:13.000000000 +0200
@@ -22,4 +22,4 @@
from ._core import *
from ._app import WebSocketApp
-__version__ = "0.32.0"
+__version__ = "0.37.0"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_abnf.py new/websocket_client-0.37.0/websocket/_abnf.py
--- old/websocket_client-0.32.0/websocket/_abnf.py 2015-04-01 00:47:57.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_abnf.py 2016-01-04 07:13:49.000000000 +0100
@@ -26,6 +26,24 @@
from ._exceptions import *
from ._utils import validate_utf8
+try:
+ # If wsaccel is available we use compiled routines to mask data.
+ from wsaccel.xormask import XorMaskerSimple
+
+ def _mask(_m, _d):
+ return XorMaskerSimple(_m).process(_d)
+
+except ImportError:
+ # wsaccel is not available, we rely on python implementations.
+ def _mask(_m, _d):
+ for i in range(len(_d)):
+ _d[i] ^= _m[i % 4]
+
+ if six.PY3:
+ return _d.tobytes()
+ else:
+ return _d.tostring()
+
# closing frame status codes.
STATUS_NORMAL = 1000
STATUS_GOING_AWAY = 1001
@@ -81,7 +99,7 @@
OPCODE_PONG: "pong"
}
- # data length threashold.
+ # data length threshold.
LENGTH_7 = 0x7e
LENGTH_16 = 1 << 16
LENGTH_63 = 1 << 63
@@ -144,8 +162,8 @@
create frame to send text, binary and other data.
data: data to send. This is string value(byte array).
- if opcode is OPCODE_TEXT and this value is uniocde,
- data value is conveted into unicode string, automatically.
+ if opcode is OPCODE_TEXT and this value is unicode,
+ data value is converted into unicode string, automatically.
opcode: operation code. please see OPCODE_XXX.
@@ -208,6 +226,7 @@
"""
if data == None:
data = ""
+
if isinstance(mask_key, six.text_type):
mask_key = six.b(mask_key)
@@ -216,14 +235,7 @@
_m = array.array("B", mask_key)
_d = array.array("B", data)
- for i in range(len(_d)):
- _d[i] ^= _m[i % 4]
-
- if six.PY3:
- return _d.tobytes()
- else:
- return _d.tostring()
-
+ return _mask(_m, _d)
class frame_buffer(object):
_HEADER_MASK_INDEX = 5
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_app.py new/websocket_client-0.37.0/websocket/_app.py
--- old/websocket_client-0.32.0/websocket/_app.py 2015-04-28 02:10:55.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_app.py 2016-04-12 01:58:13.000000000 +0200
@@ -33,7 +33,7 @@
from ._core import WebSocket, getdefaulttimeout
from ._exceptions import *
from ._logging import *
-from websocket._abnf import ABNF
+from ._abnf import ABNF
__all__ = ["WebSocketApp"]
@@ -48,29 +48,38 @@
on_close=None, on_ping=None, on_pong=None,
on_cont_message=None,
keep_running=True, get_mask_key=None, cookie=None,
- subprotocols=None):
+ subprotocols=None,
+ on_data=None):
"""
url: websocket url.
header: custom header for websocket handshake.
on_open: callable object which is called at opening websocket.
- this function has one argument. The arugment is this class object.
- on_message: callbale object which is called when recieved data.
+ this function has one argument. The argument is this class object.
+ on_message: callable object which is called when received data.
on_message has 2 arguments.
- The 1st arugment is this class object.
- The passing 2nd arugment is utf-8 string which we get from the server.
+ The 1st argument is this class object.
+ The 2nd argument is utf-8 string which we get from the server.
on_error: callable object which is called when we get error.
on_error has 2 arguments.
- The 1st arugment is this class object.
- The passing 2nd arugment is exception object.
+ The 1st argument is this class object.
+ The 2nd argument is exception object.
on_close: callable object which is called when closed the connection.
- this function has one argument. The arugment is this class object.
- on_cont_message: callback object which is called when recieve continued
+ this function has one argument. The argument is this class object.
+ on_cont_message: callback object which is called when receive continued
frame data.
- on_message has 3 arguments.
- The 1st arugment is this class object.
- The passing 2nd arugment is utf-8 string which we get from the server.
- The 3rd arugment is continue flag. if 0, the data continue
+ on_cont_message has 3 arguments.
+ The 1st argument is this class object.
+ The 2nd argument is utf-8 string which we get from the server.
+ The 3rd argument is continue flag. if 0, the data continue
to next frame data
+ on_data: callback object which is called when a message received.
+ This is called before on_message or on_cont_message,
+ and then on_message or on_cont_message is called.
+ on_data has 4 argument.
+ The 1st argument is this class object.
+ The 2nd argument is utf-8 string which we get from the server.
+ The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came.
+ The 4th argument is continue flag. if 0, the data continue
keep_running: a boolean flag indicating whether the app's main loop
should keep running, defaults to True
get_mask_key: a callable to produce new mask keys,
@@ -82,6 +91,7 @@
self.cookie = cookie
self.on_open = on_open
self.on_message = on_message
+ self.on_data = on_data
self.on_error = on_error
self.on_close = on_close
self.on_ping = on_ping
@@ -91,6 +101,7 @@
self.get_mask_key = get_mask_key
self.sock = None
self.last_ping_tm = 0
+ self.last_pong_tm = 0
self.subprotocols = subprotocols
def send(self, data, opcode=ABNF.OPCODE_TEXT):
@@ -129,12 +140,12 @@
This loop is infinite loop and is alive during websocket is available.
sockopt: values for socket.setsockopt.
sockopt must be tuple
- and each element is argument of sock.setscokopt.
+ and each element is argument of sock.setsockopt.
sslopt: ssl socket optional dict.
ping_interval: automatically send "ping" command
every specified period(second)
if set to 0, not send automatically.
- ping_timeout: timeout(second) if the pong message is not recieved.
+ ping_timeout: timeout(second) if the pong message is not received.
http_proxy_host: http proxy host name.
http_proxy_port: http proxy port. If not set, set to 80.
http_no_proxy: host names, which doesn't use proxy.
@@ -145,6 +156,8 @@
if not ping_timeout or ping_timeout <= 0:
ping_timeout = None
+ if ping_timeout and ping_interval and ping_interval <= ping_timeout:
+ raise WebSocketException("Ensure ping_interval > ping_timeout")
if sockopt is None:
sockopt = []
if sslopt is None:
@@ -178,9 +191,6 @@
r, w, e = select.select((self.sock.sock, ), (), (), ping_timeout)
if not self.keep_running:
break
- if ping_timeout and self.last_ping_tm and time.time() - self.last_ping_tm > ping_timeout:
- self.last_ping_tm = 0
- raise WebSocketTimeoutException("ping timed out")
if r:
op_code, frame = self.sock.recv_data_frame(True)
@@ -190,18 +200,29 @@
elif op_code == ABNF.OPCODE_PING:
self._callback(self.on_ping, frame.data)
elif op_code == ABNF.OPCODE_PONG:
+ self.last_pong_tm = time.time()
self._callback(self.on_pong, frame.data)
elif op_code == ABNF.OPCODE_CONT and self.on_cont_message:
+ self._callback(self.on_data, data, frame.opcode, frame.fin)
self._callback(self.on_cont_message, frame.data, frame.fin)
else:
data = frame.data
if six.PY3 and frame.opcode == ABNF.OPCODE_TEXT:
data = data.decode("utf-8")
+ self._callback(self.on_data, data, frame.opcode, True)
self._callback(self.on_message, data)
- except Exception as e:
+
+ if ping_timeout and self.last_ping_tm \
+ and self.last_ping_tm - time.time() > ping_timeout \
+ and self.last_ping_tm - self.last_pong_tm > ping_timeout:
+ raise WebSocketTimeoutException("ping/pong timed out")
+ except (Exception, KeyboardInterrupt, SystemExit) as e:
self._callback(self.on_error, e)
+ if isinstance(e, SystemExit):
+ # propagate SystemExit further
+ raise
finally:
- if thread:
+ if thread and thread.isAlive():
event.set()
thread.join()
self.keep_running = False
@@ -215,8 +236,12 @@
if they exists, and if the self.on_close except three arguments """
import inspect
# if the on_close callback is "old", just return empty list
- if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3:
- return []
+ if sys.version_info < (3, 0):
+ if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3:
+ return []
+ else:
+ if not self.on_close or len(inspect.getfullargspec(self.on_close).args) != 3:
+ return []
if data and len(data) >= 2:
code = 256*six.byte2int(data[0:1]) + six.byte2int(data[1:2])
@@ -230,7 +255,7 @@
try:
callback(self, *args)
except Exception as e:
- error(e)
+ error("error from callback {}: {}".format(callback, e))
if isEnabledForDebug():
_, _, tb = sys.exc_info()
traceback.print_tb(tb)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_core.py new/websocket_client-0.37.0/websocket/_core.py
--- old/websocket_client-0.32.0/websocket/_core.py 2015-04-28 02:10:55.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_core.py 2016-04-14 02:24:17.000000000 +0200
@@ -53,58 +53,6 @@
"""
-def create_connection(url, timeout=None, **options):
- """
- connect to url and return websocket object.
-
- Connect to url and return the WebSocket object.
- Passing optional timeout parameter will set the timeout on the socket.
- If no timeout is supplied,
- the global default timeout setting returned by getdefauttimeout() is used.
- You can customize using 'options'.
- If you set "header" list object, you can set your own custom header.
-
- >>> conn = create_connection("ws://echo.websocket.org/",
- ... header=["User-Agent: MyProgram",
- ... "x-custom: header"])
-
-
- timeout: socket timeout time. This value is integer.
- if you set None for this value,
- it means "use default_timeout value"
-
-
- options: "header" -> custom http header list.
- "cookie" -> cookie value.
- "origin" -> custom origin url.
- "host" -> custom host header string.
- "http_proxy_host" - http proxy host name.
- "http_proxy_port" - http proxy port. If not set, set to 80.
- "http_no_proxy" - host names, which doesn't use proxy.
- "http_proxy_auth" - http proxy auth infomation.
- tuple of username and password.
- default is None
- "enable_multithread" -> enable lock for multithread.
- "sockopt" -> socket options
- "sslopt" -> ssl option
- "subprotocols" - array of available sub protocols.
- default is None.
- "skip_utf8_validation" - skip utf8 validation.
- """
- sockopt = options.get("sockopt", [])
- sslopt = options.get("sslopt", {})
- fire_cont_frame = options.get("fire_cont_frame", False)
- enable_multithread = options.get("enable_multithread", False)
- skip_utf8_validation = options.get("skip_utf8_validation", False)
- websock = WebSocket(sockopt=sockopt, sslopt=sslopt,
- fire_cont_frame=fire_cont_frame,
- enable_multithread=enable_multithread,
- skip_utf8_validation=skip_utf8_validation)
- websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
- websock.connect(url, **options)
- return websock
-
-
class WebSocket(object):
"""
Low level WebSocket interface.
@@ -112,8 +60,8 @@
The WebSocket protocol draft-hixie-thewebsocketprotocol-76
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
- We can connect to the websocket server and send/recieve data.
- The following example is a echo client.
+ We can connect to the websocket server and send/receive data.
+ The following example is an echo client.
>>> import websocket
>>> ws = websocket.WebSocket()
@@ -126,7 +74,7 @@
get_mask_key: a callable to produce new mask keys, see the set_mask_key
function's docstring for more details
sockopt: values for socket.setsockopt.
- sockopt must be tuple and each element is argument of sock.setscokopt.
+ sockopt must be tuple and each element is argument of sock.setsockopt.
sslopt: dict object for ssl socket option.
fire_cont_frame: fire recv event for each cont frame. default is False
enable_multithread: if set to True, lock send method.
@@ -135,9 +83,9 @@
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
fire_cont_frame=False, enable_multithread=False,
- skip_utf8_validation=False):
+ skip_utf8_validation=False, **options):
"""
- Initalize WebSocket object.
+ Initialize WebSocket object.
"""
self.sock_opt = sock_opt(sockopt, sslopt)
self.handshake_response = None
@@ -172,12 +120,12 @@
def set_mask_key(self, func):
"""
- set function to create musk key. You can custumize mask key generator.
+ set function to create musk key. You can customize mask key generator.
Mainly, this is for testing purpose.
- func: callable object. the fuct must 1 argument as integer.
+ func: callable object. the func takes 1 argument as integer.
The argument means length of mask key.
- This func must be return string(byte array),
+ This func must return string(byte array),
which length is argument specified.
"""
self.get_mask_key = func
@@ -249,21 +197,23 @@
if you set None for this value,
it means "use default_timeout value"
- options: "header" -> custom http header list.
+ options: "header" -> custom http header list or dict.
"cookie" -> cookie value.
"origin" -> custom origin url.
"host" -> custom host header string.
"http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port. If not set, set to 80.
"http_no_proxy" - host names, which doesn't use proxy.
- "http_proxy_auth" - http proxy auth infomation.
+ "http_proxy_auth" - http proxy auth information.
tuple of username and password.
- defualt is None
+ default is None
"subprotocols" - array of available sub protocols.
default is None.
+ "socket" - pre-initialized stream socket.
"""
- self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options))
+ self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options),
+ options.pop('socket', None))
try:
self.handshake_response = handshake(self.sock, *addrs, **options)
@@ -355,7 +305,7 @@
def recv_data(self, control_frame=False):
"""
- Recieve data with operation code.
+ Receive data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
@@ -367,7 +317,7 @@
def recv_data_frame(self, control_frame=False):
"""
- Recieve data with operation code.
+ Receive data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
@@ -403,7 +353,7 @@
def recv_frame(self):
"""
- recieve data as frame from server.
+ receive data as frame from server.
return value: ABNF frame object.
"""
@@ -422,13 +372,16 @@
self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
- def close(self, status=STATUS_NORMAL, reason=six.b("")):
+ def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3):
"""
Close Websocket object
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string.
+
+ timeout: timeout until receive a close frame.
+ If None, it will wait forever until receive a close frame.
"""
if self.connected:
if status < 0 or status >= ABNF.LENGTH_16:
@@ -437,8 +390,8 @@
try:
self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
- timeout = self.sock.gettimeout()
- self.sock.settimeout(3)
+ sock_timeout = self.sock.gettimeout()
+ self.sock.settimeout(timeout)
try:
frame = self.recv_frame()
if isEnabledForError():
@@ -447,7 +400,7 @@
error("close status: " + repr(recv_status))
except:
pass
- self.sock.settimeout(timeout)
+ self.sock.settimeout(sock_timeout)
self.sock.shutdown(socket.SHUT_RDWR)
except:
pass
@@ -456,7 +409,7 @@
def abort(self):
"""
- Low-level asynchonous abort, wakes up other threads that are waiting in recv_*
+ Low-level asynchronous abort, wakes up other threads that are waiting in recv_*
"""
if self.connected:
self.sock.shutdown(socket.SHUT_RDWR)
@@ -480,3 +433,58 @@
self.sock = None
self.connected = False
raise
+
+
+def create_connection(url, timeout=None, class_=WebSocket, **options):
+ """
+ connect to url and return websocket object.
+
+ Connect to url and return the WebSocket object.
+ Passing optional timeout parameter will set the timeout on the socket.
+ If no timeout is supplied,
+ the global default timeout setting returned by getdefauttimeout() is used.
+ You can customize using 'options'.
+ If you set "header" list object, you can set your own custom header.
+
+ >>> conn = create_connection("ws://echo.websocket.org/",
+ ... header=["User-Agent: MyProgram",
+ ... "x-custom: header"])
+
+
+ timeout: socket timeout time. This value is integer.
+ if you set None for this value,
+ it means "use default_timeout value"
+
+ class_: class to instantiate when creating the connection. It has to implement
+ settimeout and connect. It's __init__ should be compatible with
+ WebSocket.__init__, i.e. accept all of it's kwargs.
+ options: "header" -> custom http header list or dict.
+ "cookie" -> cookie value.
+ "origin" -> custom origin url.
+ "host" -> custom host header string.
+ "http_proxy_host" - http proxy host name.
+ "http_proxy_port" - http proxy port. If not set, set to 80.
+ "http_no_proxy" - host names, which doesn't use proxy.
+ "http_proxy_auth" - http proxy auth information.
+ tuple of username and password.
+ default is None
+ "enable_multithread" -> enable lock for multithread.
+ "sockopt" -> socket options
+ "sslopt" -> ssl option
+ "subprotocols" - array of available sub protocols.
+ default is None.
+ "skip_utf8_validation" - skip utf8 validation.
+ "socket" - pre-initialized stream socket.
+ """
+ sockopt = options.pop("sockopt", [])
+ sslopt = options.pop("sslopt", {})
+ fire_cont_frame = options.pop("fire_cont_frame", False)
+ enable_multithread = options.pop("enable_multithread", False)
+ skip_utf8_validation = options.pop("skip_utf8_validation", False)
+ websock = class_(sockopt=sockopt, sslopt=sslopt,
+ fire_cont_frame=fire_cont_frame,
+ enable_multithread=enable_multithread,
+ skip_utf8_validation=skip_utf8_validation, **options)
+ websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
+ websock.connect(url, **options)
+ return websock
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_exceptions.py new/websocket_client-0.37.0/websocket/_exceptions.py
--- old/websocket_client-0.32.0/websocket/_exceptions.py 2015-03-10 07:36:38.000000000 +0100
+++ new/websocket_client-0.37.0/websocket/_exceptions.py 2016-01-04 07:13:49.000000000 +0100
@@ -27,22 +27,25 @@
class WebSocketException(Exception):
"""
- websocket exeception class.
+ websocket exception class.
"""
pass
+
class WebSocketProtocolException(WebSocketException):
"""
- If the webscoket protocol is invalid, this exception will be raised.
+ If the websocket protocol is invalid, this exception will be raised.
"""
pass
+
class WebSocketPayloadException(WebSocketException):
"""
- If the webscoket payload is invalid, this exception will be raised.
+ If the websocket payload is invalid, this exception will be raised.
"""
pass
+
class WebSocketConnectionClosedException(WebSocketException):
"""
If remote host closed the connection or some network error happened,
@@ -50,16 +53,25 @@
"""
pass
+
class WebSocketTimeoutException(WebSocketException):
"""
WebSocketTimeoutException will be raised at socket timeout during read/write data.
"""
pass
+
class WebSocketProxyException(WebSocketException):
"""
- WebSocketProxyException will be raised when proxy error occured.
+ WebSocketProxyException will be raised when proxy error occurred.
"""
pass
+class WebSocketBadStatusException(WebSocketException):
+ """
+ WebSocketBadStatusException will be raised when we get bad handshake status code.
+ """
+ def __init__(self, message, status_code):
+ super(WebSocketBadStatusException, self).__init__(message % status_code)
+ self.status_code = status_code
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_handshake.py new/websocket_client-0.37.0/websocket/_handshake.py
--- old/websocket_client-0.32.0/websocket/_handshake.py 2015-05-22 04:01:33.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_handshake.py 2015-10-28 01:16:18.000000000 +0100
@@ -28,6 +28,9 @@
import uuid
import hashlib
+import hmac
+import os
+import sys
from ._logging import *
from ._url import *
@@ -37,6 +40,12 @@
__all__ = ["handshake_response", "handshake"]
+if hasattr(hmac, "compare_digest"):
+ compare_digest = hmac.compare_digest
+else:
+ def compare_digest(s1, s2):
+ return s1 == s2
+
# websocket supported version.
VERSION = 13
@@ -92,7 +101,10 @@
headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols))
if "header" in options:
- headers.extend(options["header"])
+ header = options["header"]
+ if isinstance(header, dict):
+ header = map(": ".join, header.items())
+ headers.extend(header)
cookie = options.get("cookie", None)
@@ -108,7 +120,7 @@
def _get_resp_headers(sock, success_status=101):
status, resp_headers = read_headers(sock)
if status != success_status:
- raise WebSocketException("Handshake status %d" % status)
+ raise WebSocketBadStatusException("Handshake status %d", status)
return status, resp_headers
_HEADERS_TO_CHECK = {
@@ -143,7 +155,8 @@
value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8')
hashed = base64encode(hashlib.sha1(value).digest()).strip().lower()
- success = (hashed == result)
+ success = compare_digest(hashed, result)
+
if success:
return True, subproto
else:
@@ -151,5 +164,5 @@
def _create_sec_websocket_key():
- uid = uuid.uuid4()
- return base64encode(uid.bytes).decode('utf-8').strip()
+ randomness = os.urandom(16)
+ return base64encode(randomness).decode('utf-8').strip()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_http.py new/websocket_client-0.37.0/websocket/_http.py
--- old/websocket_client-0.32.0/websocket/_http.py 2015-06-03 02:44:39.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_http.py 2016-01-04 09:02:13.000000000 +0100
@@ -51,8 +51,12 @@
self.auth = None
self.no_proxy = None
-def connect(url, options, proxy):
+def connect(url, options, proxy, socket):
hostname, port, resource, is_secure = parse_url(url)
+
+ if socket:
+ return socket, (hostname, port, resource)
+
addrinfo_list, need_tunnel, auth = _get_addrinfo_list(hostname, port, is_secure, proxy)
if not addrinfo_list:
raise WebSocketException(
@@ -119,19 +123,29 @@
def _can_use_sni():
- return (six.PY2 and sys.version_info[1] >= 7 and sys.version_info[2] >= 9) or (six.PY3 and sys.version_info[2] >= 2)
+ return six.PY2 and sys.version_info >= (2, 7, 9) or sys.version_info >= (3, 2)
def _wrap_sni_socket(sock, sslopt, hostname, check_hostname):
context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23))
- context.load_verify_locations(cafile=sslopt.get('ca_certs', None))
+ if sslopt.get('cert_reqs', ssl.CERT_NONE) != ssl.CERT_NONE:
+ context.load_verify_locations(cafile=sslopt.get('ca_certs', None))
+ if sslopt.get('certfile', None):
+ context.load_cert_chain(
+ sslopt['certfile'],
+ sslopt.get('keyfile', None),
+ sslopt.get('password', None),
+ )
# see https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531ad…
context.verify_mode = sslopt['cert_reqs']
if HAVE_CONTEXT_CHECK_HOSTNAME:
context.check_hostname = check_hostname
if 'ciphers' in sslopt:
context.set_ciphers(sslopt['ciphers'])
+ if 'cert_chain' in sslopt :
+ certfile,keyfile,password = sslopt['cert_chain']
+ context.load_cert_chain(certfile, keyfile, password)
return context.wrap_socket(
sock,
@@ -143,11 +157,12 @@
def _ssl_socket(sock, user_sslopt, hostname):
sslopt = dict(cert_reqs=ssl.CERT_REQUIRED)
+ sslopt.update(user_sslopt)
+
certPath = os.path.join(
os.path.dirname(__file__), "cacert.pem")
- if os.path.isfile(certPath):
+ if os.path.isfile(certPath) and user_sslopt.get('ca_certs', None) == None:
sslopt['ca_certs'] = certPath
- sslopt.update(user_sslopt)
check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop('check_hostname', True)
if _can_use_sni():
@@ -183,7 +198,7 @@
if status != 200:
raise WebSocketProxyException(
- "failed CONNECT via proxy status: %r" + status)
+ "failed CONNECT via proxy status: %r" % status)
return sock
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_logging.py new/websocket_client-0.37.0/websocket/_logging.py
--- old/websocket_client-0.32.0/websocket/_logging.py 2015-04-28 02:10:55.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_logging.py 2016-04-12 01:43:10.000000000 +0200
@@ -22,7 +22,7 @@
import logging
-_logger = logging.getLogger()
+_logger = logging.getLogger('websocket')
_traceEnabled = False
__all__ = ["enableTrace", "dump", "error", "debug", "trace",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_socket.py new/websocket_client-0.37.0/websocket/_socket.py
--- old/websocket_client-0.32.0/websocket/_socket.py 2015-04-01 00:47:57.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_socket.py 2016-01-04 07:17:01.000000000 +0100
@@ -115,7 +115,7 @@
raise WebSocketTimeoutException(message)
except Exception as e:
message = extract_err_message(e)
- if message and "timed out" in message:
+ if isinstance(message, str) and "timed out" in message:
raise WebSocketTimeoutException(message)
else:
raise
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_url.py new/websocket_client-0.37.0/websocket/_url.py
--- old/websocket_client-0.32.0/websocket/_url.py 2015-04-01 00:47:57.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_url.py 2016-01-04 07:13:49.000000000 +0100
@@ -100,9 +100,9 @@
options: "http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port.
"http_no_proxy" - host names, which doesn't use proxy.
- "http_proxy_auth" - http proxy auth infomation.
+ "http_proxy_auth" - http proxy auth information.
tuple of username and password.
- defualt is None
+ default is None
"""
if _is_no_proxy_host(hostname, no_proxy):
return None, 0, None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/_utils.py new/websocket_client-0.37.0/websocket/_utils.py
--- old/websocket_client-0.32.0/websocket/_utils.py 2015-05-22 04:01:33.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/_utils.py 2016-01-04 07:13:49.000000000 +0100
@@ -28,43 +28,63 @@
def __enter__(self):
pass
- def __exit__(self,type, value, traceback):
+ def __exit__(self, type, value, traceback):
pass
+try:
+ # If wsaccel is available we use compiled routines to validate UTF-8
+ # strings.
+ from wsaccel.utf8validator import Utf8Validator
+
+ def _validate_utf8(utfbytes):
+ return Utf8Validator().validate(utfbytes)[0]
+
+except ImportError:
+ # UTF-8 validator
+ # python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
+
+ _UTF8_ACCEPT = 0
+ _UTF8_REJECT = 12
+
+ _UTF8D = [
+ # The first part of the table maps bytes to character classes that
+ # to reduce the size of the transition table and create bitmasks.
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
+ 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
+ 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
+ 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8,
+
+ # The second part is a transition table that maps a combination
+ # of a state of the automaton and a character class to a state.
+ 0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
+ 12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
+ 12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
+ 12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
+ 12,36,12,12,12,12,12,12,12,12,12,12, ]
+
+ def _decode(state, codep, ch):
+ tp = _UTF8D[ch]
+
+ codep = (ch & 0x3f ) | (codep << 6) if (state != _UTF8_ACCEPT) else (0xff >> tp) & (ch)
+ state = _UTF8D[256 + state + tp]
+
+ return state, codep;
+
+ def _validate_utf8(utfbytes):
+ state = _UTF8_ACCEPT
+ codep = 0
+ for i in utfbytes:
+ if six.PY2:
+ i = ord(i)
+ state, codep = _decode(state, codep, i)
+ if state == _UTF8_REJECT:
+ return False
-# UTF-8 validator
-# python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
-
-UTF8_ACCEPT = 0
-UTF8_REJECT=12
-
-_UTF8D = [
- # The first part of the table maps bytes to character classes that
- # to reduce the size of the transition table and create bitmasks.
- 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
- 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
- 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
- 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
- 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,
- 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
- 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
- 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8,
-
- # The second part is a transition table that maps a combination
- # of a state of the automaton and a character class to a state.
- 0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12,
- 12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12,
- 12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12,
- 12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12,
- 12,36,12,12,12,12,12,12,12,12,12,12, ]
-
-def _decode(state, codep, ch):
- tp = _UTF8D[ch]
-
- codep = (ch & 0x3f ) | (codep << 6) if (state != UTF8_ACCEPT) else (0xff >> tp) & (ch)
- state = _UTF8D[256 + state + tp]
-
- return state, codep;
+ return True
def validate_utf8(utfbytes):
"""
@@ -72,17 +92,10 @@
utfbytes: utf byte string to check.
return value: if valid utf8 string, return true. Otherwise, return false.
"""
- state = UTF8_ACCEPT
- codep = 0
- for i in utfbytes:
- if six.PY2:
- i = ord(i)
- state, codep = _decode(state, codep, i)
- if state == UTF8_REJECT:
- return False
-
- return True
-
+ return _validate_utf8(utfbytes)
def extract_err_message(exception):
- return getattr(exception, 'strerror', str(exception))
+ if exception.args:
+ return exception.args[0]
+ else:
+ return None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket/tests/test_websocket.py new/websocket_client-0.37.0/websocket/tests/test_websocket.py
--- old/websocket_client-0.32.0/websocket/tests/test_websocket.py 2015-05-22 04:01:33.000000000 +0200
+++ new/websocket_client-0.37.0/websocket/tests/test_websocket.py 2015-10-27 07:48:24.000000000 +0100
@@ -464,12 +464,12 @@
self.assertRaises(ws.WebSocketConnectionClosedException, s.send, "Hello")
self.assertRaises(ws.WebSocketConnectionClosedException, s.recv)
- def testUUID4(self):
- """ WebSocket key should be a UUID4.
+ def testNonce(self):
+ """ WebSocket key should be a random 16-byte nonce.
"""
key = _create_sec_websocket_key()
- u = uuid.UUID(bytes=base64decode(key.encode("utf-8")))
- self.assertEqual(4, u.version)
+ nonce = base64decode(key.encode("utf-8"))
+ self.assertEqual(16, len(nonce))
class WebSocketAppTest(unittest.TestCase):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket_client.egg-info/PKG-INFO new/websocket_client-0.37.0/websocket_client.egg-info/PKG-INFO
--- old/websocket_client-0.32.0/websocket_client.egg-info/PKG-INFO 2015-06-03 02:45:13.000000000 +0200
+++ new/websocket_client-0.37.0/websocket_client.egg-info/PKG-INFO 2016-04-14 02:27:29.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: websocket-client
-Version: 0.32.0
+Version: 0.37.0
Summary: WebSocket client for python. hybi13 is supported.
Home-page: https://github.com/liris/websocket-client
Author: liris
@@ -51,7 +51,9 @@
Current implementation of websocket-client is using "CONNECT" method via proxy.
- example::
+ example
+
+ .. code:: python
import websocket
ws = websocket.WebSocket()
@@ -63,28 +65,49 @@
Example
=============
- Low Level API example::
+ Low Level API example
+
+ .. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/")
print "Sending 'Hello, World'..."
ws.send("Hello, World")
print "Sent"
- print "Reeiving..."
+ print "Receiving..."
result = ws.recv()
print "Received '%s'" % result
ws.close()
If you want to customize socket options, set sockopt.
- sockopt example::
+ sockopt example
+
+ .. code:: python
from websocket import create_connection
ws = create_connection("ws://echo.websocket.org/",
sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),))
+ You can also use your own class for the connection.
+
+ custom connection class example
+
+ ..code:: python
+
+ from websocket import create_connection, WebSocket
+ class MyWebSocket(WebSocket):
+ def recv_frame(self):
+ frame = super().recv_frame()
+ print('yay! I got this frame: ', frame)
+ return frame
+
+ ws = create_connection("ws://echo.websocket.org/",
+ sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),), class_=MyWebSocket)
+
+ JavaScript websocket-like API example
- JavaScript websocket-like API example::
+ .. code:: python
import websocket
import thread
@@ -128,17 +151,23 @@
Please set sslopt to {"cert_reqs": ssl.CERT_NONE}.
- WebSocketApp sample::
+ WebSocketApp sample
+
+ .. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
- create_connection sample::
+ create_connection sample
+
+ .. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"cert_reqs": ssl.CERT_NONE})
- WebSocket sample::
+ WebSocket sample
+
+ .. code:: python
ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE})
ws.connect("wss://echo.websocket.org")
@@ -150,24 +179,30 @@
Please set sslopt to {"check_hostname": False}.
(since v0.18.0)
- WebSocketApp sample::
+ WebSocketApp sample
+
+ .. code:: python
ws = websocket.WebSocketApp("wss://echo.websocket.org")
ws.run_forever(sslopt={"check_hostname": False})
- create_connection sample::
+ create_connection sample
+
+ .. code:: python
ws = websocket.create_connection("wss://echo.websocket.org",
sslopt={"check_hostname": False})
- WebSocket sample::
+ WebSocket sample
+
+ .. code:: python
ws = websocket.WebSocket(sslopt={"check_hostname": False})
ws.connect("wss://echo.websocket.org")
How to enable `SNI <http://en.wikipedia.org/wiki/Server_Name_Indication>`_?
- ------------------
+ ---------------------------------------------------------------------------
SNI support is available for Python 2.7.9+ and 3.2+. It will be enabled automatically whenever possible.
@@ -175,10 +210,12 @@
Sub Protocols.
----------------------------------------
- The server needs to support sub protocols, please set the subprotcol like this.
+ The server needs to support sub protocols, please set the subprotocol like this.
+
+ Subprotocol sample
- Subprotocol sample::
+ .. code:: python
ws = websocket.create_connection("ws://exapmle.com/websocket", subprotocols=["binary", "base64"])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/websocket_client-0.32.0/websocket_client.egg-info/requires.txt new/websocket_client-0.37.0/websocket_client.egg-info/requires.txt
--- old/websocket_client-0.32.0/websocket_client.egg-info/requires.txt 2015-06-03 02:45:13.000000000 +0200
+++ new/websocket_client-0.37.0/websocket_client.egg-info/requires.txt 2016-04-14 02:27:29.000000000 +0200
@@ -1 +1,2 @@
-six
\ No newline at end of file
+six
+backports.ssl_match_hostname
1
0
Hello community,
here is the log from the commit of package python-warlock for openSUSE:Factory checked in at 2016-12-08 00:30:29
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-warlock (Old)
and /work/SRC/openSUSE:Factory/.python-warlock.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-warlock"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-warlock/python-warlock.changes 2014-09-17 17:27:30.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-warlock.new/python-warlock.changes 2016-12-08 00:30:30.000000000 +0100
@@ -1,0 +2,5 @@
+Tue Nov 15 12:40:31 UTC 2016 - dmueller(a)suse.com
+
+- update to 1.2.0
+
+-------------------------------------------------------------------
Old:
----
warlock-1.1.0.tar.gz
New:
----
warlock-1.2.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-warlock.spec ++++++
--- /var/tmp/diff_new_pack.wzoxlU/_old 2016-12-08 00:30:31.000000000 +0100
+++ /var/tmp/diff_new_pack.wzoxlU/_new 2016-12-08 00:30:31.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package python-warlock
#
-# Copyright (c) 2014 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,13 +17,13 @@
Name: python-warlock
-Version: 1.1.0
+Version: 1.2.0
Release: 0
Summary: Python object model built on top of JSON schema
License: Apache-2.0
Group: Development/Languages/Python
Url: http://github.com/bcwaldon/warlock
-Source: http://pypi.python.org/packages/source/w/warlock/warlock-%{version}.tar.gz
+Source: https://pypi.io/packages/source/w/warlock/warlock-%{version}.tar.gz
BuildRequires: python-devel
BuildRequires: python-jsonpatch
BuildRequires: python-jsonschema
++++++ warlock-1.1.0.tar.gz -> warlock-1.2.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/PKG-INFO new/warlock-1.2.0/PKG-INFO
--- old/warlock-1.1.0/PKG-INFO 2013-11-19 16:08:08.000000000 +0100
+++ new/warlock-1.2.0/PKG-INFO 2015-10-12 19:09:45.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: warlock
-Version: 1.1.0
+Version: 1.2.0
Summary: Python object model built on JSON schema and JSON patch.
Home-page: http://github.com/bcwaldon/warlock
Author: Brian Waldon
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/setup.cfg new/warlock-1.2.0/setup.cfg
--- old/warlock-1.1.0/setup.cfg 2013-11-19 16:08:08.000000000 +0100
+++ new/warlock-1.2.0/setup.cfg 2015-10-12 19:09:45.000000000 +0200
@@ -1,3 +1,6 @@
+[bdist_wheel]
+universal = 1
+
[egg_info]
tag_build =
tag_date = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/setup.py new/warlock-1.2.0/setup.py
--- old/warlock-1.1.0/setup.py 2013-11-19 16:04:21.000000000 +0100
+++ new/warlock-1.2.0/setup.py 2015-10-12 19:08:55.000000000 +0200
@@ -24,7 +24,7 @@
setuptools.setup(
name='warlock',
- version='1.1.0',
+ version='1.2.0',
description='Python object model built on JSON schema and JSON patch.',
author='Brian Waldon',
author_email='bcwaldon(a)gmail.com',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/test/test_core.py new/warlock-1.2.0/test/test_core.py
--- old/warlock-1.1.0/test/test_core.py 2013-11-19 16:00:43.000000000 +0100
+++ new/warlock-1.2.0/test/test_core.py 2015-10-12 18:48:26.000000000 +0200
@@ -15,6 +15,8 @@
import copy
import unittest
+import json
+
import six
import warlock
@@ -39,6 +41,15 @@
}
+nameless_fixture = {
+ 'properties': {
+ 'name': {'type': 'string'},
+ 'population': {'type': 'integer'},
+ },
+ 'additionalProperties': False,
+}
+
+
class TestCore(unittest.TestCase):
def test_create_invalid_object(self):
Country = warlock.model_factory(fixture)
@@ -94,6 +105,19 @@
exc = warlock.InvalidOperation
self.assertRaises(exc, sweden.update, {'population': 'N/A'})
self.assertRaises(exc, sweden.update, {'overloard': 'Bears'})
+
+ def test_naming(self):
+ Country = warlock.model_factory(fixture)
+ self.assertEqual(Country.__name__, 'Country')
+
+ Country2 = warlock.model_factory(fixture, name='Country2')
+ self.assertEqual(Country2.__name__, 'Country2')
+
+ nameless = warlock.model_factory(nameless_fixture)
+ self.assertEqual(nameless.__name__, 'Model')
+
+ nameless2 = warlock.model_factory(nameless_fixture, name='Country3')
+ self.assertEqual(nameless2.__name__, 'Country3')
def test_deepcopy(self):
"""Make sure we aren't leaking references."""
@@ -169,14 +193,17 @@
sweden = Country(name='Sweden', population=9379116)
sweden['name'] = 'Finland'
self.assertEqual(
- sweden.patch,
- '[{"path": "/name", "value": "Finland", "op": "replace"}]')
+ json.loads(sweden.patch),
+ json.loads(
+ '[{"path": "/name", "value": "Finland", "op": "replace"}]'))
def test_patch_drop_attribute(self):
Country = warlock.model_factory(fixture)
sweden = Country(name='Sweden', population=9379116)
del sweden['name']
- self.assertEqual(sweden.patch, '[{"path": "/name", "op": "remove"}]')
+ self.assertEqual(
+ json.loads(sweden.patch),
+ json.loads('[{"path": "/name", "op": "remove"}]'))
def test_patch_reduce_operations(self):
Country = warlock.model_factory(fixture)
@@ -184,13 +211,15 @@
sweden['name'] = 'Finland'
self.assertEqual(
- sweden.patch,
- '[{"path": "/name", "value": "Finland", "op": "replace"}]')
+ json.loads(sweden.patch),
+ json.loads(
+ '[{"path": "/name", "value": "Finland", "op": "replace"}]'))
sweden['name'] = 'Norway'
self.assertEqual(
- sweden.patch,
- '[{"path": "/name", "value": "Norway", "op": "replace"}]')
+ json.loads(sweden.patch),
+ json.loads(
+ '[{"path": "/name", "value": "Norway", "op": "replace"}]'))
def test_patch_multiple_operations(self):
Country = warlock.model_factory(fixture)
@@ -198,7 +227,12 @@
sweden['name'] = 'Finland'
sweden['population'] = 5387000
- self.assertEqual(
- sweden.patch,
+
+ self.assertEqual(len(json.loads(sweden.patch)), 2)
+
+ patches = json.loads(
'[{"path": "/name", "value": "Finland", "op": "replace"}, '
'{"path": "/population", "value": 5387000, "op": "replace"}]')
+
+ for patch in json.loads(sweden.patch):
+ self.assertTrue(patch in patches)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/warlock/core.py new/warlock-1.2.0/warlock/core.py
--- old/warlock-1.1.0/warlock/core.py 2013-11-19 16:00:43.000000000 +0100
+++ new/warlock-1.2.0/warlock/core.py 2015-10-12 18:48:26.000000000 +0200
@@ -19,10 +19,11 @@
from . import model
-def model_factory(schema, base_class=model.Model):
+def model_factory(schema, base_class=model.Model, name=None):
"""Generate a model class based on the provided JSON Schema
:param schema: dict representing valid JSON schema
+ :param name: A name to give the class, if `name` is not in `schema`
"""
schema = copy.deepcopy(schema)
@@ -31,5 +32,8 @@
self.__dict__['schema'] = schema
base_class.__init__(self, *args, **kwargs)
- Model.__name__ = str(schema['name'])
+ if name is not None:
+ Model.__name__ = name
+ elif 'name' in schema:
+ Model.__name__ = str(schema['name'])
return Model
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/warlock.egg-info/PKG-INFO new/warlock-1.2.0/warlock.egg-info/PKG-INFO
--- old/warlock-1.1.0/warlock.egg-info/PKG-INFO 2013-11-19 16:08:08.000000000 +0100
+++ new/warlock-1.2.0/warlock.egg-info/PKG-INFO 2015-10-12 19:09:45.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: warlock
-Version: 1.1.0
+Version: 1.2.0
Summary: Python object model built on JSON schema and JSON patch.
Home-page: http://github.com/bcwaldon/warlock
Author: Brian Waldon
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/warlock.egg-info/SOURCES.txt new/warlock-1.2.0/warlock.egg-info/SOURCES.txt
--- old/warlock-1.1.0/warlock.egg-info/SOURCES.txt 2013-11-19 16:08:08.000000000 +0100
+++ new/warlock-1.2.0/warlock.egg-info/SOURCES.txt 2015-10-12 19:09:45.000000000 +0200
@@ -2,6 +2,7 @@
MANIFEST.in
README.md
requirements.txt
+setup.cfg
setup.py
test/test_core.py
warlock/__init__.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/warlock-1.1.0/warlock.egg-info/requires.txt new/warlock-1.2.0/warlock.egg-info/requires.txt
--- old/warlock-1.1.0/warlock.egg-info/requires.txt 2013-11-19 16:08:08.000000000 +0100
+++ new/warlock-1.2.0/warlock.egg-info/requires.txt 2015-10-12 19:09:45.000000000 +0200
@@ -1,3 +1,3 @@
jsonschema>=0.7,<3
jsonpatch>=0.10,<2
-six
\ No newline at end of file
+six
1
0
Hello community,
here is the log from the commit of package python-requestsexceptions for openSUSE:Factory checked in at 2016-12-08 00:30:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-requestsexceptions (Old)
and /work/SRC/openSUSE:Factory/.python-requestsexceptions.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-requestsexceptions"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-requestsexceptions/python-requestsexceptions.changes 2016-01-11 19:11:51.000000000 +0100
+++ /work/SRC/openSUSE:Factory/.python-requestsexceptions.new/python-requestsexceptions.changes 2016-12-08 00:30:25.000000000 +0100
@@ -1,0 +2,6 @@
+Tue Nov 15 12:33:34 UTC 2016 - dmueller(a)suse.com
+
+- update to 1.1.3:
+ * Properly indicate python 3 support
+
+-------------------------------------------------------------------
Old:
----
requestsexceptions-1.1.2.tar.gz
New:
----
requestsexceptions-1.1.3.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-requestsexceptions.spec ++++++
--- /var/tmp/diff_new_pack.QwjWS5/_old 2016-12-08 00:30:26.000000000 +0100
+++ /var/tmp/diff_new_pack.QwjWS5/_new 2016-12-08 00:30:26.000000000 +0100
@@ -17,7 +17,7 @@
Name: python-requestsexceptions
-Version: 1.1.2
+Version: 1.1.3
Release: 0
Summary: Import exceptions from potentially bundled packages in requests
License: Apache-2.0
++++++ requestsexceptions-1.1.2.tar.gz -> requestsexceptions-1.1.3.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/requestsexceptions-1.1.2/ChangeLog new/requestsexceptions-1.1.3/ChangeLog
--- old/requestsexceptions-1.1.2/ChangeLog 2016-01-05 00:16:00.000000000 +0100
+++ new/requestsexceptions-1.1.3/ChangeLog 2016-02-07 16:31:23.000000000 +0100
@@ -1,6 +1,11 @@
CHANGES
=======
+1.1.3
+-----
+
+* Properly indicate python 3 support
+
1.1.2
-----
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/requestsexceptions-1.1.2/PKG-INFO new/requestsexceptions-1.1.3/PKG-INFO
--- old/requestsexceptions-1.1.2/PKG-INFO 2016-01-05 00:16:00.000000000 +0100
+++ new/requestsexceptions-1.1.3/PKG-INFO 2016-02-07 16:31:23.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requestsexceptions
-Version: 1.1.2
+Version: 1.1.3
Summary: Import exceptions from potentially bundled packages in requests.
Home-page: http://www.openstack.org/
Author: OpenStack
@@ -26,4 +26,6 @@
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/requestsexceptions-1.1.2/requestsexceptions.egg-info/PKG-INFO new/requestsexceptions-1.1.3/requestsexceptions.egg-info/PKG-INFO
--- old/requestsexceptions-1.1.2/requestsexceptions.egg-info/PKG-INFO 2016-01-05 00:16:00.000000000 +0100
+++ new/requestsexceptions-1.1.3/requestsexceptions.egg-info/PKG-INFO 2016-02-07 16:31:23.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requestsexceptions
-Version: 1.1.2
+Version: 1.1.3
Summary: Import exceptions from potentially bundled packages in requests.
Home-page: http://www.openstack.org/
Author: OpenStack
@@ -26,4 +26,6 @@
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/requestsexceptions-1.1.2/requestsexceptions.egg-info/pbr.json new/requestsexceptions-1.1.3/requestsexceptions.egg-info/pbr.json
--- old/requestsexceptions-1.1.2/requestsexceptions.egg-info/pbr.json 2016-01-05 00:16:00.000000000 +0100
+++ new/requestsexceptions-1.1.3/requestsexceptions.egg-info/pbr.json 2016-02-07 16:31:23.000000000 +0100
@@ -1 +1 @@
-{"git_version": "cd9cc98", "is_release": true}
\ No newline at end of file
+{"git_version": "19ee969", "is_release": true}
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/requestsexceptions-1.1.2/setup.cfg new/requestsexceptions-1.1.3/setup.cfg
--- old/requestsexceptions-1.1.2/setup.cfg 2016-01-05 00:16:00.000000000 +0100
+++ new/requestsexceptions-1.1.3/setup.cfg 2016-02-07 16:31:23.000000000 +0100
@@ -15,14 +15,19 @@
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
- Programming Language :: Python :: 2.6
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.3
+ Programming Language :: Python :: 3.4
[files]
packages =
requestsexceptions
+[wheel]
+universal = 1
+
[egg_info]
-tag_svn_revision = 0
tag_date = 0
+tag_svn_revision = 0
tag_build =
1
0
Hello community,
here is the log from the commit of package python-redis for openSUSE:Factory checked in at 2016-12-08 00:30:19
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-redis (Old)
and /work/SRC/openSUSE:Factory/.python-redis.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-redis"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-redis/python-redis.changes 2014-09-03 08:30:35.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-redis.new/python-redis.changes 2016-12-08 00:30:20.000000000 +0100
@@ -1,0 +2,29 @@
+Tue Nov 15 12:35:46 UTC 2016 - dmueller(a)suse.com
+
+- update to 2.10.5:
+ * Allow URL encoded parameters in Redis URLs. Characters like a "/" can
+ now be URL encoded and redis-py will correctly decode them. Thanks
+ * Added support for the WAIT command. Thanks https://github.com/eshizhan
+ * Better shutdown support for the PubSub Worker Thread. It now properly
+ cleans up the connection, unsubscribes from any channels and patterns
+ previously subscribed to and consumes any waiting messages on the socket.
+ * Added the ability to sleep for a brief period in the event of a
+ WatchError occuring. Thanks Joshua Harlow.
+ * Fixed a bug with pipeline error reporting when dealing with characters
+ in error messages that could not be encoded to the connection's
+ character set. Thanks Hendrik Muhs.
+ * Fixed a bug in Sentinel connections that would inadvertantly connect
+ to the master when the connection pool resets. Thanks
+ https://github.com/df3n5
+ * Better timeout support in Pubsub get_message. Thanks Andy Isaacson.
+ * Fixed a bug with the HiredisParser that would cause the parser to
+ get stuck in an endless loop if a specific number of bytes were
+ delivered from the socket. This fix also increases performance of
+ parsing large responses from the Redis server.
+ * Added support for ZREVRANGEBYLEX.
+ * ConnectionErrors are now raised if Redis refuses a connection due to
+ the maxclients limit being exceeded. Thanks Roman Karpovich.
+ * max_connections can now be set when instantiating client instances.
+ Thanks Ohad Perry.
+
+-------------------------------------------------------------------
Old:
----
redis-2.10.3.tar.gz
New:
----
redis-2.10.5.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-redis.spec ++++++
--- /var/tmp/diff_new_pack.eBRrUU/_old 2016-12-08 00:30:21.000000000 +0100
+++ /var/tmp/diff_new_pack.eBRrUU/_new 2016-12-08 00:30:21.000000000 +0100
@@ -1,7 +1,7 @@
#
# spec file for package python-redis
#
-# Copyright (c) 2014 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -17,13 +17,13 @@
Name: python-redis
-Version: 2.10.3
+Version: 2.10.5
Release: 0
Url: http://github.com/andymccurdy/redis-py
Summary: Python client for Redis key-value store
License: MIT
Group: Development/Languages/Python
-Source: https://pypi.python.org/packages/source/r/redis/redis-%{version}.tar.gz
+Source: https://pypi.io/packages/source/r/redis/redis-%{version}.tar.gz
BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRequires: python-devel
BuildRequires: python-py
++++++ redis-2.10.3.tar.gz -> redis-2.10.5.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/CHANGES new/redis-2.10.5/CHANGES
--- old/redis-2.10.3/CHANGES 2014-08-14 19:18:42.000000000 +0200
+++ new/redis-2.10.5/CHANGES 2015-11-03 01:19:33.000000000 +0100
@@ -1,3 +1,31 @@
+* 2.10.5
+ * Allow URL encoded parameters in Redis URLs. Characters like a "/" can
+ now be URL encoded and redis-py will correctly decode them. Thanks
+ Paul Keene.
+ * Added support for the WAIT command. Thanks https://github.com/eshizhan
+ * Better shutdown support for the PubSub Worker Thread. It now properly
+ cleans up the connection, unsubscribes from any channels and patterns
+ previously subscribed to and consumes any waiting messages on the socket.
+ * Added the ability to sleep for a brief period in the event of a
+ WatchError occuring. Thanks Joshua Harlow.
+ * Fixed a bug with pipeline error reporting when dealing with characters
+ in error messages that could not be encoded to the connection's
+ character set. Thanks Hendrik Muhs.
+ * Fixed a bug in Sentinel connections that would inadvertantly connect
+ to the master when the connection pool resets. Thanks
+ https://github.com/df3n5
+ * Better timeout support in Pubsub get_message. Thanks Andy Isaacson.
+ * Fixed a bug with the HiredisParser that would cause the parser to
+ get stuck in an endless loop if a specific number of bytes were
+ delivered from the socket. This fix also increases performance of
+ parsing large responses from the Redis server.
+ * Added support for ZREVRANGEBYLEX.
+ * ConnectionErrors are now raised if Redis refuses a connection due to
+ the maxclients limit being exceeded. Thanks Roman Karpovich.
+ * max_connections can now be set when instantiating client instances.
+ Thanks Ohad Perry.
+* 2.10.4
+ (skipped due to a PyPI snafu)
* 2.10.3
* Fixed a bug with the bytearray support introduced in 2.10.2. Thanks
Josh Owen.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/PKG-INFO new/redis-2.10.5/PKG-INFO
--- old/redis-2.10.3/PKG-INFO 2014-08-14 19:19:16.000000000 +0200
+++ new/redis-2.10.5/PKG-INFO 2015-11-03 01:21:05.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: redis
-Version: 2.10.3
+Version: 2.10.5
Summary: Python client for Redis key-value store
Home-page: http://github.com/andymccurdy/redis-py
Author: Andy McCurdy
@@ -76,7 +76,7 @@
`this comment on issue #151
<https://github.com/andymccurdy/redis-py/issues/151#issuecomment-1545015>`_
for details).
- * **SCAN/SSCAN/HSCAN/ZSCAN**: The *SCAN commands are implemented as they
+ * **SCAN/SSCAN/HSCAN/ZSCAN**: The \*SCAN commands are implemented as they
exist in the Redis documentation. In addition, each command has an equivilant
iterator method. These are purely for convenience so the user doesn't have
to keep track of the cursor while iterating. Use the
@@ -134,7 +134,7 @@
you want to control the socket behavior within an async framework. To
instantiate a client class using your own connection, you need to create
a connection pool, passing your class to the connection_class argument.
- Other keyword parameters your pass to the pool will be passed to the class
+ Other keyword parameters you pass to the pool will be passed to the class
specified during initialization.
.. code-block:: pycon
@@ -621,7 +621,7 @@
>>> sentinel.discover_slaves('mymaster')
[('127.0.0.1', 6380)]
- You can also create Redis client connections from a Sentinel instnace. You can
+ You can also create Redis client connections from a Sentinel instance. You can
connect to either the master (for write operations) or a slave (for read-only
operations).
@@ -651,7 +651,7 @@
Scan Iterators
^^^^^^^^^^^^^^
- The *SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
+ The \*SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
these commands are fully supported, redis-py also exposes the following methods
that return Python iterators for convenience: `scan_iter`, `hscan_iter`,
`sscan_iter` and `zscan_iter`.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/README.rst new/redis-2.10.5/README.rst
--- old/redis-2.10.3/README.rst 2014-06-16 22:42:56.000000000 +0200
+++ new/redis-2.10.5/README.rst 2015-01-04 01:21:38.000000000 +0100
@@ -68,7 +68,7 @@
`this comment on issue #151
<https://github.com/andymccurdy/redis-py/issues/151#issuecomment-1545015>`_
for details).
-* **SCAN/SSCAN/HSCAN/ZSCAN**: The *SCAN commands are implemented as they
+* **SCAN/SSCAN/HSCAN/ZSCAN**: The \*SCAN commands are implemented as they
exist in the Redis documentation. In addition, each command has an equivilant
iterator method. These are purely for convenience so the user doesn't have
to keep track of the cursor while iterating. Use the
@@ -126,7 +126,7 @@
you want to control the socket behavior within an async framework. To
instantiate a client class using your own connection, you need to create
a connection pool, passing your class to the connection_class argument.
-Other keyword parameters your pass to the pool will be passed to the class
+Other keyword parameters you pass to the pool will be passed to the class
specified during initialization.
.. code-block:: pycon
@@ -613,7 +613,7 @@
>>> sentinel.discover_slaves('mymaster')
[('127.0.0.1', 6380)]
-You can also create Redis client connections from a Sentinel instnace. You can
+You can also create Redis client connections from a Sentinel instance. You can
connect to either the master (for write operations) or a slave (for read-only
operations).
@@ -643,7 +643,7 @@
Scan Iterators
^^^^^^^^^^^^^^
-The *SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
+The \*SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
these commands are fully supported, redis-py also exposes the following methods
that return Python iterators for convenience: `scan_iter`, `hscan_iter`,
`sscan_iter` and `zscan_iter`.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis/__init__.py new/redis-2.10.5/redis/__init__.py
--- old/redis-2.10.3/redis/__init__.py 2014-08-14 19:17:30.000000000 +0200
+++ new/redis-2.10.5/redis/__init__.py 2015-11-03 01:19:40.000000000 +0100
@@ -22,7 +22,7 @@
)
-__version__ = '2.10.3'
+__version__ = '2.10.5'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis/_compat.py new/redis-2.10.5/redis/_compat.py
--- old/redis-2.10.3/redis/_compat.py 2014-06-17 00:34:58.000000000 +0200
+++ new/redis-2.10.5/redis/_compat.py 2015-09-29 00:28:42.000000000 +0200
@@ -3,6 +3,7 @@
if sys.version_info[0] < 3:
+ from urllib import unquote
from urlparse import parse_qs, urlparse
from itertools import imap, izip
from string import letters as ascii_letters
@@ -12,15 +13,40 @@
except ImportError:
from StringIO import StringIO as BytesIO
- iteritems = lambda x: x.iteritems()
- iterkeys = lambda x: x.iterkeys()
- itervalues = lambda x: x.itervalues()
- nativestr = lambda x: \
- x if isinstance(x, str) else x.encode('utf-8', 'replace')
- u = lambda x: x.decode()
- b = lambda x: x
- next = lambda x: x.next()
- byte_to_chr = lambda x: x
+ # special unicode handling for python2 to avoid UnicodeDecodeError
+ def safe_unicode(obj, *args):
+ """ return the unicode representation of obj """
+ try:
+ return unicode(obj, *args)
+ except UnicodeDecodeError:
+ # obj is byte string
+ ascii_text = str(obj).encode('string_escape')
+ return unicode(ascii_text)
+
+ def iteritems(x):
+ return x.iteritems()
+
+ def iterkeys(x):
+ return x.iterkeys()
+
+ def itervalues(x):
+ return x.itervalues()
+
+ def nativestr(x):
+ return x if isinstance(x, str) else x.encode('utf-8', 'replace')
+
+ def u(x):
+ return x.decode()
+
+ def b(x):
+ return x
+
+ def next(x):
+ return x.next()
+
+ def byte_to_chr(x):
+ return x
+
unichr = unichr
xrange = xrange
basestring = basestring
@@ -28,19 +54,32 @@
bytes = str
long = long
else:
- from urllib.parse import parse_qs, urlparse
+ from urllib.parse import parse_qs, unquote, urlparse
from io import BytesIO
from string import ascii_letters
from queue import Queue
- iteritems = lambda x: iter(x.items())
- iterkeys = lambda x: iter(x.keys())
- itervalues = lambda x: iter(x.values())
- byte_to_chr = lambda x: chr(x)
- nativestr = lambda x: \
- x if isinstance(x, str) else x.decode('utf-8', 'replace')
- u = lambda x: x
- b = lambda x: x.encode('latin-1') if not isinstance(x, bytes) else x
+ def iteritems(x):
+ return iter(x.items())
+
+ def iterkeys(x):
+ return iter(x.keys())
+
+ def itervalues(x):
+ return iter(x.values())
+
+ def byte_to_chr(x):
+ return chr(x)
+
+ def nativestr(x):
+ return x if isinstance(x, str) else x.decode('utf-8', 'replace')
+
+ def u(x):
+ return x
+
+ def b(x):
+ return x.encode('latin-1') if not isinstance(x, bytes) else x
+
next = next
unichr = chr
imap = map
@@ -48,6 +87,7 @@
xrange = range
basestring = str
unicode = str
+ safe_unicode = str
bytes = bytes
long = int
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis/client.py new/redis-2.10.5/redis/client.py
--- old/redis-2.10.3/redis/client.py 2014-07-21 19:53:42.000000000 +0200
+++ new/redis-2.10.5/redis/client.py 2015-11-03 00:02:54.000000000 +0100
@@ -3,10 +3,12 @@
import datetime
import sys
import warnings
+import time
import threading
import time as mod_time
from redis._compat import (b, basestring, bytes, imap, iteritems, iterkeys,
- itervalues, izip, long, nativestr, unicode)
+ itervalues, izip, long, nativestr, unicode,
+ safe_unicode)
from redis.connection import (ConnectionPool, UnixDomainSocketConnection,
SSLConnection, Token)
from redis.lock import Lock, LuaLock
@@ -57,7 +59,8 @@
def dict_merge(*dicts):
merged = {}
- [merged.update(d) for d in dicts]
+ for d in dicts:
+ merged.update(d)
return merged
@@ -397,7 +400,8 @@
charset=None, errors=None,
decode_responses=False, retry_on_timeout=False,
ssl=False, ssl_keyfile=None, ssl_certfile=None,
- ssl_cert_reqs=None, ssl_ca_certs=None):
+ ssl_cert_reqs=None, ssl_ca_certs=None,
+ max_connections=None):
if not connection_pool:
if charset is not None:
warnings.warn(DeprecationWarning(
@@ -415,7 +419,8 @@
'encoding': encoding,
'encoding_errors': encoding_errors,
'decode_responses': decode_responses,
- 'retry_on_timeout': retry_on_timeout
+ 'retry_on_timeout': retry_on_timeout,
+ 'max_connections': max_connections
}
# based on input, setup appropriate connection args
if unix_socket_path is not None:
@@ -476,6 +481,7 @@
"""
shard_hint = kwargs.pop('shard_hint', None)
value_from_callable = kwargs.pop('value_from_callable', False)
+ watch_delay = kwargs.pop('watch_delay', None)
with self.pipeline(True, shard_hint) as pipe:
while 1:
try:
@@ -485,6 +491,8 @@
exec_value = pipe.execute()
return func_value if value_from_callable else exec_value
except WatchError:
+ if watch_delay is not None and watch_delay > 0:
+ time.sleep(watch_delay)
continue
def lock(self, name, timeout=None, sleep=0.1, blocking_timeout=None,
@@ -762,6 +770,15 @@
"""
return self.execute_command('TIME')
+ def wait(self, num_replicas, timeout):
+ """
+ Redis synchronous replication
+ That returns the number of replicas that processed the query when
+ we finally have at least ``num_replicas``, or when the ``timeout`` was
+ reached.
+ """
+ return self.execute_command('WAIT', num_replicas, timeout)
+
# BASIC KEY COMMANDS
def append(self, key, value):
"""
@@ -1646,6 +1663,22 @@
pieces.extend([Token('LIMIT'), start, num])
return self.execute_command(*pieces)
+ def zrevrangebylex(self, name, max, min, start=None, num=None):
+ """
+ Return the reversed lexicographical range of values from sorted set
+ ``name`` between ``max`` and ``min``.
+
+ If ``start`` and ``num`` are specified, then return a slice of the
+ range.
+ """
+ if (start is not None and num is None) or \
+ (num is not None and start is None):
+ raise RedisError("``start`` and ``num`` must both be specified")
+ pieces = ['ZREVRANGEBYLEX', name, max, min]
+ if start is not None and num is not None:
+ pieces.extend([Token('LIMIT'), start, num])
+ return self.execute_command(*pieces)
+
def zrangebyscore(self, name, min, max, start=None, num=None,
withscores=False, score_cast_func=float):
"""
@@ -1799,12 +1832,12 @@
"Adds the specified elements to the specified HyperLogLog."
return self.execute_command('PFADD', name, *values)
- def pfcount(self, name):
+ def pfcount(self, *sources):
"""
Return the approximated cardinality of
- the set observed by the HyperLogLog at key.
+ the set observed by the HyperLogLog at key(s).
"""
- return self.execute_command('PFCOUNT', name)
+ return self.execute_command('PFCOUNT', *sources)
def pfmerge(self, dest, *sources):
"Merge N different HyperLogLogs into a single one."
@@ -2142,10 +2175,10 @@
# previously listening to
return command(*args)
- def parse_response(self, block=True):
+ def parse_response(self, block=True, timeout=0):
"Parse the response from a publish/subscribe command"
connection = self.connection
- if not block and not connection.can_read():
+ if not block and not connection.can_read(timeout=timeout):
return None
return self._execute(connection, connection.read_response)
@@ -2216,9 +2249,15 @@
if response is not None:
yield response
- def get_message(self, ignore_subscribe_messages=False):
- "Get the next message if one is available, otherwise None"
- response = self.parse_response(block=False)
+ def get_message(self, ignore_subscribe_messages=False, timeout=0):
+ """
+ Get the next message if one is available, otherwise None.
+
+ If timeout is specified, the system will wait for `timeout` seconds
+ before returning. Timeout should be specified as a floating point
+ number.
+ """
+ response = self.parse_response(block=False, timeout=timeout)
if response:
return self.handle_message(response, ignore_subscribe_messages)
return None
@@ -2282,30 +2321,39 @@
for pattern, handler in iteritems(self.patterns):
if handler is None:
raise PubSubError("Pattern: '%s' has no handler registered")
- pubsub = self
- class WorkerThread(threading.Thread):
- def __init__(self, *args, **kwargs):
- super(WorkerThread, self).__init__(*args, **kwargs)
- self._running = False
-
- def run(self):
- if self._running:
- return
- self._running = True
- while self._running and pubsub.subscribed:
- pubsub.get_message(ignore_subscribe_messages=True)
- mod_time.sleep(sleep_time)
-
- def stop(self):
- self._running = False
- self.join()
-
- thread = WorkerThread()
+ thread = PubSubWorkerThread(self, sleep_time)
thread.start()
return thread
+class PubSubWorkerThread(threading.Thread):
+ def __init__(self, pubsub, sleep_time):
+ super(PubSubWorkerThread, self).__init__()
+ self.pubsub = pubsub
+ self.sleep_time = sleep_time
+ self._running = False
+
+ def run(self):
+ if self._running:
+ return
+ self._running = True
+ pubsub = self.pubsub
+ sleep_time = self.sleep_time
+ while pubsub.subscribed:
+ pubsub.get_message(ignore_subscribe_messages=True,
+ timeout=sleep_time)
+ pubsub.close()
+ self._running = False
+
+ def stop(self):
+ # stopping simply unsubscribes from all channels and patterns.
+ # the unsubscribe responses that are generated will short circuit
+ # the loop in run(), calling pubsub.close() to clean up the connection
+ self.pubsub.unsubscribe()
+ self.pubsub.punsubscribe()
+
+
class BasePipeline(object):
"""
Pipelines provide a way to transmit multiple commands to the Redis server
@@ -2526,9 +2574,9 @@
raise r
def annotate_exception(self, exception, number, command):
- cmd = unicode(' ').join(imap(unicode, command))
+ cmd = safe_unicode(' ').join(imap(safe_unicode, command))
msg = unicode('Command # %d (%s) of pipeline caused error: %s') % (
- number, cmd, unicode(exception.args[0]))
+ number, cmd, safe_unicode(exception.args[0]))
exception.args = (msg,) + exception.args[1:]
def parse_response(self, connection, command_name, **options):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis/connection.py new/redis-2.10.5/redis/connection.py
--- old/redis-2.10.3/redis/connection.py 2014-08-14 19:15:19.000000000 +0200
+++ new/redis-2.10.5/redis/connection.py 2015-11-02 20:25:13.000000000 +0100
@@ -16,7 +16,8 @@
from redis._compat import (b, xrange, imap, byte_to_chr, unicode, bytes, long,
BytesIO, nativestr, basestring, iteritems,
- LifoQueue, Empty, Full, urlparse, parse_qs)
+ LifoQueue, Empty, Full, urlparse, parse_qs,
+ unquote)
from redis.exceptions import (
RedisError,
ConnectionError,
@@ -79,7 +80,9 @@
class BaseParser(object):
EXCEPTION_CLASSES = {
- 'ERR': ResponseError,
+ 'ERR': {
+ 'max number of clients reached': ConnectionError
+ },
'EXECABORT': ExecAbortError,
'LOADING': BusyLoadingError,
'NOSCRIPT': NoScriptError,
@@ -91,7 +94,10 @@
error_code = response.split(' ')[0]
if error_code in self.EXCEPTION_CLASSES:
response = response[len(error_code) + 1:]
- return self.EXCEPTION_CLASSES[error_code](response)
+ exception_class = self.EXCEPTION_CLASSES[error_code]
+ if isinstance(exception_class, dict):
+ exception_class = exception_class.get(response, ResponseError)
+ return exception_class(response)
return ResponseError(response)
@@ -179,8 +185,16 @@
self.bytes_read = 0
def close(self):
- self.purge()
- self._buffer.close()
+ try:
+ self.purge()
+ self._buffer.close()
+ except:
+ # issue #633 suggests the purge/close somehow raised a
+ # BadFileDescriptor error. Perhaps the client ran out of
+ # memory or something else? It's probably OK to ignore
+ # any error being raised from purge/close since we're
+ # removing the reference to the instance below.
+ pass
self._buffer = None
self._sock = None
@@ -345,14 +359,6 @@
self._reader.feed(self._buffer, 0, bufflen)
else:
self._reader.feed(buffer)
- # proactively, but not conclusively, check if more data is in the
- # buffer. if the data received doesn't end with \r\n, there's more.
- if HIREDIS_USE_BYTE_BUFFER:
- if bufflen > 2 and self._buffer[bufflen - 2:bufflen] != SYM_CRLF:
- continue
- else:
- if not buffer.endswith(SYM_CRLF):
- continue
response = self._reader.gets()
# if an older version of hiredis is installed, we need to attempt
# to convert ResponseErrors to their appropriate types.
@@ -542,11 +548,12 @@
e = sys.exc_info()[1]
self.disconnect()
if len(e.args) == 1:
- _errno, errmsg = 'UNKNOWN', e.args[0]
+ errno, errmsg = 'UNKNOWN', e.args[0]
else:
- _errno, errmsg = e.args
+ errno = e.args[0]
+ errmsg = e.args[1]
raise ConnectionError("Error %s while writing to socket. %s." %
- (_errno, errmsg))
+ (errno, errmsg))
except:
self.disconnect()
raise
@@ -555,13 +562,14 @@
"Pack and send a command to the Redis server"
self.send_packed_command(self.pack_command(*args))
- def can_read(self):
+ def can_read(self, timeout=0):
"Poll the socket to see if there's data that can be read."
sock = self._sock
if not sock:
self.connect()
sock = self._sock
- return bool(select([sock], [], [], 0)[0]) or self._parser.can_read()
+ return self._parser.can_read() or \
+ bool(select([sock], [], [], timeout)[0])
def read_response(self):
"Read the response from a previously sent command"
@@ -585,7 +593,7 @@
elif isinstance(value, float):
value = b(repr(value))
elif not isinstance(value, basestring):
- value = str(value)
+ value = unicode(value)
if isinstance(value, unicode):
value = value.encode(self.encoding, self.encoding_errors)
return value
@@ -728,7 +736,7 @@
class ConnectionPool(object):
"Generic connection pool"
@classmethod
- def from_url(cls, url, db=None, **kwargs):
+ def from_url(cls, url, db=None, decode_components=False, **kwargs):
"""
Return a connection pool configured from the given URL.
@@ -752,6 +760,12 @@
If none of these options are specified, db=0 is used.
+ The ``decode_components`` argument allows this function to work with
+ percent-encoded URLs. If this argument is set to ``True`` all ``%xx``
+ escapes will be replaced by their single-character equivalents after
+ the URL has been parsed. This only applies to the ``hostname``,
+ ``path``, and ``password`` components.
+
Any additional querystring arguments and keyword arguments will be
passed along to the ConnectionPool class's initializer. In the case
of conflicting arguments, querystring arguments always win.
@@ -776,26 +790,35 @@
if value and len(value) > 0:
url_options[name] = value[0]
+ if decode_components:
+ password = unquote(url.password) if url.password else None
+ path = unquote(url.path) if url.path else None
+ hostname = unquote(url.hostname) if url.hostname else None
+ else:
+ password = url.password
+ path = url.path
+ hostname = url.hostname
+
# We only support redis:// and unix:// schemes.
if url.scheme == 'unix':
url_options.update({
- 'password': url.password,
- 'path': url.path,
+ 'password': password,
+ 'path': path,
'connection_class': UnixDomainSocketConnection,
})
else:
url_options.update({
- 'host': url.hostname,
+ 'host': hostname,
'port': int(url.port or 6379),
- 'password': url.password,
+ 'password': password,
})
# If there's a path argument, use it as the db argument if a
# querystring value wasn't specified
- if 'db' not in url_options and url.path:
+ if 'db' not in url_options and path:
try:
- url_options['db'] = int(url.path.replace('/', ''))
+ url_options['db'] = int(path.replace('/', ''))
except (AttributeError, ValueError):
pass
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis/sentinel.py new/redis-2.10.5/redis/sentinel.py
--- old/redis-2.10.3/redis/sentinel.py 2014-06-17 00:34:58.000000000 +0200
+++ new/redis-2.10.5/redis/sentinel.py 2014-09-18 18:01:10.000000000 +0200
@@ -129,6 +129,8 @@
self.disconnect()
self.reset()
self.__init__(self.service_name, self.sentinel_manager,
+ is_master=self.is_master,
+ check_connection=self.check_connection,
connection_class=self.connection_class,
max_connections=self.max_connections,
**self.connection_kwargs)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis.egg-info/PKG-INFO new/redis-2.10.5/redis.egg-info/PKG-INFO
--- old/redis-2.10.3/redis.egg-info/PKG-INFO 2014-08-14 19:19:15.000000000 +0200
+++ new/redis-2.10.5/redis.egg-info/PKG-INFO 2015-11-03 01:21:05.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: redis
-Version: 2.10.3
+Version: 2.10.5
Summary: Python client for Redis key-value store
Home-page: http://github.com/andymccurdy/redis-py
Author: Andy McCurdy
@@ -76,7 +76,7 @@
`this comment on issue #151
<https://github.com/andymccurdy/redis-py/issues/151#issuecomment-1545015>`_
for details).
- * **SCAN/SSCAN/HSCAN/ZSCAN**: The *SCAN commands are implemented as they
+ * **SCAN/SSCAN/HSCAN/ZSCAN**: The \*SCAN commands are implemented as they
exist in the Redis documentation. In addition, each command has an equivilant
iterator method. These are purely for convenience so the user doesn't have
to keep track of the cursor while iterating. Use the
@@ -134,7 +134,7 @@
you want to control the socket behavior within an async framework. To
instantiate a client class using your own connection, you need to create
a connection pool, passing your class to the connection_class argument.
- Other keyword parameters your pass to the pool will be passed to the class
+ Other keyword parameters you pass to the pool will be passed to the class
specified during initialization.
.. code-block:: pycon
@@ -621,7 +621,7 @@
>>> sentinel.discover_slaves('mymaster')
[('127.0.0.1', 6380)]
- You can also create Redis client connections from a Sentinel instnace. You can
+ You can also create Redis client connections from a Sentinel instance. You can
connect to either the master (for write operations) or a slave (for read-only
operations).
@@ -651,7 +651,7 @@
Scan Iterators
^^^^^^^^^^^^^^
- The *SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
+ The \*SCAN commands introduced in Redis 2.8 can be cumbersome to use. While
these commands are fully supported, redis-py also exposes the following methods
that return Python iterators for convenience: `scan_iter`, `hscan_iter`,
`sscan_iter` and `zscan_iter`.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/redis.egg-info/SOURCES.txt new/redis-2.10.5/redis.egg-info/SOURCES.txt
--- old/redis-2.10.3/redis.egg-info/SOURCES.txt 2014-08-14 19:19:16.000000000 +0200
+++ new/redis-2.10.5/redis.egg-info/SOURCES.txt 2015-11-03 01:21:05.000000000 +0100
@@ -3,6 +3,7 @@
LICENSE
MANIFEST.in
README.rst
+setup.cfg
setup.py
redis/__init__.py
redis/_compat.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/setup.cfg new/redis-2.10.5/setup.cfg
--- old/redis-2.10.3/setup.cfg 2014-08-14 19:19:16.000000000 +0200
+++ new/redis-2.10.5/setup.cfg 2015-11-03 01:21:05.000000000 +0100
@@ -1,3 +1,6 @@
+[bdist_wheel]
+universal = 1
+
[egg_info]
tag_build =
tag_date = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/setup.py new/redis-2.10.5/setup.py
--- old/redis-2.10.3/setup.py 2014-06-17 00:34:58.000000000 +0200
+++ new/redis-2.10.5/setup.py 2015-09-29 00:29:14.000000000 +0200
@@ -23,7 +23,9 @@
except ImportError:
from distutils.core import setup
- PyTest = lambda x: x
+
+ def PyTest(x):
+ x
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
long_description = f.read()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/tests/test_commands.py new/redis-2.10.5/tests/test_commands.py
--- old/redis-2.10.3/tests/test_commands.py 2014-07-21 19:53:42.000000000 +0200
+++ new/redis-2.10.5/tests/test_commands.py 2015-11-02 19:08:25.000000000 +0100
@@ -112,7 +112,7 @@
r['a'] = 'foo'
assert isinstance(r.object('refcount', 'a'), int)
assert isinstance(r.object('idletime', 'a'), int)
- assert r.object('encoding', 'a') == b('raw')
+ assert r.object('encoding', 'a') in (b('raw'), b('embstr'))
assert r.object('idletime', 'invalid-key') is None
def test_ping(self, r):
@@ -959,6 +959,17 @@
assert r.zrangebylex('a', '[f', '+') == [b('f'), b('g')]
assert r.zrangebylex('a', '-', '+', start=3, num=2) == [b('d'), b('e')]
+ @skip_if_server_version_lt('2.9.9')
+ def test_zrevrangebylex(self, r):
+ r.zadd('a', a=0, b=0, c=0, d=0, e=0, f=0, g=0)
+ assert r.zrevrangebylex('a', '[c', '-') == [b('c'), b('b'), b('a')]
+ assert r.zrevrangebylex('a', '(c', '-') == [b('b'), b('a')]
+ assert r.zrevrangebylex('a', '(g', '[aaa') == \
+ [b('f'), b('e'), b('d'), b('c'), b('b')]
+ assert r.zrevrangebylex('a', '+', '[f') == [b('g'), b('f')]
+ assert r.zrevrangebylex('a', '+', '-', start=3, num=2) == \
+ [b('d'), b('c')]
+
def test_zrangebyscore(self, r):
r.zadd('a', a1=1, a2=2, a3=3, a4=4, a5=5)
assert r.zrangebyscore('a', 2, 4) == [b('a2'), b('a3'), b('a4')]
@@ -1106,6 +1117,10 @@
members = set([b('1'), b('2'), b('3')])
r.pfadd('a', *members)
assert r.pfcount('a') == len(members)
+ members_b = set([b('2'), b('3'), b('4')])
+ r.pfadd('b', *members_b)
+ assert r.pfcount('b') == len(members_b)
+ assert r.pfcount('a', 'b') == len(members_b.union(members))
@skip_if_server_version_lt('2.8.9')
def test_pfmerge(self, r):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/tests/test_connection_pool.py new/redis-2.10.5/tests/test_connection_pool.py
--- old/redis-2.10.3/tests/test_connection_pool.py 2014-06-17 00:34:58.000000000 +0200
+++ new/redis-2.10.5/tests/test_connection_pool.py 2015-02-10 02:18:07.000000000 +0100
@@ -163,6 +163,17 @@
'password': None,
}
+ def test_quoted_hostname(self):
+ pool = redis.ConnectionPool.from_url('redis://my %2F host %2B%3D+',
+ decode_components=True)
+ assert pool.connection_class == redis.Connection
+ assert pool.connection_kwargs == {
+ 'host': 'my / host +=+',
+ 'port': 6379,
+ 'db': 0,
+ 'password': None,
+ }
+
def test_port(self):
pool = redis.ConnectionPool.from_url('redis://localhost:6380')
assert pool.connection_class == redis.Connection
@@ -183,6 +194,18 @@
'password': 'mypassword',
}
+ def test_quoted_password(self):
+ pool = redis.ConnectionPool.from_url(
+ 'redis://:%2Fmypass%2F%2B word%3D%24+@localhost',
+ decode_components=True)
+ assert pool.connection_class == redis.Connection
+ assert pool.connection_kwargs == {
+ 'host': 'localhost',
+ 'port': 6379,
+ 'db': 0,
+ 'password': '/mypass/+ word=$+',
+ }
+
def test_db_as_argument(self):
pool = redis.ConnectionPool.from_url('redis://localhost', db='1')
assert pool.connection_class == redis.Connection
@@ -259,6 +282,28 @@
'db': 0,
'password': 'mypassword',
}
+
+ def test_quoted_password(self):
+ pool = redis.ConnectionPool.from_url(
+ 'unix://:%2Fmypass%2F%2B word%3D%24+@/socket',
+ decode_components=True)
+ assert pool.connection_class == redis.UnixDomainSocketConnection
+ assert pool.connection_kwargs == {
+ 'path': '/socket',
+ 'db': 0,
+ 'password': '/mypass/+ word=$+',
+ }
+
+ def test_quoted_path(self):
+ pool = redis.ConnectionPool.from_url(
+ 'unix://:mypassword@/my%2Fpath%2Fto%2F..%2F+_%2B%3D%24ocket',
+ decode_components=True)
+ assert pool.connection_class == redis.UnixDomainSocketConnection
+ assert pool.connection_kwargs == {
+ 'path': '/my/path/to/../+_+=$ocket',
+ 'db': 0,
+ 'password': 'mypassword',
+ }
def test_db_as_argument(self):
pool = redis.ConnectionPool.from_url('unix:///socket', db=1)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/tests/test_encoding.py new/redis-2.10.5/tests/test_encoding.py
--- old/redis-2.10.3/tests/test_encoding.py 2014-06-17 00:34:58.000000000 +0200
+++ new/redis-2.10.5/tests/test_encoding.py 2015-06-06 21:44:46.000000000 +0200
@@ -23,6 +23,13 @@
r.rpush('a', *result)
assert r.lrange('a', 0, -1) == result
+ def test_object_value(self, r):
+ unicode_string = unichr(3456) + u('abcd') + unichr(3421)
+ r['unicode-string'] = Exception(unicode_string)
+ cached_val = r['unicode-string']
+ assert isinstance(cached_val, unicode)
+ assert unicode_string == cached_val
+
class TestCommandsAndTokensArentEncoded(object):
@pytest.fixture()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/redis-2.10.3/tests/test_scripting.py new/redis-2.10.5/tests/test_scripting.py
--- old/redis-2.10.3/tests/test_scripting.py 2014-06-16 22:42:56.000000000 +0200
+++ new/redis-2.10.5/tests/test_scripting.py 2015-01-02 20:04:16.000000000 +0100
@@ -10,6 +10,17 @@
value = tonumber(value)
return value * ARGV[1]"""
+msgpack_hello_script = """
+local message = cmsgpack.unpack(ARGV[1])
+local name = message['name']
+return "hello " .. name
+"""
+msgpack_hello_script_broken = """
+local message = cmsgpack.unpack(ARGV[1])
+local names = message['name']
+return "hello " .. name
+"""
+
class TestScripting(object):
@pytest.fixture(autouse=True)
@@ -80,3 +91,25 @@
assert r.script_exists(multiply.sha) == [False]
# [SET worked, GET 'a', result of multiple script]
assert pipe.execute() == [True, b('2'), 6]
+
+ def test_eval_msgpack_pipeline_error_in_lua(self, r):
+ msgpack_hello = r.register_script(msgpack_hello_script)
+ assert not msgpack_hello.sha
+
+ pipe = r.pipeline()
+
+ # avoiding a dependency to msgpack, this is the output of
+ # msgpack.dumps({"name": "joe"})
+ msgpack_message_1 = b'\x81\xa4name\xa3Joe'
+
+ msgpack_hello(args=[msgpack_message_1], client=pipe)
+
+ assert r.script_exists(msgpack_hello.sha) == [True]
+ assert pipe.execute()[0] == b'hello Joe'
+
+ msgpack_hello_broken = r.register_script(msgpack_hello_script_broken)
+
+ msgpack_hello_broken(args=[msgpack_message_1], client=pipe)
+ with pytest.raises(exceptions.ResponseError) as excinfo:
+ pipe.execute()
+ assert excinfo.type == exceptions.ResponseError
1
0
Hello community,
here is the log from the commit of package python-python-mimeparse for openSUSE:Factory checked in at 2016-12-08 00:30:13
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-python-mimeparse (Old)
and /work/SRC/openSUSE:Factory/.python-python-mimeparse.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-python-mimeparse"
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-python-mimeparse/python-python-mimeparse.changes 2013-05-02 11:42:20.000000000 +0200
+++ /work/SRC/openSUSE:Factory/.python-python-mimeparse.new/python-python-mimeparse.changes 2016-12-08 00:30:14.000000000 +0100
@@ -1,0 +2,5 @@
+Tue Nov 15 12:45:22 UTC 2016 - dmueller(a)suse.com
+
+- update to 1.5.2
+
+-------------------------------------------------------------------
Old:
----
python-mimeparse-0.1.4.tar.gz
New:
----
python-mimeparse-1.5.2.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-python-mimeparse.spec ++++++
--- /var/tmp/diff_new_pack.u5XtqI/_old 2016-12-08 00:30:15.000000000 +0100
+++ /var/tmp/diff_new_pack.u5XtqI/_new 2016-12-08 00:30:15.000000000 +0100
@@ -1,7 +1,7 @@
#
-# spec file for package python-mimeparse
+# spec file for package python-python-mimeparse
#
-# Copyright (c) 2012 SUSE LINUX Products GmbH, Nuernberg, Germany.
+# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -15,16 +15,18 @@
# Please submit bugfixes or comments via http://bugs.opensuse.org/
#
+
Name: python-python-mimeparse
-Version: 0.1.4
+Version: 1.5.2
Release: 0
Url: http://code.google.com/p/mimeparse/
Summary: Basic functions for parsing and matching mime-type names
License: MIT
Group: Development/Languages/Python
-Source: https://pypi.python.org/packages/source/p/python-mimeparse/python-mimeparse…
+Source: https://pypi.io/packages/source/p/python-mimeparse/python-mimeparse-%{versi…
BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRequires: python-devel
+BuildRequires: python-setuptools
Provides: python-mimeparse = 0.1.4
Obsoletes: python-mimeparse < 0.1.4
%if 0%{?suse_version} && 0%{?suse_version} <= 1110
@@ -52,7 +54,7 @@
%files
%defattr(-,root,root,-)
-%doc LICENSE README
+%doc LICENSE README.md
%{python_sitelib}/*
%changelog
++++++ python-mimeparse-0.1.4.tar.gz -> python-mimeparse-1.5.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/MANIFEST.in new/python-mimeparse-1.5.2/MANIFEST.in
--- old/python-mimeparse-0.1.4/MANIFEST.in 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/MANIFEST.in 2016-04-26 18:54:58.000000000 +0200
@@ -0,0 +1 @@
+include README.md LICENSE mimeparse_test.py testdata.json
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/PKG-INFO new/python-mimeparse-1.5.2/PKG-INFO
--- old/python-mimeparse-0.1.4/PKG-INFO 2012-08-23 22:29:29.000000000 +0200
+++ new/python-mimeparse-1.5.2/PKG-INFO 2016-04-26 19:55:46.000000000 +0200
@@ -1,13 +1,14 @@
Metadata-Version: 1.1
Name: python-mimeparse
-Version: 0.1.4
+Version: 1.5.2
Summary: A module provides basic functions for parsing mime-type names and matching them against a list of media-ranges.
Home-page: https://github.com/dbtsai/python-mimeparse
-Author: David Tsai
+Author: DB Tsai
Author-email: dbtsai(a)dbtsai.com
License: UNKNOWN
-Download-URL: http://pypi.python.org/packages/source/p/python-mimeparse/python-mimeparse-…
-Description:
+Download-URL: http://pypi.python.org/packages/source/p/python-mimeparse/python-mimeparse-…
+Description: # Travis CI Build Status [![Build Status](https://travis-ci.org/dbtsai/python-mimeparse.svg?branch=master)](h…
+
This module provides basic functions for handling mime-types. It can handle
matching mime-types against a list of media-ranges. See section 14.1 of
the HTTP specification [RFC 2616] for a complete explanation.
@@ -21,6 +22,22 @@
- quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
- best_match(): Choose the mime-type with the highest quality ("q") from a list of candidates.
+ Testing
+ =======
+ The format of the JSON test data file is as follows:
+ A top-level JSON object which has a key for each of the functions to be tested. The value corresponding to that key is a list of tests. Each test contains: the argument or arguments to the function being tested, the expected results and an optional description.
+
+ Python
+ ======
+ The Python tests require Python 2.6.
+
+ Run the tests by typing:
+ python mimeparse_test.py
+
+ To make sure that the package works in all the supported environments, you can run tox tests:
+ pip install tox
+ tox
+
Keywords: mime-type
Platform: UNKNOWN
Classifier: Programming Language :: Python
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/README new/python-mimeparse-1.5.2/README
--- old/python-mimeparse-0.1.4/README 2012-08-23 05:38:20.000000000 +0200
+++ new/python-mimeparse-1.5.2/README 1970-01-01 01:00:00.000000000 +0100
@@ -1,19 +0,0 @@
-This module provides basic functions for parsing mime-type names and matching them against a list of media-ranges.
-
-See section 14.1 of RFC 2616 (the HTTP specification) for a complete explanation.
-
-Testing
-=======
-The format of the JSON test data file is as follows:
-A top-level JSON object which has a key for each of the functions to be tested. The value corresponding to that key is a list of tests. Each test contains: the argument or arguments to the function being tested, the expected results and an optional description.
-
-
-Python
-======
-The Python tests require either Python 2.6 or the installation of the SimpleJSON library.
-
-Installing SimpleJson can be done by:
-sudo easy_install simplejson
-
-Run the tests by typing:
-python mimeparse_test.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/README.md new/python-mimeparse-1.5.2/README.md
--- old/python-mimeparse-0.1.4/README.md 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/README.md 2016-04-26 18:54:58.000000000 +0200
@@ -0,0 +1,30 @@
+# Travis CI Build Status [![Build Status](https://travis-ci.org/dbtsai/python-mimeparse.svg?branch=master)](h…
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of
+the HTTP specification [RFC 2616] for a complete explanation.
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Contents:
+ - parse_mime_type(): Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a "q" quality parameter.
+ - quality(): Determines the quality ("q") of a mime-type when compared against a list of media-ranges.
+ - quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
+ - best_match(): Choose the mime-type with the highest quality ("q") from a list of candidates.
+
+Testing
+=======
+The format of the JSON test data file is as follows:
+A top-level JSON object which has a key for each of the functions to be tested. The value corresponding to that key is a list of tests. Each test contains: the argument or arguments to the function being tested, the expected results and an optional description.
+
+Python
+======
+The Python tests require Python 2.6.
+
+Run the tests by typing:
+python mimeparse_test.py
+
+To make sure that the package works in all the supported environments, you can run tox tests:
+pip install tox
+tox
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/mimeparse.py new/python-mimeparse-1.5.2/mimeparse.py
--- old/python-mimeparse-0.1.4/mimeparse.py 2012-08-23 22:19:36.000000000 +0200
+++ new/python-mimeparse-1.5.2/mimeparse.py 2016-04-26 18:55:28.000000000 +0200
@@ -19,13 +19,17 @@
"""
from functools import reduce
-__version__ = '0.1.4'
+__version__ = '1.5.2'
__author__ = 'Joe Gregorio'
__email__ = 'joe(a)bitworking.org'
__license__ = 'MIT License'
__credits__ = ''
+class MimeTypeParseException(ValueError):
+ pass
+
+
def parse_mime_type(mime_type):
"""Parses a mime-type into its component parts.
@@ -45,7 +49,12 @@
# single '*'. Turn it into a legal wildcard.
if full_type == '*':
full_type = '*/*'
- (type, subtype) = full_type.split('/')
+
+ type_parts = full_type.split('/') if '/' in full_type else None
+ if not type_parts or len(type_parts) > 2:
+ raise MimeTypeParseException("Can't parse type \"{}\"".format(full_type))
+
+ (type, subtype) = type_parts
return (type.strip(), subtype.strip(), params)
@@ -65,15 +74,14 @@
necessary.
"""
(type, subtype, params) = parse_mime_type(range)
- if not 'q' in params or not params['q'] or \
- not float(params['q']) or float(params['q']) > 1\
- or float(params['q']) < 0:
+ if 'q' not in params or not params['q'] or \
+ float(params['q']) > 1 or float(params['q']) < 0:
params['q'] = '1'
return (type, subtype, params)
-def fitness_and_quality_parsed(mime_type, parsed_ranges):
+def quality_and_fitness_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
@@ -95,7 +103,7 @@
target_subtype == '*')
if type_match and subtype_match:
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in
- list(target_params.items()) if key != 'q' and
+ target_params.items() if key != 'q' and
key in params and value == params[key]], 0)
fitness = (type == target_type) and 100 or 0
fitness += (subtype == target_subtype) and 10 or 0
@@ -104,7 +112,7 @@
best_fitness = fitness
best_fit_q = params['q']
- return best_fitness, float(best_fit_q)
+ return float(best_fit_q), best_fitness
def quality_parsed(mime_type, parsed_ranges):
@@ -113,10 +121,10 @@
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns the 'q'
quality parameter of the best match, 0 if no match was found. This function
- bahaves the same as quality() except that 'parsed_ranges' must be a list of
+ behaves the same as quality() except that 'parsed_ranges' must be a list of
parsed media ranges. """
- return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+ return quality_and_fitness_parsed(mime_type, parsed_ranges)[0]
def quality(mime_type, ranges):
@@ -154,12 +162,12 @@
weighted_matches = []
pos = 0
for mime_type in supported:
- weighted_matches.append((fitness_and_quality_parsed(mime_type,
+ weighted_matches.append((quality_and_fitness_parsed(mime_type,
parsed_header), pos, mime_type))
pos += 1
weighted_matches.sort()
- return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
+ return weighted_matches[-1][0][0] and weighted_matches[-1][2] or ''
def _filter_blank(i):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/mimeparse_test.py new/python-mimeparse-1.5.2/mimeparse_test.py
--- old/python-mimeparse-0.1.4/mimeparse_test.py 2012-08-23 05:38:20.000000000 +0200
+++ new/python-mimeparse-1.5.2/mimeparse_test.py 2016-04-26 18:54:58.000000000 +0200
@@ -5,68 +5,68 @@
This module loads a json file and converts the tests specified therein to a set
of PyUnitTestCases. Then it uses PyUnit to run them and report their status.
"""
-__version__ = "0.1"
-__author__ = 'Ade Oshineye'
-__email__ = "ade(a)oshineye.com"
-__credits__ = ""
-
import json
import mimeparse
import unittest
-from functools import partial
-
-
-def test_parse_media_range(args, expected):
- expected = tuple(expected)
- result = mimeparse.parse_media_range(args)
- message = "Expected: '%s' but got %s" % (expected, result)
- assert expected == result, message
-
-
-def test_quality(args, expected):
- result = mimeparse.quality(args[0], args[1])
- message = "Expected: '%s' but got %s" % (expected, result)
- assert expected == result, message
-def test_best_match(args, expected):
- result = mimeparse.best_match(args[0], args[1])
- message = "Expected: '%s' but got %s" % (expected, result)
- assert expected == result, message
-
-
-def test_parse_mime_type(args, expected):
- expected = tuple(expected)
- result = mimeparse.parse_mime_type(args)
- message = "Expected: '%s' but got %s" % (expected, result)
- assert expected == result, message
-
-
-def add_tests(suite, json_object, func_name, test_func):
- test_data = json_object[func_name]
- for test_datum in test_data:
- args, expected = test_datum[0], test_datum[1]
- desc = "%s(%s) with expected result: %s" % (func_name, str(args),
- str(expected))
- if len(test_datum) == 3:
- desc = test_datum[2] + " : " + desc
- func = partial(test_func, *(args, expected))
- testcase = unittest.FunctionTestCase(func, description=desc)
- suite.addTest(testcase)
-
+__version__ = "0.1"
+__author__ = 'Ade Oshineye'
+__email__ = "ade(a)oshineye.com"
+__credits__ = ""
-def run_tests():
- json_object = json.load(open("testdata.json"))
- suite = unittest.TestSuite()
- add_tests(suite, json_object, "parse_media_range", test_parse_media_range)
- add_tests(suite, json_object, "quality", test_quality)
- add_tests(suite, json_object, "best_match", test_best_match)
- add_tests(suite, json_object, "parse_mime_type", test_parse_mime_type)
+class MimeParseTestCase(unittest.TestCase):
- test_runner = unittest.TextTestRunner(verbosity=1)
- test_runner.run(suite)
+ def setUp(self):
+ super(MimeParseTestCase, self).setUp()
+ with open("testdata.json") as f:
+ self.test_data = json.load(f)
+
+ def _test_parse_media_range(self, args, expected):
+ expected = tuple(expected)
+ result = mimeparse.parse_media_range(args)
+ message = "Expected: '%s' but got %s" % (expected, result)
+ self.assertEqual(expected, result, message)
+
+ def _test_quality(self, args, expected):
+ result = mimeparse.quality(args[0], args[1])
+ message = "Expected: '%s' but got %s" % (expected, result)
+ self.assertEqual(expected, result, message)
+
+ def _test_best_match(self, args, expected, description):
+ if expected is None:
+ self.assertRaises(mimeparse.MimeTypeParseException, mimeparse.best_match, args[0], args[1])
+ else:
+ result = mimeparse.best_match(args[0], args[1])
+ message = "Expected: '%s' but got %s. Description for this test: %s" % (expected, result, description)
+ self.assertEqual(expected, result, message)
+
+ def _test_parse_mime_type(self, args, expected):
+ if expected is None:
+ self.assertRaises(mimeparse.MimeTypeParseException, mimeparse.parse_mime_type, args)
+ else:
+ expected = tuple(expected)
+ result = mimeparse.parse_mime_type(args)
+ message = "Expected: '%s' but got %s" % (expected, result)
+ self.assertEqual(expected, result, message)
+
+ def test_parse_media_range(self):
+ for args, expected in self.test_data['parse_media_range']:
+ self._test_parse_media_range(args, expected)
+
+ def test_quality(self):
+ for args, expected in self.test_data['quality']:
+ self._test_quality(args, expected)
+
+ def test_best_match(self):
+ for args, expected, description in self.test_data['best_match']:
+ self._test_best_match(args, expected, description)
+
+ def test_parse_mime_type(self):
+ for args, expected in self.test_data['parse_mime_type']:
+ self._test_parse_mime_type(args, expected)
-if __name__ == "__main__":
- run_tests()
+if __name__ == '__main__':
+ unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/python_mimeparse.egg-info/PKG-INFO new/python-mimeparse-1.5.2/python_mimeparse.egg-info/PKG-INFO
--- old/python-mimeparse-0.1.4/python_mimeparse.egg-info/PKG-INFO 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/python_mimeparse.egg-info/PKG-INFO 2016-04-26 19:55:46.000000000 +0200
@@ -0,0 +1,50 @@
+Metadata-Version: 1.1
+Name: python-mimeparse
+Version: 1.5.2
+Summary: A module provides basic functions for parsing mime-type names and matching them against a list of media-ranges.
+Home-page: https://github.com/dbtsai/python-mimeparse
+Author: DB Tsai
+Author-email: dbtsai(a)dbtsai.com
+License: UNKNOWN
+Download-URL: http://pypi.python.org/packages/source/p/python-mimeparse/python-mimeparse-…
+Description: # Travis CI Build Status [![Build Status](https://travis-ci.org/dbtsai/python-mimeparse.svg?branch=master)](h…
+
+ This module provides basic functions for handling mime-types. It can handle
+ matching mime-types against a list of media-ranges. See section 14.1 of
+ the HTTP specification [RFC 2616] for a complete explanation.
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+ Contents:
+ - parse_mime_type(): Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a "q" quality parameter.
+ - quality(): Determines the quality ("q") of a mime-type when compared against a list of media-ranges.
+ - quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
+ - best_match(): Choose the mime-type with the highest quality ("q") from a list of candidates.
+
+ Testing
+ =======
+ The format of the JSON test data file is as follows:
+ A top-level JSON object which has a key for each of the functions to be tested. The value corresponding to that key is a list of tests. Each test contains: the argument or arguments to the function being tested, the expected results and an optional description.
+
+ Python
+ ======
+ The Python tests require Python 2.6.
+
+ Run the tests by typing:
+ python mimeparse_test.py
+
+ To make sure that the package works in all the supported environments, you can run tox tests:
+ pip install tox
+ tox
+
+Keywords: mime-type
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/python_mimeparse.egg-info/SOURCES.txt new/python-mimeparse-1.5.2/python_mimeparse.egg-info/SOURCES.txt
--- old/python-mimeparse-0.1.4/python_mimeparse.egg-info/SOURCES.txt 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/python_mimeparse.egg-info/SOURCES.txt 2016-04-26 19:55:46.000000000 +0200
@@ -0,0 +1,11 @@
+LICENSE
+MANIFEST.in
+README.md
+mimeparse.py
+mimeparse_test.py
+setup.py
+testdata.json
+python_mimeparse.egg-info/PKG-INFO
+python_mimeparse.egg-info/SOURCES.txt
+python_mimeparse.egg-info/dependency_links.txt
+python_mimeparse.egg-info/top_level.txt
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/python_mimeparse.egg-info/dependency_links.txt new/python-mimeparse-1.5.2/python_mimeparse.egg-info/dependency_links.txt
--- old/python-mimeparse-0.1.4/python_mimeparse.egg-info/dependency_links.txt 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/python_mimeparse.egg-info/dependency_links.txt 2016-04-26 19:55:46.000000000 +0200
@@ -0,0 +1 @@
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/python_mimeparse.egg-info/top_level.txt new/python-mimeparse-1.5.2/python_mimeparse.egg-info/top_level.txt
--- old/python-mimeparse-0.1.4/python_mimeparse.egg-info/top_level.txt 1970-01-01 01:00:00.000000000 +0100
+++ new/python-mimeparse-1.5.2/python_mimeparse.egg-info/top_level.txt 2016-04-26 19:55:46.000000000 +0200
@@ -0,0 +1 @@
+mimeparse
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/setup.cfg new/python-mimeparse-1.5.2/setup.cfg
--- old/python-mimeparse-0.1.4/setup.cfg 2012-08-23 07:15:20.000000000 +0200
+++ new/python-mimeparse-1.5.2/setup.cfg 2016-04-26 19:55:46.000000000 +0200
@@ -1,2 +1,5 @@
-[easy_install]
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/setup.py new/python-mimeparse-1.5.2/setup.py
--- old/python-mimeparse-0.1.4/setup.py 2012-08-23 22:28:43.000000000 +0200
+++ new/python-mimeparse-1.5.2/setup.py 2016-04-26 18:54:58.000000000 +0200
@@ -1,17 +1,26 @@
#!/usr/bin/env python
-from distutils.core import setup
+import os
+import codecs
import mimeparse
+from setuptools import setup
+
+
+def read(fname):
+ path = os.path.join(os.path.dirname(__file__), fname)
+ return codecs.open(path, encoding='utf-8').read()
setup(
name="python-mimeparse",
py_modules=["mimeparse"],
version=mimeparse.__version__,
- description="A module provides basic functions for parsing mime-type names and matching them against a list of media-ranges.",
- author="David Tsai",
+ description=("A module provides basic functions for parsing mime-type "
+ "names and matching them against a list of media-ranges."),
+ author="DB Tsai",
author_email="dbtsai(a)dbtsai.com",
url="https://github.com/dbtsai/python-mimeparse",
- download_url="http://pypi.python.org/packages/source/p/python-mimeparse/python-mimeparse-…",
+ download_url=("http://pypi.python.org/packages/source/p/python-mimeparse/"
+ "python-mimeparse-" + mimeparse.__version__ + ".tar.gz"),
keywords=["mime-type"],
classifiers=[
"Programming Language :: Python",
@@ -23,18 +32,5 @@
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
- long_description="""
-This module provides basic functions for handling mime-types. It can handle
-matching mime-types against a list of media-ranges. See section 14.1 of
-the HTTP specification [RFC 2616] for a complete explanation.
-
- http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
-
-Contents:
- - parse_mime_type(): Parses a mime-type into its component parts.
- - parse_media_range(): Media-ranges are mime-types with wild-cards and a "q" quality parameter.
- - quality(): Determines the quality ("q") of a mime-type when compared against a list of media-ranges.
- - quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
- - best_match(): Choose the mime-type with the highest quality ("q") from a list of candidates.
-"""
+ long_description=read('README.md')
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/python-mimeparse-0.1.4/testdata.json new/python-mimeparse-1.5.2/testdata.json
--- old/python-mimeparse-0.1.4/testdata.json 2012-08-23 05:38:20.000000000 +0200
+++ new/python-mimeparse-1.5.2/testdata.json 2016-04-26 18:54:58.000000000 +0200
@@ -1,42 +1,47 @@
{
"parse_media_range": [
- ["application/xml;q=1", ["application", "xml", {"q": "1"}]],
- ["application/xml", ["application", "xml", {"q": "1"}]],
- ["application/xml;q=",["application", "xml", {"q": "1"}]],
- ["application/xml ;q=",["application", "xml", {"q": "1"}]],
- ["application/xml ; q=1;b=other",["application", "xml", {"q": "1", "b":"other"}]],
- ["application/xml ; q=2;b=other",["application", "xml", {"q": "1", "b":"other"}]],
- [" *; q=.2",["*", "*", {"q": ".2"}]]
+ ["application/xml;q=1", ["application", "xml", {"q": "1"}]],
+ ["application/xml", ["application", "xml", {"q": "1"}]],
+ ["application/xml;q=",["application", "xml", {"q": "1"}]],
+ ["application/xml ;q=",["application", "xml", {"q": "1"}]],
+ ["application/xml ; q=1;b=other",["application", "xml", {"q": "1", "b":"other"}]],
+ ["application/xml ; q=2;b=other",["application", "xml", {"q": "1", "b":"other"}]],
+ ["application/xml ; q=0",["application", "xml", {"q": "0"}]],
+ [" *; q=.2",["*", "*", {"q": ".2"}]]
],
"quality": [
- [["text/html;level=1", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 1],
- [["text/html", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.7],
- [["text/plain", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.3],
- [["image/jpeg", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.5],
- [["text/html;level=2", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.4],
- [["text/html;level=3", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.7],
- [["text/plain", "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"], 0.2]
+ [["text/html;level=1", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 1],
+ [["text/html", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.7],
+ [["text/plain", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.3],
+ [["image/jpeg", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.5],
+ [["text/html;level=2", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.4],
+ [["text/html;level=3", "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], 0.7],
+ [["text/plain", "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"], 0.2]
],
"best_match": [
- [[["application/xbel+xml", "application/xml"], "application/xbel+xml"], "application/xbel+xml", "direct match"],
- [[["application/xbel+xml", "application/xml"], "application/xbel+xml; q=1"], "application/xbel+xml", "direct match with a q parameter"],
- [[["application/xbel+xml", "application/xml"], "application/xml; q=1"], "application/xml", "direct match of our second choice with a q parameter"],
- [[["application/xbel+xml", "application/xml"], "application/*; q=1"], "application/xml", "match using a subtype wildcard"],
- [[["application/xbel+xml", "application/xml"], "*/*", "application/xml"], "application/xml", "match using a type wildcard"],
- [[["application/xbel+xml", "text/xml"], "text/*;q=0.5,*/*; q=0.1"], "text/xml", "match using a type versus a lower weighted subtype"],
- [[["application/xbel+xml", "text/xml"], "text/html,application/atom+xml; q=0.9"], "", "fail to match anything"],
- [[["application/json", "text/html"], "application/json, text/javascript, */*"], "application/json", "common AJAX scenario"],
- [[["application/json", "text/html"], "application/json, text/html;q=0.9"], "application/json", "verify fitness ordering"],
- [[["image/*", "application/xml"], "image/png"], "image/*", "match using a type wildcard"],
- [[["image/*", "application/xml"], "image/*"], "image/*", "match using a wildcard for both requested and supported"],
- [[["text/html", "application/rdf+xml"], "text/html, application/rdf+xml"], "application/rdf+xml", "match should use highest order of supported when there is a tie"],
- [[["application/rdf+xml", "text/html"], "text/html, application/rdf+xml"], "text/html", "match should use highest order of supported when there is a tie"]
+ [[["application/xbel+xml", "application/xml"], "application/xbel+xml"], "application/xbel+xml", "direct match"],
+ [[["application/xbel+xml", "application/xml"], "application/xbel+xml; q=1"], "application/xbel+xml", "direct match with a q parameter"],
+ [[["application/xbel+xml", "application/xml"], "application/xml; q=1"], "application/xml", "direct match of our second choice with a q parameter"],
+ [[["application/xbel+xml", "application/xml"], "application/*; q=1"], "application/xml", "match using a subtype wildcard"],
+ [[["application/xbel+xml", "application/xml"], "*/*", "application/xml"], "application/xml", "match using a type wildcard"],
+ [[["application/xbel+xml", "text/xml"], "text/*;q=0.5,*/*; q=0.1"], "text/xml", "match using a type versus a lower weighted subtype"],
+ [[["application/xbel+xml", "text/xml"], "text/html,application/atom+xml; q=0.9"], "", "fail to match anything"],
+ [[["application/json", "text/html"], "application/json, text/javascript, */*"], "application/json", "common AJAX scenario"],
+ [[["application/json", "text/html"], "application/json, text/html;q=0.9"], "application/json", "verify fitness ordering"],
+ [[["image/*", "application/xml"], "image/png"], "image/*", "match using a type wildcard"],
+ [[["image/*", "application/xml"], "image/*"], "image/*", "match using a wildcard for both requested and supported"],
+ [[["image/jpeg", "text/plain"], "text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"], "image/jpeg", "media type with highest associated quality factor should win, not necessarily most specific"],
+ [[["text/html", "application/rdf+xml"], "text/html, application/rdf+xml"], "application/rdf+xml", "match should use highest order of supported when there is a tie"],
+ [[["application/rdf+xml", "text/html"], "text/html, application/rdf+xml"], "text/html", "match should use highest order of supported when there is a tie"],
+ [[["application/json", "text/html"], "text"], null, "match should use the default if an invalid Accept header is passed"]
],
"parse_mime_type": [
- ["application/xhtml;q=0.5", ["application", "xhtml", {"q": "0.5"}]],
- ["application/xhtml;q=0.5;ver=1.2", ["application", "xhtml", {"q": "0.5", "ver": "1.2"}]]
+ ["application/xhtml;q=0.5", ["application", "xhtml", {"q": "0.5"}]],
+ ["application/xhtml;q=0.5;ver=1.2", ["application", "xhtml", {"q": "0.5", "ver": "1.2"}]],
+ ["text", null],
+ ["text/something/invalid", null]
]
}
1
0