openSUSE Commits
Threads by month
- ----- 2025 -----
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
December 2020
- 1 participants
- 2154 discussions
commit perl-HTTP-Entity-Parser for openSUSE:Factory
by User for buildservice source handling 01 Dec '20
by User for buildservice source handling 01 Dec '20
01 Dec '20
Hello community,
here is the log from the commit of package perl-HTTP-Entity-Parser for openSUSE:Factory checked in at 2020-12-01 14:23:38
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-HTTP-Entity-Parser (Old)
and /work/SRC/openSUSE:Factory/.perl-HTTP-Entity-Parser.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "perl-HTTP-Entity-Parser"
Tue Dec 1 14:23:38 2020 rev:4 rq:852064 version:0.25
Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-HTTP-Entity-Parser/perl-HTTP-Entity-Parser.changes 2020-08-10 14:58:54.628029390 +0200
+++ /work/SRC/openSUSE:Factory/.perl-HTTP-Entity-Parser.new.5913/perl-HTTP-Entity-Parser.changes 2020-12-01 14:24:00.373672482 +0100
@@ -1,0 +2,10 @@
+Sun Nov 29 03:08:04 UTC 2020 - Tina Müller <timueller+perl(a)suse.de>
+
+- updated to 0.25
+ see /usr/share/doc/packages/perl-HTTP-Entity-Parser/Changes
+
+ 0.25 2020-11-28T02:35:28Z
+
+ - handle empty Content-Type as if octet-stream blob #14
+
+-------------------------------------------------------------------
Old:
----
HTTP-Entity-Parser-0.24.tar.gz
New:
----
HTTP-Entity-Parser-0.25.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ perl-HTTP-Entity-Parser.spec ++++++
--- /var/tmp/diff_new_pack.wYNYPa/_old 2020-12-01 14:24:00.857673006 +0100
+++ /var/tmp/diff_new_pack.wYNYPa/_new 2020-12-01 14:24:00.861673010 +0100
@@ -17,7 +17,7 @@
Name: perl-HTTP-Entity-Parser
-Version: 0.24
+Version: 0.25
Release: 0
%define cpan_name HTTP-Entity-Parser
Summary: PSGI compliant HTTP Entity Parser
++++++ HTTP-Entity-Parser-0.24.tar.gz -> HTTP-Entity-Parser-0.25.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/HTTP-Entity-Parser-0.24/Changes new/HTTP-Entity-Parser-0.25/Changes
--- old/HTTP-Entity-Parser-0.24/Changes 2020-08-05 07:36:51.000000000 +0200
+++ new/HTTP-Entity-Parser-0.25/Changes 2020-11-28 03:35:29.000000000 +0100
@@ -1,5 +1,9 @@
Revision history for Perl extension HTTP-Entity-Parser
+0.25 2020-11-28T02:35:28Z
+
+ - handle empty Content-Type as if octet-stream blob #14
+
0.24 2020-08-05T05:36:49Z
- encode recursively nested Arrayrefs and Hashrefs in JSON #13
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/HTTP-Entity-Parser-0.24/META.json new/HTTP-Entity-Parser-0.25/META.json
--- old/HTTP-Entity-Parser-0.24/META.json 2020-08-05 07:36:51.000000000 +0200
+++ new/HTTP-Entity-Parser-0.25/META.json 2020-11-28 03:35:29.000000000 +0100
@@ -68,7 +68,7 @@
"provides" : {
"HTTP::Entity::Parser" : {
"file" : "lib/HTTP/Entity/Parser.pm",
- "version" : "0.24"
+ "version" : "0.25"
},
"HTTP::Entity::Parser::JSON" : {
"file" : "lib/HTTP/Entity/Parser/JSON.pm"
@@ -94,11 +94,12 @@
"web" : "https://github.com/kazeburo/HTTP-Entity-Parser"
}
},
- "version" : "0.24",
+ "version" : "0.25",
"x_contributors" : [
"José Joaquín Atria <jjatria(a)gmail.com>",
"Karen Etheridge <ether(a)cpan.org>",
"Shoichi Kaji <skaji(a)cpan.org>",
+ "Tatsuhiko Miyagawa <miyagawa(a)bulknews.net>",
"Todd Rinaldo <toddr(a)cpan.org>",
"commojun <junpooooow(a)gmail.com>",
"jrubinator <jjrs.pam+github(a)gmail.com>",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/HTTP-Entity-Parser-0.24/META.yml new/HTTP-Entity-Parser-0.25/META.yml
--- old/HTTP-Entity-Parser-0.24/META.yml 2020-08-05 07:36:51.000000000 +0200
+++ new/HTTP-Entity-Parser-0.25/META.yml 2020-11-28 03:35:30.000000000 +0100
@@ -29,7 +29,7 @@
provides:
HTTP::Entity::Parser:
file: lib/HTTP/Entity/Parser.pm
- version: '0.24'
+ version: '0.25'
HTTP::Entity::Parser::JSON:
file: lib/HTTP/Entity/Parser/JSON.pm
HTTP::Entity::Parser::MultiPart:
@@ -52,11 +52,12 @@
bugtracker: https://github.com/kazeburo/HTTP-Entity-Parser/issues
homepage: https://github.com/kazeburo/HTTP-Entity-Parser
repository: git://github.com/kazeburo/HTTP-Entity-Parser.git
-version: '0.24'
+version: '0.25'
x_contributors:
- 'José Joaquín Atria <jjatria(a)gmail.com>'
- 'Karen Etheridge <ether(a)cpan.org>'
- 'Shoichi Kaji <skaji(a)cpan.org>'
+ - 'Tatsuhiko Miyagawa <miyagawa(a)bulknews.net>'
- 'Todd Rinaldo <toddr(a)cpan.org>'
- 'commojun <junpooooow(a)gmail.com>'
- 'jrubinator <jjrs.pam+github(a)gmail.com>'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/HTTP-Entity-Parser-0.24/lib/HTTP/Entity/Parser.pm new/HTTP-Entity-Parser-0.25/lib/HTTP/Entity/Parser.pm
--- old/HTTP-Entity-Parser-0.24/lib/HTTP/Entity/Parser.pm 2020-08-05 07:36:51.000000000 +0200
+++ new/HTTP-Entity-Parser-0.25/lib/HTTP/Entity/Parser.pm 2020-11-28 03:35:29.000000000 +0100
@@ -6,7 +6,7 @@
use Stream::Buffered;
use Module::Load;
-our $VERSION = "0.24";
+our $VERSION = "0.25";
our $BUFFER_LENGTH = 65536;
@@ -44,11 +44,7 @@
my ($self, $env) = @_;
my $buffer_length = $self->[1];
- my $ct = $env->{CONTENT_TYPE};
- if (!$ct) {
- # No Content-Type
- return ([], []);
- }
+ my $ct = $env->{CONTENT_TYPE} || '';
my $parser;
for my $handler (@{$self->[0]}) {
1
0
01 Dec '20
Hello community,
here is the log from the commit of package perl-Graph for openSUSE:Factory checked in at 2020-12-01 14:23:37
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/perl-Graph (Old)
and /work/SRC/openSUSE:Factory/.perl-Graph.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "perl-Graph"
Tue Dec 1 14:23:37 2020 rev:18 rq:852063 version:0.9711
Changes:
--------
--- /work/SRC/openSUSE:Factory/perl-Graph/perl-Graph.changes 2020-11-23 18:54:57.077638723 +0100
+++ /work/SRC/openSUSE:Factory/.perl-Graph.new.5913/perl-Graph.changes 2020-12-01 14:23:59.573671616 +0100
@@ -1,0 +2,13 @@
+Sat Nov 28 03:06:17 UTC 2020 - Tina Müller <timueller+perl(a)suse.de>
+
+- updated to 0.9711
+ see /usr/share/doc/packages/perl-Graph/Changes
+
+ 0.9711 2020-11-27
+ - ingest handle multivertexed, multiedged right
+
+ 0.9710 2020-11-27
+ - all_paths method
+ - as_hashes handle multivertexed, multiedged right
+
+-------------------------------------------------------------------
Old:
----
Graph-0.9709.tar.gz
New:
----
Graph-0.9711.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ perl-Graph.spec ++++++
--- /var/tmp/diff_new_pack.ppldMy/_old 2020-12-01 14:24:00.057672140 +0100
+++ /var/tmp/diff_new_pack.ppldMy/_new 2020-12-01 14:24:00.061672144 +0100
@@ -17,7 +17,7 @@
Name: perl-Graph
-Version: 0.9709
+Version: 0.9711
Release: 0
%define cpan_name Graph
Summary: Graph data structures and algorithms
++++++ Graph-0.9709.tar.gz -> Graph-0.9711.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/Changes new/Graph-0.9711/Changes
--- old/Graph-0.9709/Changes 2020-11-22 20:16:27.000000000 +0100
+++ new/Graph-0.9711/Changes 2020-11-27 04:51:27.000000000 +0100
@@ -1,3 +1,10 @@
+0.9711 2020-11-27
+- ingest handle multivertexed, multiedged right
+
+0.9710 2020-11-27
+- all_paths method
+- as_hashes handle multivertexed, multiedged right
+
0.9709 2020-11-22
- add path_count option to TransitiveClosure
- get_{edge,vertex}_attributes undef if no such entity, in list context
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/META.json new/Graph-0.9711/META.json
--- old/Graph-0.9709/META.json 2020-11-22 20:18:08.000000000 +0100
+++ new/Graph-0.9711/META.json 2020-11-27 04:52:11.000000000 +0100
@@ -67,6 +67,6 @@
"web" : "https://github.com/graphviz-perl/Graph"
}
},
- "version" : "0.9709",
+ "version" : "0.9711",
"x_serialization_backend" : "JSON::PP version 4.00"
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/META.yml new/Graph-0.9711/META.yml
--- old/Graph-0.9709/META.yml 2020-11-22 20:18:08.000000000 +0100
+++ new/Graph-0.9711/META.yml 2020-11-27 04:52:11.000000000 +0100
@@ -29,5 +29,5 @@
resources:
bugtracker: https://github.com/graphviz-perl/Graph/issues
repository: git://github.com/graphviz-perl/Graph.git
-version: '0.9709'
+version: '0.9711'
x_serialization_backend: 'CPAN::Meta::YAML version 0.018'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/lib/Graph/TransitiveClosure/Matrix.pm new/Graph-0.9711/lib/Graph/TransitiveClosure/Matrix.pm
--- old/Graph-0.9709/lib/Graph/TransitiveClosure/Matrix.pm 2020-11-21 00:04:27.000000000 +0100
+++ new/Graph-0.9711/lib/Graph/TransitiveClosure/Matrix.pm 2020-11-27 01:06:22.000000000 +0100
@@ -5,6 +5,13 @@
use Graph::AdjacencyMatrix;
use Graph::Matrix;
+use Scalar::Util qw(weaken);
+
+sub _A() { 0 } # adjacency
+sub _D() { 1 } # distance
+sub _P() { 2 } # predecessors
+sub _V() { 3 } # vertices
+sub _G() { 4 } # the original graph (OG)
sub _new {
my ($g, $class, $opt, $want_transitive, $want_reflexive, $want_path, $want_path_vertices, $want_path_count) = @_;
@@ -188,7 +195,8 @@
$pm->[0] = \@pi;
$pm->[1] = \%pi;
}
- bless [ $am, $dm, $pm, \%V ], $class;
+ weaken(my $og = $g);
+ bless [ $am, $dm, $pm, \%V, $og ], $class;
}
sub new {
@@ -220,7 +228,7 @@
sub has_vertices {
my $tc = shift;
for my $v (@_) {
- return 0 unless exists $tc->[3]->{ $v };
+ return 0 unless exists $tc->[ _V ]->{ $v };
}
return 1;
}
@@ -229,7 +237,7 @@
my ($tc, $u, $v) = @_;
return undef unless $tc->has_vertices($u, $v);
return 1 if $u eq $v;
- $tc->[0]->get($u, $v);
+ $tc->[ _A ]->get($u, $v);
}
sub is_transitive {
@@ -238,7 +246,7 @@
} else { # A TC graph.
my ($tc, $u, $v) = @_;
return undef unless $tc->has_vertices($u, $v);
- $tc->[0]->get($u, $v);
+ $tc->[ _A ]->get($u, $v);
}
}
@@ -251,14 +259,14 @@
my ($tc, $u, $v) = @_;
return undef unless $tc->has_vertices($u, $v);
return 0 if $u eq $v;
- $tc->[1]->get($u, $v);
+ $tc->[ _D ]->get($u, $v);
}
sub path_predecessor {
my ($tc, $u, $v) = @_;
return undef if $u eq $v;
return undef unless $tc->has_vertices($u, $v);
- $tc->[2]->get($u, $v);
+ $tc->[ _P ]->get($u, $v);
}
sub path_vertices {
@@ -270,10 +278,23 @@
last unless defined($u = $tc->path_predecessor($u, $v));
push @v, $u;
}
- $tc->[2]->set($u, $v, [ @v ]) if @v;
+ $tc->[ _P ]->set($u, $v, [ @v ]) if @v;
return @v;
}
+sub all_paths {
+ my ($tc, $u, $v) = @_;
+ return if $u eq $v;
+ my @found;
+ push @found, [$u, $v] if $tc->[ _G ]->has_edge($u, $v);
+ push @found,
+ map [$u, @$_],
+ map $tc->all_paths($_, $v),
+ grep $tc->is_reachable($_, $v),
+ grep $_ ne $v, $tc->[ _G ]->successors($u);
+ @found;
+}
+
1;
__END__
=pod
@@ -425,6 +446,10 @@
Return the predecessor of vertex $v in the transitive closure path
going back to vertex $u.
+=item all_paths($u, $v)
+
+Return list of array-refs with all the paths from $u to $v.
+
=back
=head1 RETURN VALUES
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/lib/Graph/TransitiveClosure.pm new/Graph-0.9711/lib/Graph/TransitiveClosure.pm
--- old/Graph-0.9709/lib/Graph/TransitiveClosure.pm 2020-11-09 20:27:19.000000000 +0100
+++ new/Graph-0.9711/lib/Graph/TransitiveClosure.pm 2020-11-27 00:12:50.000000000 +0100
@@ -99,6 +99,10 @@
my $tcg = Graph::TransitiveClosure->new($g, path_vertices => 1);
$tcg->path_vertices($u, $v)
+ # see how many paths exist from $u to $v
+ my $tcg = Graph::TransitiveClosure->new($g, path_count => 1);
+ $tcg->path_length($u, $v)
+
# Both path_length and path_vertices.
my $tcg = Graph::TransitiveClosure->new($g, path => 1);
$tcg->path_vertices($u, $v)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/lib/Graph.pm new/Graph-0.9711/lib/Graph.pm
--- old/Graph-0.9709/lib/Graph.pm 2020-11-22 20:16:42.000000000 +0100
+++ new/Graph-0.9711/lib/Graph.pm 2020-11-27 04:51:41.000000000 +0100
@@ -13,7 +13,7 @@
use Graph::AdjacencyMap qw(:flags :fields);
-our $VERSION = '0.9709';
+our $VERSION = '0.9711';
require 5.006; # Weak references are absolutely required.
@@ -1736,18 +1736,49 @@
sub as_hashes {
my ($g) = @_;
- my %e;
- $e{ $_->[0] }{ $_->[1] } = $g->get_edge_attributes(@$_) || {} for $g->edges;
- my %n = map +( $_ => $g->get_vertex_attributes($_) || {} ), $g->vertices;
+ my (%n, %e);
+ if ($g->is_multivertexed) {
+ for my $v ($g->vertices) {
+ $n{$v} = {
+ map +($_ => $g->get_vertex_attributes_by_id($v, $_) || {}),
+ $g->get_multivertex_ids($v)
+ };
+ }
+ } else {
+ %n = map +($_ => $g->get_vertex_attributes($_) || {}), $g->vertices;
+ }
+ if ($g->is_multiedged) {
+ for my $e ($g->edges) {
+ $e{ $e->[0] }{ $e->[1] } = {
+ map +($_ => $g->get_edge_attributes_by_id(@$e, $_) || {}),
+ $g->get_multiedge_ids(@$e)
+ };
+ }
+ } else {
+ $e{ $_->[0] }{ $_->[1] } = $g->get_edge_attributes(@$_) || {}
+ for $g->edges;
+ }
( \%n, \%e );
}
sub ingest {
my ($g, $g2) = @_;
for my $v ($g2->vertices) {
- $g->set_vertex_attributes($v, $g2->get_vertex_attributes($v));
- $g->set_edge_attributes(@$_, $g2->get_edge_attributes(@$_))
- for $g2->edges_from($v);
+ if ($g->is_multivertexed) {
+ $g->set_vertex_attributes_by_id($v, $_, $g2->get_vertex_attributes_by_id($v, $_))
+ for $g2->get_multivertex_ids($v);
+ } else {
+ $g->set_vertex_attributes($v, $g2->get_vertex_attributes($v));
+ }
+ if ($g->is_multiedged) {
+ for my $e ($g2->edges_from($v)) {
+ $g->set_edge_attributes_by_id(@$e, $_, $g2->get_edge_attributes_by_id(@$e, $_))
+ for $g2->get_multiedge_ids(@$e);
+ }
+ } else {
+ $g->set_edge_attributes(@$_, $g2->get_edge_attributes(@$_))
+ for $g2->edges_from($v);
+ }
}
$g;
}
@@ -3687,6 +3718,12 @@
$tcm->path_vertices(@_);
}
+sub all_paths {
+ my $g = shift;
+ my $tcm = $g->transitive_closure_matrix;
+ $tcm->all_paths(@_);
+}
+
sub is_reachable {
my $g = shift;
my $tcm = $g->transitive_closure_matrix;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/lib/Graph.pod new/Graph-0.9711/lib/Graph.pod
--- old/Graph-0.9709/lib/Graph.pod 2020-11-22 20:11:58.000000000 +0100
+++ new/Graph-0.9711/lib/Graph.pod 2020-11-27 02:47:54.000000000 +0100
@@ -1281,6 +1281,12 @@
a two-level hash mapping the predecessor to its successors, mapped to
the attributes.
+If C<multivertexed> is true, the vertices hash will have the second-level
+values be the multivertex's ID, and the third level will be attributes
+as above.
+
+If C<multiedged> is true, similar will be true for the edges hash.
+
=back
=head2 Connected Graphs and Their Components
@@ -1864,9 +1870,11 @@
Returns the predecessor of vertex $v in the all-pairs shortest paths.
-=back
+=item all_paths
-=over 8
+ my @paths = $apsp->all_paths($u, $v);
+
+Return list of array-refs with all the paths from $u to $v.
=item average_path_length
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/t/51_multivertex_attributes.t new/Graph-0.9711/t/51_multivertex_attributes.t
--- old/Graph-0.9709/t/51_multivertex_attributes.t 2020-10-20 05:06:19.000000000 +0200
+++ new/Graph-0.9711/t/51_multivertex_attributes.t 2020-11-27 04:35:52.000000000 +0100
@@ -1,5 +1,5 @@
use strict; use warnings;
-use Test::More tests => 69;
+use Test::More tests => 60;
use Graph;
my $g = Graph->new(multivertexed => 1);
@@ -35,13 +35,9 @@
my @name = $g->get_vertex_attribute_names_by_id("a", "hot");
my @val = $g->get_vertex_attribute_values_by_id("a", "hot");
-is( scalar keys %$attr, 1 );
-is( scalar @name, 1 );
-is( scalar @val, 1 );
-
-is( $attr->{color}, "green" );
-is( $name[0], "color" );
-is( $val[0], "green" );
+is_deeply $attr, { color => "green" };
+is_deeply \@name, [ "color" ];
+is_deeply \@val, [ "green" ];
ok( $g->set_vertex_attribute_by_id("a", "hot", "taste", "rhubarb") );
@@ -58,16 +54,10 @@
@name = sort $g->get_vertex_attribute_names_by_id("a", "hot");
@val = sort $g->get_vertex_attribute_values_by_id("a", "hot");
-is( scalar keys %$attr, 2 );
-is( scalar @name, 2 );
-is( scalar @val, 2 );
-
-is( $attr->{color}, "green" );
-is( $attr->{taste}, "rhubarb" );
-is( $name[0], "color" );
-is( $val[0], "green" );
-is( $name[1], "taste" );
-is( $val[1], "rhubarb" );
+is_deeply $attr, { color => "green", taste => "rhubarb" };
+is_deeply \@name, [ "color", "taste" ];
+is_deeply \@val, [ "green", "rhubarb" ];
+is_deeply(($g->as_hashes)[0], { a => { hot => { color => "green", taste => "rhubarb" } } });
ok( $g->delete_vertex_attribute_by_id("a", "hot", "color" ) );
@@ -86,9 +76,9 @@
@name = $g->get_vertex_attribute_names_by_id("a", "hot");
@val = $g->get_vertex_attribute_values_by_id("a", "hot");
-is( scalar keys %$attr, 0 );
-is( scalar @name, 0 );
-is( scalar @val, 0 );
+is_deeply $attr, undef;
+is_deeply \@name, [];
+is_deeply \@val, [];
is( $g->vertices, 0 ); # No "a" anymore.
@@ -108,9 +98,7 @@
ok($g->set_vertex_attributes_by_id('a', 'hot',
{ 'color' => 'pearl', 'weight' => 'heavy' }));
$attr = $g->get_vertex_attributes_by_id('a', 'hot');
-is(scalar keys %$attr, 2);
-is($attr->{color}, 'pearl');
-is($attr->{weight}, 'heavy');
+is_deeply $attr, { color => "pearl", weight => 'heavy' };
ok( $g->set_vertex_weight_by_id("a", "hot", 42));
is( $g->get_vertex_weight_by_id("a", "hot"), 42);
@@ -122,6 +110,12 @@
my $h = Graph->new(multivertexed => 1);
-eval '$h->set_vertex_attribute("foo", "color", "gold")';
+eval { $h->set_vertex_attribute("foo", "color", "gold") };
like($@, qr/set_vertex_attribute: expected non-multivertexed/);
+$h->ingest($g);
+is_deeply(($h->as_hashes)[0], {
+ a => { hot => { color => 'pearl' } },
+ b => { cool => { weight => 43 } },
+ c => { cool => { weight => 44 } }
+});
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/t/53_multiedge_attributes.t new/Graph-0.9711/t/53_multiedge_attributes.t
--- old/Graph-0.9709/t/53_multiedge_attributes.t 2020-10-20 05:06:19.000000000 +0200
+++ new/Graph-0.9711/t/53_multiedge_attributes.t 2020-11-27 04:49:07.000000000 +0100
@@ -1,5 +1,5 @@
use strict; use warnings;
-use Test::More tests => 74;
+use Test::More tests => 65;
use Graph;
my $g = Graph->new(multiedged => 1);
@@ -35,13 +35,9 @@
my @name = $g->get_edge_attribute_names_by_id("a", "b", "hot");
my @val = $g->get_edge_attribute_values_by_id("a", "b", "hot");
-is( scalar keys %$attr, 1 );
-is( scalar @name, 1 );
-is( scalar @val, 1 );
-
-is( $attr->{color}, "green" );
-is( $name[0], "color" );
-is( $val[0], "green" );
+is_deeply $attr, { color => "green" };
+is_deeply \@name, [ "color" ];
+is_deeply \@val, [ "green" ];
ok( $g->set_edge_attribute_by_id("a", "b", "hot", "taste", "rhubarb") );
@@ -58,16 +54,10 @@
@name = sort $g->get_edge_attribute_names_by_id("a", "b", "hot");
@val = sort $g->get_edge_attribute_values_by_id("a", "b", "hot");
-is( scalar keys %$attr, 2 );
-is( scalar @name, 2 );
-is( scalar @val, 2 );
-
-is( $attr->{color}, "green" );
-is( $attr->{taste}, "rhubarb" );
-is( $name[0], "color" );
-is( $val[0], "green" );
-is( $name[1], "taste" );
-is( $val[1], "rhubarb" );
+is_deeply $attr, { color => "green", taste => "rhubarb" };
+is_deeply \@name, [ "color", "taste" ];
+is_deeply \@val, [ "green", "rhubarb" ];
+is_deeply(($g->as_hashes)[1], { a => { b => { hot => { color => "green", taste => "rhubarb" } } } });
ok( $g->delete_edge_attribute_by_id("a", "b", "hot", "color" ) );
@@ -86,9 +76,9 @@
@name = $g->get_edge_attribute_names_by_id("a", "b", "hot");
@val = $g->get_edge_attribute_values_by_id("a", "b", "hot");
-is( scalar keys %$attr, 0 );
-is( scalar @name, 0 );
-is( scalar @val, 0 );
+is_deeply $attr, undef;
+is_deeply \@name, [];
+is_deeply \@val, [];
is( $g->edges, 0 ); # No "a", "b" anymore.
@@ -122,9 +112,7 @@
ok($u->set_edge_attributes_by_id('a', 'b', 'hot',
{ 'color' => 'pearl', 'weight' => 'heavy' }));
$attr = $u->get_edge_attributes_by_id('a', 'b', 'hot');
-is(scalar keys %$attr, 2);
-is($attr->{color}, 'pearl');
-is($attr->{weight}, 'heavy');
+is_deeply $attr, { color => "pearl", weight => 'heavy' };
ok( $g->set_edge_weight_by_id("a", "b", "hot", 42));
is( $g->get_edge_weight_by_id("a", "b", "hot"), 42);
@@ -136,5 +124,13 @@
my $h = Graph->new(multiedged => 1);
-eval '$h->set_edge_attribute("foo", "bar", "color", "gold")';
+eval { $h->set_edge_attribute("foo", "bar", "color", "gold") };
like($@, qr/set_edge_attribute: expected non-multiedged/);
+
+$h->ingest($g);
+my $got = ($h->as_hashes)[1];
+is_deeply $got, {
+ c => { d => { hot => { weight => 45 } } },
+ d => { e => { hot => { weight => 46 } } },
+ e => { f => { hot => { weight => 44 } } }
+} or diag explain $got;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Graph-0.9709/t/72_transitive.t new/Graph-0.9711/t/72_transitive.t
--- old/Graph-0.9709/t/72_transitive.t 2020-11-21 00:04:12.000000000 +0100
+++ new/Graph-0.9711/t/72_transitive.t 2020-11-27 01:29:24.000000000 +0100
@@ -1,5 +1,5 @@
use strict; use warnings;
-use Test::More tests => 243;
+use Test::More tests => 255;
use Graph::Directed;
use Graph::Undirected;
@@ -451,3 +451,32 @@
is $path_counts->path_length($u, $v), $count, "count $u $v";
}
}
+
+{
+ my @example = ( [ 1, 2 ],
+ [ 1, 3 ],
+ [ 1, 4 ], # direct link to two away
+ [ 3, 4 ] );
+ my $g = Graph::Directed->new;
+ $g->add_edge(@$_) for @example;
+ my $tcg = $g->transitive_closure;
+ my @paths = (
+ [ 1, 2, [[1,2]] ],
+ [ 1, 3, [[1,3]] ],
+ [ 1, 4, [[1,3,4], [1,4]] ],
+ [ 2, 1, [] ],
+ [ 2, 3, [] ],
+ [ 2, 4, [] ],
+ [ 3, 1, [] ],
+ [ 3, 2, [] ],
+ [ 3, 4, [[3,4]] ],
+ [ 4, 1, [] ],
+ [ 4, 2, [] ],
+ [ 4, 3, [] ],
+ );
+ foreach my $t (@paths) {
+ my ($u, $v, $paths) = @$t;
+ my $got = [ sort { $a->[1] <=> $b->[1] } $g->all_paths($u, $v) ];
+ is_deeply $got, $paths, "paths $u $v" or diag explain $got;
+ }
+}
1
0
01 Dec '20
Hello community,
here is the log from the commit of package parsec-tool for openSUSE:Factory checked in at 2020-12-01 14:23:29
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/parsec-tool (Old)
and /work/SRC/openSUSE:Factory/.parsec-tool.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "parsec-tool"
Tue Dec 1 14:23:29 2020 rev:2 rq:852022 version:0.1.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/parsec-tool/parsec-tool.changes 2020-11-10 13:46:23.543650781 +0100
+++ /work/SRC/openSUSE:Factory/.parsec-tool.new.5913/parsec-tool.changes 2020-12-01 14:23:53.081664593 +0100
@@ -1,0 +2,5 @@
+Thu Nov 26 16:39:03 UTC 2020 - Andreas Schwab <schwab(a)suse.de>
+
+- BuildRequire protobuf-devel for protoc
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ parsec-tool.spec ++++++
--- /var/tmp/diff_new_pack.Hgog1C/_old 2020-12-01 14:23:53.609665164 +0100
+++ /var/tmp/diff_new_pack.Hgog1C/_new 2020-12-01 14:23:53.613665169 +0100
@@ -1,7 +1,7 @@
#
# spec file for package parsec-tool
#
-# Copyright (c) 2019 SUSE LLC
+# Copyright (c) 2020 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -15,6 +15,7 @@
# Please submit bugfixes or comments via https://bugs.opensuse.org/
#
+
%global rustflags '-Clink-arg=-Wl,-z,relro,-z,now'
%{?systemd_ordering}
Name: parsec-tool
@@ -27,6 +28,7 @@
Source1: vendor.tar.xz
Source2: cargo_config
BuildRequires: cargo
+BuildRequires: protobuf-devel
BuildRequires: rust-packaging
Requires: parsec
ExcludeArch: armv6l armv6hl
@@ -40,6 +42,7 @@
cp %{SOURCE2} .cargo/config
%build
+export PROTOC=/usr/bin/protoc
%cargo_build
%install
1
0
Hello community,
here is the log from the commit of package fprintd for openSUSE:Factory checked in at 2020-12-01 14:23:26
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/fprintd (Old)
and /work/SRC/openSUSE:Factory/.fprintd.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "fprintd"
Tue Dec 1 14:23:26 2020 rev:10 rq:851949 version:1.90.4
Changes:
--------
--- /work/SRC/openSUSE:Factory/fprintd/fprintd.changes 2020-03-20 23:59:46.137036048 +0100
+++ /work/SRC/openSUSE:Factory/.fprintd.new.5913/fprintd.changes 2020-12-01 14:23:51.241662603 +0100
@@ -1,0 +2,14 @@
+Sun Nov 29 15:33:57 UTC 2020 - Martin Hauke <mardnh(a)gmx.de>
+
+- Update to version 1.90.4
+ * Authentication is now required to enroll a new print.
+ * Add support for the libfprint early reporting mechanism
+ * Proper hotplug support together with libfprint 1.90.4
+ * Handle STATE_DIRECTORY containing multiple paths
+- Add patch:
+ * 0001-tests-Fix-test-not-failing-on-error.patch
+ https://gitlab.freedesktop.org/libfprint/fprintd/-/merge_requests/70
+ Unittest test_manager_get_devices_on_name_appeared is broken
+ with this patch applied - so revert it for now.
+
+-------------------------------------------------------------------
Old:
----
fprintd-1.90.1.tar.xz
New:
----
0001-tests-Fix-test-not-failing-on-error.patch
fprintd-1.90.4.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ fprintd.spec ++++++
--- /var/tmp/diff_new_pack.7lsm94/_old 2020-12-01 14:23:51.777663183 +0100
+++ /var/tmp/diff_new_pack.7lsm94/_new 2020-12-01 14:23:51.781663187 +0100
@@ -16,16 +16,20 @@
#
-%define gitlabhash a47c31c844e23e070665a8a85dae0144
+%define gitlabhash 88a627f1873ce8f30e5354593bf4d45953126395
+
Name: fprintd
-Version: 1.90.1
+Version: 1.90.4
Release: 0
Summary: D-Bus service for Fingerprint reader access
License: GPL-2.0-or-later
URL: https://fprint.freedesktop.org/
-Source0: https://gitlab.freedesktop.org/libfprint/fprintd/uploads/%{gitlabhash}/%{na…
+#Git-Clone: https://gitlab.freedesktop.org/libfprint/fprintd.git
+Source0: https://gitlab.freedesktop.org/libfprint/fprintd/-/archive/v%{version}/%{na…
Source1: baselibs.conf
Source2: README.SUSE
+Patch0: 0001-tests-Fix-test-not-failing-on-error.patch
+BuildRequires: gobject-introspection
BuildRequires: gtk-doc >= 1.3
BuildRequires: intltool
BuildRequires: meson >= 0.46.1
@@ -33,9 +37,9 @@
BuildRequires: pkgconfig
BuildRequires: python3-cairo
BuildRequires: python3-dbusmock
-BuildRequires: python3-gobject
BuildRequires: python3-libpamtest
BuildRequires: python3-pydbus
+BuildRequires: typelib-1_0-FPrint-2_0
BuildRequires: pkgconfig(dbus-glib-1)
BuildRequires: pkgconfig(glib-2.0)
BuildRequires: pkgconfig(libfprint-2) >= 1.90.1
@@ -105,7 +109,8 @@
%lang_package
%prep
-%autosetup -p1
+%setup -q -n %{name}-v%{version}-%{gitlabhash}
+%patch0 -p1 -R
cp %{SOURCE2} .
%build
++++++ 0001-tests-Fix-test-not-failing-on-error.patch ++++++
From 6dc699ae6fec2e2ff644b0d7b9c3665d3d302336 Mon Sep 17 00:00:00 2001
From: Benjamin Berg <bberg(a)redhat.com>
Date: Fri, 2 Oct 2020 17:54:20 +0200
Subject: [PATCH] tests: Fix test not failing on error
An assertion that is raised within a callback will not be swallowed by
the C code that called the function. To ensure that errors will be
noticable, pass the result back to the surrounding scope and check it
there.
---
tests/fprintd.py | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/tests/fprintd.py b/tests/fprintd.py
index 0be7776..0bd9ac3 100755
--- a/tests/fprintd.py
+++ b/tests/fprintd.py
@@ -427,27 +427,27 @@ class FPrintdManagerPreStartTests(FPrintdTest):
self.manager.GetDefaultDevice()
def test_manager_get_devices_on_name_appeared(self):
- self._appeared = False
+ self._appeared_res = []
def on_name_appeared(connection, name, name_owner):
- self._appeared = True
- dev_path = connection.call_sync('net.reactivated.Fprint',
+ self._appeared_res.append(connection.call_sync('net.reactivated.Fprint',
'/net/reactivated/Fprint/Manager',
'net.reactivated.Fprint.Manager',
'GetDefaultDevice', None, None,
- Gio.DBusCallFlags.NO_AUTO_START, 500, None)
- self.assertIsNotNone(dev_path)
- self.assertTrue(dev_path.startswith('/net/reactivated/Fprint/Device/'))
+ Gio.DBusCallFlags.NO_AUTO_START, 500, None))
id = Gio.bus_watch_name_on_connection(self.dbus,
'net.reactivated.Fprint', Gio.BusNameWatcherFlags.NONE,
on_name_appeared, None)
self.daemon_start()
- while not self._appeared:
+ while not self._appeared_res:
ctx.iteration(True)
- self.assertTrue(self._appeared)
+ self.assertIsNotNone(self._appeared_res[0])
+ dev_path = self._appeared_res[0][0]
+ self.assertTrue(dev_path.startswith('/net/reactivated/Fprint/Device/'))
+
Gio.bus_unwatch_name(id)
--
2.29.2
++++++ fprintd-1.90.1.tar.xz -> fprintd-1.90.4.tar.bz2 ++++++
++++ 6368 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package onedrive for openSUSE:Factory checked in at 2020-12-01 14:23:28
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/onedrive (Old)
and /work/SRC/openSUSE:Factory/.onedrive.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "onedrive"
Tue Dec 1 14:23:28 2020 rev:6 rq:852024 version:2.4.8
Changes:
--------
--- /work/SRC/openSUSE:Factory/onedrive/onedrive.changes 2020-11-09 13:59:11.683730747 +0100
+++ /work/SRC/openSUSE:Factory/.onedrive.new.5913/onedrive.changes 2020-12-01 14:23:52.213663654 +0100
@@ -1,0 +2,24 @@
+Tue Dec 01 06:33:45 UTC 2020 - egotthold(a)suse.com
+
+- Update to version 2.4.8:
+ * Release files for 2.4.8 (#1167)
+ * Fix broken link in manual (#1166)
+ * Fix application crash in --monitor mode due to 'Failed to stat file' when setgid is used on a directory (#1157)
+ * Check application_id before setting redirect URL when using specific Azure endpoints (#1165)
+ * Fix logging output when authentication authorisation fails (#1156)
+ * Update #1142 logging output handling (#1153)
+ * Fix changing permissions on pre-existing local directories (#1152)
+ * Corrected typo (#1151)
+ * Use config set option for 'remove_source_files' and 'skip_dir_strict_match' rather than ignore (#1142)
+ * Update sync.d
+ * Update sync.d
+ * Update sync.d
+ * Update sync.d
+ * catch folder creation errors due to file system permissions error
+ * Update error catching and handling
+ * Update onedrive.d
+ * Fix download failure due to incorrect filesystem permissions
+ * Update USAGE.md to clarify 'application_id' usage to utilise older 'skilion' application id (#1136)
+ * Release 2.4.8 Dev Prep (#1134)
+
+-------------------------------------------------------------------
Old:
----
onedrive-2.4.7.tar
New:
----
onedrive-2.4.8.tar
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ onedrive.spec ++++++
--- /var/tmp/diff_new_pack.b8xezo/_old 2020-12-01 14:23:52.721664204 +0100
+++ /var/tmp/diff_new_pack.b8xezo/_new 2020-12-01 14:23:52.721664204 +0100
@@ -28,7 +28,7 @@
%endif
Name: onedrive
-Version: 2.4.7
+Version: 2.4.8
Release: 0
Summary: Client for One Drive Service for Linux
License: GPL-3.0-only
@@ -120,7 +120,7 @@
%files
%defattr(-,root,root)
%license LICENSE
-%doc USAGE.md Office365.md INSTALL.md Docker.md CHANGELOG.md config README.md BusinessSharedFolders.md LICENSE
+%doc USAGE.md Office365.md INSTALL.md Docker.md CHANGELOG.md config README.md BusinessSharedFolders.md LICENSE advanced-usage.md
%config(noreplace) %{_sysconfdir}/%{name}
%config(noreplace) %{_sysconfdir}/logrotate.d/%{name}
%{_bindir}/%{name}
++++++ _service ++++++
--- /var/tmp/diff_new_pack.b8xezo/_old 2020-12-01 14:23:52.745664230 +0100
+++ /var/tmp/diff_new_pack.b8xezo/_new 2020-12-01 14:23:52.745664230 +0100
@@ -3,8 +3,8 @@
<param name="scm">git</param>
<param name="url">https://github.com/abraunegg/onedrive.git</param>
<param name="filename">onedrive</param>
- <param name="revision">v2.4.7</param>
- <param name="version">2.4.7</param>
+ <param name="revision">v2.4.8</param>
+ <param name="version">2.4.8</param>
<param name="changesgenerate">enable</param>
<param name="exclude">**/bad-file-name.tar.xz</param>
</service>
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.b8xezo/_old 2020-12-01 14:23:52.761664247 +0100
+++ /var/tmp/diff_new_pack.b8xezo/_new 2020-12-01 14:23:52.761664247 +0100
@@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">https://github.com/abraunegg/onedrive.git</param>
- <param name="changesrevision">313af18671636ce477db42eb54256eead8512901</param></service></servicedata>
\ No newline at end of file
+ <param name="changesrevision">545fdcd692e30753df39969d710dda5b692e25ac</param></service></servicedata>
\ No newline at end of file
++++++ onedrive-2.4.7.tar -> onedrive-2.4.8.tar ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/CHANGELOG.md new/onedrive-2.4.8/CHANGELOG.md
--- old/onedrive-2.4.7/CHANGELOG.md 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/CHANGELOG.md 2020-11-30 06:39:36.000000000 +0100
@@ -1,7 +1,23 @@
# Changelog
-
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
+
+## 2.4.8 - 2020-11-30
+### Fixed
+* Fix to use config set option for 'remove_source_files' and 'skip_dir_strict_match' rather than ignore if set
+* Fix download failure and crash due to incorrect local filesystem permissions when using mounted external devices
+* Fix to not change permissions on pre-existing local directories
+* Fix logging output when authentication authorisation fails to not say authorisation was successful
+* Fix to check application_id before setting redirect URL when using specific Azure endpoints
+* Fix application crash in --monitor mode due to 'Failed to stat file' when setgid is used on a directory and data cannot be read
+
+### Added
+* Added advanced-usage.md to document advaced client usage such as multi account configurations and Windows dual-boot
+
+### Updated
+* Updated --verbose logging output for config options when set
+* Updated documentation (man page, USAGE.md, Office365.md, BusinessSharedFolders.md)
+
## 2.4.7 - 2020-11-09
### Fixed
* Fix debugging output for /delta changes available queries
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/Makefile.in new/onedrive-2.4.8/Makefile.in
--- old/onedrive-2.4.7/Makefile.in 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/Makefile.in 2020-11-30 06:39:36.000000000 +0100
@@ -55,7 +55,7 @@
system_unit_files = contrib/systemd/onedrive@.service
user_unit_files = contrib/systemd/onedrive.service
-DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/Office365.md docs/USAGE.md docs/BusinessSharedFolders.md
+DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/Office365.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md
ifneq ("$(wildcard /etc/redhat-release)","")
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux Server|CentOS)" | wc -l)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/configure new/onedrive-2.4.8/configure
--- old/onedrive-2.4.7/configure 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/configure 2020-11-30 06:39:36.000000000 +0100
@@ -1,6 +1,6 @@
#! /bin/sh
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.69 for onedrive v2.4.7.
+# Generated by GNU Autoconf 2.69 for onedrive v2.4.8.
#
# Report bugs to <https://github.com/abraunegg/onedrive>.
#
@@ -579,8 +579,8 @@
# Identity of this package.
PACKAGE_NAME='onedrive'
PACKAGE_TARNAME='onedrive'
-PACKAGE_VERSION='v2.4.7'
-PACKAGE_STRING='onedrive v2.4.7'
+PACKAGE_VERSION='v2.4.8'
+PACKAGE_STRING='onedrive v2.4.8'
PACKAGE_BUGREPORT='https://github.com/abraunegg/onedrive'
PACKAGE_URL=''
@@ -1219,7 +1219,7 @@
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-\`configure' configures onedrive v2.4.7 to adapt to many kinds of systems.
+\`configure' configures onedrive v2.4.8 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1280,7 +1280,7 @@
if test -n "$ac_init_help"; then
case $ac_init_help in
- short | recursive ) echo "Configuration of onedrive v2.4.7:";;
+ short | recursive ) echo "Configuration of onedrive v2.4.8:";;
esac
cat <<\_ACEOF
@@ -1393,7 +1393,7 @@
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
-onedrive configure v2.4.7
+onedrive configure v2.4.8
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
@@ -1410,7 +1410,7 @@
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
-It was created by onedrive $as_me v2.4.7, which was
+It was created by onedrive $as_me v2.4.8, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
@@ -3159,7 +3159,7 @@
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
-This file was extended by onedrive $as_me v2.4.7, which was
+This file was extended by onedrive $as_me v2.4.8, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -3212,7 +3212,7 @@
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
-onedrive config.status v2.4.7
+onedrive config.status v2.4.8
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/configure.ac new/onedrive-2.4.8/configure.ac
--- old/onedrive-2.4.7/configure.ac 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/configure.ac 2020-11-30 06:39:36.000000000 +0100
@@ -9,7 +9,7 @@
dnl - tag the release
AC_PREREQ([2.69])
-AC_INIT([onedrive],[v2.4.7], [https://github.com/abraunegg/onedrive] [onedrive])
+AC_INIT([onedrive],[v2.4.8], [https://github.com/abraunegg/onedrive] [onedrive])
AC_CONFIG_SRCDIR([src/main.d])
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/contrib/gentoo/onedrive-2.4.7.ebuild new/onedrive-2.4.8/contrib/gentoo/onedrive-2.4.7.ebuild
--- old/onedrive-2.4.7/contrib/gentoo/onedrive-2.4.7.ebuild 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/contrib/gentoo/onedrive-2.4.7.ebuild 1970-01-01 01:00:00.000000000 +0100
@@ -1,31 +0,0 @@
-# Copyright 1999-2018 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=6
-
-DESCRIPTION="Onedrive sync client for Linux"
-HOMEPAGE="https://github.com/abraunegg/onedrive"
-SRC_URI="https://github.com/abraunegg/onedrive/archive/v${PV}.tar.gz -> ${P}.tar.gz"
-
-LICENSE="GPL-3"
-SLOT="0"
-KEYWORDS="~amd64 ~x86"
-IUSE=""
-
-DEPEND="
- >=dev-lang/dmd-2.081.1
- dev-db/sqlite
-"
-
-RDEPEND="${DEPEND}
- net-misc/curl
- "
-src_prepare() {
- default
- # Copy line 38 to 44 as systemd path needs to be created in portage sandbox
- # Update the makefile so that it doesnt use git commands to get the version during build.
- sed -i -e "38h; 44p; 44x" \
- -e "s/version:.*/version:/" \
- -e "\$s/.*/\techo v${PV} > version/" \
- Makefile
-}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/contrib/gentoo/onedrive-2.4.8.ebuild new/onedrive-2.4.8/contrib/gentoo/onedrive-2.4.8.ebuild
--- old/onedrive-2.4.7/contrib/gentoo/onedrive-2.4.8.ebuild 1970-01-01 01:00:00.000000000 +0100
+++ new/onedrive-2.4.8/contrib/gentoo/onedrive-2.4.8.ebuild 2020-11-30 06:39:36.000000000 +0100
@@ -0,0 +1,31 @@
+# Copyright 1999-2018 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=6
+
+DESCRIPTION="Onedrive sync client for Linux"
+HOMEPAGE="https://github.com/abraunegg/onedrive"
+SRC_URI="https://github.com/abraunegg/onedrive/archive/v${PV}.tar.gz -> ${P}.tar.gz"
+
+LICENSE="GPL-3"
+SLOT="0"
+KEYWORDS="~amd64 ~x86"
+IUSE=""
+
+DEPEND="
+ >=dev-lang/dmd-2.081.1
+ dev-db/sqlite
+"
+
+RDEPEND="${DEPEND}
+ net-misc/curl
+ "
+src_prepare() {
+ default
+ # Copy line 38 to 44 as systemd path needs to be created in portage sandbox
+ # Update the makefile so that it doesnt use git commands to get the version during build.
+ sed -i -e "38h; 44p; 44x" \
+ -e "s/version:.*/version:/" \
+ -e "\$s/.*/\techo v${PV} > version/" \
+ Makefile
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/contrib/spec/onedrive.spec.in new/onedrive-2.4.8/contrib/spec/onedrive.spec.in
--- old/onedrive-2.4.7/contrib/spec/onedrive.spec.in 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/contrib/spec/onedrive.spec.in 2020-11-30 06:39:36.000000000 +0100
@@ -6,7 +6,7 @@
%endif
Name: onedrive
-Version: 2.4.7
+Version: 2.4.8
Release: 1%{?dist}
Summary: Microsoft OneDrive Client
Group: System Environment/Network
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/docs/BusinessSharedFolders.md new/onedrive-2.4.8/docs/BusinessSharedFolders.md
--- old/onedrive-2.4.7/docs/BusinessSharedFolders.md 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/docs/BusinessSharedFolders.md 2020-11-30 06:39:36.000000000 +0100
@@ -2,7 +2,8 @@
Syncing OneDrive Business Shared Folders requires additional configuration for your 'onedrive' client:
1. List available shared folders to determine which folder you wish to sync & to validate that you have access to that folder
2. Create a new file called 'business_shared_folders' in your config directory which contains a list of the shared folders you wish to sync
-3. Perform a sync
+3. Test the configuration using '--dry-run'
+4. Sync the OneDrive Business Shared folders as required
## Listing available OneDrive Business Shared Folders
List the available OneDrive Business Shared folders with the following command:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/docs/Office365.md new/onedrive-2.4.8/docs/Office365.md
--- old/onedrive-2.4.7/docs/Office365.md 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/docs/Office365.md 2020-11-30 06:39:36.000000000 +0100
@@ -1,12 +1,24 @@
-# Show how to access a Sharepoint group drive in Office 365 business or education
-## Obtaining the Sharepoint Site Details
+# How to configure OneDrive SharePoint Shared Library sync
+Syncing a OneDrive SharePoint library requires additional configuration for your 'onedrive' client:
1. Login to OneDrive and under 'Shared Libraries' obtain the shared library name
+2. Query that shared library name using the client to obtain the required configuration details
+3. Configure the client's config file with the required 'drive_id'
+4. Test the configuration using '--dry-run'
+5. Sync the SharePoint Library as required
+
+## Listing available OneDrive SharePoint Libraries
+1. Login to the OneDrive web interface and determine which shared library you wish to configure the client for:
+![shared_libraries](./images/SharedLibraries.jpg)
+
+## Query that shared library name using the client to obtain the required configuration details
2. Run the following command using the 'onedrive' client
```text
onedrive --get-O365-drive-id '<your library name>'
```
3. This will return the following:
```text
+Configuration file successfully loaded
+Configuring Global Azure AD Endpoints
Initializing the Synchronization Engine ...
Office 365 Library Name Query: <your library name>
SiteName: <your library name>
@@ -14,10 +26,21 @@
URL: <your site URL>
```
-## Configuring the onedrive client
-Once you have obtained the 'drive_id' above, add to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
+## Configure the client's config file with the required 'drive_id'
+4. Once you have obtained the 'drive_id' above, add to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
```text
-drive_id = "insert the drive id from above here"
+drive_id = "insert the drive_id value from above here"
```
+The OneDrive client will now be configured to sync this SharePoint shared library to your local system.
+
+**Note:** After changing `drive_id`, you must perform a full re-synchronization by adding `--resync` to your existing command line.
+
+## Test the configuration using '--dry-run'
+5. Test your new configuration using the `--dry-run` option to validate the the new configuration
+
+## Sync the SharePoint Library as required
+6. Sync the SharePoint Library to your system with either `--synchronize` or `--monitor` operations
+
-The OneDrive client will now sync this SharePoint shared library to your local system.
+# How to configure multiple OneDrive SharePoint Shared Library sync
+Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/docs/USAGE.md new/onedrive-2.4.8/docs/USAGE.md
--- old/onedrive-2.4.7/docs/USAGE.md 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/docs/USAGE.md 2020-11-30 06:39:36.000000000 +0100
@@ -509,6 +509,20 @@
# sync_business_shared_folders = "false"
```
+### Configuring the client to use older 'skilion' application identifier
+In some instances it may be desirable to utilise the older 'skilion' application identifier to avoid authorising a new application ID within Microsoft Azure environments.
+To configure this, update the 'config' file with the old Application ID, then this will be used for the authentication process.
+```text
+# skip_dir_strict_match = "false"
+application_id = "22c49a0d-d21c-4792-aed1-8f163c982546"
+# resync = "false"
+# bypass_data_preservation = "false"
+```
+**Note:** The application will now use the older 'skilion' client identifier, however this may increase your chances of getting a OneDrive 429 error.
+
+**Note:** After changing the 'application_id' you will need to restart any 'onedrive' process you have running, and potentially issue a `--logout` to re-auth the client with this updated application ID.
+
+
### How to 'skip' directories from syncing?
There are several mechanisms available to 'skip' a directory from the sync process:
* Utilise 'skip_dir'
@@ -631,7 +645,7 @@
In some cases you may wish to receive GUI notifications when using the client when logged in as a non-root user. In this case, follow the directions below:
1. Login via graphical UI as user you wish to enable the service for
-2. Disable any `onedive@` service files for your username - eg:
+2. Disable any `onedrive@` service files for your username - eg:
```text
sudo systemctl stop onedrive(a)alex.service
sudo systemctl disable onedrive(a)alex.service
@@ -651,42 +665,7 @@
## Additional Configuration
### Using multiple OneDrive accounts
-You can run multiple instances of the application by specifying a different config directory in order to handle multiple OneDrive accounts. For example, if you have a work and a personal account, you can run the onedrive command using the --confdir parameter. Here is an example:
-
-```text
-onedrive --synchronize --verbose --confdir="~/.config/onedrivePersonal" &
-onedrive --synchronize --verbose --confdir="~/.config/onedriveWork" &
-```
-or
-```text
-onedrive --monitor --verbose --confdir="~/.config/onedrivePersonal" &
-onedrive --monitor --verbose --confdir="~/.config/onedriveWork" &
-```
-
-* `--synchronize` does a one-time sync
-* `--monitor` keeps the application running and monitoring for changes both local and remote
-* `&` puts the application in background and leaves the terminal interactive
-
-**Important:** For each configuration, change the 'sync_dir' to a new value, unique for each specific configuration. Leaving this at the default of `sync_dir = "~/OneDrive"` will cause all data from both accounts to be synced to the same folder, then to each other.
-
-### Automatic syncing of both OneDrive accounts
-In order to automatically start syncing your OneDrive accounts, you will need to create a service file for each account. From the applicable 'user systemd folder':
-* RHEL / CentOS: `/usr/lib/systemd/system`
-* Others: `/usr/lib/systemd/user`
-
-```text
-cp onedrive.service onedrive-work.service
-```
-And edit the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
-```text
-ExecStart=/usr/local/bin/onedrive --monitor --confdir="/path/to/config/dir"
-```
-Then you can safely run these commands:
-```text
-systemctl --user enable onedrive-work
-systemctl --user start onedrive-work
-```
-Repeat these steps for each OneDrive account that you wish to use.
+Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
### Access OneDrive service through a proxy
If you have a requirement to run the client through a proxy, there are a couple of ways to achieve this:
@@ -725,7 +704,6 @@
```
## All available commands
-
Output of `onedrive --help`
```text
OneDrive - a client for OneDrive Cloud Services
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/docs/advanced-usage.md new/onedrive-2.4.8/docs/advanced-usage.md
--- old/onedrive-2.4.7/docs/advanced-usage.md 1970-01-01 01:00:00.000000000 +0100
+++ new/onedrive-2.4.8/docs/advanced-usage.md 2020-11-30 06:39:36.000000000 +0100
@@ -0,0 +1,139 @@
+# Advanced Configuration of the OneDrive Free Client
+This document covers the following scenarios:
+* Configuring the client to use mutlitple OneDrive accounts / configurations
+* Configuring the client for use in dual-boot (Windows / Linux) situations
+
+## Configuring the client to use mutlitple OneDrive accounts / configurations
+Essentially, each OneDrive account or SharePoint Shared Library which you require to be synced needs to have it's own and unique configuration, local sync directory and service files. To do this, the following steps are needed:
+1. Create a unique configuration folder for each onedrive client configuration that you need
+2. Copy to this folder a copy of the default configuration file
+3. Update the default configuration file as required, changing the required minimum config options and any additional options as needed to support your multi-account configuration
+4. Authenticate the client using the new configuration directory
+5. Test the configuration using '--display-config' and '--dry-run'
+6. Sync the OneDrive account data as required using `--synchronize` or `--monitor`
+7. Configure a unique systemd service file for this account configuration
+
+### 1. Create a unique configuration folder for each onedrive client configuration that you need
+Make the configuration folder as required for this new configuration, for example:
+```text
+mkdir ~/.config/my-new-config
+```
+
+### 2. Copy to this folder a copy of the default configuration file
+Copy to this folder a copy of the default configuration file by downloading this file from GitHub and saving this file in the directory created above:
+```text
+wget https://raw.githubusercontent.com/abraunegg/onedrive/master/config -O ~/.config/my-new-config/config
+```
+
+### 3. Update the default configuration file
+The following config options *must* be updated to ensure that individual account data is not cross populated with other OneDrive accounts or other configurations:
+* sync_dir
+
+Other options that may require to be updated, depending on the OneDrive account that is being configured:
+* drive_id
+* application_id
+* sync_business_shared_folders
+* skip_dir
+* skip_file
+* Creation of a 'sync_list' file if required
+* Creation of a 'business_shared_folders' file if required
+
+### 4. Authenticate the client
+Authenticate the client using the specific configuration file:
+```text
+onedrive --confdir="~/.config/my-new-config"
+```
+You will be asked to open a specific URL by using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving permission to the application, you will be redirected to a blank page. Copy the URI of the blank page into the application.
+```text
+[user@hostname ~]$ onedrive --confdir="~/.config/my-new-config"
+Configuration file successfully loaded
+Configuring Global Azure AD Endpoints
+Authorize this app visiting:
+
+https://.....
+
+Enter the response uri:
+
+```
+
+### 5. Display and Test the configuration
+Test the configuration using '--display-config' and '--dry-run'. By doing so, this allows you to test any configuration that you have currently made, enabling you to fix this configuration before using the configuration.
+
+#### Display the configuration
+```text
+onedrive --confdir="~/.config/my-new-config --display-config"
+```
+
+#### Test the configuration by performing a dry-run
+```text
+onedrive --confdir="~/.config/my-new-config" --synchronize --verbose --dry-run
+```
+
+If both of these operate as per your expectation, the configuration of this client setup is complete and validated. If not, amend your configuration as required.
+
+### 6. Sync the OneDrive account data as required
+Sync the data for the new account configuration as required:
+```text
+onedrive --confdir="~/.config/my-new-config" --synchronize --verbose
+```
+or
+```text
+onedrive --confdir="~/.config/my-new-config" --monitor --verbose
+```
+
+* `--synchronize` does a one-time sync
+* `--monitor` keeps the application running and monitoring for changes both local and remote
+
+### 7. Automatic syncing of new OneDrive configuration
+In order to automatically start syncing your OneDrive accounts, you will need to create a service file for each account. From the applicable 'systemd folder' where the applicable systemd service file exists:
+* RHEL / CentOS: `/usr/lib/systemd/system`
+* Others: `/usr/lib/systemd/user` and `/lib/systemd/system`
+
+**Note:** The `onedrive.service` runs the service as the 'root' user, whereas the `onedrive@.service` runs the service as your user account.
+
+Copy the required service file to a new name:
+```text
+cp onedrive.service onedrive-my-new-config.service
+```
+or
+```text
+cp onedrive@.service onedrive-my-new-config@.service
+```
+
+Edit the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
+```text
+ExecStart=/usr/local/bin/onedrive --monitor --confdir="/full/path/to/config/dir"
+```
+
+Example:
+```text
+ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/my-new-config"
+```
+
+Then you can safely run these commands:
+```text
+systemctl --user enable onedrive-my-new-config
+systemctl --user start onedrive-my-new-config
+```
+or
+```text
+systemctl --user enable onedrive-my-new-config(a)myusername.service
+systemctl --user start onedrive-my-new-config(a)myusername.service
+```
+
+Repeat these steps for each OneDrive new account that you wish to use.
+
+## Configuring the client for use in dual-boot (Windows / Linux) situations
+When dual booting Windows and Linux, depending on the Windows OneDrive account configuration, the 'Files On-Demand' option may be enabled when running OneDrive within your Windows environment.
+
+When this option is enabled in Windows, if you are sharing this location between your Windows and Linux systems, all files will be a 0 byte link, and cannot be used under Linux.
+
+To fix the problem of windows turning all files (that should be kept offline) into links, you have to uncheck a specific option in the onedrive settings window. The option in question is `Save space and download files as you use them`.
+
+To find this setting, open the onedrive pop-up window from the taskbar, click "Help & Settings" > "Settings". This opens a new window. Go to the tab "Settings" and look for the section "Files On-Demand".
+
+After unchecking the option and clicking "OK", the Windows OneDrive client should restart itself and start actually downloading your files so they will truely be available on your disk when offline. These files will then be fully accessible under Linux and the Linux OneDrive client.
+
+| OneDrive Personal | Onedrive Business<br>SharePoint |
+|---|---|
+| ![Uncheck-Personal](./images/personal-files-on-demand.png) | ![Uncheck-Business](./images/business-files-on-demand.png) |
Binary files old/onedrive-2.4.7/docs/images/SharedLibraries.jpg and new/onedrive-2.4.8/docs/images/SharedLibraries.jpg differ
Binary files old/onedrive-2.4.7/docs/images/business-files-on-demand.png and new/onedrive-2.4.8/docs/images/business-files-on-demand.png differ
Binary files old/onedrive-2.4.7/docs/images/personal-files-on-demand.png and new/onedrive-2.4.8/docs/images/personal-files-on-demand.png differ
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/onedrive.1.in new/onedrive-2.4.8/onedrive.1.in
--- old/onedrive-2.4.7/onedrive.1.in 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/onedrive.1.in 2020-11-30 06:39:36.000000000 +0100
@@ -241,13 +241,23 @@
Real-Time file monitoring with Inotify
+File upload / download validation to ensure data integrity
+
Resumable uploads
Support OneDrive for Business (part of Office 365)
-Shared folders (OneDrive Personal)
+Shared Folder support for OneDrive Personal and OneDrive Business accounts
+
+SharePoint / Office365 Shared Libraries
+
+Desktop notifications via libnotify
+
+Dry-run capability to test configuration changes
+
+Prevent major OneDrive accidental data deletion after configuration change
-SharePoint / Office 365 Group Drives (refer to README.Office365.md to configure)
+Support for National cloud deployments (Microsoft Cloud for US Government, Microsoft Cloud Germany, Azure and Office 365 operated by 21Vianet in China)
.SH CONFIGURATION
@@ -339,4 +349,8 @@
Further examples and documentation is available in
\f[C]README.md\f[]
-\f[C]README.Office365.md\f[]
+\f[C]docs/USAGE.md\f[]
+\f[C]docs/advanced-usage.md\f[]
+\f[C]docs/BusinessSharedFolders.md\f[]
+\f[C]docs/Office365.md\f[]
+\f[C]docs/national-cloud-deployments.md\f[]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/src/config.d new/onedrive-2.4.8/src/config.d
--- old/onedrive-2.4.7/src/config.d 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/src/config.d 2020-11-30 06:39:36.000000000 +0100
@@ -282,8 +282,6 @@
boolValues["monitor"] = false;
boolValues["synchronize"] = false;
boolValues["force"] = false;
- boolValues["remove_source_files"] = false;
- boolValues["skip_dir_strict_match"] = false;
boolValues["list_business_shared_folders"] = false;
// Application Startup option validation
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/src/main.d new/onedrive-2.4.8/src/main.d
--- old/onedrive-2.4.7/src/main.d 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/src/main.d 2020-11-30 06:39:36.000000000 +0100
@@ -509,6 +509,9 @@
writeln("Config option 'min_notify_changes' = ", cfg.getValueLong("min_notify_changes"));
writeln("Config option 'log_dir' = ", cfg.getValueString("log_dir"));
writeln("Config option 'classify_as_big_delete' = ", cfg.getValueLong("classify_as_big_delete"));
+ writeln("Config option 'upload_only' = ", cfg.getValueBool("upload_only"));
+ writeln("Config option 'no_remote_delete' = ", cfg.getValueBool("no_remote_delete"));
+ writeln("Config option 'remove_source_files' = ", cfg.getValueBool("remove_source_files"));
// Is config option drive_id configured?
if (cfg.getValueString("drive_id") != ""){
@@ -597,10 +600,17 @@
// was the application just authorised?
if (cfg.applicationAuthorizeResponseUri) {
// Application was just authorised
- log.log("\nApplication has been successfully authorised, however no additional command switches were provided.\n");
- log.log("Please use --help for further assistance in regards to running this application.\n");
- // Use exit scopes to shutdown API
- return EXIT_SUCCESS;
+ if (exists(cfg.refreshTokenFilePath)) {
+ // OneDrive refresh token exists
+ log.log("\nApplication has been successfully authorised, however no additional command switches were provided.\n");
+ log.log("Please use --help for further assistance in regards to running this application.\n");
+ // Use exit scopes to shutdown API
+ return EXIT_SUCCESS;
+ } else {
+ // we just authorised, but refresh_token does not exist .. probably an auth error
+ log.log("\nApplication has not been successfully authorised. Please check your URI response entry and try again.\n");
+ return EXIT_FAILURE;
+ }
} else {
// Application was not just authorised
log.log("\n--synchronize or --monitor switches missing from your command line input. Please add one (not both) of these switches to your command line or use --help for further assistance.\n");
@@ -655,6 +665,7 @@
// Attempt to create the sync dir we have been configured with
mkdirRecurse(syncDir);
// Configure the applicable permissions for the folder
+ log.vdebug("Setting directory permissions for: ", syncDir);
syncDir.setAttributes(cfg.returnRequiredDirectoryPermisions());
} catch (std.file.FileException e) {
// Creating the sync directory failed
@@ -765,14 +776,18 @@
// Do we need to configure specific --upload-only options?
if (cfg.getValueBool("upload_only")) {
// --upload-only was passed in or configured
+ log.vdebug("Configuring uploadOnly flag to TRUE as --upload-only passed in or configured");
+ sync.setUploadOnly();
// was --no-remote-delete passed in or configured
if (cfg.getValueBool("no_remote_delete")) {
// Configure the noRemoteDelete flag
+ log.vdebug("Configuring noRemoteDelete flag to TRUE as --no-remote-delete passed in or configured");
sync.setNoRemoteDelete();
}
// was --remove-source-files passed in or configured
if (cfg.getValueBool("remove_source_files")) {
// Configure the localDeleteAfterUpload flag
+ log.vdebug("Configuring localDeleteAfterUpload flag to TRUE as --remove-source-files passed in or configured");
sync.setLocalDeleteAfterUpload();
}
}
@@ -906,6 +921,7 @@
string singleDirectoryPath = cfg.getValueString("single_directory");
mkdirRecurse(singleDirectoryPath);
// Configure the applicable permissions for the folder
+ log.vdebug("Setting directory permissions for: ", singleDirectoryPath);
singleDirectoryPath.setAttributes(cfg.returnRequiredDirectoryPermisions());
}
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/src/monitor.d new/onedrive-2.4.8/src/monitor.d
--- old/onedrive-2.4.7/src/monitor.d 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/src/monitor.d 2020-11-30 06:39:36.000000000 +0100
@@ -1,7 +1,8 @@
import core.sys.linux.sys.inotify;
import core.stdc.errno;
import core.sys.posix.poll, core.sys.posix.unistd;
-import std.exception, std.file, std.path, std.regex, std.stdio, std.string, std.algorithm.mutation;
+import std.exception, std.file, std.path, std.regex, std.stdio, std.string, std.algorithm;
+import core.stdc.stdlib;
import config;
import selective;
import util;
@@ -132,17 +133,43 @@
}
// passed all potential exclusions
+ // add inotify watch for this path / directory / file
+ log.vdebug("Calling add() for this dirname: ", dirname);
add(dirname);
- try {
- auto pathList = dirEntries(dirname, SpanMode.shallow, false);
- foreach(DirEntry entry; pathList) {
- if (entry.isDir) {
- addRecursive(entry.name);
+
+ // if this is a directory, recursivly add this path
+ if (isDir(dirname)) {
+ // try and get all the directory entities for this path
+ try {
+ auto pathList = dirEntries(dirname, SpanMode.shallow, false);
+ foreach(DirEntry entry; pathList) {
+ if (entry.isDir) {
+ log.vdebug("Calling addRecursive() for this directory: ", entry.name);
+ addRecursive(entry.name);
+ }
+ }
+ // catch any error which is generated
+ } catch (std.file.FileException e) {
+ // Standard filesystem error
+ displayFileSystemErrorMessage(e.msg);
+ return;
+ } catch (Exception e) {
+ // Issue #1154 handling
+ // Need to check for: Failed to stat file in error message
+ if (canFind(e.msg, "Failed to stat file")) {
+ // File system access issue
+ log.error("ERROR: The local file system returned an error with the following message:");
+ log.error(" Error Message: ", e.msg);
+ log.error("ACCESS ERROR: Please check your UID and GID access to this file, as the permissions on this file is preventing this application to read it");
+ log.error("\nFATAL: Exiting application to avoid deleting data due to local file system access issues\n");
+ // Must exit here
+ exit(-1);
+ } else {
+ // some other error
+ displayFileSystemErrorMessage(e.msg);
+ return;
}
}
- } catch (std.file.FileException e) {
- log.vdebug("ERROR: ", e.msg);
- return;
}
}
@@ -173,6 +200,7 @@
// Add path to inotify watch - required regardless if a '.folder' or 'folder'
wdToDirName[wd] = buildNormalizedPath(pathname) ~ "/";
+ log.vdebug("inotify_add_watch successfully added for: ", pathname);
// Do we log that we are monitoring this directory?
if (isDir(pathname)) {
@@ -360,4 +388,12 @@
}
}
}
+
+ // Parse and display error message received from the local file system
+ private void displayFileSystemErrorMessage(string message)
+ {
+ log.error("ERROR: The local file system returned an error with the following message:");
+ auto errorArray = splitLines(message);
+ log.error(" Error Message: ", errorArray[0]);
+ }
}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/src/onedrive.d new/onedrive-2.4.8/src/onedrive.d
--- old/onedrive-2.4.7/src/onedrive.d 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/src/onedrive.d 2020-11-30 06:39:36.000000000 +0100
@@ -15,6 +15,9 @@
private ulong retryAfterValue = 0;
private immutable {
+ // Client ID / Application ID (abraunegg)
+ string clientIdDefault = "d50ca740-c83f-4d1b-b616-12c519384f0c";
+
// Azure Active Directory & Graph Explorer Endpoints
// Global & Defaults
string globalAuthEndpoint = "https://login.microsoftonline.com";
@@ -38,8 +41,8 @@
}
private {
- // Client ID / Application ID (abraunegg)
- string clientId = "d50ca740-c83f-4d1b-b616-12c519384f0c";
+ // Client ID / Application ID
+ string clientId = clientIdDefault;
// Default User Agent configuration
string isvTag = "ISV";
@@ -138,6 +141,14 @@
.debugResponse = true;
}
+ // Update clientId if application_id is set in config file
+ if (cfg.getValueString("application_id") != "") {
+ // an application_id is set in config file
+ log.vdebug("Setting custom application_id to: " , cfg.getValueString("application_id"));
+ clientId = cfg.getValueString("application_id");
+ companyName = "custom_application";
+ }
+
// Configure tenant id value, if 'azure_tenant_id' is configured,
// otherwise use the "common" multiplexer
string tenantId = "common";
@@ -145,7 +156,7 @@
// Use the value entered by the user
tenantId = cfg.getValueString("azure_tenant_id");
}
-
+
// Configure Azure AD endpoints if 'azure_ad_endpoint' is configured
string azureConfigValue = cfg.getValueString("azure_ad_endpoint");
switch(azureConfigValue) {
@@ -164,8 +175,16 @@
log.log("Configuring Azure AD for US Government Endpoints");
// Authentication
authUrl = usl4AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/authorize";
- redirectUrl = usl4AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
tokenUrl = usl4AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/token";
+ if (clientId == clientIdDefault) {
+ // application_id == default
+ log.vdebug("USL4 AD Endpoint but default application_id, redirectUrl needs to be aligned to globalAuthEndpoint");
+ redirectUrl = globalAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ } else {
+ // custom application_id
+ redirectUrl = usl4AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ }
+
// Drive Queries
driveUrl = usl4GraphEndpoint ~ "/v1.0/me/drive";
driveByIdUrl = usl4GraphEndpoint ~ "/v1.0/drives/";
@@ -182,8 +201,16 @@
log.log("Configuring Azure AD for US Government Endpoints (DOD)");
// Authentication
authUrl = usl5AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/authorize";
- redirectUrl = usl5AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
tokenUrl = usl5AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/token";
+ if (clientId == clientIdDefault) {
+ // application_id == default
+ log.vdebug("USL5 AD Endpoint but default application_id, redirectUrl needs to be aligned to globalAuthEndpoint");
+ redirectUrl = globalAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ } else {
+ // custom application_id
+ redirectUrl = usl5AuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ }
+
// Drive Queries
driveUrl = usl5GraphEndpoint ~ "/v1.0/me/drive";
driveByIdUrl = usl5GraphEndpoint ~ "/v1.0/drives/";
@@ -200,8 +227,16 @@
log.log("Configuring Azure AD Germany");
// Authentication
authUrl = deAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/authorize";
- redirectUrl = deAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
tokenUrl = deAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/token";
+ if (clientId == clientIdDefault) {
+ // application_id == default
+ log.vdebug("DE AD Endpoint but default application_id, redirectUrl needs to be aligned to globalAuthEndpoint");
+ redirectUrl = globalAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ } else {
+ // custom application_id
+ redirectUrl = deAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ }
+
// Drive Queries
driveUrl = deGraphEndpoint ~ "/v1.0/me/drive";
driveByIdUrl = deGraphEndpoint ~ "/v1.0/drives/";
@@ -218,8 +253,16 @@
log.log("Configuring AD China operated by 21Vianet");
// Authentication
authUrl = cnAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/authorize";
- redirectUrl = cnAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
tokenUrl = cnAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/v2.0/token";
+ if (clientId == clientIdDefault) {
+ // application_id == default
+ log.vdebug("CN AD Endpoint but default application_id, redirectUrl needs to be aligned to globalAuthEndpoint");
+ redirectUrl = globalAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ } else {
+ // custom application_id
+ redirectUrl = cnAuthEndpoint ~ "/" ~ tenantId ~ "/oauth2/nativeclient";
+ }
+
// Drive Queries
driveUrl = cnGraphEndpoint ~ "/v1.0/me/drive";
driveByIdUrl = cnGraphEndpoint ~ "/v1.0/drives/";
@@ -289,13 +332,6 @@
bool init()
{
- // Update clientId if application_id is set in config file
- if (cfg.getValueString("application_id") != "") {
- // an application_id is set in config file
- clientId = cfg.getValueString("application_id");
- companyName = "custom_application";
- }
-
// detail what we are using for applicaion identification
log.vdebug("clientId = ", clientId);
log.vdebug("companyName = ", companyName);
@@ -493,15 +529,43 @@
{
checkAccessTokenExpired();
scope(failure) {
- if (exists(saveToPath)) remove(saveToPath);
+ if (exists(saveToPath)) {
+ // try and remove the file, catch error
+ try {
+ remove(saveToPath);
+ } catch (FileException e) {
+ // display the error message
+ displayFileSystemErrorMessage(e.msg);
+ }
+ }
}
- // Create the directory
+
+ // Create the required local directory
string newPath = dirName(saveToPath);
- mkdirRecurse(newPath);
- // Configure the applicable permissions for the folder
- newPath.setAttributes(cfg.returnRequiredDirectoryPermisions());
+
+ // Does the path exist locally?
+ if (!exists(newPath)) {
+ try {
+ log.vdebug("Requested path does not exist, creating directory structure: ", newPath);
+ mkdirRecurse(newPath);
+ // Configure the applicable permissions for the folder
+ log.vdebug("Setting directory permissions for: ", newPath);
+ newPath.setAttributes(cfg.returnRequiredDirectoryPermisions());
+ } catch (FileException e) {
+ // display the error message
+ displayFileSystemErrorMessage(e.msg);
+ }
+ }
+
const(char)[] url = driveByIdUrl ~ driveId ~ "/items/" ~ id ~ "/content?AVOverride=1";
+ // Download file
download(url, saveToPath, fileSize);
+ // Does path exist?
+ if (exists(saveToPath)) {
+ // File was downloaded sucessfully - configure the applicable permissions for the file
+ log.vdebug("Setting file permissions for: ", saveToPath);
+ saveToPath.setAttributes(cfg.returnRequiredFilePermisions());
+ }
}
// https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_…
@@ -728,6 +792,7 @@
try {
// try and update the refresh_token file
std.file.write(cfg.refreshTokenFilePath, refreshToken);
+ log.vdebug("Setting file permissions for: ", cfg.refreshTokenFilePath);
cfg.refreshTokenFilePath.setAttributes(cfg.returnRequiredFilePermisions());
} catch (FileException e) {
// display the error message
@@ -818,8 +883,6 @@
// close open file
file.close();
}
- // Configure the applicable permissions for the file
- filename.setAttributes(cfg.returnRequiredFilePermisions());
}
http.method = HTTP.Method.get;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/onedrive-2.4.7/src/sync.d new/onedrive-2.4.8/src/sync.d
--- old/onedrive-2.4.7/src/sync.d 2020-11-09 00:31:48.000000000 +0100
+++ new/onedrive-2.4.8/src/sync.d 2020-11-30 06:39:36.000000000 +0100
@@ -2006,25 +2006,31 @@
// - full path + combination of any above two - /path/name*.txt
// - full path to file - /path/to/file.txt
- // need to compute the full path for this file
- path = itemdb.computePath(item.driveId, item.parentId) ~ "/" ~ item.name;
-
- // The path that needs to be checked needs to include the '/'
- // This due to if the user has specified in skip_file an exclusive path: '/path/file' - that is what must be matched
- if (!startsWith(path, "/")){
- // Add '/' to the path
- path = '/' ~ path;
+ // is the parent id in the database?
+ if (itemdb.idInLocalDatabase(item.driveId, item.parentId)){
+ // need to compute the full path for this file
+ path = itemdb.computePath(item.driveId, item.parentId) ~ "/" ~ item.name;
+
+ // The path that needs to be checked needs to include the '/'
+ // This due to if the user has specified in skip_file an exclusive path: '/path/file' - that is what must be matched
+ if (!startsWith(path, "/")){
+ // Add '/' to the path
+ path = '/' ~ path;
+ }
+
+ log.vdebug("skip_file item to check: ", path);
+ unwanted = selectiveSync.isFileNameExcluded(path);
+ log.vdebug("Result: ", unwanted);
+ if (unwanted) log.vlog("Skipping item - excluded by skip_file config: ", item.name);
+ } else {
+ // parent id is not in the database
+ unwanted = true;
+ log.vlog("Skipping file - parent path not present in local database");
}
-
- log.vdebug("skip_file item to check: ", path);
- unwanted = selectiveSync.isFileNameExcluded(path);
- log.vdebug("Result: ", unwanted);
- if (unwanted) log.vlog("Skipping item - excluded by skip_file config: ", item.name);
}
}
// check the item type
-
if (!unwanted) {
if (isItemFile(driveItem)) {
log.vdebug("The item we are syncing is a file");
@@ -2238,6 +2244,14 @@
}
// What was the item that was saved
log.vdebug("item details: ", item);
+ } else {
+ // flag was tripped, which was it
+ if (downloadFailed) {
+ log.vdebug("Download or creation of local directory failed");
+ }
+ if (malwareDetected) {
+ log.vdebug("OneDrive reported that file contained malware");
+ }
}
}
@@ -2423,10 +2437,23 @@
}
if (!dryRun) {
- // Create the new directory
- mkdirRecurse(path);
- // Configure the applicable permissions for the folder
- path.setAttributes(cfg.returnRequiredDirectoryPermisions());
+ try {
+ // Does the path exist locally?
+ if (!exists(path)) {
+ // Create the new directory
+ log.vdebug("Requested path does not exist, creating directory structure: ", path);
+ mkdirRecurse(path);
+ // Configure the applicable permissions for the folder
+ log.vdebug("Setting directory permissions for: ", path);
+ path.setAttributes(cfg.returnRequiredDirectoryPermisions());
+ }
+ } catch (FileException e) {
+ // display the error message
+ displayFileSystemErrorMessage(e.msg);
+ // flag that this failed
+ downloadFailed = true;
+ return;
+ }
} else {
// we dont create the directory, but we need to track that we 'faked it'
idsFaked ~= [item.driveId, item.id];
@@ -2480,6 +2507,7 @@
// handle changed time
if (newItem.type == ItemType.file && oldItem.mtime != newItem.mtime) {
try {
+ log.vdebug("Calling setTimes() for this file: ", newPath);
setTimes(newPath, newItem.mtime, newItem.mtime);
} catch (FileException e) {
// display the error message
@@ -2565,7 +2593,6 @@
log.vdebug("onedrive.downloadById(item.driveId, item.id, path, fileSize); generated a OneDriveException");
// 408 = Request Time Out
// 429 = Too Many Requests - need to delay
-
if (e.httpStatusCode == 408) {
// 408 error handling - request time out
// https://github.com/abraunegg/onedrive/issues/694
@@ -2637,6 +2664,12 @@
}
}
}
+ } catch (FileException e) {
+ // There was a file system error
+ // display the error message
+ displayFileSystemErrorMessage(e.msg);
+ downloadFailed = true;
+ return;
} catch (std.exception.ErrnoException e) {
// There was a file system error
// display the error message
@@ -2655,6 +2688,7 @@
// downloaded matches either size or hash
log.vdebug("Downloaded file matches reported size and or reported file hash");
try {
+ log.vdebug("Calling setTimes() for this file: ", path);
setTimes(path, item.mtime, item.mtime);
} catch (FileException e) {
// display the error message
1
0
01 Dec '20
Hello community,
here is the log from the commit of package greybird-theme for openSUSE:Factory checked in at 2020-12-01 14:23:24
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/greybird-theme (Old)
and /work/SRC/openSUSE:Factory/.greybird-theme.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "greybird-theme"
Tue Dec 1 14:23:24 2020 rev:17 rq:851942 version:3.22.12+git4.454f139
Changes:
--------
--- /work/SRC/openSUSE:Factory/greybird-theme/greybird-theme.changes 2020-02-24 15:55:42.579911018 +0100
+++ /work/SRC/openSUSE:Factory/.greybird-theme.new.5913/greybird-theme.changes 2020-12-01 14:23:37.193647406 +0100
@@ -1,0 +2,8 @@
+Sat Nov 28 13:15:25 UTC 2020 - tux93(a)opensuse.org
+
+- Update to version 3.22.12+git4.454f139:
+ * Rebase parts of the %linked code (Fixes #265)
+ * Fix Thunar CSD when not focused (Fixes #274)
+ * Fix GTK3 dark colors
+
+-------------------------------------------------------------------
Old:
----
Greybird-3.22.11+git4.320a703.tar.xz
New:
----
Greybird-3.22.12+git4.454f139.tar.xz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ greybird-theme.spec ++++++
--- /var/tmp/diff_new_pack.DnyUaq/_old 2020-12-01 14:23:37.877648146 +0100
+++ /var/tmp/diff_new_pack.DnyUaq/_new 2020-12-01 14:23:37.881648150 +0100
@@ -19,7 +19,7 @@
%define _name Greybird
Name: greybird-theme
-Version: 3.22.11+git4.320a703
+Version: 3.22.12+git4.454f139
Release: 0
URL: https://github.com/shimmerproject/Greybird
Summary: A grey theme for GNOME, XFCE, GTK+ 2 and 3
++++++ Greybird-3.22.11+git4.320a703.tar.xz -> Greybird-3.22.12+git4.454f139.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_colors.scss new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_colors.scss
--- old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_colors.scss 2020-02-03 15:32:30.000000000 +0100
+++ new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_colors.scss 2020-11-26 06:55:50.000000000 +0100
@@ -2,9 +2,9 @@
// it gets @if ed depending on $variant
-$base_color: if($variant == 'light', #fcfcfc, #292929);
+$base_color: if($variant == 'light', #fcfcfc, #2d2e30);
$text_color: if($variant == 'light', #212121, white);
-$bg_color: if($variant == 'light', #cecece, #393f3f);
+$bg_color: if($variant == 'light', #cecece, #3b3e3f);
$fg_color: if($variant == 'light', #3c3c3c, #eeeeec);
$selected_fg_color: #ffffff;
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_common.scss new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_common.scss
--- old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_common.scss 2020-02-03 15:32:30.000000000 +0100
+++ new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_common.scss 2020-11-26 06:55:50.000000000 +0100
@@ -6,6 +6,7 @@
$asset_suffix: if($variant=='dark', '-dark', '');
$backdrop_transition: 200ms ease-out;
$button_transition: all 200ms $ease-out-quad;
+$button_radius: 3px;
* {
padding: 0;
@@ -869,65 +870,105 @@
}
// More inline toolbar buttons
-toolbar.inline-toolbar toolbutton,
-toolbar.inline-toolbar toolbutton:backdrop {
+toolbar.inline-toolbar toolbutton {
> button.flat { @extend %linked_middle; }
- &:first-child > button.flat { @extend %linked:first-child; }
+ &:first-child > button.flat { @extend %linked_left; }
- &:last-child > button.flat { @extend %linked:last-child; }
+ &:last-child > button.flat { @extend %linked_right; }
- &:only-child > button.flat { @extend %linked:only-child; }
+ &:only-child > button.flat { @extend %linked_only_child; }
}
%linked_middle {
- border-radius: 0;
border-right-style: none;
+ border-radius: 0;
+ -gtk-outline-radius: 0;
+}
+
+%linked_left {
+ border-top-left-radius: $button_radius;
+ border-bottom-left-radius: $button_radius;
+ -gtk-outline-top-left-radius: $button_radius;
+ -gtk-outline-bottom-left-radius: $button_radius;
+}
+
+%linked_right {
+ border-right-style: solid;
+ border-top-right-radius: $button_radius;
+ border-bottom-right-radius: $button_radius;
+ -gtk-outline-top-right-radius: $button_radius;
+ -gtk-outline-bottom-right-radius: $button_radius;
+}
+
+%linked_only_child {
+ border-style: solid;
+ border-radius: $button_radius;
+ -gtk-outline-radius: $button_radius;
}
+// .linked assumes Box, which reverses nodes in RTL, so 1st child is always left
%linked {
@extend %linked_middle;
- &:first-child {
- border-top-left-radius: 3px;
- border-bottom-left-radius: 3px;
- }
+ &:first-child { @extend %linked_left; }
+ &:last-child { @extend %linked_right; }
- &:last-child {
- border-top-right-radius: 3px;
- border-bottom-right-radius: 3px;
- border-right-style: solid;
+ &:only-child { @extend %linked_only_child; }
+}
+
+// Other widgets use widget child order, so 1st/last child are at text start/end
+%linked_flippable {
+ @extend %linked_middle;
+
+ &:dir(ltr) {
+ &:first-child { @extend %linked_left; }
+ &:last-child { @extend %linked_right; }
}
- &:only-child {
- border-radius: 3px;
- border-style: solid;
+ &:dir(rtl) {
+ &:first-child { @extend %linked_right; }
+ &:last-child { @extend %linked_left; }
}
+
+ &:only-child { @extend %linked_only_child; }
}
%linked_vertical_middle {
border-style: solid solid none solid;
border-radius: 0;
+ -gtk-outline-radius: 0;
+}
+
+%linked_vertical_top {
+ border-top-left-radius: $button_radius;
+ border-top-right-radius: $button_radius;
+ -gtk-outline-top-left-radius: $button_radius;
+ -gtk-outline-top-right-radius: $button_radius;
+}
+
+%linked_vertical_bottom {
+ border-bottom-style: solid;
+ border-bottom-left-radius: $button_radius;
+ border-bottom-right-radius: $button_radius;
+ -gtk-outline-bottom-left-radius: $button_radius;
+ -gtk-outline-bottom-right-radius: $button_radius;
}
-%linked_vertical{
+%linked_vertical_only_child {
+ border-style: solid;
+ border-radius: $button_radius;
+ -gtk-outline-radius: $button_radius;
+}
+
+%linked_vertical {
@extend %linked_vertical_middle;
- &:first-child {
- border-top-left-radius: 3px;
- border-top-right-radius: 3px;
- }
+ &:first-child { @extend %linked_vertical_top; }
- &:last-child {
- border-bottom-left-radius: 3px;
- border-bottom-right-radius: 3px;
- border-style: solid;
- }
+ &:last-child { @extend %linked_vertical_bottom; }
- &:only-child {
- border-radius: 3px;
- border-style: solid;
- }
+ &:only-child { @extend %linked_vertical_only_child; }
}
%undecorated_button {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_xfce.scss new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_xfce.scss
--- old/Greybird-3.22.11+git4.320a703/light/gtk-3.0/_xfce.scss 2020-02-03 15:32:30.000000000 +0100
+++ new/Greybird-3.22.12+git4.454f139/light/gtk-3.0/_xfce.scss 2020-11-26 06:55:50.000000000 +0100
@@ -149,7 +149,7 @@
/* Thunar's sidebar top border */
.thunar {
- :backdrop { color: $insensitive_fg_color; }
+ grid :backdrop { color: $insensitive_fg_color; }
toolbar {
border-bottom: none;
++++++ Greybird.obsinfo ++++++
--- /var/tmp/diff_new_pack.DnyUaq/_old 2020-12-01 14:23:39.945650382 +0100
+++ /var/tmp/diff_new_pack.DnyUaq/_new 2020-12-01 14:23:39.945650382 +0100
@@ -1,5 +1,5 @@
name: Greybird
-version: 3.22.11+git4.320a703
-mtime: 1580740350
-commit: 320a703a54056741da500382080affd63de737e6
+version: 3.22.12+git4.454f139
+mtime: 1606370150
+commit: 454f139017058975b800c40a5daf22e36136d322
++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.DnyUaq/_old 2020-12-01 14:23:39.969650409 +0100
+++ /var/tmp/diff_new_pack.DnyUaq/_new 2020-12-01 14:23:39.973650413 +0100
@@ -1,4 +1,4 @@
<servicedata>
<service name="tar_scm">
<param name="url">https://github.com/shimmerproject/Greybird.git</param>
- <param name="changesrevision">320a703a54056741da500382080affd63de737e6</param></service></servicedata>
\ No newline at end of file
+ <param name="changesrevision">454f139017058975b800c40a5daf22e36136d322</param></service></servicedata>
\ No newline at end of file
1
0
Hello community,
here is the log from the commit of package bspwm for openSUSE:Factory checked in at 2020-12-01 14:23:23
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/bspwm (Old)
and /work/SRC/openSUSE:Factory/.bspwm.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "bspwm"
Tue Dec 1 14:23:23 2020 rev:10 rq:851948 version:0.9.10
Changes:
--------
--- /work/SRC/openSUSE:Factory/bspwm/bspwm.changes 2020-06-12 21:45:47.500544127 +0200
+++ /work/SRC/openSUSE:Factory/.bspwm.new.5913/bspwm.changes 2020-12-01 14:23:36.129646254 +0100
@@ -1,0 +2,26 @@
+Mon Nov 30 21:45:58 UTC 2020 - Avindra Goolcharan <avindra(a)opensuse.org>
+
+- Update to version 0.9.10:
+ + Additions
+ - New node descriptor: first_ancestor.
+ - New node modifiers: horizontal, vertical.
+ + Changes
+ - The node descriptors next and prev might now return any node.
+ The previous behavior can be emulated by appending .!hidden.window.
+ - The node descriptors pointed, biggest and smallest now return
+ leaves (in particular pointed will now return the id of a
+ pointed receptacle). The previous behavior can be emulated by
+ appending .window.
+ - The query command now handles all the possible descriptor-free
+ constraints (for example, query -N -d .active now works as
+ expected).
+ - The rules can now match against the window's names (WM_NAME).
+ - The configuration script now receives an argument to indicate
+ whether is was executed after a restart or not.
+ - The intermediate consequences passed to the external rules
+ command are now in resolved form to avoid unwanted
+ code execution.
+- Package config to /etc/.skel
+- Ran spec-cleaner
+
+-------------------------------------------------------------------
@@ -4,2 +30,7 @@
-- Bump to version 0.9.9
- * no upstream changelog
+- Update to version 0.9.9:
+ * Fix a memory allocation bug in the implementation of wm --restart.
+ * Honor single_monocle when the hidden flag is toggled.
+- includes 0.9.8:
+ * Fix a potential infinite loop.
+ * Fix two bugs having to do with single_monocle.
+ * Honor removal_adjustment for the spiral automatic insertion scheme.
Old:
----
bspwm-0.9.9.tar.gz
New:
----
bspwm-0.9.10.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ bspwm.spec ++++++
--- /var/tmp/diff_new_pack.bzbKk5/_old 2020-12-01 14:23:36.769646947 +0100
+++ /var/tmp/diff_new_pack.bzbKk5/_new 2020-12-01 14:23:36.773646951 +0100
@@ -18,7 +18,7 @@
Name: bspwm
-Version: 0.9.9
+Version: 0.9.10
Release: 0
Summary: A tiling window manager based on binary space partitioning
License: BSD-2-Clause
@@ -86,24 +86,23 @@
%build
export CPPFLAGS="%{optflags} -fcommon"
-make %{?_smp_mflags} V=1
+%make_build
%install
%make_install PREFIX=%{_prefix} DOCPREFIX=%{_docdir}/%{name}
-install -pm 644 %{SOURCE1} contrib/freedesktop/bspwm.desktop
+install -pm 644 %{SOURCE1} contrib/freedesktop/%{name}.desktop
install -D -p -m 644 examples/bspwmrc \
- %{buildroot}%{_sysconfdir}/skel/.config/bspwm/bspwmrc
+ %{buildroot}%{_sysconfdir}/skel/.bspwmrc
install -D -p -m 644 LICENSE %{buildroot}%{_docdir}/%{name}/LICENSE
%files
-%{_bindir}/bspwm
+%config(noreplace) %{_sysconfdir}/skel/.bspwmrc
+%{_bindir}/%{name}
%{_bindir}/bspc
%{_docdir}/%{name}
-%{_mandir}/man1/bspwm.1%{?ext_man}
+%{_mandir}/man1/%{name}.1%{?ext_man}
%{_mandir}/man1/bspc.1%{?ext_man}
-%{_datadir}/xsessions/bspwm.desktop
-%{_sysconfdir}/skel/.config/bspwm
-%config %{_sysconfdir}/skel/.config/bspwm/bspwmrc
+%{_datadir}/xsessions/%{name}.desktop
%files bash-completion
%{_datadir}/bash-completion
++++++ bspwm-0.9.9.tar.gz -> bspwm-0.9.10.tar.gz ++++++
++++ 1605 lines of diff (skipped)
1
0
Hello community,
here is the log from the commit of package volk for openSUSE:Factory checked in at 2020-12-01 14:23:20
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/volk (Old)
and /work/SRC/openSUSE:Factory/.volk.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "volk"
Tue Dec 1 14:23:20 2020 rev:5 rq:851907 version:2.4.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/volk/volk.changes 2020-05-11 13:44:36.781534451 +0200
+++ /work/SRC/openSUSE:Factory/.volk.new.5913/volk.changes 2020-12-01 14:23:33.761643692 +0100
@@ -1,0 +2,25 @@
+Mon Nov 30 11:31:35 UTC 2020 - Martin Hauke <mardnh(a)gmx.de>
+
+- Update to version 2.4.0
+ Documentation
+ * Update README to be more verbose and to improve usefulness.
+ CMake
+ * Enable to not install volk_modtool.
+ * Remove "find_package_handle_standard_args" warning.
+ cpu_features
+ * Use cpu_features v0.6.0 as a private submodule to detect
+ available CPU features.
+ * Fix incorrect feature detection for newer AVX versions.
+ * Circumvent platform specific feature detection.
+ * Enable more architecture specific kernels on more platforms.
+ Kernels
+ * Disable slow and broken SSE4.1 kernel in
+ volk_32fc_x2_dot_prod_32fc.
+ * Adjust min/max for 32f_s32f_convert_8i kernel
+ * Use INT8_* instead of CHAR_*
+- Upstream tarball does not have the cpu_features included so use
+ a service file to generate the tarball.
+- Add patch:
+ * volk-fix-cpu_features-compilation-error.patch
+
+-------------------------------------------------------------------
Old:
----
volk-2.3.0.tar.gz
New:
----
_service
volk-2.4.0.tar.xz
volk-fix-cpu_features-compilation-error.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ volk.spec ++++++
--- /var/tmp/diff_new_pack.RO9X5B/_old 2020-12-01 14:23:34.249644221 +0100
+++ /var/tmp/diff_new_pack.RO9X5B/_new 2020-12-01 14:23:34.253644225 +0100
@@ -17,16 +17,16 @@
%global sonum 2
-%global soname 2_3
-
+%global soname 2_4
Name: volk
-Version: 2.3.0
+Version: 2.4.0
Release: 0
Summary: Vector-Optimized Library of Kernels
License: GPL-3.0-only
Group: Development/Libraries/C and C++
-URL: http://libvolk.org/
-Source: https://github.com/gnuradio/volk/archive/v%{version}.tar.gz#/%{name}-%{vers…
+URL: https://libvolk.org/
+Source: %{name}-%{version}.tar.xz
+Patch0: volk-fix-cpu_features-compilation-error.patch
BuildRequires: cmake
BuildRequires: fdupes
BuildRequires: gcc-c++
@@ -35,6 +35,7 @@
BuildRequires: orc
BuildRequires: python-rpm-macros
BuildRequires: python3-Mako
+Provides: bundled(cpu_features) = 0.6.0
%description
VOLK provides a library of vector-optimized kernels. It is a subproject
@@ -45,9 +46,9 @@
# Formerly part of gnuradio 3.7.x.y
Group: Development/Libraries/C and C++
Requires: libvolk%{soname} = %{version}
+Recommends: volk_modtool
Conflicts: gnuradio-devel < 3.8.0.0
Provides: gnuradio-devel:%{_libdir}/pkgconfig/volk.pc
-Recommends: volk_modtool
%description devel
This package provides the the development files for VOLK.
@@ -70,6 +71,7 @@
%prep
%setup -q
+%patch0 -p1
%build
%cmake
@@ -77,10 +79,15 @@
%install
%cmake_install
-
chmod -x %{buildroot}%{python3_sitearch}/volk_modtool/*py
sed -i -e '1 { \@.*/bin/env.*python.*@ d }' %{buildroot}%{python3_sitearch}/volk_modtool/*py
+# remove stuff from bundled cpu_features
+rm %{buildroot}%{_bindir}/list_cpu_features
+rm -R %{buildroot}%{_includedir}/cpu_features
+rm -R %{buildroot}%{_libdir}/cmake/CpuFeatures
+rm %{buildroot}%{_libdir}/libcpu_features.a
+
%fdupes %{buildroot}
%post -n libvolk%{soname} -p /sbin/ldconfig
++++++ _service ++++++
<services>
<service name="tar_scm" mode="disabled">
<param name="scm">git</param>
<param name="url">https://github.com/gnuradio/volk.git</param>
<param name="revision">v2.4.0</param>
<param name="versionformat">2.4.0</param>
</service>
<service name="recompress" mode="disabled">
<param name="file">*.tar</param>
<param name="compression">xz</param>
</service>
<service name="set_version" mode="disabled"/>
</services>
++++++ volk-fix-cpu_features-compilation-error.patch ++++++
diff --git a/cpu_features/src/utils/list_cpu_features.c b/cpu_features/src/utils/list_cpu_features.c
index c80ffc5..82a8e72 100644
--- a/cpu_features/src/utils/list_cpu_features.c
+++ b/cpu_features/src/utils/list_cpu_features.c
@@ -340,6 +340,7 @@ static Node* GetCacheTypeString(CacheType cache_type) {
case CPU_FEATURE_CACHE_PREFETCH:
return CreateConstantString("prefetch");
}
+ return 0;
}
static void AddCacheInfo(Node* root, const CacheInfo* cache_info) {
1
0
01 Dec '20
Hello community,
here is the log from the commit of package rtl8812au for openSUSE:Factory checked in at 2020-12-01 14:23:16
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/rtl8812au (Old)
and /work/SRC/openSUSE:Factory/.rtl8812au.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "rtl8812au"
Tue Dec 1 14:23:16 2020 rev:10 rq:851878 version:5.6.4.2+git20200702.3110ad6
Changes:
--------
--- /work/SRC/openSUSE:Factory/rtl8812au/rtl8812au.changes 2020-07-06 16:37:06.280603439 +0200
+++ /work/SRC/openSUSE:Factory/.rtl8812au.new.5913/rtl8812au.changes 2020-12-01 14:23:32.657642498 +0100
@@ -1,0 +2,6 @@
+Sat Sep 12 21:30:55 UTC 2020 - Victor Kwan <vkwan8(a)uwo.ca>
+
+- Update to 5.6.4.2+git20200702.3110ad6
+ * Works with kernel 5.8rc3.
+
+-------------------------------------------------------------------
Old:
----
rtl8812au-5.6.4.2+git20200318.49e98ff.obscpio
New:
----
rtl8812au-5.6.4.2+git20200702.3110ad6.obscpio
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ rtl8812au.spec ++++++
--- /var/tmp/diff_new_pack.quqCb1/_old 2020-12-01 14:23:33.397643299 +0100
+++ /var/tmp/diff_new_pack.quqCb1/_new 2020-12-01 14:23:33.401643303 +0100
@@ -17,7 +17,7 @@
Name: rtl8812au
-Version: 5.6.4.2+git20200318.49e98ff
+Version: 5.6.4.2+git20200702.3110ad6
Release: 0
Summary: Kernel driver for Realtek 802.11ac 8812au wifi cards
License: GPL-2.0-only
++++++ rtl8812au-5.6.4.2+git20200318.49e98ff.obscpio -> rtl8812au-5.6.4.2+git20200702.3110ad6.obscpio ++++++
/work/SRC/openSUSE:Factory/rtl8812au/rtl8812au-5.6.4.2+git20200318.49e98ff.obscpio /work/SRC/openSUSE:Factory/.rtl8812au.new.5913/rtl8812au-5.6.4.2+git20200702.3110ad6.obscpio differ: char 49, line 1
++++++ rtl8812au.obsinfo ++++++
--- /var/tmp/diff_new_pack.quqCb1/_old 2020-12-01 14:23:33.501643412 +0100
+++ /var/tmp/diff_new_pack.quqCb1/_new 2020-12-01 14:23:33.501643412 +0100
@@ -1,5 +1,5 @@
name: rtl8812au
-version: 5.6.4.2+git20200318.49e98ff
-mtime: 1584541726
-commit: 49e98ff9bfdbe2ddce843808713de383132002e0
+version: 5.6.4.2+git20200702.3110ad6
+mtime: 1593718986
+commit: 3110ad65d0f03532bd97b1017cae67ca86dd34f6
1
0
01 Dec '20
Hello community,
here is the log from the commit of package python-particle for openSUSE:Factory checked in at 2020-12-01 14:23:15
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-particle (Old)
and /work/SRC/openSUSE:Factory/.python-particle.new.5913 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-particle"
Tue Dec 1 14:23:15 2020 rev:5 rq:851920 version:0.14.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-particle/python-particle.changes 2020-11-24 22:14:15.579594530 +0100
+++ /work/SRC/openSUSE:Factory/.python-particle.new.5913/python-particle.changes 2020-12-01 14:23:26.373635700 +0100
@@ -1,0 +2,14 @@
+Sun Nov 29 22:00:43 UTC 2020 - Atri Bhattacharya <badshah400(a)gmail.com>
+
+- Update to version 0.14.0:
+ * Particle class: Methods `Particle.to_list` and
+ `Particle.to_dict` enhanced.
+ * Data CSV files:
+ - Version 8 of package data files, with fixed parities for
+ antibaryons with undefined parity.
+ - Tests added to check if every particle is parsed and loaded
+ correctly.
+ * Miscellaneous: - Minor fix on static typing.
+- Update _service file to download tests at tag 0.14.0.
+
+-------------------------------------------------------------------
Old:
----
particle-0.13.1.tar.gz
New:
----
particle-0.14.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-particle.spec ++++++
--- /var/tmp/diff_new_pack.tOZVfQ/_old 2020-12-01 14:23:27.161636552 +0100
+++ /var/tmp/diff_new_pack.tOZVfQ/_new 2020-12-01 14:23:27.165636557 +0100
@@ -19,7 +19,7 @@
%global modname particle
%define skip_python2 1
Name: python-particle
-Version: 0.13.1
+Version: 0.14.0
Release: 0
Summary: PDG particle data and identification codes
License: BSD-3-Clause
++++++ _service ++++++
--- /var/tmp/diff_new_pack.tOZVfQ/_old 2020-12-01 14:23:27.205636600 +0100
+++ /var/tmp/diff_new_pack.tOZVfQ/_new 2020-12-01 14:23:27.205636600 +0100
@@ -2,7 +2,7 @@
<service name="tar_scm" mode="disabled">
<param name="url">https://github.com/scikit-hep/particle.git</param>
<param name="scm">git</param>
- <param name="revision">v0.13.1</param>
+ <param name="revision">v0.14.0</param>
<param name="subdir">tests</param>
<param name="version">_none_</param>
<param name="filename">tests</param>
++++++ particle-0.13.1.tar.gz -> particle-0.14.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/PKG-INFO new/particle-0.14.0/PKG-INFO
--- old/particle-0.13.1/PKG-INFO 2020-11-10 18:33:02.348700300 +0100
+++ new/particle-0.14.0/PKG-INFO 2020-11-26 15:32:47.617567500 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: particle
-Version: 0.13.1
+Version: 0.14.0
Summary: Extended PDG particle data and MC identification codes
Home-page: https://github.com/scikit-hep/particle
Author: Eduardo Rodrigues
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/setup.cfg new/particle-0.14.0/setup.cfg
--- old/particle-0.13.1/setup.cfg 2020-11-10 18:33:02.348700300 +0100
+++ new/particle-0.14.0/setup.cfg 2020-11-26 15:32:47.617567500 +0100
@@ -59,15 +59,18 @@
test =
pytest
pandas; python_version>"3.4"
+ tabulate
dev =
pytest
pandas; python_version>"3.4"
+ tabulate
check-manifest>=0.42
black==20.8b1
mypy==0.790
all =
pytest
pandas; python_version>"3.4"
+ tabulate
check-manifest>=0.42
black==20.8b1
mypy==0.790
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/conversions.csv new/particle-0.14.0/src/particle/data/conversions.csv
--- old/particle-0.13.1/src/particle/data/conversions.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/conversions.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - conversions.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - conversions.csv - version 8 - 2020-11-23
PDGID, PYTHIAID, GEANT3ID, PDGNAME, EVTGENNAME, LATEXNAME
1, 1, 303, d, d, d
-1, -1, 304, d~, anti-d, \bar{d}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/nuclei2020.csv new/particle-0.14.0/src/particle/data/nuclei2020.csv
--- old/particle-0.13.1/src/particle/data/nuclei2020.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/nuclei2020.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - nuclei2020.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - nuclei2020.csv - version 8 - 2020-11-23
ID,Mass,MassUpper,MassLower,Width,WidthUpper,WidthLower,I,G,P,C,Anti,Charge,Rank,Status,Name,Quarks,Latex
1000000010,939.565413,6e-06,6e-06,7.485e-25,5e-28,5e-28,1/2,5,1,5,1,0,4,0,n,udd,n
-1000000010,939.565413,6e-06,6e-06,7.485e-25,5e-28,5e-28,1/2,5,-1,5,1,0,4,0,n,UDD,\bar{n}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/particle2018.csv new/particle-0.14.0/src/particle/data/particle2018.csv
--- old/particle-0.13.1/src/particle/data/particle2018.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/particle2018.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - particle2018.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - particle2018.csv - version 8 - 2020-11-23
ID,Mass,MassUpper,MassLower,Width,WidthUpper,WidthLower,I,G,P,C,Anti,Charge,Rank,Status,Name,Quarks,Latex
1,4.7,0.5,0.3,-1,-1,-1,1/2,5,1,5,1,-1,0,0,d,d,d
-1,4.7,0.5,0.3,-1,-1,-1,1/2,5,1,5,1,1,0,0,d,D,\bar{d}
@@ -526,9 +526,9 @@
100553,10023.26,0.31,0.31,0.032,0.0026,0.0026,0,-1,-1,-1,0,0,0,0,Upsilon(2S),bB,\Upsilon(2S)
100555,10268.6,0.5,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b2)(2P),bB,\chi_{b2}(2P)
103316,1950,15,15,60,20,20,1/2,5,5,5,1,-3,3,1,Xi(1950),dss,\Xi(1950)^{-}
--103316,1950,15,15,60,20,20,1/2,5,-5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
+-103316,1950,15,15,60,20,20,1/2,5,5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),uss,\Xi(1950)^{0}
--103326,1950,15,15,60,20,20,1/2,5,-5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
+-103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
104122,2628.11,0.19,0.19,-1,-1,-1,0,5,-1,5,1,3,3,0,Lambda(c)(2625),udc,\Lambda_{c}(2625)^{+}
-104122,2628.11,0.19,0.19,-1,-1,-1,0,5,1,5,1,-3,3,0,Lambda(c)(2625),UDC,\bar{\Lambda}_{c}(2625)^{-}
104312,2820.22,0.32,0.32,2.54,0.25,0.25,1/2,5,-1,5,1,0,3,0,Xi(c)(2815),dsc,\Xi_{c}(2815)^{0}
@@ -543,15 +543,15 @@
120553,10255.5,0.5,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b1)(2P),bB,\chi_{b1}(2P)
200553,10355.2,0.5,0.5,0.0203,0.0019,0.0019,0,-1,-1,-1,0,0,0,0,Upsilon(3S),bB,\Upsilon(3S)
203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,-3,3,1,Xi(1690),dss,\Xi(1690)^{-}
--203312,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
+-203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
203316,2025,5,5,20,15,5,1/2,5,5,5,1,-3,3,1,Xi(2030),dss,\Xi(2030)^{-}
--203316,2025,5,5,20,15,5,1/2,5,-5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
+-203316,2025,5,5,20,15,5,1/2,5,5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),uss,\Xi(1690)^{0}
--203322,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
+-203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),uss,\Xi(2030)^{0}
--203326,2025,5,5,20,15,5,1/2,5,-5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
+-203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
203338,2252,9,9,55,18,18,0,5,5,5,1,-3,3,0,Omega(2250),sss,\Omega(2250)^{-}
--203338,2252,9,9,55,18,18,0,5,-5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
+-203338,2252,9,9,55,18,18,0,5,5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,1,5,1,3,3,0,Lambda(c)(2880),udc,\Lambda_{c}(2880)^{+}
-204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,-1,5,1,-3,3,0,Lambda(c)(2880),UDC,\bar{\Lambda}_{c}(2880)^{-}
300553,10579.4,1.2,1.2,20.5,2.5,2.5,0,-1,-1,-1,0,0,0,0,Upsilon(4S),bB,\Upsilon(4S)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/particle2019.csv new/particle-0.14.0/src/particle/data/particle2019.csv
--- old/particle-0.13.1/src/particle/data/particle2019.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/particle2019.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - particle2019.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - particle2019.csv - version 8 - 2020-11-23
ID,Mass,MassUpper,MassLower,Width,WidthUpper,WidthLower,I,G,P,C,Anti,Charge,Rank,Status,Name,Quarks,Latex
1,4.67,0.5,0.2,-1,-1,-1,1/2,5,1,5,1,-1,0,0,d,d,d
-1,4.67,0.5,0.2,-1,-1,-1,1/2,5,1,5,1,1,0,0,d,D,\bar{d}
@@ -527,9 +527,9 @@
100553,10023.26,0.31,0.31,0.03198,0.0026,0.0026,0,-1,-1,-1,0,0,0,0,Upsilon(2S),bB,\Upsilon(2S)
100555,10268.65,0.2,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b2)(2P),bB,\chi_{b2}(2P)
103316,1950,15,15,60,20,20,1/2,5,5,5,1,-3,3,1,Xi(1950),dss,\Xi(1950)^{-}
--103316,1950,15,15,60,20,20,1/2,5,-5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
+-103316,1950,15,15,60,20,20,1/2,5,5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),uss,\Xi(1950)^{0}
--103326,1950,15,15,60,20,20,1/2,5,-5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
+-103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
104122,2628.11,0.19,0.19,-1,-1,-1,0,5,-1,5,1,3,3,0,Lambda(c)(2625),udc,\Lambda_{c}(2625)^{+}
-104122,2628.11,0.19,0.19,-1,-1,-1,0,5,1,5,1,-3,3,0,Lambda(c)(2625),UDC,\bar{\Lambda}_{c}(2625)^{-}
104312,2820.26,0.27,0.27,2.54,0.25,0.25,1/2,5,-1,5,1,0,3,0,Xi(c)(2815),dsc,\Xi_{c}(2815)^{0}
@@ -544,15 +544,15 @@
120553,10255.46,0.2,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b1)(2P),bB,\chi_{b1}(2P)
200553,10355.2,0.5,0.5,0.02032,0.0019,0.0019,0,-1,-1,-1,0,0,0,0,Upsilon(3S),bB,\Upsilon(3S)
203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,-3,3,1,Xi(1690),dss,\Xi(1690)^{-}
--203312,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
+-203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
203316,2025,5,5,20,15,5,1/2,5,5,5,1,-3,3,1,Xi(2030),dss,\Xi(2030)^{-}
--203316,2025,5,5,20,15,5,1/2,5,-5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
+-203316,2025,5,5,20,15,5,1/2,5,5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),uss,\Xi(1690)^{0}
--203322,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
+-203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),uss,\Xi(2030)^{0}
--203326,2025,5,5,20,15,5,1/2,5,-5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
+-203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
203338,2252,9,9,55,18,18,0,5,5,5,1,-3,3,0,Omega(2250),sss,\Omega(2250)^{-}
--203338,2252,9,9,55,18,18,0,5,-5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
+-203338,2252,9,9,55,18,18,0,5,5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,1,5,1,3,3,0,Lambda(c)(2880),udc,\Lambda_{c}(2880)^{+}
-204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,-1,5,1,-3,3,0,Lambda(c)(2880),UDC,\bar{\Lambda}_{c}(2880)^{-}
300553,10579.4,1.2,1.2,20.5,2.5,2.5,0,-1,-1,-1,0,0,0,0,Upsilon(4S),bB,\Upsilon(4S)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/particle2020.csv new/particle-0.14.0/src/particle/data/particle2020.csv
--- old/particle-0.13.1/src/particle/data/particle2020.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/particle2020.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - particle2020.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - particle2020.csv - version 8 - 2020-11-23
ID,Mass,MassUpper,MassLower,Width,WidthUpper,WidthLower,I,G,P,C,Anti,Charge,Rank,Status,Name,Quarks,Latex
1,4.67,0.5,0.2,-1,-1,-1,1/2,5,1,5,1,-1,0,0,d,d,d
-1,4.67,0.5,0.2,-1,-1,-1,1/2,5,1,5,1,1,0,0,d,D,\bar{d}
@@ -527,9 +527,9 @@
100553,10023.26,0.31,0.31,0.032,0.0026,0.0026,0,-1,-1,-1,0,0,0,0,Upsilon(2S),bB,\Upsilon(2S)
100555,10268.6,0.5,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b2)(2P),bB,\chi_{b2}(2P)
103316,1950,15,15,60,20,20,1/2,5,5,5,1,-3,3,1,Xi(1950),dss,\Xi(1950)^{-}
--103316,1950,15,15,60,20,20,1/2,5,-5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
+-103316,1950,15,15,60,20,20,1/2,5,5,5,1,3,3,1,Xi(1950),DSS,\bar{\Xi}(1950)^{+}
103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),uss,\Xi(1950)^{0}
--103326,1950,15,15,60,20,20,1/2,5,-5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
+-103326,1950,15,15,60,20,20,1/2,5,5,5,1,0,3,1,Xi(1950),USS,\bar{\Xi}(1950)^{0}
104122,2628.11,0.19,0.19,-1,-1,-1,0,5,-1,5,1,3,3,0,Lambda(c)(2625),udc,\Lambda_{c}(2625)^{+}
-104122,2628.11,0.19,0.19,-1,-1,-1,0,5,1,5,1,-3,3,0,Lambda(c)(2625),UDC,\bar{\Lambda}_{c}(2625)^{-}
104312,2820.25,0.25,0.31,2.54,0.25,0.25,1/2,5,-1,5,1,0,3,0,Xi(c)(2815),dsc,\Xi_{c}(2815)^{0}
@@ -544,15 +544,15 @@
120553,10255.5,0.5,0.5,-1,-1,-1,0,1,1,1,0,0,0,0,chi(b1)(2P),bB,\chi_{b1}(2P)
200553,10355.2,0.5,0.5,0.0203,0.0019,0.0019,0,-1,-1,-1,0,0,0,0,Upsilon(3S),bB,\Upsilon(3S)
203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,-3,3,1,Xi(1690),dss,\Xi(1690)^{-}
--203312,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
+-203312,1690,10,10,-1,-1,-1,1/2,5,5,5,1,3,3,1,Xi(1690),DSS,\bar{\Xi}(1690)^{+}
203316,2025,5,5,20,15,5,1/2,5,5,5,1,-3,3,1,Xi(2030),dss,\Xi(2030)^{-}
--203316,2025,5,5,20,15,5,1/2,5,-5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
+-203316,2025,5,5,20,15,5,1/2,5,5,5,1,3,3,1,Xi(2030),DSS,\bar{\Xi}(2030)^{+}
203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),uss,\Xi(1690)^{0}
--203322,1690,10,10,-1,-1,-1,1/2,5,-5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
+-203322,1690,10,10,-1,-1,-1,1/2,5,5,5,1,0,3,1,Xi(1690),USS,\bar{\Xi}(1690)^{0}
203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),uss,\Xi(2030)^{0}
--203326,2025,5,5,20,15,5,1/2,5,-5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
+-203326,2025,5,5,20,15,5,1/2,5,5,5,1,0,3,1,Xi(2030),USS,\bar{\Xi}(2030)^{0}
203338,2252,9,9,55,18,18,0,5,5,5,1,-3,3,0,Omega(2250),sss,\Omega(2250)^{-}
--203338,2252,9,9,55,18,18,0,5,-5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
+-203338,2252,9,9,55,18,18,0,5,5,5,1,3,3,0,Omega(2250),SSS,\bar{\Omega}(2250)^{+}
204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,1,5,1,3,3,0,Lambda(c)(2880),udc,\Lambda_{c}(2880)^{+}
-204126,2881.63,0.24,0.24,5.6,0.8,0.6,0,5,-1,5,1,-3,3,0,Lambda(c)(2880),UDC,\bar{\Lambda}_{c}(2880)^{-}
300553,10579.4,1.2,1.2,20.5,2.5,2.5,0,-1,-1,-1,0,0,0,0,Upsilon(4S),bB,\Upsilon(4S)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/pdgid_to_evtgenname.csv new/particle-0.14.0/src/particle/data/pdgid_to_evtgenname.csv
--- old/particle-0.13.1/src/particle/data/pdgid_to_evtgenname.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/pdgid_to_evtgenname.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - pdgid_to_evtgenname.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - pdgid_to_evtgenname.csv - version 8 - 2020-11-23
PDGID,STR
1,d
-1,anti-d
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/pdgid_to_geant3id.csv new/particle-0.14.0/src/particle/data/pdgid_to_geant3id.csv
--- old/particle-0.13.1/src/particle/data/pdgid_to_geant3id.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/pdgid_to_geant3id.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - pdgid_to_geant3id.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - pdgid_to_geant3id.csv - version 8 - 2020-11-23
PDGID,GEANT3ID
1,303
-1,304
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/pdgid_to_latexname.csv new/particle-0.14.0/src/particle/data/pdgid_to_latexname.csv
--- old/particle-0.13.1/src/particle/data/pdgid_to_latexname.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/pdgid_to_latexname.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - pdgid_to_latexname.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - pdgid_to_latexname.csv - version 8 - 2020-11-23
PDGID,LATEXNAME
1,d
2,u
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/data/pdgid_to_pythiaid.csv new/particle-0.14.0/src/particle/data/pdgid_to_pythiaid.csv
--- old/particle-0.13.1/src/particle/data/pdgid_to_pythiaid.csv 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/data/pdgid_to_pythiaid.csv 2020-11-26 15:32:38.000000000 +0100
@@ -1,4 +1,4 @@
-# (c) Scikit-HEP project - Particle package data file - pdgid_to_pythiaid.csv - version 7 - 2020-08-17
+# (c) Scikit-HEP project - Particle package data file - pdgid_to_pythiaid.csv - version 8 - 2020-11-23
PDGID,PYTHIAID
1,1
-1,-1
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/particle/convert.py new/particle-0.14.0/src/particle/particle/convert.py
--- old/particle-0.13.1/src/particle/particle/convert.py 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/particle/convert.py 2020-11-26 15:32:38.000000000 +0100
@@ -212,8 +212,14 @@
)
# Parity flips for baryons
+ def is_baryon_with_defined_parity(i, p):
+ # type: (PDGID, Parity) -> bool
+ return is_baryon(i) and p != Parity.u
+
pdg_table_inv["P"] = np.where(
- pdg_table_inv.reset_index()["ID"].map(lambda x: is_baryon(x)),
+ pdg_table_inv.reset_index().apply(
+ lambda x: is_baryon_with_defined_parity(x["ID"], x["P"]), axis=1
+ ),
-pdg_table_inv["P"],
pdg_table_inv["P"],
)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/particle/enums.py new/particle-0.14.0/src/particle/particle/enums.py
--- old/particle-0.13.1/src/particle/particle/enums.py 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/particle/enums.py 2020-11-26 15:32:38.000000000 +0100
@@ -35,7 +35,6 @@
"""Enum representing a particle parity."""
p = 1
- o = 0
m = -1
u = 5
@@ -109,7 +108,6 @@
# Mappings that allow the above classes to be produced from text mappings
Parity_mapping = {
"+": Parity.p,
- "0": Parity.o,
"-": Parity.m,
"?": Parity.u,
"": Parity.u,
@@ -140,8 +138,8 @@
}
# Mappings that allow the above classes to be turned into text mappings
-Parity_undo = {Parity.p: "+", Parity.o: "0", Parity.m: "-", Parity.u: "None"}
-Parity_prog = {Parity.p: "p", Parity.o: "0", Parity.m: "m", Parity.u: "u"}
+Parity_undo = {Parity.p: "+", Parity.m: "-", Parity.u: "None"}
+Parity_prog = {Parity.p: "p", Parity.m: "m", Parity.u: "u"}
Charge_undo = {
Charge.pp: "++",
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/particle/particle.py new/particle-0.14.0/src/particle/particle/particle.py
--- old/particle-0.13.1/src/particle/particle/particle.py 2020-11-10 18:32:56.000000000 +0100
+++ new/particle-0.14.0/src/particle/particle/particle.py 2020-11-26 15:32:38.000000000 +0100
@@ -285,8 +285,10 @@
exclude_fields=(), # type: Iterable[str]
n_rows=-1, # type: int
filter_fn=None, # type: Optional[Callable[[Particle], bool]]
+ particle=None, # type: Optional[bool]
+ **search_terms # type: Any
):
- # type: (...) -> List[List[Any]]
+ # type: (...) -> List[List[Union[bool, int, str, float]]]
"""
Render a search (via `findall`) on the internal particle data CSV table
as a `list`, loading the table from the default location if no table has yet been loaded.
@@ -328,6 +330,11 @@
filter_fn: function, optional, default is None
Apply a filter to each particle.
See `findall(...)`` for typical use cases.
+ particle: bool, optional, default is None
+ Pass particle=True/False to only return particles or antiparticles.
+ Option passed on internally to `findall(...)``.
+ search_terms: keyword arguments, optional
+ See `findall(...)`` for typical use cases.
Returns
-------
@@ -342,33 +349,42 @@
--------
Reproduce the whole particle table kept internally:
- >>> Particle.to_list() # doctest: +SKIP
+ >>> query_as_list = Particle.to_list()
Reduce the information on the particle table to the only fields
['pdgid', 'pdg_name'] and render the first 5 particles:
>>> query_as_list = Particle.to_list(exclusive_fields=['pdgid', 'pdg_name'], n_rows=5)
>>> from tabulate import tabulate
- >>> print(tabulate(query_as_list, headers='firstrow')) # doctest: +SKIP
+ >>> print(tabulate(query_as_list, headers='firstrow'))
+ pdgid pdg_name
+ ------- ----------
+ 1 d
+ -1 d
+ 2 u
+ -2 u
+ 3 s
Request the properties of a specific list of particles:
- >>> query_as_list = Particle.to_list(filter_fn=lambda p: p.pdgid.is_lepton and p.charge!=0, exclusive_fields=['pdgid', 'name', 'mass', 'charge'])
-
+ >>> query_as_list = Particle.to_list(filter_fn=lambda p: p.pdgid.is_lepton and p.charge!=0, exclusive_fields=['pdgid', 'name', 'mass', 'charge'], particle=False)
>>> print(tabulate(query_as_list, headers='firstrow', tablefmt="rst", floatfmt=".12g", numalign="decimal"))
======= ====== =============== ========
pdgid name mass charge
======= ====== =============== ========
- 11 e- 0.5109989461 -1
-11 e+ 0.5109989461 1
- 13 mu- 105.6583745 -1
-13 mu+ 105.6583745 1
- 15 tau- 1776.86 -1
-15 tau+ 1776.86 1
- 17 tau'- -1
-17 tau'+ 1
======= ====== =============== ========
+ >>> query_as_list = Particle.to_list(filter_fn=lambda p: p.pdgid.is_lepton, pdg_name='tau', exclusive_fields=['pdgid', 'name', 'mass', 'charge'])
+ >>> print(tabulate(query_as_list, headers='firstrow'))
+ pdgid name mass charge
+ ------- ------ ------- --------
+ 15 tau- 1776.86 -1
+ -15 tau+ 1776.86 1
+
Save it to a file:
>>> with open('particles.txt', "w") as outfile: # doctest: +SKIP
@@ -396,7 +412,7 @@
# Apply a filter, if specified
if filter_fn is not None:
- tbl_all = cls.findall(filter_fn)
+ tbl_all = cls.findall(filter_fn, particle, **search_terms)
# In any case, only keep a given number of rows?
if n_rows >= 0:
@@ -412,11 +428,13 @@
@classmethod
def to_dict(cls, *args, **kwargs):
- # type: (Any, Any) -> Dict[List[str], List[Any]]
+ # type: (Any, Any) -> Dict[str, List[Union[str, float, bool, int]]]
"""
Render a search (via `findall`) on the internal particle data CSV table
as a `dict`, loading the table from the default location if no table has yet been loaded.
+ See `to_list` for details on the full function signature.
+
The returned attributes are those of the class. By default all attributes
are used as fields. Their complete list is:
pdgid
@@ -454,6 +472,11 @@
filter_fn: function, optional, default is None
Apply a filter to each particle.
See `findall(...)`` for typical use cases.
+ particle: bool, optional, default is None
+ Pass particle=True/False to only return particles or antiparticles.
+ Option passed on internally to `findall(...)``.
+ search_terms: keyword arguments, optional
+ See `findall(...)`` for typical use cases.
Returns
-------
@@ -468,7 +491,7 @@
--------
Reproduce the whole particle table kept internally:
- >>> Particle.to_dict() # doctest: +SKIP
+ >>> query_as_dict = Particle.to_dict()
Reduce the information on the particle table to the only fields
['pdgid', 'pdg_name'] and render the first 5 particles:
@@ -476,25 +499,34 @@
>>> query_as_dict = Particle.to_dict(exclusive_fields=['pdgid', 'pdg_name'], n_rows=5)
>>> from tabulate import tabulate # doctest: +SKIP
>>> print(tabulate(query_as_dict, headers='keys')) # doctest: +SKIP
+ pdgid pdg_name
+ ------- ----------
+ 1 d
+ -1 d
+ 2 u
+ -2 u
+ 3 s
Request the properties of a specific list of particles:
- >>> query_as_dict = Particle.to_dict(filter_fn=lambda p: p.pdgid.is_lepton and p.charge!=0, exclusive_fields=['pdgid', 'name', 'mass', 'charge'])
-
+ >>> query_as_dict = Particle.to_dict(filter_fn=lambda p: p.pdgid.is_lepton and p.charge!=0, exclusive_fields=['pdgid', 'name', 'mass', 'charge'], particle=True)
>>> print(tabulate(query_as_dict, headers='keys', tablefmt="rst", floatfmt=".12g", numalign="decimal")) # doctest: +SKIP
======= ====== =============== ========
pdgid name mass charge
======= ====== =============== ========
11 e- 0.5109989461 -1
- -11 e+ 0.5109989461 1
13 mu- 105.6583745 -1
- -13 mu+ 105.6583745 1
15 tau- 1776.86 -1
- -15 tau+ 1776.86 1
17 tau'- -1
- -17 tau'+ 1
======= ====== =============== ========
+ >>> query_as_dict = Particle.to_dict(filter_fn=lambda p: p.pdgid.is_lepton, pdg_name='tau', exclusive_fields=['pdgid', 'name', 'mass', 'charge'])
+ >>> print(tabulate(query_as_dict, headers='keys')) # doctest: +SKIP
+ pdgid name mass charge
+ ------- ------ ------- --------
+ 15 tau- 1776.86 -1
+ -15 tau+ 1776.86 1
+
Save it to a file:
>>> with open('particles.txt', "w") as outfile: # doctest: +SKIP
@@ -502,9 +534,10 @@
"""
query_as_list = cls.to_list(*args, **kwargs)
- return dict(
- zip(query_as_list[0], zip(*query_as_list[1:]))
- ) # dict(zip(keys, values))
+ keys = query_as_list[0]
+ values = query_as_list[1:]
+
+ return {str(key): value for key, value in zip(keys, values)}
@classmethod
def load_table(cls, filename=None, append=False, _name=None):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle/version.py new/particle-0.14.0/src/particle/version.py
--- old/particle-0.13.1/src/particle/version.py 2020-11-10 18:33:01.000000000 +0100
+++ new/particle-0.14.0/src/particle/version.py 2020-11-26 15:32:46.000000000 +0100
@@ -1,4 +1,4 @@
# coding: utf-8
# file generated by setuptools_scm
# don't change, don't track in version control
-version = '0.13.1'
+version = '0.14.0'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle.egg-info/PKG-INFO new/particle-0.14.0/src/particle.egg-info/PKG-INFO
--- old/particle-0.13.1/src/particle.egg-info/PKG-INFO 2020-11-10 18:33:01.000000000 +0100
+++ new/particle-0.14.0/src/particle.egg-info/PKG-INFO 2020-11-26 15:32:47.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: particle
-Version: 0.13.1
+Version: 0.14.0
Summary: Extended PDG particle data and MC identification codes
Home-page: https://github.com/scikit-hep/particle
Author: Eduardo Rodrigues
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/particle-0.13.1/src/particle.egg-info/requires.txt new/particle-0.14.0/src/particle.egg-info/requires.txt
--- old/particle-0.13.1/src/particle.egg-info/requires.txt 2020-11-10 18:33:01.000000000 +0100
+++ new/particle-0.14.0/src/particle.egg-info/requires.txt 2020-11-26 15:32:47.000000000 +0100
@@ -12,6 +12,7 @@
[all]
pytest
+tabulate
check-manifest>=0.42
black==20.8b1
mypy==0.790
@@ -21,6 +22,7 @@
[dev]
pytest
+tabulate
check-manifest>=0.42
black==20.8b1
mypy==0.790
@@ -30,6 +32,7 @@
[test]
pytest
+tabulate
[test:python_version > "3.4"]
pandas
++++++ tests.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/tests/particle/test_particle.py new/tests/particle/test_particle.py
--- old/tests/particle/test_particle.py 2020-11-13 18:45:00.000000000 +0100
+++ new/tests/particle/test_particle.py 2020-11-26 15:17:38.000000000 +0100
@@ -248,29 +248,31 @@
"""
The parity quantum number is stored in the (curated) data CSV files.
For baryons the (intrinsic) parity flips sign for the antiparticle.
+ As for baryons with undefined parity, that of the antibaryon
+ is equally undefined, of course.
"""
pdgid = lambda p: p.pdgid
- pdgids_baryons = [
+ pdgids_baryons_defined_P = [
pdgid(b)
for b in Particle.findall(
lambda p: p.P != Parity.u and p.pdgid.is_baryon and p.pdgid > 0
)
]
- pdgids_antibaryons = [
+
+ pdgids_baryons_undefined_P = [
pdgid(b)
for b in Particle.findall(
- lambda p: p.P != Parity.u and p.pdgid.is_baryon and p.pdgid < 0
+ lambda p: p.P == Parity.u and p.pdgid.is_baryon and p.pdgid > 0
)
]
- for pdgid in pdgids_baryons:
- # Only consider checks on existing baryon-antibaryon pairs in the "DB"
- if not (-pdgid in pdgids_antibaryons):
- continue
-
+ for pdgid in pdgids_baryons_defined_P:
assert Particle.from_pdgid(pdgid).P == -Particle.from_pdgid(-pdgid).P
+ for pdgid in pdgids_baryons_undefined_P:
+ assert Particle.from_pdgid(pdgid).P == Particle.from_pdgid(-pdgid).P
+
def test_C_consistency():
"""
@@ -346,6 +348,21 @@
assert Particle.all() is not None
+def test_all_particles_are_loaded():
+ Particle.load_table(data.open_text(data, "particle2018.csv"))
+ assert len(Particle.all()) == 605
+ Particle.load_table(data.open_text(data, "particle2019.csv"))
+ assert len(Particle.all()) == 610
+ Particle.load_table(data.open_text(data, "particle2020.csv"))
+ assert len(Particle.all()) == 610
+
+ Particle.load_table(data.open_text(data, "nuclei2020.csv"))
+ assert len(Particle.all()) == 5880
+
+ # Load default table to restore global state
+ Particle.load_table()
+
+
checklist_html_name = (
(22, "γ"), # photon
(1, "d"), # d quark
1
0