openSUSE Commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
July 2021
- 1 participants
- 2330 discussions
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-web.py for openSUSE:Factory checked in at 2021-07-01 07:05:41
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-web.py (Old)
and /work/SRC/openSUSE:Factory/.python-web.py.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-web.py"
Thu Jul 1 07:05:41 2021 rev:14 rq:903207 version:0.62
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-web.py/python-web.py.changes 2020-11-10 13:53:27.962848553 +0100
+++ /work/SRC/openSUSE:Factory/.python-web.py.new.2625/python-web.py.changes 2021-07-01 07:05:56.299334657 +0200
@@ -1,0 +2,5 @@
+Wed Jun 30 10:44:53 UTC 2021 - Matej Cepl <mcepl(a)suse.com>
+
+- Skip ApplicationTest.test_routing test (reported upstream as gh#webpy/webpy#712)
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-web.py.spec ++++++
--- /var/tmp/diff_new_pack.r4XnRz/_old 2021-07-01 07:05:56.927329752 +0200
+++ /var/tmp/diff_new_pack.r4XnRz/_new 2021-07-01 07:05:56.931329720 +0200
@@ -1,7 +1,7 @@
#
# spec file for package python-web.py
#
-# Copyright (c) 2020 SUSE LLC
+# Copyright (c) 2021 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -23,7 +23,7 @@
Version: 0.62
Release: 0
Summary: web.py: makes web apps
-License: SUSE-Public-Domain AND BSD-3-Clause
+License: BSD-3-Clause AND SUSE-Public-Domain
URL: https://webpy.org/
Source: https://files.pythonhosted.org/packages/source/w/web.py/web.py-%{version}.t…
BuildRequires: %{python_module setuptools}
@@ -51,7 +51,8 @@
%python_expand %fdupes %{buildroot}%{$python_sitelib}
%check
-%pytest
+# gh#webpy/webpy#712
+%pytest -k 'not test_routing'
%files %{python_files}
%{python_sitelib}/*
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-uamqp for openSUSE:Factory checked in at 2021-07-01 07:05:40
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-uamqp (Old)
and /work/SRC/openSUSE:Factory/.python-uamqp.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-uamqp"
Thu Jul 1 07:05:40 2021 rev:13 rq:903197 version:1.4.1
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-uamqp/python-uamqp.changes 2021-05-07 17:15:15.248970767 +0200
+++ /work/SRC/openSUSE:Factory/.python-uamqp.new.2625/python-uamqp.changes 2021-07-01 07:05:53.739354654 +0200
@@ -1,0 +2,8 @@
+Tue Jun 29 12:15:05 UTC 2021 - John Paul Adrian Glaubitz <adrian.glaubitz(a)suse.com>
+
+- New upstream release
+ + Version 1.4.1
+ + For detailed information about changes see the
+ HISTORY.rst file provided with this package
+
+-------------------------------------------------------------------
Old:
----
uamqp-1.4.0.tar.gz
New:
----
uamqp-1.4.1.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-uamqp.spec ++++++
--- /var/tmp/diff_new_pack.LhYlDZ/_old 2021-07-01 07:05:54.335349999 +0200
+++ /var/tmp/diff_new_pack.LhYlDZ/_new 2021-07-01 07:05:54.339349968 +0200
@@ -21,7 +21,7 @@
%define skip_python2 1
%endif
Name: python-uamqp
-Version: 1.4.0
+Version: 1.4.1
Release: 0
Summary: AMQP 10 Client Library for Python
License: MIT
++++++ uamqp-1.4.0.tar.gz -> uamqp-1.4.1.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/HISTORY.rst new/uamqp-1.4.1/HISTORY.rst
--- old/uamqp-1.4.0/HISTORY.rst 2021-05-03 15:12:29.000000000 +0200
+++ new/uamqp-1.4.1/HISTORY.rst 2021-06-28 18:25:08.000000000 +0200
@@ -3,6 +3,12 @@
Release History
===============
+1.4.1 (2021-06-28)
++++++++++++++++++++
+
+- Fixed bug that JWTTokenAuth and JWTTokenAsync do not initialize token for token types other than b'jwt'.
+- Fixed bug that attibutes `creation_time`, `absolute_expiry_time` and `group_sequence` on `MessageProperties` should be compatible with integer types on Python 2.7.
+
1.4.0 (2021-05-03)
+++++++++++++++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/PKG-INFO new/uamqp-1.4.1/PKG-INFO
--- old/uamqp-1.4.0/PKG-INFO 2021-05-03 15:12:50.000000000 +0200
+++ new/uamqp-1.4.1/PKG-INFO 2021-06-28 18:25:29.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.2
Name: uamqp
-Version: 1.4.0
+Version: 1.4.1
Summary: AMQP 1.0 Client Library for Python
Home-page: https://github.com/Azure/azure-uamqp-python
Author: Microsoft Corporation
@@ -127,6 +127,12 @@
Release History
===============
+ 1.4.1 (2021-06-28)
+ +++++++++++++++++++
+
+ - Fixed bug that JWTTokenAuth and JWTTokenAsync do not initialize token for token types other than b'jwt'.
+ - Fixed bug that attibutes `creation_time`, `absolute_expiry_time` and `group_sequence` on `MessageProperties` should be compatible with integer types on Python 2.7.
+
1.4.0 (2021-05-03)
+++++++++++++++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/uamqp/__init__.py new/uamqp-1.4.1/uamqp/__init__.py
--- old/uamqp-1.4.0/uamqp/__init__.py 2021-05-03 15:12:29.000000000 +0200
+++ new/uamqp-1.4.1/uamqp/__init__.py 2021-06-28 18:25:09.000000000 +0200
@@ -35,7 +35,7 @@
pass # Async not supported.
-__version__ = "1.4.0"
+__version__ = "1.4.1"
_logger = logging.getLogger(__name__)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/uamqp/authentication/cbs_auth.py new/uamqp-1.4.1/uamqp/authentication/cbs_auth.py
--- old/uamqp-1.4.0/uamqp/authentication/cbs_auth.py 2021-05-03 15:12:29.000000000 +0200
+++ new/uamqp-1.4.1/uamqp/authentication/cbs_auth.py 2021-06-28 18:25:09.000000000 +0200
@@ -62,9 +62,6 @@
self._connection = connection
self._session = Session(connection, **kwargs)
- if self.token_type == b'jwt': # Initialize the jwt token
- self.update_token()
-
try:
self._cbs_auth = c_uamqp.CBSTokenAuth(
self.audience,
@@ -421,6 +418,10 @@
self.sasl = _SASL()
self.set_io(self.hostname, port, http_proxy, transport_type)
+ def create_authenticator(self, connection, debug=False, **kwargs):
+ self.update_token()
+ return super(JWTTokenAuth, self).create_authenticator(connection, debug, **kwargs)
+
def update_token(self):
access_token = self.get_token()
self.expires_at = access_token.expires_on
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/uamqp/authentication/cbs_auth_async.py new/uamqp-1.4.1/uamqp/authentication/cbs_auth_async.py
--- old/uamqp-1.4.0/uamqp/authentication/cbs_auth_async.py 2021-05-03 15:12:29.000000000 +0200
+++ new/uamqp-1.4.1/uamqp/authentication/cbs_auth_async.py 2021-06-28 18:25:09.000000000 +0200
@@ -54,9 +54,6 @@
self._connection = connection
self._session = SessionAsync(connection, loop=self.loop, **kwargs)
- if self.token_type == b'jwt': # Async initialize the jwt token
- await self.update_token()
-
try:
self._cbs_auth = c_uamqp.CBSTokenAuth(
self.audience,
@@ -278,6 +275,10 @@
self.sasl = _SASL()
self.set_io(self.hostname, port, http_proxy, transport_type)
+ async def create_authenticator_async(self, connection, debug=False, loop=None, **kwargs):
+ await self.update_token()
+ return await super(JWTTokenAsync, self).create_authenticator_async(connection, debug, loop, **kwargs)
+
async def update_token(self):
access_token = await self.get_token()
self.expires_at = access_token.expires_on
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/uamqp/message.py new/uamqp-1.4.1/uamqp/message.py
--- old/uamqp-1.4.0/uamqp/message.py 2021-05-03 15:12:29.000000000 +0200
+++ new/uamqp-1.4.1/uamqp/message.py 2021-06-28 18:25:09.000000000 +0200
@@ -1003,7 +1003,7 @@
@absolute_expiry_time.setter
def absolute_expiry_time(self, value):
- if value is not None and not isinstance(value, int):
+ if value is not None and not isinstance(value, six.integer_types):
raise TypeError("absolute_expiry_time must be an integer.")
self._absolute_expiry_time = value
@@ -1013,7 +1013,7 @@
@creation_time.setter
def creation_time(self, value):
- if value is not None and not isinstance(value, int):
+ if value is not None and not isinstance(value, six.integer_types):
raise TypeError("creation_time must be an integer.")
self._creation_time = value
@@ -1035,7 +1035,7 @@
@group_sequence.setter
def group_sequence(self, value):
- if value is not None and not isinstance(value, int):
+ if value is not None and not isinstance(value, six.integer_types):
raise TypeError("group_sequence must be an integer.")
self._group_sequence = value
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/uamqp-1.4.0/uamqp.egg-info/PKG-INFO new/uamqp-1.4.1/uamqp.egg-info/PKG-INFO
--- old/uamqp-1.4.0/uamqp.egg-info/PKG-INFO 2021-05-03 15:12:50.000000000 +0200
+++ new/uamqp-1.4.1/uamqp.egg-info/PKG-INFO 2021-06-28 18:25:29.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.2
Name: uamqp
-Version: 1.4.0
+Version: 1.4.1
Summary: AMQP 1.0 Client Library for Python
Home-page: https://github.com/Azure/azure-uamqp-python
Author: Microsoft Corporation
@@ -127,6 +127,12 @@
Release History
===============
+ 1.4.1 (2021-06-28)
+ +++++++++++++++++++
+
+ - Fixed bug that JWTTokenAuth and JWTTokenAsync do not initialize token for token types other than b'jwt'.
+ - Fixed bug that attibutes `creation_time`, `absolute_expiry_time` and `group_sequence` on `MessageProperties` should be compatible with integer types on Python 2.7.
+
1.4.0 (2021-05-03)
+++++++++++++++++++
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package python-msal for openSUSE:Factory checked in at 2021-07-01 07:05:40
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-msal (Old)
and /work/SRC/openSUSE:Factory/.python-msal.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-msal"
Thu Jul 1 07:05:40 2021 rev:8 rq:903198 version:1.12.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-msal/python-msal.changes 2021-04-12 15:49:43.449261928 +0200
+++ /work/SRC/openSUSE:Factory/.python-msal.new.2625/python-msal.changes 2021-07-01 07:05:55.447341313 +0200
@@ -1,0 +2,28 @@
+Tue Jun 29 12:40:07 UTC 2021 - John Paul Adrian Glaubitz <adrian.glaubitz(a)suse.com>
+
+- Update to version 1.12.0
+ + New feature: MSAL Python supports ConfidentialClientApplication(..., azure_region=...).
+ If your app is deployed in Azure, you can use this new feature to pin a region.
+ (#295, #358)
+ + New feature: Historically MSAL Python attempts to acquire a Refresh Token (RT) by
+ default. Since this version, MSAL Python supports ConfidentialClientApplication(...,
+ excluse_scopes=["offline_access"]) to opt out of RT (#207, #361)
+ + Improvement: acquire_token_interactive(...) can also trigger browser when
+ running inside WSL (8d86917)
+ + Adjustment: get_accounts(...) would automatically combine equivalent accounts,
+ so that your account selector widget could be easier to use (#349)
+ + Document: MSAL Python has long been accepting acquire_token_interactive(..., prompt="create"),
+ now we officially documented it. (#356, #360)
+- from version 1.11.0
+ + Enhancement: ConfidentialClientApplication also supports
+ acquire_token_by_username_password() now. (#294, #344)
+ + Enhancement: PublicClientApplication's acquire_token_interactive() also supports WSL Ubuntu
+ 18.04 (#332, #333)
+ + Enhancement: Enable a retry once behavior on connection error. (But this is only available
+ from the default http client. If your app supplies your customized http_client via MSAL
+ constructors, it is your http_client's job to decide whether retry.) (#326)
+ + Enhancement: MSAL improves the internal telemetry mechanism. (#137, #175, #329, #345)
+ + Bugfix: Better compatibility on handling SAML token when using
+ acquire_token_by_username_password() with ADFS. (#336)
+
+-------------------------------------------------------------------
Old:
----
msal-1.10.0.tar.gz
New:
----
msal-1.12.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-msal.spec ++++++
--- /var/tmp/diff_new_pack.RSVMx7/_old 2021-07-01 07:05:55.879337938 +0200
+++ /var/tmp/diff_new_pack.RSVMx7/_new 2021-07-01 07:05:55.879337938 +0200
@@ -21,7 +21,7 @@
%define skip_python2 1
%endif
Name: python-msal
-Version: 1.10.0
+Version: 1.12.0
Release: 0
Summary: Microsoft Authentication Library (MSAL) for Python
License: MIT
++++++ msal-1.10.0.tar.gz -> msal-1.12.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/LICENSE new/msal-1.12.0/LICENSE
--- old/msal-1.10.0/LICENSE 1970-01-01 01:00:00.000000000 +0100
+++ new/msal-1.12.0/LICENSE 2021-05-19 22:28:05.000000000 +0200
@@ -0,0 +1,24 @@
+The MIT License (MIT)
+
+Copyright (c) Microsoft Corporation.
+All rights reserved.
+
+This code is licensed under the MIT License.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files(the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions :
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/PKG-INFO new/msal-1.12.0/PKG-INFO
--- old/msal-1.10.0/PKG-INFO 2021-03-08 21:46:32.805985200 +0100
+++ new/msal-1.12.0/PKG-INFO 2021-05-19 22:28:13.778312400 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: msal
-Version: 1.10.0
+Version: 1.12.0
Summary: The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect.
Home-page: https://github.com/AzureAD/microsoft-authentication-library-for-python
Author: Microsoft Corporation
@@ -21,8 +21,14 @@
Quick links:
- | [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support)
- | --- | --- | --- | --- |
+ | [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support) | [Feedback](https://forms.office.com/r/TMjZkDbzjY) |
+ | --- | --- | --- | --- | --- |
+
+ ## Scenarios supported
+
+ Click on the following thumbnail to visit a large map with clickable links to proper samples.
+
+ [![Map effect won't work inside github's markdown file, so we have to use a thumbnail here to lure audience to a real static website](docs/thumbnail.png)](https://msal-python.readthedocs.io/en/latest/)
## Installation
@@ -135,6 +141,9 @@
Here is the latest Q&A on Stack Overflow for MSAL:
[http://stackoverflow.com/questions/tagged/msal](http://stackoverflow.com/qu…
+ ## Submit Feedback
+ We'd like your thoughts on this library. Please complete [this short survey.](https://forms.office.com/r/TMjZkDbzjY)
+
## Security Reporting
If you find a security issue with our libraries or services please report it to [secure@microsoft.com](mailto:secure@microsoft.com) with as much detail as possible. Your submission may be eligible for a bounty through the [Microsoft Bounty](http://aka.ms/bugbounty) program. Please do not post security issues to GitHub Issues or any other public site. We will contact you shortly upon receiving the information. We encourage you to get notifications of when security incidents occur by visiting [this page](https://technet.microsoft.com/security/dd252948) and subscribing to Security Advisory Alerts.
@@ -153,11 +162,11 @@
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Description-Content-Type: text/markdown
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/README.md new/msal-1.12.0/README.md
--- old/msal-1.10.0/README.md 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/README.md 2021-05-19 22:28:05.000000000 +0200
@@ -12,8 +12,14 @@
Quick links:
-| [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support)
-| --- | --- | --- | --- |
+| [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support) | [Feedback](https://forms.office.com/r/TMjZkDbzjY) |
+| --- | --- | --- | --- | --- |
+
+## Scenarios supported
+
+Click on the following thumbnail to visit a large map with clickable links to proper samples.
+
+[![Map effect won't work inside github's markdown file, so we have to use a thumbnail here to lure audience to a real static website](docs/thumbnail.png)](https://msal-python.readthedocs.io/en/latest/)
## Installation
@@ -126,6 +132,9 @@
Here is the latest Q&A on Stack Overflow for MSAL:
[http://stackoverflow.com/questions/tagged/msal](http://stackoverflow.com/qu…
+## Submit Feedback
+We'd like your thoughts on this library. Please complete [this short survey.](https://forms.office.com/r/TMjZkDbzjY)
+
## Security Reporting
If you find a security issue with our libraries or services please report it to [secure@microsoft.com](mailto:secure@microsoft.com) with as much detail as possible. Your submission may be eligible for a bounty through the [Microsoft Bounty](http://aka.ms/bugbounty) program. Please do not post security issues to GitHub Issues or any other public site. We will contact you shortly upon receiving the information. We encourage you to get notifications of when security incidents occur by visiting [this page](https://technet.microsoft.com/security/dd252948) and subscribing to Security Advisory Alerts.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/__init__.py new/msal-1.12.0/msal/__init__.py
--- old/msal-1.10.0/msal/__init__.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/__init__.py 2021-05-19 22:28:05.000000000 +0200
@@ -31,5 +31,6 @@
ConfidentialClientApplication,
PublicClientApplication,
)
+from .oauth2cli.oidc import Prompt
from .token_cache import TokenCache, SerializableTokenCache
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/application.py new/msal-1.12.0/msal/application.py
--- old/msal-1.10.0/msal/application.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/application.py 2021-05-19 22:28:05.000000000 +0200
@@ -8,7 +8,7 @@
import logging
import sys
import warnings
-import uuid
+from threading import Lock
import requests
@@ -18,52 +18,15 @@
from .wstrust_request import send_request as wst_send_request
from .wstrust_response import *
from .token_cache import TokenCache
+import msal.telemetry
+from .region import _detect_region
# The __init__.py will import this. Not the other way around.
-__version__ = "1.10.0"
+__version__ = "1.12.0"
logger = logging.getLogger(__name__)
-def decorate_scope(
- scopes, client_id,
- reserved_scope=frozenset(['openid', 'profile', 'offline_access'])):
- if not isinstance(scopes, (list, set, tuple)):
- raise ValueError("The input scopes should be a list, tuple, or set")
- scope_set = set(scopes) # Input scopes is typically a list. Copy it to a set.
- if scope_set & reserved_scope:
- # These scopes are reserved for the API to provide good experience.
- # We could make the developer pass these and then if they do they will
- # come back asking why they don't see refresh token or user information.
- raise ValueError(
- "API does not accept {} value as user-provided scopes".format(
- reserved_scope))
- if client_id in scope_set:
- if len(scope_set) > 1:
- # We make developers pass their client id, so that they can express
- # the intent that they want the token for themselves (their own
- # app).
- # If we do not restrict them to passing only client id then they
- # could write code where they expect an id token but end up getting
- # access_token.
- raise ValueError("Client Id can only be provided as a single scope")
- decorated = set(reserved_scope) # Make a writable copy
- else:
- decorated = scope_set | reserved_scope
- return list(decorated)
-
-CLIENT_REQUEST_ID = 'client-request-id'
-CLIENT_CURRENT_TELEMETRY = 'x-client-current-telemetry'
-
-def _get_new_correlation_id():
- correlation_id = str(uuid.uuid4())
- logger.debug("Generates correlation_id: %s", correlation_id)
- return correlation_id
-
-
-def _build_current_telemetry_request_header(public_api_id, force_refresh=False):
- return "1|{},{}|".format(public_api_id, "1" if force_refresh else "0")
-
def extract_certs(public_cert_content):
# Parses raw public certificate file contents and returns a list of strings
@@ -119,6 +82,8 @@
GET_ACCOUNTS_ID = "902"
REMOVE_ACCOUNT_ID = "903"
+ ATTEMPT_REGION_DISCOVERY = True # "TryAutoDetect"
+
def __init__(
self, client_id,
client_credential=None, authority=None, validate_authority=True,
@@ -126,12 +91,18 @@
http_client=None,
verify=True, proxies=None, timeout=None,
client_claims=None, app_name=None, app_version=None,
- client_capabilities=None):
+ client_capabilities=None,
+ azure_region=None, # Note: We choose to add this param in this base class,
+ # despite it is currently only needed by ConfidentialClientApplication.
+ # This way, it holds the same positional param place for PCA,
+ # when we would eventually want to add this feature to PCA in future.
+ exclude_scopes=None,
+ ):
"""Create an instance of application.
:param str client_id: Your app has a client_id after you register it on AAD.
- :param str client_credential:
+ :param Union[str, dict] client_credential:
For :class:`PublicClientApplication`, you simply use `None` here.
For :class:`ConfidentialClientApplication`,
it can be a string containing client secret,
@@ -187,7 +158,12 @@
By default, an in-memory cache will be created and used.
:param http_client: (optional)
Your implementation of abstract class HttpClient <msal.oauth2cli.http.http_client>
- Defaults to a requests session instance
+ Defaults to a requests session instance.
+ Since MSAL 1.11.0, the default session would be configured
+ to attempt one retry on connection error.
+ If you are providing your own http_client,
+ it will be your http_client's duty to decide whether to perform retry.
+
:param verify: (optional)
It will be passed to the
`verify parameter in the underlying requests library
@@ -226,11 +202,75 @@
MSAL will combine them into
`claims parameter <https://openid.net/specs/openid-connect-core-1_0-final.html#ClaimsParameter…
which you will later provide via one of the acquire-token request.
+
+ :param str azure_region:
+ Added since MSAL Python 1.12.0.
+
+ As of 2021 May, regional service is only available for
+ ``acquire_token_for_client()`` sent by any of the following scenarios::
+
+ 1. An app powered by a capable MSAL
+ (MSAL Python 1.12+ will be provisioned)
+
+ 2. An app with managed identity, which is formerly known as MSI.
+ (However MSAL Python does not support managed identity,
+ so this one does not apply.)
+
+ 3. An app authenticated by
+ `Subject Name/Issuer (SNI) <https://github.com/AzureAD/microsoft-authentication-library-for-python/issu…>`_.
+
+ 4. An app which already onboard to the region's allow-list.
+
+ MSAL's default value is None, which means region behavior remains off.
+ If enabled, the `acquire_token_for_client()`-relevant traffic
+ would remain inside that region.
+
+ App developer can opt in to a regional endpoint,
+ by provide its region name, such as "westus", "eastus2".
+ You can find a full list of regions by running
+ ``az account list-locations -o table``, or referencing to
+ `this doc <https://docs.microsoft.com/en-us/dotnet/api/microsoft.azure.management.reso…>`_.
+
+ An app running inside Azure Functions and Azure VM can use a special keyword
+ ``ClientApplication.ATTEMPT_REGION_DISCOVERY`` to auto-detect region.
+
+ .. note::
+
+ Setting ``azure_region`` to non-``None`` for an app running
+ outside of Azure Function/VM could hang indefinitely.
+
+ You should consider opting in/out region behavior on-demand,
+ by loading ``azure_region=None`` or ``azure_region="westus"``
+ or ``azure_region=True`` (which means opt-in and auto-detect)
+ from your per-deployment configuration, and then do
+ ``app = ConfidentialClientApplication(..., azure_region=azure_region)``.
+
+ Alternatively, you can configure a short timeout,
+ or provide a custom http_client which has a short timeout.
+ That way, the latency would be under your control,
+ but still less performant than opting out of region feature.
+ :param list[str] exclude_scopes: (optional)
+ Historically MSAL hardcodes `offline_access` scope,
+ which would allow your app to have prolonged access to user's data.
+ If that is unnecessary or undesirable for your app,
+ now you can use this parameter to supply an exclusion list of scopes,
+ such as ``exclude_scopes = ["offline_access"]``.
"""
self.client_id = client_id
self.client_credential = client_credential
self.client_claims = client_claims
self._client_capabilities = client_capabilities
+
+ if exclude_scopes and not isinstance(exclude_scopes, list):
+ raise ValueError(
+ "Invalid exclude_scopes={}. It need to be a list of strings.".format(
+ repr(exclude_scopes)))
+ self._exclude_scopes = frozenset(exclude_scopes or [])
+ if "openid" in self._exclude_scopes:
+ raise ValueError(
+ 'Invalid exclude_scopes={}. You can not opt out "openid" scope'.format(
+ repr(exclude_scopes)))
+
if http_client:
self.http_client = http_client
else:
@@ -241,15 +281,101 @@
# But you can patch that (https://github.com/psf/requests/issues/3341)
self.http_client.request = functools.partial(
self.http_client.request, timeout=timeout)
+
+ # Enable a minimal retry. Better than nothing.
+ # https://github.com/psf/requests/blob/v2.25.1/requests/adapters.py#L94-L108
+ a = requests.adapters.HTTPAdapter(max_retries=1)
+ self.http_client.mount("http://", a)
+ self.http_client.mount("https://", a)
+
self.app_name = app_name
self.app_version = app_version
- self.authority = Authority(
+
+ # Here the self.authority will not be the same type as authority in input
+ try:
+ self.authority = Authority(
authority or "https://login.microsoftonline.com/common/",
self.http_client, validate_authority=validate_authority)
- # Here the self.authority is not the same type as authority in input
+ except ValueError: # Those are explicit authority validation errors
+ raise
+ except Exception: # The rest are typically connection errors
+ if validate_authority and azure_region:
+ # Since caller opts in to use region, here we tolerate connection
+ # errors happened during authority validation at non-region endpoint
+ self.authority = Authority(
+ authority or "https://login.microsoftonline.com/common/",
+ self.http_client, validate_authority=False)
+ else:
+ raise
+
self.token_cache = token_cache or TokenCache()
- self.client = self._build_client(client_credential, self.authority)
+ self._region_configured = azure_region
+ self._region_detected = None
+ self.client, self._regional_client = self._build_client(
+ client_credential, self.authority)
self.authority_groups = None
+ self._telemetry_buffer = {}
+ self._telemetry_lock = Lock()
+
+ def _decorate_scope(
+ self, scopes,
+ reserved_scope=frozenset(['openid', 'profile', 'offline_access'])):
+ if not isinstance(scopes, (list, set, tuple)):
+ raise ValueError("The input scopes should be a list, tuple, or set")
+ scope_set = set(scopes) # Input scopes is typically a list. Copy it to a set.
+ if scope_set & reserved_scope:
+ # These scopes are reserved for the API to provide good experience.
+ # We could make the developer pass these and then if they do they will
+ # come back asking why they don't see refresh token or user information.
+ raise ValueError(
+ "API does not accept {} value as user-provided scopes".format(
+ reserved_scope))
+ if self.client_id in scope_set:
+ if len(scope_set) > 1:
+ # We make developers pass their client id, so that they can express
+ # the intent that they want the token for themselves (their own
+ # app).
+ # If we do not restrict them to passing only client id then they
+ # could write code where they expect an id token but end up getting
+ # access_token.
+ raise ValueError("Client Id can only be provided as a single scope")
+ decorated = set(reserved_scope) # Make a writable copy
+ else:
+ decorated = scope_set | reserved_scope
+ decorated -= self._exclude_scopes
+ return list(decorated)
+
+ def _build_telemetry_context(
+ self, api_id, correlation_id=None, refresh_reason=None):
+ return msal.telemetry._TelemetryContext(
+ self._telemetry_buffer, self._telemetry_lock, api_id,
+ correlation_id=correlation_id, refresh_reason=refresh_reason)
+
+ def _get_regional_authority(self, central_authority):
+ is_region_specified = bool(self._region_configured
+ and self._region_configured != self.ATTEMPT_REGION_DISCOVERY)
+ self._region_detected = self._region_detected or _detect_region(
+ self.http_client if self._region_configured is not None else None)
+ if (is_region_specified and self._region_configured != self._region_detected):
+ logger.warning('Region configured ({}) != region detected ({})'.format(
+ repr(self._region_configured), repr(self._region_detected)))
+ region_to_use = (
+ self._region_configured if is_region_specified else self._region_detected)
+ if region_to_use:
+ logger.info('Region to be used: {}'.format(repr(region_to_use)))
+ regional_host = ("{}.login.microsoft.com".format(region_to_use)
+ if central_authority.instance in (
+ # The list came from https://github.com/AzureAD/microsoft-authentication-library-for-python/pull…
+ "login.microsoftonline.com",
+ "login.windows.net",
+ "sts.windows.net",
+ )
+ else "{}.{}".format(region_to_use, central_authority.instance))
+ return Authority(
+ "https://{}/{}".format(regional_host, central_authority.tenant),
+ self.http_client,
+ validate_authority=False) # The central_authority has already been validated
+ return None
def _build_client(self, client_credential, authority):
client_assertion = None
@@ -289,15 +415,15 @@
client_assertion_type = Client.CLIENT_ASSERTION_TYPE_JWT
else:
default_body['client_secret'] = client_credential
- server_configuration = {
+ central_configuration = {
"authorization_endpoint": authority.authorization_endpoint,
"token_endpoint": authority.token_endpoint,
"device_authorization_endpoint":
authority.device_authorization_endpoint or
urljoin(authority.token_endpoint, "devicecode"),
}
- return Client(
- server_configuration,
+ central_client = Client(
+ central_configuration,
self.client_id,
http_client=self.http_client,
default_headers=default_headers,
@@ -309,6 +435,31 @@
on_removing_rt=self.token_cache.remove_rt,
on_updating_rt=self.token_cache.update_rt)
+ regional_client = None
+ if client_credential: # Currently regional endpoint only serves some CCA flows
+ regional_authority = self._get_regional_authority(authority)
+ if regional_authority:
+ regional_configuration = {
+ "authorization_endpoint": regional_authority.authorization_endpoint,
+ "token_endpoint": regional_authority.token_endpoint,
+ "device_authorization_endpoint":
+ regional_authority.device_authorization_endpoint or
+ urljoin(regional_authority.token_endpoint, "devicecode"),
+ }
+ regional_client = Client(
+ regional_configuration,
+ self.client_id,
+ http_client=self.http_client,
+ default_headers=default_headers,
+ default_body=default_body,
+ client_assertion=client_assertion,
+ client_assertion_type=client_assertion_type,
+ on_obtaining_tokens=lambda event: self.token_cache.add(dict(
+ event, environment=authority.instance)),
+ on_removing_rt=self.token_cache.remove_rt,
+ on_updating_rt=self.token_cache.update_rt)
+ return central_client, regional_client
+
def initiate_auth_code_flow(
self,
scopes, # type: list[str]
@@ -325,7 +476,7 @@
you can use :func:`~acquire_token_by_auth_code_flow()`
to complete the authentication/authorization.
- :param list scope:
+ :param list scopes:
It is a list of case-sensitive strings.
:param str redirect_uri:
Optional. If not specified, server will use the pre-registered one.
@@ -373,7 +524,7 @@
flow = client.initiate_auth_code_flow(
redirect_uri=redirect_uri, state=state, login_hint=login_hint,
prompt=prompt,
- scope=decorate_scope(scopes, self.client_id),
+ scope=self._decorate_scope(scopes),
domain_hint=domain_hint,
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge),
@@ -455,7 +606,7 @@
response_type=response_type,
redirect_uri=redirect_uri, state=state, login_hint=login_hint,
prompt=prompt,
- scope=decorate_scope(scopes, self.client_id),
+ scope=self._decorate_scope(scopes),
nonce=nonce,
domain_hint=domain_hint,
claims=_merge_claims_challenge_and_capabilities(
@@ -513,21 +664,21 @@
return redirect(url_for("index"))
"""
self._validate_ssh_cert_input_data(kwargs.get("data", {}))
- return _clean_up(self.client.obtain_token_by_auth_code_flow(
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_BY_AUTHORIZATION_CODE_ID)
+ response =_clean_up(self.client.obtain_token_by_auth_code_flow(
auth_code_flow,
auth_response,
- scope=decorate_scope(scopes, self.client_id) if scopes else None,
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_BY_AUTHORIZATION_CODE_ID),
- },
+ scope=self._decorate_scope(scopes) if scopes else None,
+ headers=telemetry_context.generate_headers(),
data=dict(
kwargs.pop("data", {}),
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities,
auth_code_flow.pop("claims_challenge", None))),
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
def acquire_token_by_authorization_code(
self,
@@ -586,20 +737,20 @@
"Change your acquire_token_by_authorization_code() "
"to acquire_token_by_auth_code_flow()", DeprecationWarning)
with warnings.catch_warnings(record=True):
- return _clean_up(self.client.obtain_token_by_authorization_code(
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_BY_AUTHORIZATION_CODE_ID)
+ response = _clean_up(self.client.obtain_token_by_authorization_code(
code, redirect_uri=redirect_uri,
- scope=decorate_scope(scopes, self.client_id),
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_BY_AUTHORIZATION_CODE_ID),
- },
+ scope=self._decorate_scope(scopes),
+ headers=telemetry_context.generate_headers(),
data=dict(
kwargs.pop("data", {}),
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge)),
nonce=nonce,
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
def get_accounts(self, username=None):
"""Get a list of accounts which previously signed in, i.e. exists in cache.
@@ -625,6 +776,13 @@
lowercase_username = username.lower()
accounts = [a for a in accounts
if a["username"].lower() == lowercase_username]
+ if not accounts:
+ logger.warning((
+ "get_accounts(username='{}') finds no account. "
+ "If tokens were acquired without 'profile' scope, "
+ "they would contain no username for filtering. "
+ "Consider calling get_accounts(username=None) instead."
+ ).format(username))
# Does not further filter by existing RTs here. It probably won't matter.
# Because in most cases Accounts and RTs co-exist.
# Even in the rare case when an RT is revoked and then removed,
@@ -633,10 +791,25 @@
return accounts
def _find_msal_accounts(self, environment):
- return [a for a in self.token_cache.find(
- TokenCache.CredentialType.ACCOUNT, query={"environment": environment})
+ grouped_accounts = {
+ a.get("home_account_id"): # Grouped by home tenant's id
+ { # These are minimal amount of non-tenant-specific account info
+ "home_account_id": a.get("home_account_id"),
+ "environment": a.get("environment"),
+ "username": a.get("username"),
+
+ # The following fields for backward compatibility, for now
+ "authority_type": a.get("authority_type"),
+ "local_account_id": a.get("local_account_id"), # Tenant-specific
+ "realm": a.get("realm"), # Tenant-specific
+ }
+ for a in self.token_cache.find(
+ TokenCache.CredentialType.ACCOUNT,
+ query={"environment": environment})
if a["authority_type"] in (
- TokenCache.AuthorityType.ADFS, TokenCache.AuthorityType.MSSTS)]
+ TokenCache.AuthorityType.ADFS, TokenCache.AuthorityType.MSSTS)
+ }
+ return list(grouped_accounts.values())
def _get_authority_aliases(self, instance):
if not self.authority_groups:
@@ -728,7 +901,7 @@
- None when cache lookup does not yield a token.
"""
result = self.acquire_token_silent_with_error(
- scopes, account, authority, force_refresh,
+ scopes, account, authority=authority, force_refresh=force_refresh,
claims_challenge=claims_challenge, **kwargs)
return result if result and "error" not in result else None
@@ -773,7 +946,7 @@
"""
assert isinstance(scopes, list), "Invalid parameter type"
self._validate_ssh_cert_input_data(kwargs.get("data", {}))
- correlation_id = _get_new_correlation_id()
+ correlation_id = msal.telemetry._get_new_correlation_id()
if authority:
warnings.warn("We haven't decided how/if this method will accept authority parameter")
# the_authority = Authority(
@@ -844,9 +1017,11 @@
target=scopes,
query=query)
now = time.time()
+ refresh_reason = msal.telemetry.AT_ABSENT
for entry in matches:
expires_in = int(entry["expires_on"]) - now
if expires_in < 5*60: # Then consider it expired
+ refresh_reason = msal.telemetry.AT_EXPIRED
continue # Removal is not necessary, it will be overwritten
logger.debug("Cache hit an AT")
access_token_from_cache = { # Mimic a real response
@@ -855,13 +1030,18 @@
"expires_in": int(expires_in), # OAuth2 specs defines it as int
}
if "refresh_on" in entry and int(entry["refresh_on"]) < now: # aging
+ refresh_reason = msal.telemetry.AT_AGING
break # With a fallback in hand, we break here to go refresh
+ self._build_telemetry_context(-1).hit_an_access_token()
return access_token_from_cache # It is still good as new
+ else:
+ refresh_reason = msal.telemetry.FORCE_REFRESH # TODO: It could also mean claims_challenge
+ assert refresh_reason, "It should have been established at this point"
try:
- result = self._acquire_token_silent_by_finding_rt_belongs_to_me_or_my_family(
- authority, decorate_scope(scopes, self.client_id), account,
- force_refresh=force_refresh, claims_challenge=claims_challenge, **kwargs)
- result = _clean_up(result)
+ result = _clean_up(self._acquire_token_silent_by_finding_rt_belongs_to_me_or_my_family(
+ authority, self._decorate_scope(scopes), account,
+ refresh_reason=refresh_reason, claims_challenge=claims_challenge,
+ **kwargs))
if (result and "error" not in result) or (not access_token_from_cache):
return result
except: # The exact HTTP exception is transportation-layer dependent
@@ -915,15 +1095,19 @@
def _acquire_token_silent_by_finding_specific_refresh_token(
self, authority, scopes, query,
rt_remover=None, break_condition=lambda response: False,
- force_refresh=False, correlation_id=None, claims_challenge=None, **kwargs):
+ refresh_reason=None, correlation_id=None, claims_challenge=None,
+ **kwargs):
matches = self.token_cache.find(
self.token_cache.CredentialType.REFRESH_TOKEN,
# target=scopes, # AAD RTs are scope-independent
query=query)
logger.debug("Found %d RTs matching %s", len(matches), query)
- client = self._build_client(self.client_credential, authority)
+ client, _ = self._build_client(self.client_credential, authority)
response = None # A distinguishable value to mean cache is empty
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_SILENT_ID,
+ correlation_id=correlation_id, refresh_reason=refresh_reason)
for entry in sorted( # Since unfit RTs would not be aggressively removed,
# we start from newer RTs which are more likely fit.
matches,
@@ -941,16 +1125,13 @@
skip_account_creation=True, # To honor a concurrent remove_account()
)),
scope=scopes,
- headers={
- CLIENT_REQUEST_ID: correlation_id or _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_SILENT_ID, force_refresh=force_refresh),
- },
+ headers=telemetry_context.generate_headers(),
data=dict(
kwargs.pop("data", {}),
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge)),
**kwargs)
+ telemetry_context.update_telemetry(response)
if "error" not in response:
return response
logger.debug("Refresh failed. {error}: {error_description}".format(
@@ -999,18 +1180,110 @@
* A dict contains no "error" key means migration was successful.
"""
self._validate_ssh_cert_input_data(kwargs.get("data", {}))
- return _clean_up(self.client.obtain_token_by_refresh_token(
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_BY_REFRESH_TOKEN,
+ refresh_reason=msal.telemetry.FORCE_REFRESH)
+ response = _clean_up(self.client.obtain_token_by_refresh_token(
refresh_token,
- scope=decorate_scope(scopes, self.client_id),
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_BY_REFRESH_TOKEN),
- },
+ scope=self._decorate_scope(scopes),
+ headers=telemetry_context.generate_headers(),
rt_getter=lambda rt: rt,
on_updating_rt=False,
on_removing_rt=lambda rt_item: None, # No OP
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
+
+ def acquire_token_by_username_password(
+ self, username, password, scopes, claims_challenge=None, **kwargs):
+ """Gets a token for a given resource via user credentials.
+
+ See this page for constraints of Username Password Flow.
+ https://github.com/AzureAD/microsoft-authentication-library-for-python/wiki…
+
+ :param str username: Typically a UPN in the form of an email address.
+ :param str password: The password.
+ :param list[str] scopes:
+ Scopes requested to access a protected API (a resource).
+ :param claims_challenge:
+ The claims_challenge parameter requests specific claims requested by the resource provider
+ in the form of a claims_challenge directive in the www-authenticate header to be
+ returned from the UserInfo Endpoint and/or in the ID Token and/or Access Token.
+ It is a string of a JSON object which contains lists of claims being requested from these locations.
+
+ :return: A dict representing the json response from AAD:
+
+ - A successful response would contain "access_token" key,
+ - an error response would contain "error" and usually "error_description".
+ """
+ scopes = self._decorate_scope(scopes)
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_BY_USERNAME_PASSWORD_ID)
+ headers = telemetry_context.generate_headers()
+ data = dict(
+ kwargs.pop("data", {}),
+ claims=_merge_claims_challenge_and_capabilities(
+ self._client_capabilities, claims_challenge))
+ if not self.authority.is_adfs:
+ user_realm_result = self.authority.user_realm_discovery(
+ username, correlation_id=headers[msal.telemetry.CLIENT_REQUEST_ID])
+ if user_realm_result.get("account_type") == "Federated":
+ response = _clean_up(self._acquire_token_by_username_password_federated(
+ user_realm_result, username, password, scopes=scopes,
+ data=data,
+ headers=headers, **kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
+ response = _clean_up(self.client.obtain_token_by_username_password(
+ username, password, scope=scopes,
+ headers=headers,
+ data=data,
+ **kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
+
+ def _acquire_token_by_username_password_federated(
+ self, user_realm_result, username, password, scopes=None, **kwargs):
+ wstrust_endpoint = {}
+ if user_realm_result.get("federation_metadata_url"):
+ wstrust_endpoint = mex_send_request(
+ user_realm_result["federation_metadata_url"],
+ self.http_client)
+ if wstrust_endpoint is None:
+ raise ValueError("Unable to find wstrust endpoint from MEX. "
+ "This typically happens when attempting MSA accounts. "
+ "More details available here. "
+ "https://github.com/AzureAD/microsoft-authentication-library-for-python/wiki…")
+ logger.debug("wstrust_endpoint = %s", wstrust_endpoint)
+ wstrust_result = wst_send_request(
+ username, password,
+ user_realm_result.get("cloud_audience_urn", "urn:federation:MicrosoftOnline"),
+ wstrust_endpoint.get("address",
+ # Fallback to an AAD supplied endpoint
+ user_realm_result.get("federation_active_auth_url")),
+ wstrust_endpoint.get("action"), self.http_client)
+ if not ("token" in wstrust_result and "type" in wstrust_result):
+ raise RuntimeError("Unsuccessful RSTR. %s" % wstrust_result)
+ GRANT_TYPE_SAML1_1 = 'urn:ietf:params:oauth:grant-type:saml1_1-bearer'
+ grant_type = {
+ SAML_TOKEN_TYPE_V1: GRANT_TYPE_SAML1_1,
+ SAML_TOKEN_TYPE_V2: self.client.GRANT_TYPE_SAML2,
+ WSS_SAML_TOKEN_PROFILE_V1_1: GRANT_TYPE_SAML1_1,
+ WSS_SAML_TOKEN_PROFILE_V2: self.client.GRANT_TYPE_SAML2
+ }.get(wstrust_result.get("type"))
+ if not grant_type:
+ raise RuntimeError(
+ "RSTR returned unknown token type: %s", wstrust_result.get("type"))
+ self.client.grant_assertion_encoders.setdefault( # Register a non-standard type
+ grant_type, self.client.encode_saml_assertion)
+ return self.client.obtain_token_by_assertion(
+ wstrust_result["token"], grant_type, scope=scopes,
+ on_obtaining_tokens=lambda event: self.token_cache.add(dict(
+ event,
+ environment=self.authority.instance,
+ username=username, # Useful in case IDT contains no such info
+ )),
+ **kwargs)
class PublicClientApplication(ClientApplication): # browser app or mobile app
@@ -1039,7 +1312,7 @@
Prerequisite: In Azure Portal, configure the Redirect URI of your
"Mobile and Desktop application" as ``http://localhost``.
- :param list scope:
+ :param list scopes:
It is a list of case-sensitive strings.
:param str prompt:
By default, no prompt value will be sent, not even "none".
@@ -1080,15 +1353,16 @@
:return:
- A dict containing no "error" key,
- and typically contains an "access_token" key,
- if cache lookup succeeded.
+ and typically contains an "access_token" key.
- A dict containing an "error" key, when token refresh failed.
"""
self._validate_ssh_cert_input_data(kwargs.get("data", {}))
claims = _merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge)
- return _clean_up(self.client.obtain_token_by_browser(
- scope=decorate_scope(scopes, self.client_id) if scopes else None,
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_INTERACTIVE)
+ response = _clean_up(self.client.obtain_token_by_browser(
+ scope=self._decorate_scope(scopes) if scopes else None,
extra_scope_to_consent=extra_scopes_to_consent,
redirect_uri="http://localhost:{port}".format(
# Hardcode the host, for now. AAD portal rejects 127.0.0.1 anyway
@@ -1101,12 +1375,10 @@
"domain_hint": domain_hint,
},
data=dict(kwargs.pop("data", {}), claims=claims),
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_INTERACTIVE),
- },
+ headers=telemetry_context.generate_headers(),
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
def initiate_device_flow(self, scopes=None, **kwargs):
"""Initiate a Device Flow instance,
@@ -1119,13 +1391,10 @@
- A successful response would contain "user_code" key, among others
- an error response would contain some other readable key/value pairs.
"""
- correlation_id = _get_new_correlation_id()
+ correlation_id = msal.telemetry._get_new_correlation_id()
flow = self.client.initiate_device_flow(
- scope=decorate_scope(scopes or [], self.client_id),
- headers={
- CLIENT_REQUEST_ID: correlation_id,
- # CLIENT_CURRENT_TELEMETRY is not currently required
- },
+ scope=self._decorate_scope(scopes or []),
+ headers={msal.telemetry.CLIENT_REQUEST_ID: correlation_id},
**kwargs)
flow[self.DEVICE_FLOW_CORRELATION_ID] = correlation_id
return flow
@@ -1149,7 +1418,10 @@
- A successful response would contain "access_token" key,
- an error response would contain "error" and usually "error_description".
"""
- return _clean_up(self.client.obtain_token_by_device_flow(
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_BY_DEVICE_FLOW_ID,
+ correlation_id=flow.get(self.DEVICE_FLOW_CORRELATION_ID))
+ response = _clean_up(self.client.obtain_token_by_device_flow(
flow,
data=dict(
kwargs.pop("data", {}),
@@ -1159,96 +1431,10 @@
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge),
),
- headers={
- CLIENT_REQUEST_ID:
- flow.get(self.DEVICE_FLOW_CORRELATION_ID) or _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_BY_DEVICE_FLOW_ID),
- },
+ headers=telemetry_context.generate_headers(),
**kwargs))
-
- def acquire_token_by_username_password(
- self, username, password, scopes, claims_challenge=None, **kwargs):
- """Gets a token for a given resource via user credentials.
-
- See this page for constraints of Username Password Flow.
- https://github.com/AzureAD/microsoft-authentication-library-for-python/wiki…
-
- :param str username: Typically a UPN in the form of an email address.
- :param str password: The password.
- :param list[str] scopes:
- Scopes requested to access a protected API (a resource).
- :param claims_challenge:
- The claims_challenge parameter requests specific claims requested by the resource provider
- in the form of a claims_challenge directive in the www-authenticate header to be
- returned from the UserInfo Endpoint and/or in the ID Token and/or Access Token.
- It is a string of a JSON object which contains lists of claims being requested from these locations.
-
- :return: A dict representing the json response from AAD:
-
- - A successful response would contain "access_token" key,
- - an error response would contain "error" and usually "error_description".
- """
- scopes = decorate_scope(scopes, self.client_id)
- headers = {
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_BY_USERNAME_PASSWORD_ID),
- }
- data = dict(
- kwargs.pop("data", {}),
- claims=_merge_claims_challenge_and_capabilities(
- self._client_capabilities, claims_challenge))
- if not self.authority.is_adfs:
- user_realm_result = self.authority.user_realm_discovery(
- username, correlation_id=headers[CLIENT_REQUEST_ID])
- if user_realm_result.get("account_type") == "Federated":
- return _clean_up(self._acquire_token_by_username_password_federated(
- user_realm_result, username, password, scopes=scopes,
- data=data,
- headers=headers, **kwargs))
- return _clean_up(self.client.obtain_token_by_username_password(
- username, password, scope=scopes,
- headers=headers,
- data=data,
- **kwargs))
-
- def _acquire_token_by_username_password_federated(
- self, user_realm_result, username, password, scopes=None, **kwargs):
- wstrust_endpoint = {}
- if user_realm_result.get("federation_metadata_url"):
- wstrust_endpoint = mex_send_request(
- user_realm_result["federation_metadata_url"],
- self.http_client)
- if wstrust_endpoint is None:
- raise ValueError("Unable to find wstrust endpoint from MEX. "
- "This typically happens when attempting MSA accounts. "
- "More details available here. "
- "https://github.com/AzureAD/microsoft-authentication-library-for-python/wiki…")
- logger.debug("wstrust_endpoint = %s", wstrust_endpoint)
- wstrust_result = wst_send_request(
- username, password,
- user_realm_result.get("cloud_audience_urn", "urn:federation:MicrosoftOnline"),
- wstrust_endpoint.get("address",
- # Fallback to an AAD supplied endpoint
- user_realm_result.get("federation_active_auth_url")),
- wstrust_endpoint.get("action"), self.http_client)
- if not ("token" in wstrust_result and "type" in wstrust_result):
- raise RuntimeError("Unsuccessful RSTR. %s" % wstrust_result)
- GRANT_TYPE_SAML1_1 = 'urn:ietf:params:oauth:grant-type:saml1_1-bearer'
- grant_type = {
- SAML_TOKEN_TYPE_V1: GRANT_TYPE_SAML1_1,
- SAML_TOKEN_TYPE_V2: self.client.GRANT_TYPE_SAML2,
- WSS_SAML_TOKEN_PROFILE_V1_1: GRANT_TYPE_SAML1_1,
- WSS_SAML_TOKEN_PROFILE_V2: self.client.GRANT_TYPE_SAML2
- }.get(wstrust_result.get("type"))
- if not grant_type:
- raise RuntimeError(
- "RSTR returned unknown token type: %s", wstrust_result.get("type"))
- self.client.grant_assertion_encoders.setdefault( # Register a non-standard type
- grant_type, self.client.encode_saml_assertion)
- return self.client.obtain_token_by_assertion(
- wstrust_result["token"], grant_type, scope=scopes, **kwargs)
+ telemetry_context.update_telemetry(response)
+ return response
class ConfidentialClientApplication(ClientApplication): # server-side web app
@@ -1271,18 +1457,19 @@
"""
# TBD: force_refresh behavior
self._validate_ssh_cert_input_data(kwargs.get("data", {}))
- return _clean_up(self.client.obtain_token_for_client(
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_FOR_CLIENT_ID)
+ client = self._regional_client or self.client
+ response = _clean_up(client.obtain_token_for_client(
scope=scopes, # This grant flow requires no scope decoration
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_FOR_CLIENT_ID),
- },
+ headers=telemetry_context.generate_headers(),
data=dict(
kwargs.pop("data", {}),
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge)),
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
def acquire_token_on_behalf_of(self, user_assertion, scopes, claims_challenge=None, **kwargs):
"""Acquires token using on-behalf-of (OBO) flow.
@@ -1310,12 +1497,14 @@
- A successful response would contain "access_token" key,
- an error response would contain "error" and usually "error_description".
"""
+ telemetry_context = self._build_telemetry_context(
+ self.ACQUIRE_TOKEN_ON_BEHALF_OF_ID)
# The implementation is NOT based on Token Exchange
# https://tools.ietf.org/html/draft-ietf-oauth-token-exchange-16
- return _clean_up(self.client.obtain_token_by_assertion( # bases on assertion RFC 7521
+ response = _clean_up(self.client.obtain_token_by_assertion( # bases on assertion RFC 7521
user_assertion,
self.client.GRANT_TYPE_JWT, # IDTs and AAD ATs are all JWTs
- scope=decorate_scope(scopes, self.client_id), # Decoration is used for:
+ scope=self._decorate_scope(scopes), # Decoration is used for:
# 1. Explicitly requesting an RT, without relying on AAD default
# behavior, even though it currently still issues an RT.
# 2. Requesting an IDT (which would otherwise be unavailable)
@@ -1326,9 +1515,7 @@
requested_token_use="on_behalf_of",
claims=_merge_claims_challenge_and_capabilities(
self._client_capabilities, claims_challenge)),
- headers={
- CLIENT_REQUEST_ID: _get_new_correlation_id(),
- CLIENT_CURRENT_TELEMETRY: _build_current_telemetry_request_header(
- self.ACQUIRE_TOKEN_ON_BEHALF_OF_ID),
- },
+ headers=telemetry_context.generate_headers(),
**kwargs))
+ telemetry_context.update_telemetry(response)
+ return response
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/oauth2cli/authcode.py new/msal-1.12.0/msal/oauth2cli/authcode.py
--- old/msal-1.10.0/msal/oauth2cli/authcode.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/oauth2cli/authcode.py 2021-05-19 22:28:05.000000000 +0200
@@ -33,9 +33,34 @@
).get("code")
+def is_wsl():
+ # "Official" way of detecting WSL: https://github.com/Microsoft/WSL/issues/423#issuecomment-221627364
+ # Run `uname -a` to get 'release' without python
+ # - WSL 1: '4.4.0-19041-Microsoft'
+ # - WSL 2: '4.19.128-microsoft-standard'
+ import platform
+ uname = platform.uname()
+ platform_name = getattr(uname, 'system', uname[0]).lower()
+ release = getattr(uname, 'release', uname[2]).lower()
+ return platform_name == 'linux' and 'microsoft' in release
+
+
def _browse(auth_uri): # throws ImportError, possibly webbrowser.Error in future
import webbrowser # Lazy import. Some distro may not have this.
- return webbrowser.open(auth_uri) # Use default browser. Customizable by $BROWSER
+ browser_opened = webbrowser.open(auth_uri) # Use default browser. Customizable by $BROWSER
+
+ # In WSL which doesn't have www-browser, try launching browser with PowerShell
+ if not browser_opened and is_wsl():
+ try:
+ import subprocess
+ # https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.cor…
+ # Ampersand (&) should be quoted
+ exit_code = subprocess.call(
+ ['powershell.exe', '-NoProfile', '-Command', 'Start-Process "{}"'.format(auth_uri)])
+ browser_opened = exit_code == 0
+ except FileNotFoundError: # WSL might be too old
+ pass
+ return browser_opened
def _qs2kv(qs):
@@ -245,4 +270,3 @@
timeout=60,
state=flow["state"], # Optional
), indent=4))
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/oauth2cli/oauth2.py new/msal-1.12.0/msal/oauth2cli/oauth2.py
--- old/msal-1.10.0/msal/oauth2cli/oauth2.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/oauth2cli/oauth2.py 2021-05-19 22:28:05.000000000 +0200
@@ -770,7 +770,6 @@
rt_getter=lambda token_item: token_item["refresh_token"],
on_removing_rt=None,
on_updating_rt=None,
- on_obtaining_tokens=None,
**kwargs):
# type: (Union[str, dict], Union[str, list, set, tuple], Callable) -> dict
"""This is an overload which will trigger token storage callbacks.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/oauth2cli/oidc.py new/msal-1.12.0/msal/oauth2cli/oidc.py
--- old/msal-1.10.0/msal/oauth2cli/oidc.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/oauth2cli/oidc.py 2021-05-19 22:28:05.000000000 +0200
@@ -83,6 +83,19 @@
return hashlib.sha256(nonce.encode("ascii")).hexdigest()
+class Prompt(object):
+ """This class defines the constant strings for prompt parameter.
+
+ The values are based on
+ https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest
+ """
+ NONE = "none"
+ LOGIN = "login"
+ CONSENT = "consent"
+ SELECT_ACCOUNT = "select_account"
+ CREATE = "create" # Defined in https://openid.net/specs/openid-connect-prompt-create-1_0.html#PromptParame…
+
+
class Client(oauth2.Client):
"""OpenID Connect is a layer on top of the OAuth2.
@@ -217,6 +230,8 @@
`OIDC <https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest>`_.
:param string prompt: Defined in
`OIDC <https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest>`_.
+ You can find the valid string values defined in :class:`oidc.Prompt`.
+
:param int max_age: Defined in
`OIDC <https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest>`_.
:param string ui_locales: Defined in
@@ -232,7 +247,7 @@
for descriptions on other parameters and return value.
"""
filtered_params = {k:v for k, v in dict(
- prompt=prompt,
+ prompt=" ".join(prompt) if isinstance(prompt, (list, tuple)) else prompt,
display=display,
max_age=max_age,
ui_locales=ui_locales,
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/region.py new/msal-1.12.0/msal/region.py
--- old/msal-1.10.0/msal/region.py 1970-01-01 01:00:00.000000000 +0100
+++ new/msal-1.12.0/msal/region.py 2021-05-19 22:28:05.000000000 +0200
@@ -0,0 +1,47 @@
+import os
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def _detect_region(http_client=None):
+ region = _detect_region_of_azure_function() # It is cheap, so we do it always
+ if http_client and not region:
+ return _detect_region_of_azure_vm(http_client) # It could hang for minutes
+ return region
+
+
+def _detect_region_of_azure_function():
+ return os.environ.get("REGION_NAME")
+
+
+def _detect_region_of_azure_vm(http_client):
+ url = (
+ "http://169.254.169.254/metadata/instance"
+
+ # Utilize the "route parameters" feature to obtain region as a string
+ # https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-me…
+ "/compute/location?format=text"
+
+ # Location info is available since API version 2017-04-02
+ # https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-me…
+ "&api-version=2021-01-01"
+ )
+ logger.info(
+ "Connecting to IMDS {}. "
+ "It may take a while if you are running outside of Azure. "
+ "You should consider opting in/out region behavior on-demand, "
+ 'by loading a boolean flag "is_deployed_in_azure" '
+ 'from your per-deployment config and then do '
+ '"app = ConfidentialClientApplication(..., '
+ 'azure_region=is_deployed_in_azure)"'.format(url))
+ try:
+ # https://docs.microsoft.com/en-us/azure/virtual-machines/windows/instance-me…
+ resp = http_client.get(url, headers={"Metadata": "true"})
+ except:
+ logger.info(
+ "IMDS {} unavailable. Perhaps not running in Azure VM?".format(url))
+ return None
+ else:
+ return resp.text.strip()
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/telemetry.py new/msal-1.12.0/msal/telemetry.py
--- old/msal-1.10.0/msal/telemetry.py 1970-01-01 01:00:00.000000000 +0100
+++ new/msal-1.12.0/msal/telemetry.py 2021-05-19 22:28:05.000000000 +0200
@@ -0,0 +1,78 @@
+import uuid
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+CLIENT_REQUEST_ID = 'client-request-id'
+CLIENT_CURRENT_TELEMETRY = "x-client-current-telemetry"
+CLIENT_LAST_TELEMETRY = "x-client-last-telemetry"
+NON_SILENT_CALL = 0
+FORCE_REFRESH = 1
+AT_ABSENT = 2
+AT_EXPIRED = 3
+AT_AGING = 4
+RESERVED = 5
+
+
+def _get_new_correlation_id():
+ return str(uuid.uuid4())
+
+
+class _TelemetryContext(object):
+ """It is used for handling the telemetry context for current OAuth2 "exchange"."""
+ # https://identitydivision.visualstudio.com/DevEx/_git/AuthLibrariesApiReview…
+ _SUCCEEDED = "succeeded"
+ _FAILED = "failed"
+ _FAILURE_SIZE = "failure_size"
+ _CURRENT_HEADER_SIZE_LIMIT = 100
+ _LAST_HEADER_SIZE_LIMIT = 350
+
+ def __init__(self, buffer, lock, api_id, correlation_id=None, refresh_reason=None):
+ self._buffer = buffer
+ self._lock = lock
+ self._api_id = api_id
+ self._correlation_id = correlation_id or _get_new_correlation_id()
+ self._refresh_reason = refresh_reason or NON_SILENT_CALL
+ logger.debug("Generate or reuse correlation_id: %s", self._correlation_id)
+
+ def generate_headers(self):
+ with self._lock:
+ current = "4|{api_id},{cache_refresh}|".format(
+ api_id=self._api_id, cache_refresh=self._refresh_reason)
+ if len(current) > self._CURRENT_HEADER_SIZE_LIMIT:
+ logger.warning(
+ "Telemetry header greater than {} will be truncated by AAD".format(
+ self._CURRENT_HEADER_SIZE_LIMIT))
+ failures = self._buffer.get(self._FAILED, [])
+ return {
+ CLIENT_REQUEST_ID: self._correlation_id,
+ CLIENT_CURRENT_TELEMETRY: current,
+ CLIENT_LAST_TELEMETRY: "4|{succeeded}|{failed_requests}|{errors}|".format(
+ succeeded=self._buffer.get(self._SUCCEEDED, 0),
+ failed_requests=",".join("{a},{c}".format(**f) for f in failures),
+ errors=",".join(f["e"] for f in failures),
+ )
+ }
+
+ def hit_an_access_token(self):
+ with self._lock:
+ self._buffer[self._SUCCEEDED] = self._buffer.get(self._SUCCEEDED, 0) + 1
+
+ def update_telemetry(self, auth_result):
+ if auth_result:
+ with self._lock:
+ if "error" in auth_result:
+ self._record_failure(auth_result["error"])
+ else: # Telemetry sent successfully. Reset buffer
+ self._buffer.clear() # This won't work: self._buffer = {}
+
+ def _record_failure(self, error):
+ simulation = len(",{api_id},{correlation_id},{error}".format(
+ api_id=self._api_id, correlation_id=self._correlation_id, error=error))
+ if self._buffer.get(self._FAILURE_SIZE, 0) + simulation < self._LAST_HEADER_SIZE_LIMIT:
+ self._buffer[self._FAILURE_SIZE] = self._buffer.get(
+ self._FAILURE_SIZE, 0) + simulation
+ self._buffer.setdefault(self._FAILED, []).append({
+ "a": self._api_id, "c": self._correlation_id, "e": error})
+
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/token_cache.py new/msal-1.12.0/msal/token_cache.py
--- old/msal-1.10.0/msal/token_cache.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/token_cache.py 2021-05-19 22:28:05.000000000 +0200
@@ -108,11 +108,13 @@
if sensitive in dictionary:
dictionary[sensitive] = "********"
wipe(event.get("data", {}),
- ("password", "client_secret", "refresh_token", "assertion", "username"))
+ ("password", "client_secret", "refresh_token", "assertion"))
try:
return self.__add(event, now=now)
finally:
- wipe(event.get("response", {}), ("access_token", "refresh_token"))
+ wipe(event.get("response", {}), ( # These claims were useful during __add()
+ "access_token", "refresh_token", "id_token", "username"))
+ wipe(event, ["username"]) # Needed for federated ROPC
logger.debug("event=%s", json.dumps(
# We examined and concluded that this log won't have Log Injection risk,
# because the event payload is already in JSON so CR/LF will be escaped.
@@ -145,7 +147,7 @@
client_info["uid"] = id_token_claims.get("sub")
home_account_id = id_token_claims.get("sub")
- target = ' '.join(event.get("scope", [])) # Per schema, we don't sort it
+ target = ' '.join(event.get("scope") or []) # Per schema, we don't sort it
with self._lock:
now = int(time.time() if now is None else now)
@@ -184,6 +186,8 @@
"oid", id_token_claims.get("sub")),
"username": id_token_claims.get("preferred_username") # AAD
or id_token_claims.get("upn") # ADFS 2019
+ or data.get("username") # Falls back to ROPC username
+ or event.get("username") # Falls back to Federated ROPC username
or "", # The schema does not like null
"authority_type":
self.AuthorityType.ADFS if realm == "adfs"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal/wstrust_response.py new/msal-1.12.0/msal/wstrust_response.py
--- old/msal-1.10.0/msal/wstrust_response.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/msal/wstrust_response.py 2021-05-19 22:28:05.000000000 +0200
@@ -88,5 +88,7 @@
token_types = findall_content(rstr, "TokenType")
tokens = findall_content(rstr, "RequestedSecurityToken")
if token_types and tokens:
- return {"token": tokens[0].encode('us-ascii'), "type": token_types[0]}
+ # Historically, we use "us-ascii" encoding, but it should be "utf-8"
+ # https://stackoverflow.com/questions/36658000/what-is-encoding-used-for-saml…
+ return {"token": tokens[0].encode('utf-8'), "type": token_types[0]}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal.egg-info/PKG-INFO new/msal-1.12.0/msal.egg-info/PKG-INFO
--- old/msal-1.10.0/msal.egg-info/PKG-INFO 2021-03-08 21:46:32.000000000 +0100
+++ new/msal-1.12.0/msal.egg-info/PKG-INFO 2021-05-19 22:28:13.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: msal
-Version: 1.10.0
+Version: 1.12.0
Summary: The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect.
Home-page: https://github.com/AzureAD/microsoft-authentication-library-for-python
Author: Microsoft Corporation
@@ -21,8 +21,14 @@
Quick links:
- | [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support)
- | --- | --- | --- | --- |
+ | [Getting Started](https://docs.microsoft.com/azure/active-directory/develop/quicksta… | [Docs](https://github.com/AzureAD/microsoft-authentication-library-for-pytho… | [Samples](https://aka.ms/aaddevsamplesv2) | [Support](README.md#community-help-and-support) | [Feedback](https://forms.office.com/r/TMjZkDbzjY) |
+ | --- | --- | --- | --- | --- |
+
+ ## Scenarios supported
+
+ Click on the following thumbnail to visit a large map with clickable links to proper samples.
+
+ [![Map effect won't work inside github's markdown file, so we have to use a thumbnail here to lure audience to a real static website](docs/thumbnail.png)](https://msal-python.readthedocs.io/en/latest/)
## Installation
@@ -135,6 +141,9 @@
Here is the latest Q&A on Stack Overflow for MSAL:
[http://stackoverflow.com/questions/tagged/msal](http://stackoverflow.com/qu…
+ ## Submit Feedback
+ We'd like your thoughts on this library. Please complete [this short survey.](https://forms.office.com/r/TMjZkDbzjY)
+
## Security Reporting
If you find a security issue with our libraries or services please report it to [secure@microsoft.com](mailto:secure@microsoft.com) with as much detail as possible. Your submission may be eligible for a bounty through the [Microsoft Bounty](http://aka.ms/bugbounty) program. Please do not post security issues to GitHub Issues or any other public site. We will contact you shortly upon receiving the information. We encourage you to get notifications of when security incidents occur by visiting [this page](https://technet.microsoft.com/security/dd252948) and subscribing to Security Advisory Alerts.
@@ -153,11 +162,11 @@
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Description-Content-Type: text/markdown
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal.egg-info/SOURCES.txt new/msal-1.12.0/msal.egg-info/SOURCES.txt
--- old/msal-1.10.0/msal.egg-info/SOURCES.txt 2021-03-08 21:46:32.000000000 +0100
+++ new/msal-1.12.0/msal.egg-info/SOURCES.txt 2021-05-19 22:28:13.000000000 +0200
@@ -1,3 +1,4 @@
+LICENSE
README.md
setup.cfg
setup.py
@@ -6,6 +7,8 @@
msal/authority.py
msal/exceptions.py
msal/mex.py
+msal/region.py
+msal/telemetry.py
msal/token_cache.py
msal/wstrust_request.py
msal/wstrust_response.py
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/msal.egg-info/requires.txt new/msal-1.12.0/msal.egg-info/requires.txt
--- old/msal-1.10.0/msal.egg-info/requires.txt 2021-03-08 21:46:32.000000000 +0100
+++ new/msal-1.12.0/msal.egg-info/requires.txt 2021-05-19 22:28:13.000000000 +0200
@@ -1,3 +1,6 @@
requests<3,>=2.0.0
PyJWT[crypto]<3,>=1.0.0
cryptography<4,>=0.6
+
+[:python_version < "3.3"]
+mock
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/msal-1.10.0/setup.py new/msal-1.12.0/setup.py
--- old/msal-1.10.0/setup.py 2021-03-08 21:46:19.000000000 +0100
+++ new/msal-1.12.0/setup.py 2021-05-19 22:28:06.000000000 +0200
@@ -58,11 +58,11 @@
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
@@ -84,6 +84,7 @@
# We will go with "<4" for now, which is also what our another dependency,
# pyjwt, currently use.
+ "mock;python_version<'3.3'",
]
)
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package oci-cli for openSUSE:Factory checked in at 2021-07-01 07:05:39
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/oci-cli (Old)
and /work/SRC/openSUSE:Factory/.oci-cli.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "oci-cli"
Thu Jul 1 07:05:39 2021 rev:7 rq:903196 version:2.25.4
Changes:
--------
--- /work/SRC/openSUSE:Factory/oci-cli/oci-cli.changes 2021-05-23 00:06:20.058559711 +0200
+++ /work/SRC/openSUSE:Factory/.oci-cli.new.2625/oci-cli.changes 2021-07-01 07:05:51.899369028 +0200
@@ -1,0 +2,136 @@
+Tue Jun 29 10:38:35 UTC 2021 - John Paul Adrian Glaubitz <adrian.glaubitz(a)suse.com>
+
+- Update to version 2.25.4
+ * Users can authenticate the CLI with only environment variables,
+ a config does not need to exist
+ * The following environment variables need to be set, OCI_CLI_USER, OCI_CLI_TENANCY,
+ OCI_CLI_FINGERPRINT, OCI_CLI_KEY_FILE, OCI_CLI_REGION
+ * Optional variable for passphrase, OCI_CLI_PASSPHRASE
+ * Support for VMBM Pluggable Database feature as a part of the Database Service
+ * ``oci db pluggable-database``
+ * Support for cross-tenancy volume clone in Block Storage service
+ * ``oci bv volume create --source-volume-id``
+ * ``oci bv boot-volume create --source-volume-id``
+ * Changed allowed versions of cryptography package to a range from 3.2.1 to 3.4.7
+ * Following updates in installer scripts:
+ * Use dnf, if available, to install python
+ * Updated check for ubuntu/debian systems to use ID_LIKE/ID
+ instead of NAME in /etc/os-release
+ * oci setup bootstrap was causing a ``AttributeError: module 'oci' has no attribute 'identity'``
+ - from version 2.25.3
+ * Support for migrating an OKE cluster not integrated with your
+ VCN to a VCN-Native cluster in Container Engine
+ * ``oci ce cluster cluster-migrate-to-native-vcn``
+ * ``oci ce cluster cluster-migrate-to-native-vcn-status``
+ * Support for filtering of applications based on spark version in Data Flow service
+ * ``oci data-flow application list --spark-version``
+ * Support for registration and management of target databases in Data Safe service.
+ * ``oci data-safe target-database create``
+ * Support for Elastic Storage feature for Exadata Infrastructure
+ resources for ExaCC in Database service.
+ * ``oci db exadata-infrastructure create --compute-count``
+ * ``oci db exadata-infrastructure update --additional-storage-count``
+ * ``oci db exadata-infrastructure add --exadata-infrastructure-id``
+ * New parameter --parameters-config has been added to the below commands
+ in Management Dashboard service
+ * ``oci management-dashboard dashboard create --parameters-config``
+ * ``oci management-dashboard dashboard update``
+ * ``oci management-dashboard saved-search create``
+ * ``oci management-dashboard saved-search update``
+ * PyYAML version requirement relaxed from PyYAML==5.4.1 to PyYAML>=5.4,<6
+ * Default thread count for multipart upload/download using ``oci os object put | get`` is 10.
+ * Multipart download is now default for ``oci os object get``.
+ Please use ``--no-multipart`` to disable multipart download.
+ * Changed multipart download chunk size to maximum instead of 1Mb
+ * Parameter --compartment-id was existing twice in change compartment
+ for rove node in Rover service
+ * ``oci rover node change-compartment --compartment-id``
+ * Issue with multipart download - progress bar was only showing 50% even
+ though full file was uploaded.
+ - from version 2.25.2
+ * Support for Java Management Service
+ * ``oci jms``
+ * Support to update iscsi Login State for a Volume Attachment in Compute service
+ * ``oci compute volume-attachment update --iscsi-login-state``
+ * Support for 'host-name' and 'is-database-instance-level-metrics' query parameters
+ in Operations Insights service
+ * ``oci opsi database-insights summarize-database-insight-resource-capacity-trend``
+ * ``oci opsi database-insights summarize-database-insight-resource-forecast-trend``
+ * ``oci opsi database-insights summarize-database-insight-resource-statistics``
+ * ``oci opsi database-insights summarize-database-insight-resource-usage``
+ * ``oci opsi database-insights summarize-database-insight-resource-usage-trend``
+ * ``oci opsi database-insights summarize-database-insight-resource-utilization-insight``
+ * ``oci opsi database-insights summarize-sql-insights``
+ * ``oci opsi database-insights summarize-sql-statistics``
+ * ``oci opsi database-insights summarize-sql-statistics-time-series``
+ * Support for listing database configurations in Operations Insights service
+ * ``oci opsi database-insights list-database-configurations``
+ * Added support for a new type of Source called Import for use with the
+ Export tool in Application Migration service
+ * ``oci application-migration source create-source-import-source-details``
+ * ``oci application-migration source update-source-import-source-details``
+ - from version 2.25.1
+ * Support for configuration of autonomous database KMS keys in the Database service
+ * ``oci db autonomous-database configure-autonomous-database-vault-key``
+ * Support for creating database software images from an existing database home in the Database service
+ * ``oci db database-software-image create --source-db-home-id``
+ * Support for creating database software images with any supported RUs in the Database service
+ * ``oci db database-software-image create --database-version [optional], --patch-set[optional]``
+ * ``--is-desupported-version`` flag is added to take customer acknowledgment
+ for creating database-software images with release older than N-3
+ * ``oci db database create --is-desupported-version``
+ * ``oci db database create-from-backup --is-desupported-version``
+ * ``oci db database create-from-database --is-desupported-version``
+ * ``oci db db-home create --is-desupported-version``
+ * Support for listing all NSGs associated with a given VLAN in the Networking service
+ * ``oci network nsg list --compartment-id [optional]``
+ * Services are now dynamically imported for autocomplete, speeding up completion time
+ * Previous release had a bug causing some CLI commands to error with 'KeyError'.
+ Please see `github issue #415 <https://github.com/oracle/oci-cli/issues/415>`_ for more details
+ - from version 2.25.0
+ * [Breaking] For CLI installations running on Python 3.5 or lower, warning message to upgrade
+ Python to v3.6+ and reinstall CLI before August 1st, 2021 will be seen.
+ * To disable the warning message, set environment variable ``OCI_CLI_ALLOW_PYTHON2=True``
+ * O is a wrapper for oci-cli, providing an alternate interface with shortcuts to all
+ commands, parameters, resource IDs, and output fields.
+ * O can be found on `GitHub <https://github.com/oracle/oci-cli/blob/master/scripts/examples/project_o>`__.
+ * Support for Generic Artifacts Service (``oci artifacts``)
+ * Support for Bastion Service (``oci bastion``)
+ * Support to provide visualization to view the Automatic Workload Repository (AWR) data
+ for external database in Database Management Service
+ * ``oci database-management get-awr-db-report``
+ * ``oci database-management get-awr-db-sql-report``
+ * ``oci database-management list-awr-db-snapshots``
+ * ``oci database-management list-awr-dbs``
+ * ``oci database-management summarize-awr-db-cpu-usages``
+ * ``oci database-management summarize-awr-db-metrics``
+ * ``oci database-management summarize-awr-db-parameter-changes``
+ * ``oci database-management summarize-awr-db-parameters``
+ * ``oci database-management summarize-awr-db-snapshot-ranges``
+ * ``oci database-management summarize-awr-db-sysstats``
+ * ``oci database-management summarize-awr-db-top-wait-events``
+ * ``oci database-management summarize-awr-db-wait-event-buckets``
+ * ``oci database-management summarize-awr-db-wait-events``
+ * Support for VM.Standard.E3.Flex Flexible Compute Shape with customizable OCPUs
+ and memory for Data Science Notebooks
+ * ``oci data-science notebook-session create --configuration-details``
+ * ``oci data-science notebook-session update --configuration-details``
+ * Support for HCX Enterprise Add-on for Oracle Cloud VMware Solution
+ * ``oci ocvs sddc cancel-downgrade-hcx``
+ * ``oci ocvs sddc downgrade-hcx``
+ * ``oci ocvs sddc refresh-hcx-license-status``
+ * ``oci ocvs sddc upgrade-hcx``
+ * ``current-sku`` parameter in ``oci ocvs esxi-host create`` is now optional
+ * ``initial-sku`` parameter in ``oci ocvs sddc create`` is now optional
+ * Support for Secrets Read By Name as part of Secrets in Vault Service
+ * ``oci secrets secret-bundle get-secret-bundle-by-name``
+ * Support for ``isDynamic`` field in the response for ``oci limits definition list``
+ * Service modules are now dynamically imported at runtime, speeding up CLI invocations
+- Refresh patches for new version
+ + oc_relax-python-depends.patch
+- Replace %autosetup with %patch and %setup again in %prep section
+ + Preferred as it allows to enable and disable patches individually
+- Update BuildRequires and Requires from setup.py
+- Use find command to search for Python sources for sed regexp-replace
+
+-------------------------------------------------------------------
@@ -5,7 +141,15 @@
- * Support for Object storage configuration source in the
- Resource Manager service
- * Support for spark-submit compatible options in the Data Flow service
- * Fixed: Wait for state parameter for the following commands were
- returning an error on valid states
- * Fixed list commands in Artifacts service
-- Refresh oc_relax-python-depends.patch
+ * Support for database maintenance run patchMode feature as a part
+ of the Database Service
+ * ``oci db maintenance-run``
+ * PyYAML was upgraded to version 5.4.1 to address a vulnerability identified
+ on GitHub as CVE-2020-14343
+ * Py was upgraded to version 1.10.0 to address a vulnerability identified on GitHub
+ as CVE-2020-29651. Py isn't used in our run-time system but as part of our
+ documentation build process.
+- Refresh patches for new version
+ + oc_relax-python-depends.patch
+- Replace %patch and %setup with %autosetup in %prep section
+- Replace `python3 setup.py build` with %python3_build in %build section
+- Replace `python3 setup.py install` with %python3_install in %install section
+- Switch Source URL to use %{url} macro
+- Update BuildRequires and Requires from setup.py
Old:
----
oci-cli-2.24.5.tar.gz
New:
----
oci-cli-2.25.4.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ oci-cli.spec ++++++
--- /var/tmp/diff_new_pack.5fWKeT/_old 2021-07-01 07:05:52.431364872 +0200
+++ /var/tmp/diff_new_pack.5fWKeT/_new 2021-07-01 07:05:52.435364841 +0200
@@ -28,7 +28,7 @@
%bcond_with test
%endif
Name: oci-cli%{psuffix}
-Version: 2.24.5
+Version: 2.25.4
Release: 0
Summary: Oracle Cloud Infrastructure CLI
License: Apache-2.0
@@ -44,10 +44,10 @@
BuildRequires: python3-arrow >= 0.14.7
BuildRequires: python3-certifi
BuildRequires: python3-click >= 6.7
-BuildRequires: python3-cryptography >= 3.3.2
+BuildRequires: python3-cryptography >= 3.2.1
BuildRequires: python3-devel
BuildRequires: python3-jmespath >= 0.9.4
-BuildRequires: python3-oci-sdk >= 2.38.3
+BuildRequires: python3-oci-sdk >= 2.40.1
BuildRequires: python3-pyOpenSSL >= 18.0.0
BuildRequires: python3-python-dateutil >= 2.5.3
BuildRequires: python3-pytz >= 2016.10
@@ -83,9 +83,9 @@
Requires: python3-arrow >= 0.14.7
Requires: python3-certifi
Requires: python3-click >= 6.7
-Requires: python3-cryptography >= 3.3.2
+Requires: python3-cryptography >= 3.2.1
Requires: python3-jmespath >= 0.10.0
-Requires: python3-oci-sdk >= 2.38.3
+Requires: python3-oci-sdk >= 2.40.1
Requires: python3-pyOpenSSL >= 18.0.0
Requires: python3-python-dateutil >= 2.5.3
Requires: python3-pytz >= 2016.10
@@ -103,10 +103,12 @@
functionality.
%prep
-%autosetup -p1
+%setup -q -n oci-cli-%{version}
+%patch0 -p1
+%patch1 -p1
# Fix includes
-sed -i 's/from oci._vendor //' src/oci_cli/*.py services/container_engine/src/oci_cli_container_engine/*.py services/object_storage/src/oci_cli_object_storage/object_storage_transfer_manager/*.py services/dts/src/oci_cli_dts/physical_appliance_control_plane/client/*.py services/dts/src/oci_cli_dts/*.py tests/*.py
-sed -i 's/oci\._vendor\.//' src/oci_cli/*.py services/dts/src/oci_cli_dts/*.py services/container_engine/src/oci_cli_container_engine/*.py tests/*.py tests/vcr_mods/*.py
+find . -name "*.py" -exec sed -i 's/from oci\._vendor //' \{\} +
+find . -name "*.py" -exec sed -i 's/oci\._vendor\.//' \{\} +
%build
%python3_build
++++++ oc_relax-python-depends.patch ++++++
--- /var/tmp/diff_new_pack.5fWKeT/_old 2021-07-01 07:05:52.459364653 +0200
+++ /var/tmp/diff_new_pack.5fWKeT/_new 2021-07-01 07:05:52.459364653 +0200
@@ -1,14 +1,14 @@
-diff -Nur oci-cli-2.24.5/requirements.txt new/requirements.txt
---- oci-cli-2.24.5/requirements.txt 2021-05-19 01:47:14.000000000 +0200
-+++ new/requirements.txt 2021-05-20 18:19:50.801629082 +0200
+diff -Nru oci-cli-2.25.4.orig/requirements.txt oci-cli-2.25.4/requirements.txt
+--- oci-cli-2.25.4.orig/requirements.txt 2021-06-22 23:53:53.000000000 +0200
++++ oci-cli-2.25.4/requirements.txt 2021-06-29 11:03:08.743404407 +0200
@@ -2,41 +2,41 @@
# (https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-form…)
# you may need to use the --extra-index-url option instead.
-appdirs==1.4.3
-arrow==0.17.0
-+appdirs~=1.4.3
-+arrow~=0.17.0
++appdirs>=1.4.3
++arrow>=0.17.0
certifi
cffi>=1.9.1
-click==6.7
@@ -20,7 +20,7 @@
-jmespath==0.10.0
-ndg-httpsclient==0.4.2
-mock==2.0.0
--oci==2.38.3
+-oci==2.40.1
-packaging==20.2
-pluggy==0.13.0
-py==1.10.0
@@ -30,31 +30,32 @@
-pyparsing==2.2.0
-pytest==3.2.3
-pytest-cov==2.5.1
-+click~=6.7
-+configparser~=4.0.2
-+coverage~=4.5.2
-+cryptography~=3.3.2
-+httpsig-cffi~=15.0.0
-+Jinja2~=2.11.3
-+jmespath~=0.10.0
-+ndg-httpsclient~=0.4.2
-+mock~=2.0.0
-+oci~=2.38.3
-+packaging~=20.2
-+pluggy~=0.13.0
-+py~=1.10.0
-+pyasn1~=0.2.3
-+pyOpenSSL~=19.1.0
-+pycparser~=2.20
-+pyparsing~=2.2.0
-+pytest~=3.2.3
-+pytest-cov~=2.5.1
- python-dateutil>=2.5.3,<3.0.0
+-python-dateutil>=2.5.3,<3.0.0
++click>=6.7
++configparser>=4.0.2
++coverage>=4.5.2
++cryptography>=3.3.2
++httpsig-cffi>=15.0.0
++Jinja2>=2.11.3
++jmespath>=0.10.0
++ndg-httpsclient>=0.4.2
++mock>=2.0.0
++oci>=2.40.1
++packaging>=20.2
++pluggy>=0.13.0
++py>=1.10.0
++pyasn1>=0.2.3
++pyOpenSSL>=19.1.0
++pycparser>=2.20
++pyparsing>=2.2.0
++pytest>=3.2.3
++pytest-cov>=2.5.1
++python-dateutil>=2.5.3
pytz>=2016.10
-requests==2.21.0
-retrying==1.3.3
-+requests~=2.21.0
-+retrying~=1.3.3
++requests>=2.21.0
++retrying>=1.3.3
setuptools<45
-six==1.14.0
-sphinx==1.6.4
@@ -65,54 +66,55 @@
-virtualenv==16.7.10
-pytest-xdist==1.22.2
-pytest-forked==1.0.2
--PyYAML==5.4.1
-+six~=1.14.0
-+sphinx~=1.6.4
-+sphinx-rtd-theme~=0.2.5b1
-+terminaltables~=3.1.0
-+tox~=3.23.0
-+vcrpy~=1.13.0
-+virtualenv~=16.7.10
-+pytest-xdist~=1.22.2
-+pytest-forked~=1.0.2
-+PyYAML~=5.4.1
-diff -Nur oci-cli-2.24.5/setup.py new/setup.py
---- oci-cli-2.24.5/setup.py 2021-05-19 01:47:14.000000000 +0200
-+++ new/setup.py 2021-05-20 18:19:50.801629082 +0200
+-PyYAML>=5.4,<6
++six>=1.14.0
++sphinx>=1.6.4
++sphinx-rtd-theme>=0.2.5b1
++terminaltables>=3.1.0
++tox>=3.23.0
++vcrpy>=1.13.0
++virtualenv>=16.7.10
++pytest-xdist>=1.22.2
++pytest-forked>=1.0.2
++PyYAML>=5.4
+diff -Nru oci-cli-2.25.4.orig/setup.py oci-cli-2.25.4/setup.py
+--- oci-cli-2.25.4.orig/setup.py 2021-06-22 23:53:53.000000000 +0200
++++ oci-cli-2.25.4/setup.py 2021-06-29 11:03:26.806320907 +0200
@@ -30,24 +30,24 @@
readme = f.read()
requires = [
-- 'oci==2.38.3',
+- 'oci==2.40.1',
- 'arrow==0.17.0',
-+ 'oci~=2.38.3',
-+ 'arrow~=0.17.0',
++ 'oci>=2.40.1',
++ 'arrow>=0.17.0',
'certifi',
- 'click==6.7',
- 'configparser==4.0.2',
-- 'cryptography==3.3.2',
+- 'cryptography>=3.2.1,<=3.4.7',
- 'jmespath==0.10.0',
-+ 'click~=6.7',
-+ 'configparser~=4.0.2',
-+ 'cryptography~=3.3.2',
-+ 'jmespath~=0.10.0',
- 'python-dateutil>=2.5.3,<3.0.0',
+- 'python-dateutil>=2.5.3,<3.0.0',
++ 'click>=6.7',
++ 'configparser>=4.0.2',
++ 'cryptography>=3.2.1',
++ 'jmespath>=0.10.0',
++ 'python-dateutil>=2.5.3',
'pytz>=2016.10',
- 'retrying==1.3.3',
- 'six==1.14.0',
- 'terminaltables==3.1.0',
- 'pyOpenSSL==19.1.0',
-- 'PyYAML==5.4.1'
-+ 'retrying~=1.3.3',
-+ 'six~=1.14.0',
-+ 'terminaltables~=3.1.0',
-+ 'pyOpenSSL~=19.1.0',
-+ 'PyYAML~=5.4.1'
+- 'PyYAML>=5.4,<6'
++ 'retrying>=1.3.3',
++ 'six>=1.14.0',
++ 'terminaltables>=3.1.0',
++ 'pyOpenSSL>=19.1.0',
++ 'PyYAML>=5.4'
]
extras = {
- 'db': ['cx_Oracle==7.0']
-+ 'db': ['cx_Oracle~=7.0']
++ 'db': ['cx_Oracle>=7.0']
}
fips_libcrypto_file = os.getenv("OCI_CLI_FIPS_LIBCRYPTO_FILE")
++++++ oci-cli-2.24.5.tar.gz -> oci-cli-2.25.4.tar.gz ++++++
++++ 14527 lines of diff (skipped)
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package arpwatch for openSUSE:Factory checked in at 2021-07-01 07:05:38
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/arpwatch (Old)
and /work/SRC/openSUSE:Factory/.arpwatch.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "arpwatch"
Thu Jul 1 07:05:38 2021 rev:34 rq:903193 version:2.1a15
Changes:
--------
--- /work/SRC/openSUSE:Factory/arpwatch/arpwatch.changes 2021-02-09 21:17:20.942848142 +0100
+++ /work/SRC/openSUSE:Factory/.arpwatch.new.2625/arpwatch.changes 2021-07-01 07:05:50.555379526 +0200
@@ -1,0 +2,7 @@
+Tue Jun 29 07:10:40 UTC 2021 - Johannes Segitz <jsegitz(a)suse.com>
+
+- Fixed local privilege escalation from runtime user to root
+ (bsc#1186240, CVE-2021-25321)
+- Added service_* calls for the @.service file
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ arpwatch.spec ++++++
--- /var/tmp/diff_new_pack.fkSmAl/_old 2021-07-01 07:05:51.091375339 +0200
+++ /var/tmp/diff_new_pack.fkSmAl/_new 2021-07-01 07:05:51.095375308 +0200
@@ -103,17 +103,21 @@
%pre
%service_add_pre arpwatch.service
+%service_add_pre arpwatch@.service
%preun
%service_del_preun arpwatch.service
+%service_del_preun arpwatch@.service
%post
%fillup_only
%tmpfiles_create %{_tmpfilesdir}/arpwatch.conf
%service_add_post arpwatch.service
+%service_add_post arpwatch@.service
%postun
%service_del_postun arpwatch.service
+%service_del_postun arpwatch@.service
%files
%{_unitdir}/arpwatch.service
++++++ arpwatch-2.1a11-drop-privs.dif ++++++
--- /var/tmp/diff_new_pack.fkSmAl/_old 2021-07-01 07:05:51.135374995 +0200
+++ /var/tmp/diff_new_pack.fkSmAl/_new 2021-07-01 07:05:51.135374995 +0200
@@ -1,5 +1,7 @@
---- a/arpwatch.c
-+++ b/arpwatch.c
+Index: arpwatch-2.1a15/arpwatch.c
+===================================================================
+--- arpwatch-2.1a15.orig/arpwatch.c
++++ arpwatch-2.1a15/arpwatch.c
@@ -64,6 +64,8 @@ struct rtentry;
#include <string.h>
#include <syslog.h>
@@ -9,7 +11,7 @@
#include <pcap.h>
-@@ -152,6 +154,52 @@ int sanity_toring(struct toring_header *
+@@ -152,6 +154,66 @@ int sanity_toring(struct toring_header *
#endif
__dead void usage(void) __attribute__((volatile));
@@ -34,14 +36,28 @@
+ }
+ lastslash[0]='\0';
+
++ if (!safe_base_path(arpfiledir)) {
++ syslog(LOG_ERR, "Fatal: directory structure %s not safe, can't operate here. Please make root owner of underlying directories and remove write access for other", arpfiledir);
++ exit(1);
++ }
+
++ // ensure we have a safe place to operate
++ if (lchown( arpfiledir, 0, 0) != 0 ) {
++ syslog(LOG_ERR, "Fatal: could not chown %s to root).", arpfiledir);
++ exit(1);
++ }
++ // change permissions of the file if it exists
++ if (!access(arpfile, F_OK) && lchown ( arpfile, pw->pw_uid, -1) != 0) {
++ syslog(LOG_ERR, "Fatal: could not chown %s to %d).", arpfile, pw->pw_uid);
++ exit(1);
++ }
+ /* files arp.dat.eth0- and arp.dat.eth0.new that are created
+ as backup/lastversion and for temporary storage are
-+ deleted before created again. */
-+ if ( chown ( arpfile, pw->pw_uid, pw->pw_gid) != 0 ||
-+ chown ( arpfiledir, pw->pw_uid, pw->pw_gid) != 0 ) {
-+ syslog(LOG_ERR, "Fatal: could not chown %s and %s to %d,%d).",
-+ arpfiledir,arpfile, pw->pw_uid, pw->pw_gid);
++ deleted before created again, therefor the users needs
++ to control this directory . */
++ if ( lchown ( arpfiledir, pw->pw_uid, -1) != 0 ) {
++ syslog(LOG_ERR, "Fatal: could not chown %s to %d).",
++ arpfiledir, pw->pw_uid);
+ exit(1);
+ }
+ free(arpfiledir);
@@ -62,7 +78,7 @@
int
main(int argc, char **argv)
{
-@@ -164,6 +212,7 @@ main(int argc, char **argv)
+@@ -164,6 +226,7 @@ main(int argc, char **argv)
register char *interface, *rfilename;
struct bpf_program code;
char errbuf[PCAP_ERRBUF_SIZE];
@@ -70,7 +86,7 @@
if (argv[0] == NULL)
prog = "arpwatch";
-@@ -181,7 +230,7 @@ main(int argc, char **argv)
+@@ -181,7 +244,7 @@ main(int argc, char **argv)
interface = NULL;
rfilename = NULL;
pd = NULL;
@@ -79,7 +95,7 @@
switch (op) {
case 'd':
-@@ -213,6 +262,16 @@ main(int argc, char **argv)
+@@ -213,6 +276,16 @@ main(int argc, char **argv)
rfilename = optarg;
break;
@@ -96,7 +112,7 @@
default:
usage();
}
-@@ -295,8 +354,11 @@ main(int argc, char **argv)
+@@ -295,8 +368,11 @@ main(int argc, char **argv)
* Revert to non-privileged user after opening sockets
* (not needed on most systems).
*/
@@ -110,7 +126,7 @@
/* Must be ethernet or fddi or tokenring */
linktype = pcap_datalink(pd);
-@@ -842,6 +904,6 @@ usage(void)
+@@ -842,6 +918,6 @@ usage(void)
(void)fprintf(stderr, "Version %s\n", version);
(void)fprintf(stderr, "usage: %s [-dN] [-f datafile] [-i interface]"
@@ -118,3 +134,75 @@
+ " [-n net[/width]] [-r file] [-u username]\n", prog);
exit(1);
}
+Index: arpwatch-2.1a15/util.c
+===================================================================
+--- arpwatch-2.1a15.orig/util.c
++++ arpwatch-2.1a15/util.c
+@@ -29,6 +29,7 @@ static const char rcsid[] =
+
+ #include <sys/types.h>
+ #include <sys/file.h>
++#include <sys/stat.h>
+
+ #include <fcntl.h>
+ #ifdef HAVE_MEMORY_H
+@@ -186,3 +187,47 @@ savestr(register const char *str)
+ strsize -= i;
+ return (cp);
+ }
++
++int safe_base_path(char *arpfiledir) {
++ // check directories below arpfiledir for safe ownwership/permissions
++ char *path_component;
++ char *lastslash = NULL;
++ int safe_path = 1;
++
++ if (!arpfiledir) {
++ syslog(LOG_ERR, "Fatal: safe_base_path invalid invocation.");
++ exit(1);
++ }
++
++ path_component = malloc(strlen(arpfiledir)+1);
++ if(path_component == NULL) {
++ syslog(LOG_ERR, "Fatal: malloc().");
++ exit(1);
++ }
++ strcpy(path_component, arpfiledir);
++
++ while ((lastslash = strrchr(path_component, '/'))) {
++ struct stat stats;
++
++ lastslash[0]='\0';
++ if ( lstat(path_component, &stats) ) {
++ /* on the last iteration the string will be empty and this fails,
++ which is okay, if / is unsafe all is lost anyway and we can
++ skip the check. Otherwise fail safe if lstat doesn't work */
++ if (strlen(path_component))
++ safe_path = 0;
++ } else {
++ if ( stats.st_uid != 0 ||
++ stats.st_gid != 0 ||
++ stats.st_mode & S_IWOTH
++ ) {
++ /* this is not a safe path to operate on with privileges because
++ it isn't owned by root:root or others can write there */
++ safe_path = 0;
++ }
++ }
++ }
++
++ free(path_component);
++ return safe_path;
++}
+Index: arpwatch-2.1a15/util.h
+===================================================================
+--- arpwatch-2.1a15.orig/util.h
++++ arpwatch-2.1a15/util.h
+@@ -5,6 +5,7 @@ int dump(void);
+ void dumpone(u_int32_t, u_char *, time_t, char *);
+ int readdata(void);
+ char *savestr(const char *);
++int safe_base_path(char *);
+
+ extern char *arpdir;
+ extern char *newarpfile;
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package matio for openSUSE:Factory checked in at 2021-07-01 07:05:38
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/matio (Old)
and /work/SRC/openSUSE:Factory/.matio.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "matio"
Thu Jul 1 07:05:38 2021 rev:26 rq:903190 version:1.5.21
Changes:
--------
--- /work/SRC/openSUSE:Factory/matio/matio.changes 2021-03-29 18:22:05.346281378 +0200
+++ /work/SRC/openSUSE:Factory/.matio.new.2625/matio.changes 2021-07-01 07:05:49.671386431 +0200
@@ -1,0 +2,9 @@
+Tue Jun 1 00:55:31 UTC 2021 - Ferdinand Thiessen <rpm(a)fthiessen.de>
+
+- Update to version 1.5.21
+ * Fixed: Avoid usage of strlen when creating MAT_T_UTF8 encoded
+ character array
+ * Fixed reading of compression flag from HDF5 MAT file
+- Use https for URL and SourceURL
+
+-------------------------------------------------------------------
Old:
----
matio-1.5.20.7z
New:
----
matio-1.5.21.7z
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ matio.spec ++++++
--- /var/tmp/diff_new_pack.LHWXna/_old 2021-07-01 07:05:50.363381026 +0200
+++ /var/tmp/diff_new_pack.LHWXna/_new 2021-07-01 07:05:50.367380995 +0200
@@ -19,13 +19,13 @@
%define libname lib%{name}
%define major 11
Name: matio
-Version: 1.5.20
+Version: 1.5.21
Release: 0
Summary: Library for reading and writing MATLAB MAT files
License: BSD-2-Clause
Group: Productivity/Scientific/Other
-URL: http://sourceforge.net/projects/matio
-Source0: http://downloads.sourceforge.net/matio/%{name}-%{version}.7z
+URL: https://sourceforge.net/projects/matio/
+Source0: https://downloads.sourceforge.net/matio/%{name}-%{version}.7z
# We need hdf5 1.10.2 to allow creation of files backwards compatible with hdf5 1.8
BuildRequires: hdf5-devel >= 1.10.2
BuildRequires: pkgconfig
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package pleaser for openSUSE:Factory checked in at 2021-07-01 07:05:37
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/pleaser (Old)
and /work/SRC/openSUSE:Factory/.pleaser.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "pleaser"
Thu Jul 1 07:05:37 2021 rev:2 rq:903183 version:0.4.1~git0.11a9aa8
Changes:
--------
--- /work/SRC/openSUSE:Factory/pleaser/pleaser.changes 2021-06-28 15:33:44.299194065 +0200
+++ /work/SRC/openSUSE:Factory/.pleaser.new.2625/pleaser.changes 2021-07-01 07:05:48.787393337 +0200
@@ -1,0 +2,7 @@
+Wed Jun 23 07:40:04 UTC 2021 - Jan Engelhardt <jengelh(a)inai.de>
+
+- Ensure neutrality of description.
+- Reduce PreReq on "permissions" to the actually used
+ requirement types.
+
+-------------------------------------------------------------------
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ pleaser.spec ++++++
--- /var/tmp/diff_new_pack.8cFl2Z/_old 2021-07-01 07:05:49.319389181 +0200
+++ /var/tmp/diff_new_pack.8cFl2Z/_new 2021-07-01 07:05:49.323389150 +0200
@@ -24,7 +24,7 @@
Version: 0.4.1~git0.11a9aa8
Release: 1%{?dist}
Group: Productivity/Security
-Summary: Polite regex-first sudo alternative
+Summary: Alternative to sudo (root command execution) with regex support
License: (0BSD OR MIT OR Apache-2.0) AND (Apache-2.0 OR MIT) AND (Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT) AND (Apache-2.0 OR MIT OR Zlib) AND (MIT OR Unlicense) AND Apache-2.0 AND MIT AND GPL-3.0-or-later
URL: https://gitlab.com/edneville/please/-/archive/v%{version}/please-v%{version…
Source0: please-%{version}.tar.xz
@@ -43,12 +43,14 @@
Requires: pam
-PreReq: permissions
+Requires(post): permissions
+Requires(verify):permissions
%description
-please, a secure, fast, reliable, regex alternative to sudo.
-pleaseedit, a secure method to permit editing of files without
-elevation. Fast, reliable, safe. Pick three.
+please is a regex-capable alternative to sudo, a command for allowing
+users to execute some subsequent commands as the root (or another) user.
+pleaseedit is a method to permit editing of files without
+elevation.
%global rustflags '-Clink-arg=-Wl,-z,relro,-z,now'
@@ -97,7 +99,7 @@
session include common-session
EOF
-%files -n %{name}
+%files
%doc README.md
%license LICENSE
%verify(not mode) %attr(4755,root,root) %{_bindir}/please
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package obs-service-product_converter for openSUSE:Factory checked in at 2021-07-01 07:05:36
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/obs-service-product_converter (Old)
and /work/SRC/openSUSE:Factory/.obs-service-product_converter.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "obs-service-product_converter"
Thu Jul 1 07:05:36 2021 rev:15 rq:903181 version:1.4.6
Changes:
--------
--- /work/SRC/openSUSE:Factory/obs-service-product_converter/obs-service-product_converter.changes 2020-11-13 19:00:39.858221138 +0100
+++ /work/SRC/openSUSE:Factory/.obs-service-product_converter.new.2625/obs-service-product_converter.changes 2021-07-01 07:05:47.919400117 +0200
@@ -1,0 +2,9 @@
+Wed Jun 30 06:16:44 UTC 2021 - Adrian Schr��ter <adrian(a)suse.de>
+
+- 1.4.6
+ * add a field for build.opensuse.org default repository name for
+ zypper (bsc#1187425)
+ * add an automatic field for default OBS download URL
+ * release spec file cleanup
+
+-------------------------------------------------------------------
Old:
----
obs-service-product_converter-1.4.5.obscpio
New:
----
obs-service-product_converter-1.4.6.obscpio
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ obs-service-product_converter.spec ++++++
--- /var/tmp/diff_new_pack.32IrYr/_old 2021-07-01 07:05:48.407396305 +0200
+++ /var/tmp/diff_new_pack.32IrYr/_new 2021-07-01 07:05:48.411396274 +0200
@@ -1,7 +1,7 @@
#
# spec file for package obs-service-product_converter
#
-# Copyright (c) 2020 SUSE LLC
+# Copyright (c) 2021 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -19,7 +19,7 @@
%define service product_converter
Name: obs-service-%service
-Version: 1.4.5
+Version: 1.4.6
Release: 0
Summary: An OBS source service: create product media build descriptions
License: GPL-2.0-or-later
++++++ _service ++++++
--- /var/tmp/diff_new_pack.32IrYr/_old 2021-07-01 07:05:48.439396055 +0200
+++ /var/tmp/diff_new_pack.32IrYr/_new 2021-07-01 07:05:48.443396024 +0200
@@ -1,11 +1,11 @@
<services>
- <service name="obs_scm" mode="disabled">
+ <service name="obs_scm" mode="manual">
<param name="url">https://github.com/openSUSE/obs-service-product_converter.git</param>
<param name="scm">git</param>
- <param name="version">1.4.5</param>
- <param name="revision">1.4.5</param>
+ <param name="version">1.4.6</param>
+ <param name="revision">1.4.6</param>
</service>
- <service name="set_version" mode="disabled"/>
+ <service name="set_version" mode="manual"/>
<service name="tar" mode="buildtime"/>
<service name="recompress" mode="buildtime">
<param name="file">*.tar</param>
++++++ obs-service-product_converter-1.4.5.obscpio -> obs-service-product_converter-1.4.6.obscpio ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/obs-service-product_converter-1.4.5/BSProductXML.pm new/obs-service-product_converter-1.4.6/BSProductXML.pm
--- old/obs-service-product_converter-1.4.5/BSProductXML.pm 2020-11-10 17:37:22.000000000 +0100
+++ new/obs-service-product_converter-1.4.6/BSProductXML.pm 2021-06-30 09:32:39.000000000 +0200
@@ -206,6 +206,8 @@
'defaultlang',
'datadir',
'descriptiondir',
+ 'default_obs_repository_name',
+ 'default_obs_download_url',
[ 'releasepackage' => 'name', 'flag', 'version', 'release' ],
'distribution',
[[ 'obsoletepackage' => '_content' ]],
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/obs-service-product_converter-1.4.5/create_single_product new/obs-service-product_converter-1.4.6/create_single_product
--- old/obs-service-product_converter-1.4.5/create_single_product 2020-11-10 17:37:22.000000000 +0100
+++ new/obs-service-product_converter-1.4.6/create_single_product 2021-06-30 09:32:39.000000000 +0200
@@ -758,7 +758,7 @@
foreach my $flavor ( @{$prodRef->{mediasets}->{media}} ){
next if !defined($flavor->{'flavor'}) || $flavor->{'flavor'} eq '' || $seen{$flavor->{flavor}};
$seen{$flavor->{flavor}} = 1;
- my $readmedir = "\$RPM_BUILD_ROOT/%{_defaultdocdir}/$product->{releasepkgname}-$flavor->{flavor}";
+ my $readmedir = "%{buildroot}%{_defaultdocdir}/$product->{releasepkgname}-$flavor->{flavor}";
$reame_file .= "mkdir -p $readmedir\n";
$reame_file .= "cat >$readmedir/README << EOF\n";
$reame_file .= "This package only exists for providing the product flavor \'$flavor->{flavor}\'.\n";
@@ -922,7 +922,6 @@
$str.=$productprovides;
$str.="\n___PRODUCT_DEPENDENCIES___\n";
$str.="AutoReqProv: on\n";
- $str.="BuildRoot: %{_tmppath}/%{name}-%{version}-build\n";
if (defined($product->{register}->{updates}->{distrotarget}[0])) {
$str.="# this package should only be available for the \"basearchs\" of a product\nExclusiveArch:";
foreach my $dt ( @{$product->{register}->{updates}->{distrotarget}} ) {
@@ -941,13 +940,11 @@
$str.="___CREATE_PRODUCT_FILES___\n";
# we could do this by default for base products
# $str.="___CREATE_OS_RELEASE_FILE___\n";
- $str.="\n%clean\n";
- $str.="rm -rf %buildroot\n";
$str.="\n%files\n";
$str.="%defattr(644,root,root,755)\n";
- $str.="%dir /etc/products.d\n";
- $str.="/etc/products.d/*.prod\n";
-# $str.="/etc/os-release\n";
+ $str.="%dir %{_sysconfdir}/products.d\n";
+ $str.="%{_sysconfdir}/products.d/*.prod\n";
+# $str.="%{_sysconfdir}/os-release\n";
$str.="\n%changelog\n";
} else {
@@ -1029,7 +1026,12 @@
foreach my $flavor ( @{$prodRef->{mediasets}->{media}} ){
next if !defined($flavor->{'flavor'}) || $flavor->{'flavor'} eq '' || $seen{$flavor->{flavor}};
$seen{$flavor->{flavor}} = 1;
- $product_flavors.="%package $flavor->{flavor}\n";
+ # not using $product->{releasepkgname} since it always be initialed
+ if (defined($prodRef->{'products'}->{'product'}[0]->{'releasepkgname'}) && $prodRef->{'products'}->{'product'}[0]->{'releasepkgname'} ne '') {
+ $product_flavors.="%package -n $prodRef->{'products'}->{'product'}[0]->{'releasepkgname'}-$flavor->{flavor}\n";
+ } else {
+ $product_flavors.="%package $flavor->{flavor}\n";
+ }
$product_flavors.="License: BSD-3-Clause\n";
$product_flavors.="Group: System/Fhs\n";
if ((defined($prodRef->{'project'}->{'name'})) && ("$prodRef->{'project'}->{'name'}" ne "")){
@@ -1080,22 +1082,22 @@
$greeting = $summary->{'_content'} if ( ! $summary->{'language'} );
}
- my $content="mkdir -p %{buildroot}/%{_sysconfdir}
-echo -e 'Welcome to ".$greeting." %{?betaversion:%{betaversion} }- Kernel \\r (\\l).\n\n' > %{buildroot}/etc/issue
-echo \"Welcome to ".$greeting." %{?betaversion:%{betaversion} }- Kernel %%r (%%t).\" > %{buildroot}/etc/issue.net
-echo \"#".$greeting." %{?betaversion:%{betaversion} }(%{_target_cpu})\" > %{buildroot}/etc/SuSE-release
-echo \"VERSION = %{version}\" >> %{buildroot}/etc/SuSE-release\n";
+ my $content="mkdir -p %{buildroot}%{_sysconfdir}
+echo -e 'Welcome to ".$greeting." %{?betaversion:%{betaversion} }- Kernel \\r (\\l).\n\n' > %{buildroot}%{_sysconfdir}/issue
+echo \"Welcome to ".$greeting." %{?betaversion:%{betaversion} }- Kernel %%r (%%t).\" > %{buildroot}%{_sysconfdir}/issue.net
+echo \"#".$greeting." %{?betaversion:%{betaversion} }(%{_target_cpu})\" > %{buildroot}%{_sysconfdir}/SuSE-release
+echo \"VERSION = %{version}\" >> %{buildroot}%{_sysconfdir}/SuSE-release\n";
$content.="PATCHLEVEL = ".$product->{'patchlevel'}."\n" if (defined($product->{'patchlevel'}));
- $content.="mkdir -p %{buildroot}/%{_sysconfdir}
-echo \"Have a lot of fun...\" > %{buildroot}/etc/motd
-# Bug 404141 - /etc/YaST/control.xml should be owned by some package
-mkdir -p %{buildroot}/etc/YaST2/
-install -m 644 /CD1/control.xml %{buildroot}/etc/YaST2/
-install -m 644 -D /CD1/EULA.txt %{buildroot}/%{_docdir}/%{name}/%{product}-EULA.txt
+ $content.="mkdir -p %{buildroot}%{_sysconfdir}
+echo \"Have a lot of fun...\" > %{buildroot}%{_sysconfdir}/motd
+# Bug 404141 - %{_sysconfdir}/YaST/control.xml should be owned by some package
+mkdir -p %{buildroot}%{_sysconfdir}/YaST2/
+install -m 644 /CD1/control.xml %{buildroot}%{_sysconfdir}/YaST2/
+install -m 644 -D /CD1/EULA.txt %{buildroot}%{_docdir}/%{name}/%{product}-EULA.txt
";
}
- $content="mkdir -p %{buildroot}/etc/products.d";
+ $content="mkdir -p %{buildroot}%{_sysconfdir}/products.d";
return $content;
}
@@ -1140,8 +1142,8 @@
sub createOsReleaseFile ($) {
my ($prodRef, $product) = @_;
my $bugtracker=getUrl($product,"i586","bugtracker");
- my $rfile = "\$RPM_BUILD_ROOT/etc/os-release";
- my $os_release_file = "mkdir -p \$RPM_BUILD_ROOT/etc\n";
+ my $rfile = "%{buildroot}%{_sysconfdir}/os-release";
+ my $os_release_file = "mkdir -p %{buildroot}%{_sysconfdir}\n";
$os_release_file .= "cat >$rfile << EOF\n";
my $name = $product->{'name'};
# shall we strip ^SUSE_ here or better modify the name in SLE 12?
@@ -1153,10 +1155,10 @@
} else {
$os_release_file .= "VERSION=\"%{version}";
}
- $os_release_file .= "%{?betaversion: }%{?betaversion}\"\n";
+ $os_release_file .= "%{?betaversion: %{betaversion}}\"\n";
$os_release_file .= "VERSION_ID=\"%{version}\"\n";
- $os_release_file .= "PRETTY_NAME=\"".$product->{'summary'}[0]->{_content}."%{?betaversion: (}%{?betaversion}%{?betaversion:)}\"\n" if $product->{'summary'};
+ $os_release_file .= "PRETTY_NAME=\"".$product->{'summary'}[0]->{_content}."%{?betaversion: (%{betaversion})}\"\n" if $product->{'summary'};
$os_release_file .= "BUG_REPORT_URL=\"".$bugtracker."\"\n" if $bugtracker;
$os_release_file .= "ID=\"".lc($name)."\"\n";
$os_release_file .= "ID_LIKE=\"suse\"\n"; # We assume that the system is SuSE like when you use this converter
@@ -1173,14 +1175,14 @@
# not wanted there
delete $zypp_product->{'releasepkgname'};
my $d;
- my $pfile = "\$RPM_BUILD_ROOT/etc/products.d/$product->{name}.prod";
+ my $pfile = "%{buildroot}%{_sysconfdir}/products.d/$product->{name}.prod";
if (defined($product->{'endoflife'})&& $product->{'endoflife'} ne ""
&& !($product->{'endoflife'} =~ /^[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]$/)) {
die("400 endoflife not in ISO 8601 format (YYYY-MM-DD)");
}
my $mediaStyle = getMediaStyle($prodRef);
- $zypp_product_file = "mkdir -p \$RPM_BUILD_ROOT/etc/products.d\n";
+ $zypp_product_file = "mkdir -p %{buildroot}%{_sysconfdir}/products.d\n";
$zypp_product->{'arch'} = '%{_target_cpu}'; # write product architecture during rpm build
$zypp_product->{'schemeversion'} = "0";
my $cpe = getCpeId($prodRef, $product);
@@ -1313,6 +1315,11 @@
if ( not defined($product->{'installconfig'}->{'releasepackage'}->{'version'}) );
$product->{'installconfig'}->{'releasepackage'}->{'release'} = '%{release}'
if ( not defined($product->{'installconfig'}->{'releasepackage'}->{'release'}) );
+if (defined($product->{'installconfig'}->{'default_obs_repository_name'})) {
+ # only for newer product files where a default name has been configured
+ $product->{'installconfig'}->{'default_obs_download_url'} = '%{_download_url}'
+ if ( not defined($product->{'installconfig'}->{'default_obs_download_url'}) );
+};
#
# Create a kiwi configuration for each distribution flavor
++++++ obs-service-product_converter.obsinfo ++++++
--- /var/tmp/diff_new_pack.32IrYr/_old 2021-07-01 07:05:48.563395086 +0200
+++ /var/tmp/diff_new_pack.32IrYr/_new 2021-07-01 07:05:48.567395056 +0200
@@ -1,5 +1,5 @@
name: obs-service-product_converter
-version: 1.4.5
-mtime: 1605026242
-commit: fd03b71053a57ae8c5cda73588b04231cda7706d
+version: 1.4.6
+mtime: 1625038359
+commit: 629300c500a764806c83f3b4e77b1b1f93dc5355
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package drumstick for openSUSE:Factory checked in at 2021-07-01 07:05:35
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/drumstick (Old)
and /work/SRC/openSUSE:Factory/.drumstick.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "drumstick"
Thu Jul 1 07:05:35 2021 rev:10 rq:903175 version:2.3.0
Changes:
--------
--- /work/SRC/openSUSE:Factory/drumstick/drumstick.changes 2021-05-20 19:26:07.261681478 +0200
+++ /work/SRC/openSUSE:Factory/.drumstick.new.2625/drumstick.changes 2021-07-01 07:05:46.379412147 +0200
@@ -1,0 +2,9 @@
+Wed Jun 30 06:25:38 UTC 2021 - Christophe Giboudeaux <christophe(a)krop.fr>
+
+- Update to 2.3.0. Changes since 2.2.0:
+ * experimental cmake support for building with Qt6
+ * New options: USE_PULSEAUDIO, USE_FLUIDSYNTH, USE_NETWORK
+ * Revised CMake buildsystem and documents
+ * Widgets: Fixed touch events - checked pressure capability
+
+-------------------------------------------------------------------
Old:
----
drumstick-2.2.0.tar.bz2
New:
----
drumstick-2.3.0.tar.bz2
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ drumstick.spec ++++++
--- /var/tmp/diff_new_pack.9FHm9L/_old 2021-07-01 07:05:46.899408085 +0200
+++ /var/tmp/diff_new_pack.9FHm9L/_new 2021-07-01 07:05:46.903408053 +0200
@@ -18,7 +18,7 @@
Name: drumstick
-Version: 2.2.0
+Version: 2.3.0
Release: 0
Summary: MIDI Sequencer C++ Library Bindings
License: GPL-2.0-or-later AND GPL-3.0-or-later
++++++ drumstick-2.2.0.tar.bz2 -> drumstick-2.3.0.tar.bz2 ++++++
++++ 12641 lines of diff (skipped)
1
0
Script 'mail_helper' called by obssrc
Hello community,
here is the log from the commit of package bonnie++ for openSUSE:Factory checked in at 2021-07-01 07:05:34
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/bonnie++ (Old)
and /work/SRC/openSUSE:Factory/.bonnie++.new.2625 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "bonnie++"
Thu Jul 1 07:05:34 2021 rev:26 rq:903168 version:1.98
Changes:
--------
--- /work/SRC/openSUSE:Factory/bonnie++/bonnie++.changes 2020-09-14 12:31:04.065203466 +0200
+++ /work/SRC/openSUSE:Factory/.bonnie++.new.2625/bonnie++.changes 2021-07-01 07:05:45.343420240 +0200
@@ -1,0 +2,6 @@
+Fri Jun 25 23:37:25 UTC 2021 - Fernando Mattioli <mattioli.fernando(a)gmail.com>
+
+- Add patch:
+ * bonnie++-1.98-bon_csv2html.patch (fixes gcc11 build).
+
+-------------------------------------------------------------------
New:
----
bonnie++-1.98-bon_csv2html.patch
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ bonnie++.spec ++++++
--- /var/tmp/diff_new_pack.AXNQBa/_old 2021-07-01 07:05:45.979415271 +0200
+++ /var/tmp/diff_new_pack.AXNQBa/_new 2021-07-01 07:05:45.983415241 +0200
@@ -1,7 +1,7 @@
#
# spec file for package bonnie++
#
-# Copyright (c) 2020 SUSE LLC
+# Copyright (c) 2021 SUSE LLC
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -25,6 +25,7 @@
Source: https://www.coker.com.au/bonnie++/bonnie++-%{version}.tgz
# PATCH-FIX-UPSTREAM bonnie++-1.96-makefile.patch -- fixes make install
Patch0: bonnie++-1.96-makefile.patch
+Patch1: bonnie++-1.98-bon_csv2html.patch
BuildRequires: gcc-c++
%description
@@ -34,6 +35,7 @@
%prep
%setup -q
%patch0
+%patch1 -p1
%build
%configure \
++++++ bonnie++-1.98-bon_csv2html.patch ++++++
bon_csv2html.cpp: renames variable "data" to avoid "reference to data is ambiguous" error.
Index: bonnie++-1.98/bon_csv2html.cpp
===================================================================
--- bonnie++-1.98.orig/bon_csv2html.cpp
+++ bonnie++-1.98/bon_csv2html.cpp
@@ -10,7 +10,7 @@
using namespace std;
typedef vector<PCCHAR> STR_VEC;
-vector<STR_VEC> data;
+vector<STR_VEC> v_data;
typedef PCCHAR * PPCCHAR;
PPCCHAR * props;
@@ -87,8 +87,8 @@ int main(int argc, char **argv)
read_in(buf);
}
- props = new PPCCHAR[data.size()];
- for(i = 0; i < data.size(); i++)
+ props = new PPCCHAR[v_data.size()];
+ for(i = 0; i < v_data.size(); i++)
{
props[i] = new PCCHAR[MAX_ITEMS];
props[i][0] = NULL;
@@ -109,7 +109,7 @@ int main(int argc, char **argv)
}
calc_vals();
int mid_width = header();
- for(i = 0; i < data.size(); i++)
+ for(i = 0; i < v_data.size(); i++)
{
// First print the average speed line
printf("<tr>");
@@ -171,23 +171,23 @@ int compar(const void *a, const void *b)
void calc_vals()
{
- ITEM *arr = new ITEM[data.size()];
+ ITEM *arr = new ITEM[v_data.size()];
for(unsigned int column_ind = 0; column_ind < MAX_ITEMS; column_ind++)
{
switch(vals[column_ind])
{
case eNoCols:
{
- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
+ for(unsigned int row_ind = 0; row_ind < v_data.size(); row_ind++)
{
if(column_ind == COL_CONCURRENCY)
{
- if(data[row_ind][column_ind] && strcmp("1", data[row_ind][column_ind]))
+ if(v_data[row_ind][column_ind] && strcmp("1", v_data[row_ind][column_ind]))
col_used[column_ind] = true;
}
else
{
- if(data[row_ind][column_ind] && strlen(data[row_ind][column_ind]))
+ if(v_data[row_ind][column_ind] && strlen(v_data[row_ind][column_ind]))
col_used[column_ind] = true;
}
}
@@ -195,22 +195,22 @@ void calc_vals()
break;
case eCPU:
{
- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
+ for(unsigned int row_ind = 0; row_ind < v_data.size(); row_ind++)
{
double work, cpu;
arr[row_ind].val = 0.0;
- if(data[row_ind].size() > column_ind
- && sscanf(data[row_ind][column_ind - 1], "%lf", &work) == 1
- && sscanf(data[row_ind][column_ind], "%lf", &cpu) == 1)
+ if(v_data[row_ind].size() > column_ind
+ && sscanf(v_data[row_ind][column_ind - 1], "%lf", &work) == 1
+ && sscanf(v_data[row_ind][column_ind], "%lf", &cpu) == 1)
{
arr[row_ind].val = cpu / work;
}
arr[row_ind].pos = row_ind;
}
- qsort(arr, data.size(), sizeof(ITEM), compar);
+ qsort(arr, v_data.size(), sizeof(ITEM), compar);
int col_count = -1;
double min_col = -1.0, max_col = -1.0;
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
// if item is different from previous or if the first row
// (sort_ind == 0) then increment col count
@@ -239,7 +239,7 @@ void calc_vals()
min_col /= mult;
}
double range_col = max_col - min_col;
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
if(arr[sort_ind].col_ind > -1)
{
@@ -250,7 +250,7 @@ void calc_vals()
}
else
{
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
if(vals[column_ind] == eLatency)
{
@@ -263,25 +263,25 @@ void calc_vals()
case eSpeed:
case eLatency:
{
- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
+ for(unsigned int row_ind = 0; row_ind < v_data.size(); row_ind++)
{
arr[row_ind].val = 0.0;
- if(data[row_ind].size() <= column_ind
- || sscanf(data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
+ if(v_data[row_ind].size() <= column_ind
+ || sscanf(v_data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
arr[row_ind].val = 0.0;
if(vals[column_ind] == eLatency && arr[row_ind].val != 0.0)
{
- if(strstr(data[row_ind][column_ind], "ms"))
+ if(strstr(v_data[row_ind][column_ind], "ms"))
arr[row_ind].val *= 1000.0;
- else if(!strstr(data[row_ind][column_ind], "us"))
+ else if(!strstr(v_data[row_ind][column_ind], "us"))
arr[row_ind].val *= 1000000.0; // is !us && !ms then secs!
}
arr[row_ind].pos = row_ind;
}
- qsort(arr, data.size(), sizeof(ITEM), compar);
+ qsort(arr, v_data.size(), sizeof(ITEM), compar);
int col_count = -1;
double min_col = -1.0, max_col = -1.0;
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
// if item is different from previous or if the first row
// (sort_ind == 0) then increment col count
@@ -310,7 +310,7 @@ void calc_vals()
min_col /= mult;
}
double range_col = max_col - min_col;
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
if(arr[sort_ind].col_ind > -1)
{
@@ -332,7 +332,7 @@ void calc_vals()
}
else
{
- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
+ for(unsigned int sort_ind = 0; sort_ind < v_data.size(); sort_ind++)
{
if(vals[column_ind] == eLatency)
{
@@ -481,16 +481,16 @@ void read_in(CPCCHAR buf)
free((void *)arr[0]);
return;
}
- data.push_back(arr);
+ v_data.push_back(arr);
}
void print_item(int num, int item, CPCCHAR extra)
{
PCCHAR line_data;
char buf[1024];
- if(int(data[num].size()) > item)
+ if(int(v_data[num].size()) > item)
{
- line_data = data[num][item];
+ line_data = v_data[num][item];
switch(item)
{
case COL_PUT_BLOCK:
1
0