Fix connection timeouts in URL downloader 27/1527/1
authorChamarty <ravi.chamarty@riftio.com>
Fri, 7 Apr 2017 15:37:31 +0000 (11:37 -0400)
committerChamarty <ravi.chamarty@riftio.com>
Fri, 7 Apr 2017 15:37:31 +0000 (11:37 -0400)
Change-Id: I269868a69f16c3317cc343c4ec2a7fbe2e6a9387
Signed-off-by: Chamarty <ravi.chamarty@riftio.com>
common/python/rift/downloader/url.py
rwlaunchpad/plugins/rwpkgmgr/test/CMakeLists.txt
rwlaunchpad/plugins/rwpkgmgr/test/utest_publisher_dts.py

index 2768894..7ffb999 100644 (file)
@@ -23,7 +23,6 @@ import os
 import tempfile
 import threading
 import time
-import uuid
 import zlib
 
 import requests
@@ -34,13 +33,8 @@ from requests.packages.urllib3.util.retry import Retry
 from requests.packages.urllib3.exceptions import InsecureRequestWarning
 requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
 
-import gi
-gi.require_version("RwPkgMgmtYang", "1.0")
-
-from gi.repository import RwPkgMgmtYang
 from . import base
 
-
 class UrlDownloader(base.AbstractDownloader):
     """Handles downloads of URL with some basic retry strategy.
     """
@@ -106,7 +100,9 @@ class UrlDownloader(base.AbstractDownloader):
 
     def _create_session(self):
         session = requests.Session()
-        retries = Retry(total=5, backoff_factor=1)
+        # 3 connection attempts should be more than enough, We can't wait forever!
+        # The user needs to be  updated of the status
+        retries = Retry(total=2, backoff_factor=1)
         session.mount("http://", HTTPAdapter(max_retries=retries))
         session.mount("https://", HTTPAdapter(max_retries=retries))
 
@@ -153,8 +149,8 @@ class UrlDownloader(base.AbstractDownloader):
 
             try:
                 os.remove(self.filepath)
-            except Exception as e:
-                self.log.exception(e)
+            except Exception:
+                pass
 
     def download(self):
         """Start the download
@@ -186,13 +182,16 @@ class UrlDownloader(base.AbstractDownloader):
 
     def _download(self):
 
-        url_options = {"verify": False}
+        url_options = {"verify": False, "timeout": 1}
 
         if self.auth is not None:
             url_options["auth"] = self.auth
 
         response = self.session.head(self.url, **url_options)
 
+        if response.status_code != requests.codes.ok:
+            response.raise_for_status()
+
         # Prepare the meta data
         self.meta.update_data_with_head(response.headers)
         self.meta.start_download()
index a42e8e9..8f090a2 100644 (file)
@@ -43,6 +43,11 @@ rift_py3test(utest_publisher_dts.test_url_download
   ${CMAKE_CURRENT_SOURCE_DIR}/utest_publisher_dts.py TestCase.test_url_download
   )
 
+rift_py3test(utest_publisher_dts.test_url_download_unreachable_ip
+  TEST_ARGS
+  ${CMAKE_CURRENT_SOURCE_DIR}/utest_publisher_dts.py TestCase.test_url_download_unreachable_ip
+  )
+
 rift_py3test(utest_publisher_dts.test_cancelled
   TEST_ARGS
   ${CMAKE_CURRENT_SOURCE_DIR}/utest_publisher_dts.py TestCase.test_cancelled
@@ -53,6 +58,7 @@ add_custom_target(utest_publisher_dts.py
     utest_publisher_dts.test_download_publisher
     utest_publisher_dts.test_publish
     utest_publisher_dts.test_url_download
+    utest_publisher_dts.test_url_download_unreachable_ip
     utest_publisher_dts.test_cancelled
   )
 
index a02e5c6..518b927 100755 (executable)
@@ -145,7 +145,7 @@ class TestCase(rift.test.dts.AbstractDTSTest):
         assert download_id is not None
        
         # Waiting for 5 secs to be sure that the file is downloaded
-        yield from asyncio.sleep(5, loop=self.loop)
+        yield from asyncio.sleep(10, loop=self.loop)
         xpath = "/download-jobs/job[download-id='{}']".format(
             download_id)
         result = yield from self.read_xpath(xpath)
@@ -153,6 +153,32 @@ class TestCase(rift.test.dts.AbstractDTSTest):
         assert result.status == "COMPLETED"
         assert len(self.job_handler.tasks) == 0
 
+    @rift.test.dts.async_test
+    def test_url_download_unreachable_ip(self):
+        """
+        Integration Test:
+            Ensure that a bad IP does not block forever
+        """
+        yield from self.job_handler.register()
+
+        proxy = mock.MagicMock()
+
+        # Here, we are assuming that there is no HTTP server at 10.1.2.3
+        url = "http://10.1.2.3/common/unittests/plantuml.jar"
+        url_downloader = downloader.PackageFileDownloader(url, "1", "/", "VNFD", proxy)
+
+        download_id = yield from self.job_handler.register_downloader(url_downloader)
+        assert download_id is not None
+       
+        # Waiting for 10 secs to be sure all reconnect attempts have been exhausted
+        yield from asyncio.sleep(10, loop=self.loop)
+        xpath = "/download-jobs/job[download-id='{}']".format(
+            download_id)
+        result = yield from self.read_xpath(xpath)
+        self.log.debug("Test result before complete check - %s", result)
+        assert result.status == "FAILED"
+        assert len(self.job_handler.tasks) == 0
+
 
     @rift.test.dts.async_test
     def test_cancelled(self):