czds

- ICANN Centralized Zone Data Service Tool
git clone git://git.acid.vegas/czds.git
Log | Files | Refs | Archive | README | LICENSE

commit 6585a7b081932b27613202d05c56f2221c857d99
parent aea3ae6a6decb0804a4a2db13865215bae590957
Author: acidvegas <acid.vegas@acid.vegas>
Date: Fri, 21 Mar 2025 21:22:57 -0400

version updated

Diffstat:
Mczds/__init__.py | 2+-
Mczds/client.py | 146++++++++++++++++++++++++++++++++++++++++++++++++-------------------------------
Msetup.py | 2+-

3 files changed, 90 insertions(+), 60 deletions(-)

diff --git a/czds/__init__.py b/czds/__init__.py
@@ -5,7 +5,7 @@
 from .client import CZDS
 
 
-__version__ = '1.2.8'
+__version__ = '1.2.9'
 __author__  = 'acidvegas'
 __email__   = 'acid.vegas@acid.vegas'
 __github__  = 'https://github.com/acidvegas/czds'
 \ No newline at end of file
diff --git a/czds/client.py b/czds/client.py
@@ -36,8 +36,11 @@ class CZDS:
         
         self.username = username
         self.password = password
-        self.session  = aiohttp.ClientSession()
-        self.headers  = None
+
+        # Configure longer timeouts and proper SSL settings
+        timeout = aiohttp.ClientTimeout(total=None, connect=60, sock_connect=60, sock_read=60)
+        self.session = aiohttp.ClientSession(timeout=timeout)
+        self.headers = None
 
         logging.info('Initialized CZDS client')
 
@@ -186,73 +189,100 @@ class CZDS:
         
         async def _download():
             tld = url.split('/')[-1].split('.')[0]  # Extract TLD from URL
-            logging.info(f'Starting download of {tld} zone file')
+            max_retries = 3
+            retry_delay = 5  # seconds
             
-            try:
-                async with self.session.get(url, headers=self.headers) as response:
-                    if response.status != 200:
-                        error_msg = f'Failed to download {tld}: {response.status} {await response.text()}'
-                        logging.error(error_msg)
-                        raise Exception(error_msg)
-
-                    # Get expected file size from headers
-                    expected_size = int(response.headers.get('Content-Length', 0))
-                    if not expected_size:
-                        logging.warning(f'No Content-Length header for {tld}')
-
-                    if not (content_disposition := response.headers.get('Content-Disposition')):
-                        error_msg = f'Missing Content-Disposition header for {tld}'
-                        logging.error(error_msg)
-                        raise ValueError(error_msg)
-
-                    filename = content_disposition.split('filename=')[-1].strip('"')
-                    filepath = os.path.join(output_directory, filename)
-
-                    async with aiofiles.open(filepath, 'wb') as file:
-                        total_size = 0
-                        last_progress = 0
-                        async for chunk in response.content.iter_chunked(8192):
-                            await file.write(chunk)
-                            total_size += len(chunk)
-                            if expected_size:
-                                progress = int((total_size / expected_size) * 100)
-                                if progress >= last_progress + 5:  # Log every 5% increase
-                                    logging.info(f'Downloading {tld}: {progress}% ({total_size:,}/{expected_size:,} bytes)')
-                                    last_progress = progress
+            for attempt in range(max_retries):
+                try:
+                    logging.info(f'Starting download of {tld} zone file{" (attempt " + str(attempt + 1) + ")" if attempt > 0 else ""}')
+                    
+                    async with self.session.get(url, headers=self.headers, timeout=aiohttp.ClientTimeout(total=3600)) as response:
+                        if response.status != 200:
+                            error_msg = f'Failed to download {tld}: {response.status} {await response.text()}'
+                            logging.error(error_msg)
+                            if attempt + 1 < max_retries:
+                                logging.info(f'Retrying {tld} in {retry_delay} seconds...')
+                                await asyncio.sleep(retry_delay)
+                                continue
+                            raise Exception(error_msg)
+
+                        # Get expected file size from headers
+                        expected_size = int(response.headers.get('Content-Length', 0))
+                        if not expected_size:
+                            logging.warning(f'No Content-Length header for {tld}')
+
+                        if not (content_disposition := response.headers.get('Content-Disposition')):
+                            error_msg = f'Missing Content-Disposition header for {tld}'
+                            logging.error(error_msg)
+                            raise ValueError(error_msg)
+
+                        filename = content_disposition.split('filename=')[-1].strip('"')
+                        filepath = os.path.join(output_directory, filename)
+
+                        async with aiofiles.open(filepath, 'wb') as file:
+                            total_size = 0
+                            last_progress = 0
+                            try:
+                                async for chunk in response.content.iter_chunked(8192):
+                                    await file.write(chunk)
+                                    total_size += len(chunk)
+                                    if expected_size:
+                                        progress = int((total_size / expected_size) * 100)
+                                        if progress >= last_progress + 5:
+                                            logging.info(f'Downloading {tld}: {progress}% ({total_size:,}/{expected_size:,} bytes)')
+                                            last_progress = progress
+                            except (asyncio.TimeoutError, aiohttp.ClientError) as e:
+                                logging.error(f'Connection error while downloading {tld}: {str(e)}')
+                                if attempt + 1 < max_retries:
+                                    logging.info(f'Retrying {tld} in {retry_delay} seconds...')
+                                    await asyncio.sleep(retry_delay)
+                                    continue
+                                raise
 
                         # Verify file size
                         if expected_size and total_size != expected_size:
                             error_msg = f'Incomplete download for {tld}: Got {total_size} bytes, expected {expected_size} bytes'
                             logging.error(error_msg)
-                            os.remove(filepath)  # Clean up incomplete file
+                            os.remove(filepath)
+                            if attempt + 1 < max_retries:
+                                logging.info(f'Retrying {tld} in {retry_delay} seconds...')
+                                await asyncio.sleep(retry_delay)
+                                continue
                             raise Exception(error_msg)
                         
                         size_mb = total_size / (1024 * 1024)
                         logging.info(f'Successfully downloaded {tld} zone file ({size_mb:.2f} MB)')
 
-                    if decompress:
-                        try:
-                            # Verify gzip integrity before decompressing
-                            with gzip.open(filepath, 'rb') as test_gzip:
-                                test_gzip.read(1)  # Try reading first byte to verify gzip integrity
-                            
-                            await self.gzip_decompress(filepath, cleanup)
-                            filepath = filepath[:-3]  # Remove .gz extension
-                            logging.info(f'Decompressed {tld} zone file')
-                        except (gzip.BadGzipFile, OSError) as e:
-                            error_msg = f'Failed to decompress {tld}: {str(e)}'
-                            logging.error(error_msg)
-                            os.remove(filepath)  # Clean up corrupted file
-                            raise Exception(error_msg)
-
-                    return filepath
-
-            except Exception as e:
-                logging.error(f'Error downloading {tld}: {str(e)}')
-                # Clean up any partial downloads
-                if 'filepath' in locals() and os.path.exists(filepath):
-                    os.remove(filepath)
-                raise
+                        if decompress:
+                            try:
+                                with gzip.open(filepath, 'rb') as test_gzip:
+                                    test_gzip.read(1)
+                                
+                                await self.gzip_decompress(filepath, cleanup)
+                                filepath = filepath[:-3]
+                                logging.info(f'Decompressed {tld} zone file')
+                            except (gzip.BadGzipFile, OSError) as e:
+                                error_msg = f'Failed to decompress {tld}: {str(e)}'
+                                logging.error(error_msg)
+                                os.remove(filepath)
+                                raise Exception(error_msg)
+
+                        return filepath
+
+                except (aiohttp.ClientError, asyncio.TimeoutError) as e:
+                    if attempt + 1 >= max_retries:
+                        logging.error(f'Failed to download {tld} after {max_retries} attempts: {str(e)}')
+                        if 'filepath' in locals() and os.path.exists(filepath):
+                            os.remove(filepath)
+                        raise
+                    logging.warning(f'Download attempt {attempt + 1} failed for {tld}: {str(e)}')
+                    await asyncio.sleep(retry_delay)
+
+                except Exception as e:
+                    logging.error(f'Error downloading {tld}: {str(e)}')
+                    if 'filepath' in locals() and os.path.exists(filepath):
+                        os.remove(filepath)
+                    raise
 
         if semaphore:
             async with semaphore:
diff --git a/setup.py b/setup.py
@@ -11,7 +11,7 @@ with open('README.md', 'r', encoding='utf-8') as fh:
 
 setup(
 	name='czds-api',
-	version='1.2.8',
+	version='1.2.9',
 	author='acidvegas',
 	author_email='acid.vegas@acid.vegas',
 	description='ICANN API for the Centralized Zones Data Service',