

# import os
# import time
# import re
# import requests
# import cloudinary
# from cloudinary import uploader, api
# from datetime import datetime
# from dateutil import parser
# from watchdog.observers import Observer
# from watchdog.events import FileSystemEventHandler

# # Configure Cloudinary with your credentials
# cloudinary.config(
#     cloud_name='di7xjvhdc',
#     api_key='365434738567961',
#     api_secret='0GAWSyHUKbFhwqbNBUkVQIYnqLM'
# )

# local_folder_path = 'public/uploads'
# cloudinary_folder_path = 'public/uploads'

# # Timeout mechanism
# TIMEOUT = 2 * 60  # 10 minutes in seconds
# last_sync_time = None
# last_local_modification = None

# # List to store files pending upload when offline
# pending_uploads = []

# def is_online():
#     try:
#         requests.get('http://www.google.com', timeout=5)
#         print("Online check: Online")
#         return True
#     except requests.RequestException:
#         print("Online check: Offline")
#         return False

# def get_all_local_files(base_path):
#     all_files = []
#     for root, dirs, files in os.walk(base_path):
#         for file in files:
#             all_files.append(os.path.join(root, file))
#     return all_files

# def get_cloudinary_files(cloudinary_path):
#     try:
#         response = api.resources(type='upload', prefix=cloudinary_path, max_results=500)  # Max results per request
#         return [item['public_id'] for item in response['resources']]
#     except Exception as e:
#         print(f'Error retrieving files from Cloudinary: {e}')
#         return []

# def file_needs_upload(local_file_path, cloudinary_files):
#     relative_path = os.path.relpath(local_file_path, local_folder_path)
#     cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")

#     return cloudinary_file_path not in cloudinary_files

# def upload_files_to_cloudinary(local_path, cloudinary_path, cloudinary_files):
#     local_files = get_all_local_files(local_path)
#     for local_file_path in local_files:
#         if file_needs_upload(local_file_path, cloudinary_files):
#             relative_path = os.path.relpath(local_file_path, local_path)
#             cloudinary_file_path = os.path.join(cloudinary_path, relative_path).replace("\\", "/")
#             resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
#             try:
#                 uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
#                 print(f'Uploaded {relative_path} to Cloudinary as {resource_type}')
#             except Exception as e:
#                 print(f'Error uploading {relative_path}: {e}')

# def upload_folders_to_cloudinary(local_path, cloudinary_path, cloudinary_files):
#     for root, dirs, files in os.walk(local_path):
#         for dir in dirs:
#             local_dir_path = os.path.join(root, dir)
#             if file_needs_upload(local_dir_path, cloudinary_files):
#                 relative_path = os.path.relpath(local_dir_path, local_path)
#                 cloudinary_dir_path = os.path.join(cloudinary_path, relative_path).replace("\\", "/")
#                 try:
#                     api.create_folder(cloudinary_dir_path)
#                     print(f'Created folder {relative_path} on Cloudinary')
#                 except cloudinary.exceptions.RateLimited as e:
#                     print(f'Rate limit exceeded. Waiting for rate limit reset...')
#                     wait_time = extract_wait_time(e.message)
#                     time.sleep(wait_time)
#                     upload_folders_to_cloudinary(local_path, cloudinary_path, cloudinary_files)  # Retry recursively
#                 except cloudinary.exceptions.Error as e:
#                     if e.http_code == 400:
#                         # Folder already exists
#                         print(f'Folder {relative_path} already exists on Cloudinary: {e}')
#                         pass
#                     else:
#                         print(f'Error creating folder {relative_path} on Cloudinary: {e}')

# def extract_wait_time(error_message):
#     match = re.search(r'Try again on (.*) UTC', error_message)
#     if match:
#         time_str = match.group(1)
#         reset_time = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
#         current_time = datetime.utcnow()
#         wait_time = (reset_time - current_time).total_seconds()
#         return max(wait_time, 0)
#     else:
#         return 60  # Default wait time in case of failure to extract

# def sync_folders():
#     global last_sync_time, last_local_modification
#     current_time = time.time()

#     # Check if any files have been modified since last sync
#     current_local_modification = max(os.path.getmtime(f) for f in get_all_local_files(local_folder_path))
#     if last_local_modification and current_local_modification == last_local_modification:
#         return

#     last_local_modification = current_local_modification

#     if is_online():
#         cloudinary_files = get_cloudinary_files(cloudinary_folder_path)
#         upload_folders_to_cloudinary(local_folder_path, cloudinary_folder_path, cloudinary_files)
#         upload_files_to_cloudinary(local_folder_path, cloudinary_folder_path, cloudinary_files)
        
#         # Process pending uploads
#         for local_file_path in pending_uploads:
#             relative_path = os.path.relpath(local_file_path, local_folder_path)
#             cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")
#             resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
#             try:
#                 uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
#                 print(f'Uploaded pending file {relative_path} to Cloudinary as {resource_type}')
#             except Exception as e:
#                 print(f'Error uploading pending file {relative_path}: {e}')
        
#         # Clear the pending uploads list after successful upload
#         pending_uploads.clear()

#         last_sync_time = current_time  # Update last sync time
#     else:
#         print("Currently offline. Monitoring for file changes.")

# class FileEventHandler(FileSystemEventHandler):
#     def __init__(self, sync_function):
#         self.sync_function = sync_function

#     def on_any_event(self, event):
#         if event.is_directory:
#             return
#         self.sync_function()

# def main():
#     event_handler = FileEventHandler(sync_folders)
#     observer = Observer()
#     observer.schedule(event_handler, local_folder_path, recursive=True)
#     observer.start()

#     try:
#         while True:
#             if is_online():
#                 sync_folders()
#             else:
#                 # Collect pending uploads while offline
#                 local_files = get_all_local_files(local_folder_path)
#                 for local_file_path in local_files:
#                     if local_file_path not in pending_uploads:
#                         pending_uploads.append(local_file_path)
#                 print("Offline. Collected files for later upload.")
#             time.sleep(30)  # Wait for 30 seconds before checking again
#     except KeyboardInterrupt:
#         observer.stop()
#         observer.join()

# if __name__ == "__main__":
#     main()








# import os
# import time
# import re
# import requests
# import cloudinary
# from cloudinary import uploader, api
# from datetime import datetime
# from watchdog.observers import Observer
# from watchdog.events import FileSystemEventHandler

# # Configure Cloudinary with your credentials
# cloudinary.config(
#     cloud_name='di7xjvhdc',
#     api_key='365434738567961',
#     api_secret='0GAWSyHUKbFhwqbNBUkVQIYnqLM'
# )

# local_folder_path = 'public/uploads'
# cloudinary_folder_path = 'public/uploads'

# # Timeout mechanism
# TIMEOUT = 2 * 60  # 10 minutes in seconds
# last_sync_time = None
# last_local_modification = None

# # List to store files pending upload when offline
# pending_uploads = []
# pending_deletions = []

# def is_online():
#     try:
#         requests.get('http://www.google.com', timeout=5)
#         print("Online check: Online")
#         return True
#     except requests.RequestException:
#         print("Online check: Offline")
#         return False

# def get_all_local_files(base_path):
#     all_files = []
#     for root, dirs, files in os.walk(base_path):
#         for file in files:
#             all_files.append(os.path.join(root, file))
#     return all_files

# def get_all_local_folders(base_path):
#     all_folders = []
#     for root, dirs, files in os.walk(base_path):
#         for dir in dirs:
#             all_folders.append(os.path.join(root, dir))
#     return all_folders

# def get_cloudinary_files(cloudinary_path):
#     try:
#         response = api.resources(type='upload', prefix=cloudinary_path, max_results=500)
#         return [item['public_id'] for item in response['resources']]
#     except Exception as e:
#         print(f'Error retrieving files from Cloudinary: {e}')
#         return []

# def get_cloudinary_folders(cloudinary_path):
#     try:
#         response = api.root_folders()
#         folders = response['folders']
#         return [folder['path'] for folder in folders]
#     except Exception as e:
#         print(f'Error retrieving folders from Cloudinary: {e}')
#         return []

# def file_needs_upload(local_file_path, cloudinary_files):
#     relative_path = os.path.relpath(local_file_path, local_folder_path)
#     cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")

#     return cloudinary_file_path not in cloudinary_files

# def file_needs_deletion(cloudinary_file_path, local_files):
#     relative_path = os.path.relpath(cloudinary_file_path, cloudinary_folder_path)
#     local_file_path = os.path.join(local_folder_path, relative_path)

#     return not os.path.exists(local_file_path)

# def upload_files_to_cloudinary(local_path, cloudinary_path, cloudinary_files):
#     local_files = get_all_local_files(local_path)
#     for local_file_path in local_files:
#         if file_needs_upload(local_file_path, cloudinary_files):
#             relative_path = os.path.relpath(local_file_path, local_path)
#             cloudinary_file_path = os.path.join(cloudinary_path, relative_path).replace("\\", "/")
#             resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
#             try:
#                 uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
#                 print(f'Uploaded {relative_path} to Cloudinary as {resource_type}')
#             except Exception as e:
#                 print(f'Error uploading {relative_path}: {e}')

# def upload_folders_to_cloudinary(local_path, cloudinary_path, cloudinary_files):
#     for root, dirs, files in os.walk(local_path):
#         for dir in dirs:
#             local_dir_path = os.path.join(root, dir)
#             if file_needs_upload(local_dir_path, cloudinary_files):
#                 relative_path = os.path.relpath(local_dir_path, local_path)
#                 cloudinary_dir_path = os.path.join(cloudinary_path, relative_path).replace("\\", "/")
#                 try:
#                     api.create_folder(cloudinary_dir_path)
#                     print(f'Created folder {relative_path} on Cloudinary')
#                 except cloudinary.exceptions.RateLimited as e:
#                     print(f'Rate limit exceeded. Waiting for rate limit reset...')
#                     wait_time = extract_wait_time(e.message)
#                     time.sleep(wait_time)
#                     upload_folders_to_cloudinary(local_path, cloudinary_path, cloudinary_files)  # Retry recursively
#                 except cloudinary.exceptions.Error as e:
#                     if e.http_code == 400:
#                         # Folder already exists
#                         print(f'Folder {relative_path} already exists on Cloudinary: {e}')
#                     else:
#                         print(f'Error creating folder {relative_path} on Cloudinary: {e}')

# def extract_wait_time(error_message):
#     match = re.search(r'Try again on (.*) UTC', error_message)
#     if match:
#         time_str = match.group(1)
#         reset_time = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
#         current_time = datetime.utcnow()
#         wait_time = (reset_time - current_time).total_seconds()
#         return max(wait_time, 0)
#     else:
#         return 60  # Default wait time in case of failure to extract

# def delete_files_from_cloudinary(cloudinary_files, local_files):
#     for cloudinary_file_path in cloudinary_files:
#         if file_needs_deletion(cloudinary_file_path, local_files):
#             try:
#                 uploader.destroy(cloudinary_file_path)
#                 print(f'Deleted {cloudinary_file_path} from Cloudinary')
#             except Exception as e:
#                 print(f'Error deleting {cloudinary_file_path} from Cloudinary: {e}')

# def sync_folders():
#     global last_sync_time, last_local_modification
#     current_time = time.time()

#     # Check if any files have been modified since last sync
#     current_local_modification = max(os.path.getmtime(f) for f in get_all_local_files(local_folder_path))
#     if last_local_modification and current_local_modification == last_local_modification:
#         return

#     last_local_modification = current_local_modification

#     if is_online():
#         cloudinary_files = get_cloudinary_files(cloudinary_folder_path)
#         upload_folders_to_cloudinary(local_folder_path, cloudinary_folder_path, cloudinary_files)
#         upload_files_to_cloudinary(local_folder_path, cloudinary_folder_path, cloudinary_files)
        
#         # Process pending uploads
#         for local_file_path in pending_uploads:
#             relative_path = os.path.relpath(local_file_path, local_folder_path)
#             cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")
#             resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
#             try:
#                 uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
#                 print(f'Uploaded pending file {relative_path} to Cloudinary as {resource_type}')
#             except Exception as e:
#                 print(f'Error uploading pending file {relative_path}: {e}')
        
#         # Clear the pending uploads list after successful upload
#         pending_uploads.clear()

#         # Delete files from Cloudinary that are not present locally
#         delete_files_from_cloudinary(cloudinary_files, get_all_local_files(local_folder_path))

#         last_sync_time = current_time  # Update last sync time
#     else:
#         print("Currently offline. Monitoring for file changes.")

# class FileEventHandler(FileSystemEventHandler):
#     def __init__(self, sync_function):
#         self.sync_function = sync_function

#     def on_any_event(self, event):
#         if event.is_directory:
#             return
#         self.sync_function()

# def main():
#     event_handler = FileEventHandler(sync_folders)
#     observer = Observer()
#     observer.schedule(event_handler, local_folder_path, recursive=True)
#     observer.start()

#     try:
#         while True:
#             if is_online():
#                 sync_folders()
#             else:
#                 # Collect pending uploads while offline
#                 local_files = get_all_local_files(local_folder_path)
#                 for local_file_path in local_files:
#                     if local_file_path not in pending_uploads:
#                         pending_uploads.append(local_file_path)
#                 print("Offline. Collected files for later upload.")
#             time.sleep(30)  # Wait for 30 seconds before checking again
#     except KeyboardInterrupt:
#         observer.stop()
#         observer.join()

# if __name__ == "__main__":
#     main()

























import os
import time
import re
import requests
import cloudinary
from cloudinary import uploader, api
from datetime import datetime
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler

# Configure Cloudinary with your credentials
cloudinary.config(
    cloud_name='di7xjvhdc',
    api_key='365434738567961',
    api_secret='0GAWSyHUKbFhwqbNBUkVQIYnqLM'
)

local_folder_path = 'public/uploads'
cloudinary_folder_path = 'public/uploads'
log_file_path = 'sync_log.txt'

# Timeout mechanism
TIMEOUT = 2 * 60  # 10 minutes in seconds
last_sync_time = None
last_local_modification = None

# List to store files pending upload and deletion when offline
pending_uploads = []
pending_deletions = []

def is_online():
    try:
        requests.get('http://www.google.com', timeout=5)
        print("Online check: Online")
        return True
    except requests.RequestException:
        print("Online check: Offline")
        return False

def get_all_local_files(base_path):
    all_files = []
    for root, dirs, files in os.walk(base_path):
        for file in files:
            all_files.append(os.path.join(root, file))
    return all_files

def get_cloudinary_files(cloudinary_path):
    try:
        response = api.resources(type='upload', prefix=cloudinary_path, max_results=500)
        return [item['public_id'] for item in response['resources']]
    except Exception as e:
        print(f'Error retrieving files from Cloudinary: {e}')
        return []

def file_needs_upload(local_file_path, cloudinary_files):
    relative_path = os.path.relpath(local_file_path, local_folder_path)
    cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")
    return cloudinary_file_path not in cloudinary_files

def file_needs_deletion(cloudinary_file_path, local_files):
    relative_path = os.path.relpath(cloudinary_file_path, cloudinary_folder_path)
    local_file_path = os.path.join(local_folder_path, relative_path)
    return not os.path.exists(local_file_path)

def upload_files_to_cloudinary(local_path, cloudinary_path, cloudinary_files):
    local_files = get_all_local_files(local_path)
    for local_file_path in local_files:
        if file_needs_upload(local_file_path, cloudinary_files):
            relative_path = os.path.relpath(local_file_path, local_path)
            cloudinary_file_path = os.path.join(cloudinary_path, relative_path).replace("\\", "/")
            resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
            try:
                uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
                print(f'Uploaded {relative_path} to Cloudinary as {resource_type}')
            except Exception as e:
                print(f'Error uploading {relative_path}: {e}')

def delete_files_from_cloudinary(cloudinary_files, local_files):
    for cloudinary_file_path in cloudinary_files:
        if file_needs_deletion(cloudinary_file_path, local_files):
            try:
                uploader.destroy(cloudinary_file_path)
                log_deletion(cloudinary_file_path)
                print(f'Deleted {cloudinary_file_path} from Cloudinary')
            except Exception as e:
                print(f'Error deleting {cloudinary_file_path} from Cloudinary: {e}')

def log_deletion(file_path):
    with open(log_file_path, 'a') as log_file:
        log_file.write(f'{datetime.now()} - Deleted: {file_path}\n')

def log_restoration(file_path):
    with open(log_file_path, 'a') as log_file:
        log_file.write(f'{datetime.now()} - Restored: {file_path}\n')

def restore_file(local_file_path, cloudinary_file_path, resource_type):
    try:
        uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
        log_restoration(cloudinary_file_path)
        print(f'Restored {cloudinary_file_path} to Cloudinary as {resource_type}')
    except Exception as e:
        print(f'Error restoring {cloudinary_file_path}: {e}')

def sync_folders():
    global last_sync_time, last_local_modification
    current_time = time.time()

    # Check if any files have been modified since last sync
    current_local_modification = max(os.path.getmtime(f) for f in get_all_local_files(local_folder_path))
    if last_local_modification and current_local_modification == last_local_modification:
        return

    last_local_modification = current_local_modification

    if is_online():
        cloudinary_files = get_cloudinary_files(cloudinary_folder_path)
        upload_files_to_cloudinary(local_folder_path, cloudinary_folder_path, cloudinary_files)
        
        # Process pending uploads
        for local_file_path in pending_uploads:
            relative_path = os.path.relpath(local_file_path, local_folder_path)
            cloudinary_file_path = os.path.join(cloudinary_folder_path, relative_path).replace("\\", "/")
            resource_type = 'raw' if any(local_file_path.lower().endswith(ext) for ext in ['.xlsx', '.xls', '.csv', '.txt', '.pdf']) else 'image'
            try:
                uploader.upload(local_file_path, public_id=cloudinary_file_path, resource_type=resource_type)
                print(f'Uploaded pending file {relative_path} to Cloudinary as {resource_type}')
            except Exception as e:
                print(f'Error uploading pending file {relative_path}: {e}')
        
        # Clear the pending uploads list after successful upload
        pending_uploads.clear()

        # Delete files from Cloudinary that are not present locally
        delete_files_from_cloudinary(cloudinary_files, get_all_local_files(local_folder_path))

        last_sync_time = current_time  # Update last sync time
    else:
        print("Currently offline. Monitoring for file changes.")

class FileEventHandler(FileSystemEventHandler):
    def __init__(self, sync_function):
        self.sync_function = sync_function

    def on_any_event(self, event):
        if event.is_directory:
            return
        self.sync_function()

def main():
    event_handler = FileEventHandler(sync_folders)
    observer = Observer()
    observer.schedule(event_handler, local_folder_path, recursive=True)
    observer.start()

    try:
        while True:
            if is_online():
                sync_folders()
            else:
                # Collect pending uploads while offline
                local_files = get_all_local_files(local_folder_path)
                for local_file_path in local_files:
                    if local_file_path not in pending_uploads:
                        pending_uploads.append(local_file_path)
                print("Offline. Collected files for later upload.")
            time.sleep(30)  # Wait for 30 seconds before checking again
    except KeyboardInterrupt:
        observer.stop()
        observer.join()

if __name__ == "__main__":
    main()
