|
| 1 | +import subprocess |
| 2 | +import os |
| 3 | +import stat |
| 4 | +import logging |
| 5 | +import requests |
| 6 | +from requests_file import FileAdapter |
| 7 | +import shutil |
| 8 | +import re |
| 9 | + |
| 10 | + |
| 11 | +# for large files from Google Drive |
| 12 | +def get_confirm_token(response): |
| 13 | + for key, value in response.cookies.items(): |
| 14 | + if key.startswith('download_warning'): |
| 15 | + return value |
| 16 | + return None |
| 17 | + |
| 18 | + |
| 19 | +# sets up the a local repo that acts like a remote |
| 20 | +def initialize_local_repo(local_repo_path): |
| 21 | + logging.info(f"Creating local_repo_path: {local_repo_path}") |
| 22 | + try: |
| 23 | + os.makedirs(local_repo_path) |
| 24 | + except OSError: |
| 25 | + logging.info(f"Directory exists: {local_repo_path}") |
| 26 | + |
| 27 | + subprocess_helper("git init --bare", local_repo_path) |
| 28 | + |
| 29 | + # Make our bare repository serveable over dumb HTTP |
| 30 | + hook_path = os.path.join(local_repo_path, 'hooks', 'post-update') |
| 31 | + os.rename( |
| 32 | + os.path.join(local_repo_path, 'hooks', 'post-update.sample'), |
| 33 | + hook_path |
| 34 | + ) |
| 35 | + os.chmod(hook_path, os.stat(hook_path).st_mode | stat.S_IEXEC) |
| 36 | + |
| 37 | + |
| 38 | +# local repo cloned from the "remote" which is in user drive |
| 39 | +def clone_local_origin_repo(origin_repo_path, temp_download_repo): |
| 40 | + logging.info(f"Creating temp_download_repo: {temp_download_repo}") |
| 41 | + try: |
| 42 | + os.makedirs(temp_download_repo) |
| 43 | + except OSError: |
| 44 | + logging.info(f"Directory exists: {temp_download_repo}") |
| 45 | + |
| 46 | + cmd = f"git clone file://{origin_repo_path} {temp_download_repo}" |
| 47 | + subprocess_helper(cmd, temp_download_repo) |
| 48 | + |
| 49 | + |
| 50 | +# this is needed to unarchive various formats(eg. zip, tgz, etc) |
| 51 | +def determine_file_extension(url, response): |
| 52 | + file_type = response.headers.get('content-type') |
| 53 | + content_disposition = response.headers.get('content-disposition') |
| 54 | + ext = None |
| 55 | + if content_disposition: |
| 56 | + fname = re.findall("filename\*?=([^;]+)", content_disposition) |
| 57 | + fname = fname[0].strip().strip('"') |
| 58 | + ext = fname.split(".")[1] |
| 59 | + elif file_type and "/zip" in file_type: |
| 60 | + ext = "zip" |
| 61 | + else: |
| 62 | + url = url.split("/")[-1] |
| 63 | + if "?" in url: |
| 64 | + url = url[0:url.find('?')] |
| 65 | + if "." in url: |
| 66 | + ext = url.split(".")[1] |
| 67 | + |
| 68 | + if not ext: |
| 69 | + m = f"Could not determine the file extension for unarchiving: {url}" |
| 70 | + raise Exception(m) |
| 71 | + return ext |
| 72 | + |
| 73 | + |
| 74 | +# the downloaded content is in the response -- unarchive and save to the disk |
| 75 | +def save_response_content(url, response, temp_download_repo): |
| 76 | + try: |
| 77 | + ext = determine_file_extension(url, response) |
| 78 | + CHUNK_SIZE = 32768 |
| 79 | + temp_download_file = f"{temp_download_repo}/download.{ext}" |
| 80 | + with open(temp_download_file, "wb") as f: |
| 81 | + for chunk in response.iter_content(CHUNK_SIZE): |
| 82 | + # filter out keep-alive new chunks |
| 83 | + if chunk: |
| 84 | + f.write(chunk) |
| 85 | + |
| 86 | + shutil.unpack_archive(temp_download_file, temp_download_repo) |
| 87 | + |
| 88 | + os.remove(temp_download_file) |
| 89 | + except Exception as e: |
| 90 | + m = f"Problem handling file download: {str(e)}" |
| 91 | + raise Exception(m) |
| 92 | + |
| 93 | + |
| 94 | +# grab archive file from url |
| 95 | +def fetch_files(url, id=-1): |
| 96 | + session = requests.Session() |
| 97 | + session.mount('file://', FileAdapter()) # add adapter for pytests |
| 98 | + response = session.get(url, params={'id': id}, stream=True) |
| 99 | + token = get_confirm_token(response) |
| 100 | + if token: |
| 101 | + params = {'id': id, 'confirm': token} |
| 102 | + response = session.get(url, params=params, stream=True) |
| 103 | + |
| 104 | + return response |
| 105 | + |
| 106 | + |
| 107 | +# this drive the file handling -- called from zip_puller by all the |
| 108 | +# handle_files implementations for GoogleDrive, Dropbox, and standard |
| 109 | +# Web url |
| 110 | +def handle_files_helper(args): |
| 111 | + try: |
| 112 | + origin_repo = args["repo_parent_dir"] + args["origin_dir"] |
| 113 | + temp_download_repo = args["repo_parent_dir"] + args["download_dir"] |
| 114 | + if os.path.exists(temp_download_repo): |
| 115 | + shutil.rmtree(temp_download_repo) |
| 116 | + |
| 117 | + if not os.path.exists(origin_repo): |
| 118 | + initialize_local_repo(origin_repo) |
| 119 | + |
| 120 | + clone_local_origin_repo(origin_repo, temp_download_repo) |
| 121 | + save_response_content(args["repo"], args["response"], temp_download_repo) |
| 122 | + subprocess_helper("git add .", temp_download_repo) |
| 123 | + subprocess_helper( "git -c [email protected] -c user.name=nbgitpuller commit -m test --allow-empty", temp_download_repo) |
| 124 | + subprocess_helper("git push origin master", temp_download_repo) |
| 125 | + unzipped_dirs = os.listdir(temp_download_repo) |
| 126 | + |
| 127 | + dir_names = list(filter(lambda dir: ".git" not in dir, unzipped_dirs)) |
| 128 | + return {"unzip_dir": dir_names[0], "origin_repo_path": origin_repo} |
| 129 | + except Exception as e: |
| 130 | + logging.exception(e) |
| 131 | + raise ValueError(e) |
| 132 | + |
| 133 | + |
| 134 | +# executes git commands for us |
| 135 | +def subprocess_helper(cmd, cwd): |
| 136 | + try: |
| 137 | + subprocess.run( |
| 138 | + cmd.split(" "), |
| 139 | + capture_output=True, |
| 140 | + text=True, |
| 141 | + check=True, |
| 142 | + cwd=cwd |
| 143 | + ) |
| 144 | + except Exception: |
| 145 | + m = f"Problem executing git command: {cmd}" |
| 146 | + raise Exception(m) |
0 commit comments