Skip to content

Commit d9c2af5

Browse files
authored
Merge pull request #15 from script-money/fix/windows
🐛 can't upload at windows
2 parents 5d0eadb + 29f4958 commit d9c2af5

File tree

3 files changed

+42
-16
lines changed

3 files changed

+42
-16
lines changed

src/config.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
from dotenv import load_dotenv
44
from pathlib import Path
55

6+
from httpx._types import ProxiesTypes
7+
68
load_dotenv()
79
ROOT_DIR = Path(__file__).parent.parent
810

@@ -62,7 +64,7 @@ class Quality(Enum):
6264
# upload----------------------------------------------------------------------------------------------
6365
UPLOAD_METADATA = False # set False if don't want to upload metadata
6466
PIN_FILES = False # if want to upload permanently, set to True
65-
PROXIES = {
67+
PROXIES: ProxiesTypes = {
6668
"http://": "http://127.0.0.1:7890",
6769
"https://": "http://127.0.0.1:7890",
6870
} # if in China, you need set proxy to access IPFS node

src/generate.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,10 @@ def random_attr():
3030
List: [{"value": value, "trait_type": prop}]
3131
"""
3232
attributes = []
33-
select_folder = choice(FOLDERS, p=WEIGHTS)
33+
select_folder: str = choice(FOLDERS, p=WEIGHTS)
3434
for prop in props:
3535
k = random.random()
36-
ratio_arr = df_pac.query(
36+
ratio_arr: np.ndarray = df_pac.query(
3737
f"(folder == '{select_folder}') & (prop == '{prop}')"
3838
).ratio.values
3939
cum_arr = np.cumsum(ratio_arr) - k

src/upload.py

Lines changed: 37 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import asyncio
22
import os
3-
from httpx import AsyncClient, Limits, ReadTimeout, Client
3+
from typing import Optional, cast
4+
from httpx import AsyncClient, Limits, ReadTimeout, Client, ConnectError, Response
45
import json
56
import pandas as pd
67
import random
@@ -18,7 +19,9 @@
1819
from final_check import RENAME_DF, START_ID
1920

2021

21-
async def upload_task(files_path_chunk: list[str], wait_seconds: int) -> list[dict]:
22+
async def upload_task(
23+
files_path_chunk: list[str], wait_seconds: int
24+
) -> Optional[list[dict]]:
2225
"""
2326
upload task for asyncio, a task process 10 files
2427
@@ -39,10 +42,12 @@ async def upload_task(files_path_chunk: list[str], wait_seconds: int) -> list[di
3942
for file_path in files_path_chunk
4043
]
4144
result = await asyncio.gather(*tasks)
45+
if all(map(lambda i: i is None, result)):
46+
return None
4247
return result
4348

4449

45-
async def upload_single_async(client: AsyncClient, file_path: str) -> dict:
50+
async def upload_single_async(client: AsyncClient, file_path: str) -> Optional[dict]:
4651
"""
4752
upload folder to ipfs
4853
@@ -56,26 +61,36 @@ async def upload_single_async(client: AsyncClient, file_path: str) -> dict:
5661
retry = 0
5762
while retry < 5:
5863
try:
59-
response = await client.post(
64+
response: Response = await client.post(
6065
f"https://ipfs.infura.io:5001/api/v0/add",
6166
params={
6267
"pin": "true" if PIN_FILES else "false"
6368
}, # pin=true if want to pin files
64-
auth=(PROJECT_ID, PROJECT_SECRET),
69+
auth=(PROJECT_ID, PROJECT_SECRET), # type: ignore
6570
files={"file": open(file_path, "rb")},
6671
)
67-
res_json = response.json()
72+
if response.status_code == 401:
73+
print("Project ID and scecret is invalid")
74+
exit()
75+
res_json: dict = response.json()
6876
if res_json["Name"] != "":
6977
return res_json
7078
except Exception as e:
7179
if isinstance(e, ReadTimeout):
7280
print(f"upload {file_path.split('-')[0]} timeout, retry {retry}")
81+
elif isinstance(e, ConnectError):
82+
print(f"can't connect to ipfs, please check network or proxy setting")
83+
exit()
84+
else:
85+
print(f"upload {file_path.split('-')[0]} error, exit")
86+
exit()
7387
retry += 1
88+
return None
7489

7590

7691
def upload_folder(
7792
folder_name: str, content_type: str = "image/png"
78-
) -> tuple[str, list[dict]]:
93+
) -> tuple[Optional[str], Optional[list[dict]]]:
7994
"""
8095
upload folder to ipfs
8196
@@ -84,10 +99,10 @@ def upload_folder(
8499
content_type (str, optional): mime file type. Defaults to "image/png".
85100
86101
Returns:
87-
tuple[str, list[dict]]: (folder_hash, images_dict_list)
102+
tuple[Optional[str], Optional[list[dict]]]: (folder_hash, images_dict_list)
88103
"""
89104
files = []
90-
extension = content_type.split(os.sep)[-1]
105+
extension = content_type.split("/")[-1]
91106

92107
files = [
93108
(file, open(os.path.join(folder_name, file), "rb"))
@@ -105,14 +120,15 @@ def upload_folder(
105120
"wrap-with-directory": "true",
106121
},
107122
files=files, # files should be List[filename, bytes]
108-
auth=(PROJECT_ID, PROJECT_SECRET),
123+
auth=(PROJECT_ID, PROJECT_SECRET), # type: ignore
109124
)
110125
upload_folder_res_list = response.text.strip().split("\n")
111126
if (
112127
upload_folder_res_list[0]
113128
== "basic auth failure: invalid project id or project secret"
114129
):
115130
assert False, "invalid project id or project secret"
131+
folder_hash: Optional[str] = ""
116132
try:
117133
folder_hash = json.loads(
118134
[i for i in upload_folder_res_list if json.loads(i)["Name"] == ""][0]
@@ -136,7 +152,7 @@ def upload_files(folder_name: str, content_type: str = "image/png") -> list[dict
136152
Returns:
137153
list[dict]: ipfs info list, example: [{ 'Name': str, 'Hash': str, 'Size': str }]
138154
"""
139-
extension = content_type.split(os.sep)[-1]
155+
extension = content_type.split("/")[-1]
140156
file_paths = [
141157
os.path.join(folder_name, file_path)
142158
for file_path in list(
@@ -151,10 +167,16 @@ def upload_files(folder_name: str, content_type: str = "image/png") -> list[dict
151167

152168
def complete_batch_callback(images_ipfs_data):
153169
results.append(images_ipfs_data.result())
170+
if results[0] == None:
171+
print("No upload info return")
172+
exit()
154173
print(f"complete {len(results)/len(chunks):.2%}")
155174

156175
loop = asyncio.get_event_loop()
157-
print(f"Total {len(file_count)} files to upload, estimate time: {len(chunks)+10}s")
176+
if file_count == 0:
177+
print(f"no any images in folder {IMAGES}")
178+
exit()
179+
print(f"Total {file_count} files to upload, estimate time: {len(chunks)+10}s")
158180
for epoch, path_chunk in enumerate(chunks):
159181
task = loop.create_task(upload_task(path_chunk, epoch))
160182
tasks.append(task)
@@ -185,8 +207,10 @@ def generate_metadata(
185207
Returns:
186208
tuple[int, int]: (start_id, end_id)
187209
"""
210+
index: Optional[int]
188211
for idx, row in df.iterrows():
189212
path = row["path"]
213+
idx = cast(int, idx)
190214
index = idx + start_id
191215
image_dict = next(
192216
filter(
@@ -227,7 +251,7 @@ def read_images_from_local() -> list[dict]:
227251
list[dict]: images ipfs info
228252
"""
229253
with open("image_ipfs_data.backup", "r") as f:
230-
result = json.loads(f.read())
254+
result: list[dict] = json.loads(f.read())
231255
print(f"read {len(result)} ipfs data from local")
232256
return result
233257

0 commit comments

Comments
 (0)