Skip to content

Commit

Permalink
Merge pull request #833 from googlefonts/pull-fix
Browse files Browse the repository at this point in the history
manage-traffic-jam: improvements
  • Loading branch information
m4rc1e authored Feb 19, 2024
2 parents edbb47f + 9d85f8c commit d5fbcd9
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 43 deletions.
20 changes: 18 additions & 2 deletions Lib/gftools/push/servers.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,15 @@ def update_all(self, last_checked: str):
for family_data in families_data:
family_name = family_data["family"]
last_modified = family_data["lastModified"]
if last_modified >= last_checked:

cached_family_version = self.family_versions.get(family_name)
existing_family_version = self.families.get(family_name)
# always ensure we repull family data if the family_version api
# is different
if cached_family_version and existing_family_version:
if cached_family_version != existing_family_version.version:
self.update(family_name)
elif last_modified >= last_checked:
self.update(family_name)

def update(self, family_name):
Expand Down Expand Up @@ -194,7 +202,11 @@ def compare_item(self, item: Items):
def save(self, fp: "str | Path"):
from copy import deepcopy

data = deepcopy(self).to_json()
cp = deepcopy(self)
# do not save family_versions data. We want to request this each time
if hasattr(cp, "family_versions"):
delattr(cp, "family_versions")
data = cp.to_json()
json.dump(data, open(fp, "w", encoding="utf8"), indent=4)

@classmethod
Expand All @@ -210,6 +222,10 @@ def from_dict(cls, data):
server = getattr(inst, server_name)

for item_type, item_value in data[server_name].items():
# if family_versions data is saved, skip it so we get requested
# data instead
if item_type in ["family_versions", "traffic_jam"]:
continue
if item_type == "families":
server.families = {k: Family(**v) for k, v in item_value.items()}
elif item_type == "designers":
Expand Down
101 changes: 61 additions & 40 deletions Lib/gftools/push/trafficjam.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from gftools.push.items import Axis, Designer, Family, FamilyMeta
from gftools.push.utils import google_path_to_repo_path, repo_path_to_google_path
import json

log = logging.getLogger("gftools.push")

Expand Down Expand Up @@ -107,6 +108,7 @@ def from_string(string: str): # type: ignore[misc]
projectV2(number: 74) {
id
title
updatedAt
items(first: 100, after: "%s") {
totalCount
edges {
Expand Down Expand Up @@ -434,56 +436,75 @@ def from_server_file(
return results

@classmethod
def from_traffic_jam(cls):
def from_traffic_jam(cls, fp=None):
log.info("Getting push items from traffic jam board")
from gftools.gfgithub import GitHubClient

g = GitHubClient("google", "fonts")
last_item = ""
data = g._run_graphql(GOOGLE_FONTS_TRAFFIC_JAM_QUERY % last_item, {})
board_items = data["data"]["organization"]["projectV2"]["items"]["nodes"]

# paginate through items in board
last_item = data["data"]["organization"]["projectV2"]["items"]["edges"][-1][
"cursor"
]
item_count = data["data"]["organization"]["projectV2"]["items"]["totalCount"]
while len(board_items) < item_count:
data = None
while not data:
try:
data = g._run_graphql(
GOOGLE_FONTS_TRAFFIC_JAM_QUERY % last_item, {}
)
except:
data = None
board_items += data["data"]["organization"]["projectV2"]["items"]["nodes"]
board_items = None
# use cached items if board hasn't been updated
if fp and fp.exists():
existing = json.load(open(fp, encoding="utf8"))
last_update = existing["updatedAt"]
current_update = data["data"]["organization"]["projectV2"]["updatedAt"]
if last_update == current_update:
board_items = existing["board_items"]

if not board_items:
board_items = data["data"]["organization"]["projectV2"]["items"]["nodes"]

# paginate through items in board
last_item = data["data"]["organization"]["projectV2"]["items"]["edges"][-1][
"cursor"
]
log.info(f"Getting items up to {last_item}")
for item in board_items:
if item["type"] != "PULL_REQUEST":
raise ValueError(
"Traffic Jam contains issues! All items must be pull requests. "
"Please remove the issues and rerun the tool, "
"https://github.com/orgs/google/projects/74/views/1"
item_count = data["data"]["organization"]["projectV2"]["items"]["totalCount"]
while len(board_items) < item_count:
data = None
while not data:
try:
data = g._run_graphql(
GOOGLE_FONTS_TRAFFIC_JAM_QUERY % last_item, {}
)
except:
data = None
board_items += data["data"]["organization"]["projectV2"]["items"]["nodes"]
last_item = data["data"]["organization"]["projectV2"]["items"]["edges"][-1][
"cursor"
]
log.info(f"Getting items up to {last_item}")
for item in board_items:
if item["type"] != "PULL_REQUEST":
raise ValueError(
"Traffic Jam contains issues! All items must be pull requests. "
"Please remove the issues and rerun the tool, "
"https://github.com/orgs/google/projects/74/views/1"
)
# sort items by pr number
board_items.sort(key=lambda k: k["content"]["url"])

# get files for prs which have more than 100 changed files
for item in board_items:
changed_files = item["content"]["files"]["totalCount"]
if changed_files <= 100:
continue
pr_number = item["content"]["number"]
pr_url = item["content"]["url"]
log.warn(
f"{pr_url} has {changed_files} changed files. Attempting to fetch them."
)
# sort items by pr number
board_items.sort(key=lambda k: k["content"]["url"])

# get files for prs which have more than 100 changed files
for item in board_items:
changed_files = item["content"]["files"]["totalCount"]
if changed_files <= 100:
continue
pr_number = item["content"]["number"]
pr_url = item["content"]["url"]
log.warn(
f"{pr_url} has {changed_files} changed files. Attempting to fetch them."
)
files = g.pr_files(pr_number)
item["content"]["files"]["nodes"] = [{"path": f["filename"]} for f in files]
files = g.pr_files(pr_number)
item["content"]["files"]["nodes"] = [{"path": f["filename"]} for f in files]

# save
if fp:
dat = {
"updatedAt": data["data"]["organization"]["projectV2"]["updatedAt"],
"board_items": board_items
}
with open(fp, "w", encoding="utf-8") as doc:
json.dump(dat, doc, indent=4)

results = cls()
for item in board_items:
Expand Down
3 changes: 2 additions & 1 deletion Lib/gftools/scripts/manage_traffic_jam.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,8 @@ def main(args=None):

os.chdir(args.fonts_repo)

push_items = PushItems.from_traffic_jam()
traffic_jam_data = (Path("~") / ".gf_traffic_jam_data.json").expanduser()
push_items = PushItems.from_traffic_jam(traffic_jam_data)
if not args.show_open_prs:
push_items = PushItems(i for i in push_items if i.merged == True)
if "lists" in args.filter:
Expand Down

0 comments on commit d5fbcd9

Please sign in to comment.