Skip to content

Commit 2fea286

Browse files
[Fix] as per the new DB structure
1 parent f5cfe73 commit 2fea286

File tree

4 files changed

+114
-122
lines changed

4 files changed

+114
-122
lines changed

.gitignore

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
dmp_2/__pycache__/*
22
.env
33
env/*
4-
4+
venv
55
__pycache__/*

app.py

+51-40
Original file line numberDiff line numberDiff line change
@@ -127,88 +127,99 @@ def get_issues():
127127
type: string
128128
"""
129129
try:
130-
dmp_issue =SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data
130+
# Fetch all issues with their details
131+
dmp_issues = SupabaseInterface().get_instance().client.table('dmp_issues').select('*').execute().data
131132

132-
updated_issues = []
133-
134-
for i in dmp_issue:
135-
val = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('dmp_issue_url',i['repo_url']).execute().data
136-
if val!=[]:
137-
i['issues'] = val[0] #append first obj ie all are reder same issue
138-
i['org_id'] = val[0]['org_id']
139-
i['org_name'] = val[0]['org_name']
140-
141-
updated_issues.append(i)
142-
143-
# Create a defaultdict of lists
133+
# Create a defaultdict of lists to group issues by 'org_id'
144134
grouped_data = defaultdict(list)
145-
# Group data by 'org_name'
146-
for item in updated_issues:
147-
grouped_data[item['org_name']].append(item)
135+
for issue in dmp_issues:
136+
# Fetch organization details for the issue
137+
org_details = SupabaseInterface().get_instance().client.table('dmp_orgs').select('*').eq('id', issue['org_id']).execute().data
138+
if org_details:
139+
issue['org_name'] = org_details[0]['name']
140+
141+
grouped_data[issue['org_id']].append(issue)
148142

143+
# Prepare response in the required format
149144
response = []
150-
for org_name, items in grouped_data.items():
145+
for org_id, items in grouped_data.items():
151146
issues = [
152147
{
153-
"html_url": item['issues']['html_issue_url'],
154-
"id": item['issues']['comment_id'],
155-
"issue_number": item['issues']['issue_number'],
156-
"name": item['issues']['title']
148+
"id": item['issue_number'],
149+
"name": item['title']
157150
}
158151
for item in items
159152
]
160153

161154
response.append({
162-
"issues": issues,
163-
"org_id": items[0]['org_id'],
164-
"org_name": org_name
155+
"org_id": org_id,
156+
"org_name": items[0]['org_name'], # Assuming all items in the group have the same org_name
157+
"issues": issues
165158
})
166159

167-
return jsonify(response)
160+
return jsonify({"issues": response})
168161

169162
except Exception as e:
170163
error_traceback = traceback.format_exc()
171-
return jsonify({'error': str(e), 'traceback': error_traceback}), 200
172-
164+
return jsonify({'error': str(e), 'traceback': error_traceback}), 500
165+
173166
@app.route('/issues/<owner>', methods=['GET'])
174167
@cross_origin(supports_credentials=True)
175168
@require_secret_key
176169
def get_issues_by_owner(owner):
177170
"""
178-
Fetch issues by owner.
171+
Fetch organization details by owner's GitHub URL.
179172
---
180173
parameters:
181174
- name: owner
182175
in: path
183176
type: string
184177
required: true
185-
description: The owner of the issues
178+
description: The owner of the GitHub URL (e.g., organization owner)
186179
responses:
187180
200:
188-
description: Issues fetched successfully
181+
description: Organization details fetched successfully
189182
schema:
190-
type: array
191-
items:
192-
type: object
183+
type: object
184+
properties:
185+
name:
186+
type: string
187+
description: Name of the organization
188+
description:
189+
type: string
190+
description: Description of the organization
191+
404:
192+
description: Organization not found
193+
schema:
194+
type: object
195+
properties:
196+
error:
197+
type: string
198+
description: Error message
193199
500:
194-
description: Error fetching issues
200+
description: Error fetching organization details
195201
schema:
196202
type: object
197203
properties:
198204
error:
199205
type: string
206+
description: Error message
200207
"""
201208
try:
202-
response = SupabaseInterface().get_instance().client.table('dmp_issue_updates').select('*').eq('org_name', owner).order('comment_updated_at', desc=True).execute()
209+
# Construct the GitHub URL based on the owner parameter
210+
org_link = f"https://github.com/{owner}"
211+
212+
# Fetch organization details from dmp_orgs table
213+
response = SupabaseInterface().get_instance().client.table('dmp_orgs').select('name', 'description').eq('link', org_link).execute()
214+
203215
if not response.data:
204-
return jsonify({'error': "No data found"}), 200
205-
data = response.data[0]
206-
return jsonify({"name": data['org_name'], "description": data['org_description']})
216+
return jsonify({'error': "Organization not found"}), 404
217+
218+
return jsonify(response.data)
207219

208220
except Exception as e:
209221
error_traceback = traceback.format_exc()
210-
return jsonify({'error': str(e), 'traceback': error_traceback}), 200
211-
222+
return jsonify({'error': str(e), 'traceback': error_traceback}), 500
212223

213224

214225
@app.route('/issues/<owner>/<issue>', methods=['GET'])

v2_app.py

+27-26
Original file line numberDiff line numberDiff line change
@@ -4,38 +4,46 @@
44
from utils import require_secret_key
55
from db import SupabaseInterface
66
from utils import determine_week
7-
from v2_utils import calculate_overall_progress, define_mentors_data, week_data_formatter
7+
from v2_utils import calculate_overall_progress, define_link_data, week_data_formatter
88

99
v2 = Blueprint('v2', __name__)
1010

1111

1212
@v2.route('/issues/<owner>/<issue>', methods=['GET'])
1313
@require_secret_key
1414
def get_issues_by_owner_id_v2(owner, issue):
15-
try:
15+
try:
1616
SUPABASE_DB = SupabaseInterface().get_instance()
17-
response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('owner', owner).eq('issue_number', issue).execute()
17+
# Fetch issue updates based on owner and issue number
18+
19+
url = f"https://github.com/{owner}"
20+
dmp_issue_id = SUPABASE_DB.client.table('dmp_issues').select('*').like('issue_url', f'%{url}%').eq('issue_number', issue).execute()
21+
if not dmp_issue_id.data:
22+
return jsonify({'error': "No data found"}), 500
23+
24+
dmp_issue_id = dmp_issue_id.data[0]
25+
response = SUPABASE_DB.client.table('dmp_issue_updates').select('*').eq('dmp_id', dmp_issue_id['id']).execute()
26+
1827
if not response.data:
19-
return jsonify({'error': "No data found"}), 200
28+
return jsonify({'error': "No data found"}), 500
29+
2030
data = response.data
2131

2232
final_data = []
2333
w_learn_url,w_goal_url,avg,cont_details,plain_text_body,plain_text_wurl = None,None,None,None,None,None
24-
34+
2535
for val in data:
26-
issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo'])
36+
# issue_url = "https://api.github.com/repos/{}/{}/issues/comments".format(val['owner'],val['repo'])
2737
# week_avg ,cont_name,cont_id,w_goal,w_learn,weekby_avgs,org_link = find_week_avg(issue_url)
2838
# mentors_data = find_mentors(val['issue_url']) if val['issue_url'] else {'mentors': [], 'mentor_usernames': []}
2939

3040
if val['body_text']:
3141
if "Weekly Goals" in val['body_text'] and not w_goal_url:
3242
w_goal_url = val['body_text']
3343
plain_text_body = markdown2.markdown(val['body_text'])
34-
3544
tasks = re.findall(r'\[(x| )\]', plain_text_body)
3645
total_tasks = len(tasks)
3746
completed_tasks = tasks.count('x')
38-
3947
avg = round((completed_tasks/total_tasks)*100) if total_tasks!=0 else 0
4048

4149
if "Weekly Learnings" in val['body_text'] and not w_learn_url:
@@ -46,28 +54,27 @@ def get_issues_by_owner_id_v2(owner, issue):
4654
# mentors = mentors_data['mentors']
4755
# ment_usernames = mentors_data['mentor_usernames']
4856
if not cont_details:
49-
cont_details = SUPABASE_DB.client.table('dmp_issues').select('*').eq('repo_url',val['dmp_issue_url']).execute().data
50-
51-
57+
cont_details = dmp_issue_id['contributor_username']
5258
week_data = week_data_formatter(plain_text_body,"Goals")
59+
5360
res = {
5461
"name": owner,
55-
"description": val['description'],
56-
"mentor": define_mentors_data(val['mentor_name']),
57-
"mentor_id": val['mentor_id'] ,
58-
"contributor":define_mentors_data(cont_details[0]['contributor_name']),
62+
"description": dmp_issue_id['description'],
63+
"mentor": define_link_data(dmp_issue_id['mentor_username']),
64+
"mentor_id": dmp_issue_id['mentor_username'] ,
65+
"contributor":define_link_data(cont_details),
5966
# "contributor_id": cont_details[0]['contributor_id'],
60-
"org": define_mentors_data(val['owner'])[0] if val['owner'] else [],
67+
"org": define_link_data(dmp_issue_id['mentor_username'])[0] if dmp_issue_id['mentor_username'] else [],
6168
"weekly_goals_html": w_goal_url,
6269
"weekly_learnings_html": w_learn_url,
6370
"overall_progress":calculate_overall_progress(week_data,12),
64-
"issue_url":val['html_issue_url'],
71+
"issue_url":dmp_issue_id['issue_url'],
6572
"pr_details":None,
6673
"weekly_goals":week_data,
67-
"weekly_learns":week_data_formatter(plain_text_wurl,"Learnings")
74+
"weekly_learnings":week_data_formatter(plain_text_wurl,"Learnings")
6875
}
6976

70-
pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('repo', val['repo']).eq('issue_number_title',issue).execute()
77+
pr_Data = SUPABASE_DB.client.table('dmp_pr_updates').select('*').eq('dmp_id', dmp_issue_id['id']).eq('title',issue).execute()
7178
transformed = {"pr_details": []}
7279
if pr_Data.data:
7380
for pr in pr_Data.data:
@@ -80,13 +87,7 @@ def get_issues_by_owner_id_v2(owner, issue):
8087
})
8188

8289
res['pr_details'] = transformed['pr_details']
83-
84-
# Adding each week as a separate key
85-
# for week in weekby_avgs:
86-
# res.update(week)
87-
88-
# final_data.append(res)
89-
90+
9091
return jsonify(res),200
9192

9293
except Exception as e:

v2_utils.py

+35-55
Original file line numberDiff line numberDiff line change
@@ -1,93 +1,72 @@
11
import logging,re,markdown2
22

33
# Func to create name and link for all mentors and contributors
4-
def define_mentors_data(mentors):
4+
def define_link_data(usernames):
55
try:
66
res = []
7-
8-
if type(mentors) == list:
9-
for ment in mentors:
7+
if type(usernames) == list:
8+
for username in usernames:
109
val = {}
11-
val['name'] = ment
12-
val['link'] = "https://github.com/" + ment
10+
val['name'] = username
11+
val['link'] = "https://github.com/" + username
1312
res.append(val)
14-
if type(mentors) == str:
13+
if type(usernames) == str:
14+
if usernames[0]=="@":
15+
usernames = usernames[1:]
1516
val = {}
16-
val['name'] = mentors
17-
val['link'] = "https://github.com/" + mentors
17+
val['name'] = usernames
18+
val['link'] = "https://github.com/" + usernames
1819
res.append(val)
1920

2021
return res
2122

2223
except Exception as e:
23-
logging.info(f"{e}---define_mentors")
24+
logging.info(f"{e}---define_link_data")
2425
return []
2526

2627

27-
28-
def week_data_formatter(html_content,type):
28+
29+
def week_data_formatter(html_content, type):
30+
2931
try:
30-
# Find all weeks
31-
week_matches = re.findall(r'<h2>(Week \d+)</h2>', html_content)
32-
tasks_per_week = re.findall(r'<h2>Week \d+</h2>\s*<ul>(.*?)</ul>', html_content, re.DOTALL)
33-
32+
# Use regex to find week titles (e.g., Week 1, Week 2) and their corresponding task lists
33+
week_matches = re.findall(r'(Week \d+)', html_content)
34+
tasks_per_week = re.split(r'Week \d+', html_content)[1:] # Split the content by weeks and skip the first empty split
35+
3436
weekly_updates = []
35-
total_tasks = 0
36-
37+
3738
if type == "Learnings":
3839
for i, week in enumerate(week_matches):
39-
40-
try:
41-
task_list_html = tasks_per_week[i]
42-
except Exception as e:
43-
task_list_html = ""
44-
40+
task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else ""
4541
weekly_updates.append({
46-
'week': i+1,
47-
'content':task_list_html
42+
'week': i + 1,
43+
'content': task_list_html.strip()
4844
})
49-
5045
return weekly_updates
51-
52-
else:
46+
47+
else:
5348
for i, week in enumerate(week_matches):
54-
try:
55-
task_list_html = tasks_per_week[i]
56-
except Exception as e:
57-
task_list_html = ""
49+
task_list_html = tasks_per_week[i] if i < len(tasks_per_week) else ""
5850

59-
tasks = re.findall(r'\[(x| )\] (.*?)</li>', task_list_html, re.DOTALL)
51+
# Adjust regex to capture tasks regardless of the tags around them
52+
tasks = re.findall(r'\[(x|X| )\]\s*(.*?)</?li>', task_list_html, re.DOTALL)
6053

6154
total_tasks = len(tasks)
62-
completed_tasks = sum(1 for task in tasks if task[0] == 'x')
63-
task_list = [{"content":i[1],"checked":True if i[0]=='x' else False} for i in tasks]
64-
55+
completed_tasks = sum(1 for task in tasks if task[0] in ['x', 'X'])
56+
task_list = [{"content": task[1].strip(), "checked": task[0] in ['x', 'X']} for task in tasks]
6557

6658
avg = round((completed_tasks / total_tasks) * 100) if total_tasks != 0 else 0
6759

6860
weekly_updates.append({
69-
'week': i+1,
70-
# 'total_tasks': total_tasks,
71-
# 'completed_tasks': completed_tasks,
61+
'week': i + 1,
7262
'progress': avg,
73-
'tasks':task_list
63+
'tasks': task_list
7464
})
75-
76-
77-
78-
response = {
79-
'number_of_weeks': len(week_matches),
80-
'weekly_updates': weekly_updates
81-
}
82-
83-
#FIND OVERALL PROGRESS
84-
85-
8665

8766
return weekly_updates
88-
89-
67+
9068
except Exception as e:
69+
print(f"Error: {e}")
9170
return []
9271

9372

@@ -98,7 +77,7 @@ def calculate_overall_progress(weekly_updates, default_weeks=12):
9877

9978
# Sum the progress of each provided week
10079
for week in weekly_updates:
101-
total_progress += week['progress']
80+
total_progress += week.get('progress', 0)
10281

10382
# Add zero progress for the remaining weeks to reach the default weeks
10483
total_weeks = default_weeks
@@ -110,5 +89,6 @@ def calculate_overall_progress(weekly_updates, default_weeks=12):
11089

11190
return round(overall_progress, 2)
11291
except Exception as e:
92+
print(f"Error: {e}")
11393
return 0
11494

0 commit comments

Comments
 (0)