Skip to content

Commit

Permalink
Merge pull request #64 from ClericPy/dev
Browse files Browse the repository at this point in the history
1.8.0
  • Loading branch information
ClericPy authored Jun 15, 2020
2 parents 339a904 + b4d94b0 commit e787046
Show file tree
Hide file tree
Showing 10 changed files with 383 additions and 300 deletions.
2 changes: 1 addition & 1 deletion watchdogs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
from .config import Config
from .main import init_app

__version__ = '1.7.2'
__version__ = '1.8.0'
__all__ = ['Config', 'init_app']
logging.getLogger('watchdogs').addHandler(logging.NullHandler())
10 changes: 7 additions & 3 deletions watchdogs/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ async def rss(request: Request,
logger.error(f'latest_result is list: {latest_result}')
link: str = latest_result.get('url') or task['origin_url']
description: str = latest_result.get('text') or ''
title: str = f'{task["name"]}#{description[:80]}'
title: str = f'{task["name"]}#{latest_result.get("title", description[:80])}'
item: dict = {
'title': title,
'link': link,
Expand Down Expand Up @@ -479,9 +479,10 @@ async def lite(request: Request,
now = datetime.now()
for task in tasks:
result = loads(task['latest_result'] or '{}')
# for cache...
# set / get cache from task
task['url'] = task.get('url') or result.get('url') or task['origin_url']
task['text'] = task.get('text') or result.get('text') or ''
task['text'] = task.get('text') or result.get('title') or result.get(
'text') or ''
task['timeago'] = timeago(
(now - task['last_change_time']).total_seconds(),
1,
Expand All @@ -504,4 +505,7 @@ async def lite(request: Request,
else:
last_page_url = ''
context['last_page_url'] = last_page_url
quoted_tag = quote_plus(tag)
rss_sign = Config.get_sign('/rss', f'tag={quoted_tag}')[1]
context['rss_url'] = f'/rss?tag={quoted_tag}&sign={rss_sign}'
return templates.TemplateResponse("lite.html", context=context)
168 changes: 117 additions & 51 deletions watchdogs/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,65 @@
from traceback import format_exc
from typing import Dict, Type

from torequests.utils import ttime

from .utils import ensure_await_result


class CallbackHandlerBase(ABC):
logger = getLogger('watchdogs')

def __init__(self):
# lazy init object
self.callbacks_dict: Dict[str, Type[Callback]] = {}
for cls in Callback.__subclasses__():
try:
assert cls.name is not None
cls.doc = cls.doc or cls.__doc__
self.callbacks_dict[cls.name] = cls
except Exception as err:
self.logger.error(f'{cls} registers failed: {err!r}')
self.workers = {cb.name: cb.doc for cb in self.callbacks_dict.values()}

@abstractmethod
async def callback(self, task):
pass

def get_callback(self, name):
obj = self.callbacks_dict.get(name)
if not obj:
# not found callback
return None
if not isinstance(obj, Callback):
# here for lazy init
obj = obj()
self.callbacks_dict[name] = obj
return obj


class CallbackHandler(CallbackHandlerBase):

def __init__(self):
super().__init__()

async def callback(self, task):
custom_info: str = task.custom_info.strip()
name = custom_info.split(':', 1)[0]
cb = self.get_callback(name) or self.get_callback('')
if not cb:
# not found callback, ignore
return
try:
call_result = await ensure_await_result(cb.callback(task))
self.logger.info(
f'{cb.name or "default"} callback({custom_info}) for task {task.name} {call_result}: '
)
except Exception:
self.logger.error(
f'{cb.name or "default"} callback({custom_info}) for task {task.name} error:\n{format_exc()}'
)


class Callback(ABC):
"""
Constraint: Callback object should has this attribute:
Expand Down Expand Up @@ -58,66 +114,76 @@ async def callback(self, task):
if not key or not key.strip():
continue
key = key.strip()
r = await self.req.post(
f'https://sc.ftqq.com/{key}.send',
data={
'text': title,
'desp': body
})
r = await self.req.post(f'https://sc.ftqq.com/{key}.send',
data={
'text': title,
'desp': body
})
self.logger.info(f'ServerChanCallback ({key}): {r.text}')
oks.append((key, bool(r)))
return f'{len(oks)} sended, {oks}'


class CallbackHandlerBase(ABC):
logger = getLogger('watchdogs')

def __init__(self):
# lazy init object
self.callbacks_dict: Dict[str, Type[Callback]] = {}
for cls in Callback.__subclasses__():
try:
assert cls.name is not None
cls.doc = cls.doc or cls.__doc__
self.callbacks_dict[cls.name] = cls
except Exception as err:
self.logger.error(f'{cls} registers failed: {err!r}')
self.workers = {cb.name: cb.doc for cb in self.callbacks_dict.values()}

@abstractmethod
async def callback(self, task):
pass

def get_callback(self, name):
obj = self.callbacks_dict.get(name)
if not obj:
# not found callback
return None
if not isinstance(obj, Callback):
# here for lazy init
obj = obj()
self.callbacks_dict[name] = obj
return obj
class DingTalkCallback(Callback):
"""
DingDing robot notify toolkit. Will auto check msg type as text / card.
1. Create a group.
2. Create a robot which contains word ":"
3. Set the task.custom_info as: dingding:{access_token}
class CallbackHandler(CallbackHandlerBase):
Doc: https://ding-doc.dingtalk.com/doc#/serverapi2/qf2nxq/e9d991e2
"""
name = "dingding"

def __init__(self):
super().__init__()
from torequests.dummy import Requests
self.req = Requests()

def make_data(self, task):
latest_result = loads(task.latest_result or '{}')
title = latest_result.get('title') or ''
url = latest_result.get('url') or task.origin_url
text = latest_result.get('text') or ''
cover = latest_result.get('cover') or ''
if cover:
text = f'![cover]({cover})\n{text}'
if url or cover:
# markdown
title = f'# {task.name}: {title}\n> {ttime()}'
return {
"actionCard": {
"title": title,
"text": f'{title}\n\n{text}',
"singleTitle": "Read More",
"singleURL": url
},
"msgtype": "actionCard"
}
return {
"msgtype": "text",
"text": {
"content": f"{task.name}: {title}\n{text}"
}
}

async def callback(self, task):
custom_info: str = task.custom_info.strip()
name = custom_info.split(':', 1)[0]
cb = self.get_callback(name) or self.get_callback('')
if not cb:
# not found callback, ignore
return
try:
call_result = await ensure_await_result(cb.callback(task))
self.logger.info(
f'{cb.name or "default"} callback({custom_info}) for task {task.name} {call_result}: '
)
except Exception:
self.logger.error(
f'{cb.name or "default"} callback({custom_info}) for task {task.name} error:\n{format_exc()}'
name, arg = task.custom_info.split(':', 1)
if not arg:
raise ValueError(
f'{task.name}: custom_info `{task.custom_info}` missing args after `:`'
)

data = self.make_data(task)
oks = []
for access_token in set(arg.strip().split()):
if not access_token or not access_token.strip():
continue
access_token = access_token.strip()
r = await self.req.post(
f'https://oapi.dingtalk.com/robot/send?access_token={access_token}',
json=data)
self.logger.info(
f'{self.__class__.__name__} ({access_token}): {r.text}')
oks.append((access_token, bool(r)))
return f'{len(oks)} sended, {oks}'
8 changes: 6 additions & 2 deletions watchdogs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,14 @@ async def auth_checker(request: Request, call_next):
# try checking sign
given_sign, valid_sign = Config.get_sign(path, query_string)
if given_sign == valid_sign:
# sign checking pass
return await call_next(request)
# try check cookie
if not Config.watchdog_auth or Config.watchdog_auth == request.cookies.get(
'watchdog_auth', ''):
# no watchdog_auth or cookie is valid
# valid cookie, or no watchdog_auth checker
return await call_next(request)
# not pass either checker, refused
if query_has_sign:
# request with sign will not redirect
return JSONResponse(
Expand All @@ -90,6 +93,7 @@ async def auth_checker(request: Request, call_next):
},
)
else:
# bad cookie, reset the watchdog_auth cookie as null
resp = RedirectResponse(
f'/auth?redirect={quote_plus(request.scope["path"])}', 302)
resp.set_cookie('watchdog_auth', '')
Expand Down Expand Up @@ -181,7 +185,7 @@ class Config:
custom_tabs: List[Dict] = []
COLLATION: str = None
cookie_max_age = 86400 * 7
default_page_size = 15
default_page_size = 20

@classmethod
def add_custom_tabs(cls, label, url, name=None, desc=None):
Expand Down
10 changes: 7 additions & 3 deletions watchdogs/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ async def crawl(task: Task):
result_list = None
else:
if len(crawl_result) == 1:
# chain result for __request__ which fetch a new request
# crawl_result schema: {rule_name: list_or_dict}
formated_result = get_watchdog_result(
item=crawl_result.popitem()[1])
if formated_result == {'text': 'text not found'}:
Expand All @@ -138,7 +138,7 @@ async def crawl(task: Task):
# use force crawl one web UI for more log
logger.info(f'{task.name} Crawl success: {result_list}'[:150])
else:
error = 'Invalid crawl_result schema: {rule_name: [{"text": "xxx", "url": "xxx"}]}, but given %r' % crawl_result
error = 'Invalid crawl_result against schema {rule_name: [{"text": "Required", "url": "Optional", "__key__": "Optional"}]}, given is %r' % crawl_result
logger.error(f'{task.name}: {error}')
result_list = [{"text": error}]
return task, error, result_list
Expand Down Expand Up @@ -212,10 +212,14 @@ async def _crawl_once(task_name: Optional[str] = None, chunk_size: int = 20):
# compare latest_result and new list
# later first, just like the saved result_list sortings
old_latest_result = loads(task.latest_result)
# try to use the __key__
old_latest_result_key = old_latest_result.get(
'__key__', old_latest_result)
# list of dict
to_insert_result_list = []
for result in result_list:
if result == old_latest_result:
result_key = result.get('__key__', result)
if result_key == old_latest_result_key:
break
to_insert_result_list.append(result)
if to_insert_result_list:
Expand Down
17 changes: 13 additions & 4 deletions watchdogs/static/js/watchdogs.js
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,8 @@ var Main = {
},
get_latest_result(latest_result, max_length = 80) {
try {
return JSON.parse(latest_result).text.slice(0, max_length)
let item = JSON.parse(latest_result)
return item.title || item.text.slice(0, max_length)
} catch (error) {
return latest_result
}
Expand All @@ -455,7 +456,7 @@ var Main = {
'</td><td><a target="_blank" ' +
href +
">" +
this.escape_html(result.text) +
this.escape_html(result.title || result.text) +
"</a></td></tr>"
})
text += "</table>"
Expand Down Expand Up @@ -671,7 +672,7 @@ var Main = {
dangerouslyUseHTMLString: true,
closeOnClickModal: true,
closeOnPressEscape: true,
customClass: 'work_hours_doc',
customClass: "work_hours_doc",
})
},
check_error_task({ row, rowIndex }) {
Expand Down Expand Up @@ -763,7 +764,7 @@ var vue_app = Vue.extend(Main)
var app = new vue_app({
delimiters: ["${", "}"],
}).$mount("#app")
app.load_tasks()
// app.load_tasks()
// init app vars
;(() => {
// init_vars
Expand All @@ -773,4 +774,12 @@ app.load_tasks()
app[name] = args[name]
})
node.parentNode.removeChild(node)
// auto load
var io = new IntersectionObserver((entries) => {
if (entries[0].intersectionRatio <= 0) return
if (app.has_more) {
app.load_tasks()
}
})
io.observe(document.getElementById("auto_load"))
})()
Loading

0 comments on commit e787046

Please sign in to comment.