mirror of https://github.com/searxng/searxng.git
showing publishedDate for news
This commit is contained in:
parent
a559bad488
commit
b88146d669
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from urllib import urlencode
|
from urllib import urlencode
|
||||||
from json import loads
|
from json import loads
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
categories = ['news']
|
categories = ['news']
|
||||||
|
|
||||||
|
@ -31,7 +32,15 @@ def response(resp):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
for result in search_res['responseData']['results']:
|
for result in search_res['responseData']['results']:
|
||||||
|
# S.149 (159), library.pdf
|
||||||
|
# datetime.strptime("Mon, 10 Mar 2014 16:26:15 -0700", "%a, %d %b %Y %H:%M:%S %z")
|
||||||
|
# publishedDate = parse(result['publishedDate'])
|
||||||
|
publishedDate = datetime.strptime(str.join(' ',result['publishedDate'].split(None)[0:5]), "%a, %d %b %Y %H:%M:%S")
|
||||||
|
#utc_offset = timedelta(result['publishedDate'].split(None)[5]) # local = utc + offset
|
||||||
|
#publishedDate = publishedDate + utc_offset
|
||||||
|
|
||||||
results.append({'url': result['unescapedUrl'],
|
results.append({'url': result['unescapedUrl'],
|
||||||
'title': result['titleNoFormatting'],
|
'title': result['titleNoFormatting'],
|
||||||
|
'publishedDate': publishedDate,
|
||||||
'content': result['content']})
|
'content': result['content']})
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -4,6 +4,7 @@ from urllib import urlencode
|
||||||
from lxml import html
|
from lxml import html
|
||||||
from searx.engines.xpath import extract_text, extract_url
|
from searx.engines.xpath import extract_text, extract_url
|
||||||
from searx.engines.yahoo import parse_url
|
from searx.engines.yahoo import parse_url
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
categories = ['news']
|
categories = ['news']
|
||||||
search_url = 'http://news.search.yahoo.com/search?{query}&b={offset}'
|
search_url = 'http://news.search.yahoo.com/search?{query}&b={offset}'
|
||||||
|
@ -11,6 +12,7 @@ results_xpath = '//div[@class="res"]'
|
||||||
url_xpath = './/h3/a/@href'
|
url_xpath = './/h3/a/@href'
|
||||||
title_xpath = './/h3/a'
|
title_xpath = './/h3/a'
|
||||||
content_xpath = './/div[@class="abstr"]'
|
content_xpath = './/div[@class="abstr"]'
|
||||||
|
publishedDate_xpath = './/span[@class="timestamp"]'
|
||||||
suggestion_xpath = '//div[@id="satat"]//a'
|
suggestion_xpath = '//div[@id="satat"]//a'
|
||||||
|
|
||||||
paging = True
|
paging = True
|
||||||
|
@ -37,7 +39,10 @@ def response(resp):
|
||||||
url = parse_url(extract_url(result.xpath(url_xpath), search_url))
|
url = parse_url(extract_url(result.xpath(url_xpath), search_url))
|
||||||
title = extract_text(result.xpath(title_xpath)[0])
|
title = extract_text(result.xpath(title_xpath)[0])
|
||||||
content = extract_text(result.xpath(content_xpath)[0])
|
content = extract_text(result.xpath(content_xpath)[0])
|
||||||
results.append({'url': url, 'title': title, 'content': content})
|
# Feb 20 04:02am
|
||||||
|
publishedDate = datetime.strptime(extract_text(result.xpath(publishedDate_xpath)[0]),"%b %d %H:%M%p")
|
||||||
|
#publishedDate.replace(year=2014)
|
||||||
|
results.append({'url': url, 'title': title, 'content': content,'publishedDate':publishedDate})
|
||||||
|
|
||||||
if not suggestion_xpath:
|
if not suggestion_xpath:
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<h3 class="result_title"><a href="{{ result.url }}">{{ result.title|safe }}</a></h3>
|
<h3 class="result_title"><a href="{{ result.url }}">{{ result.title|safe }}</a></h3>
|
||||||
|
{% if result.publishedDate %}<p class="published_date">{{ result.publishedDate }}</p>{% endif %}
|
||||||
<p class="content">{% if result.content %}{{ result.content|safe }}<br />{% endif %}</p>
|
<p class="content">{% if result.content %}{{ result.content|safe }}<br />{% endif %}</p>
|
||||||
<p class="url">{{ result.pretty_url }}</p>
|
<p class="url">{{ result.pretty_url }}</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -26,6 +26,7 @@ import json
|
||||||
import cStringIO
|
import cStringIO
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from flask import (
|
from flask import (
|
||||||
Flask, request, render_template, url_for, Response, make_response,
|
Flask, request, render_template, url_for, Response, make_response,
|
||||||
|
@ -156,6 +157,17 @@ def index():
|
||||||
if engine in favicons:
|
if engine in favicons:
|
||||||
result['favicon'] = engine
|
result['favicon'] = engine
|
||||||
|
|
||||||
|
# TODO, check if timezone is calculated right
|
||||||
|
if 'publishedDate' in result:
|
||||||
|
if result['publishedDate'].date() == datetime.now().date():
|
||||||
|
timedifference = datetime.now()-result['publishedDate']
|
||||||
|
if timedifference.seconds < 60*60:
|
||||||
|
result['publishedDate'] = '{0:d} minutes ago'.format(timedifference.seconds/60)
|
||||||
|
else:
|
||||||
|
result['publishedDate'] = '{0:d} hours ago'.format(timedifference.seconds/60/60)
|
||||||
|
else:
|
||||||
|
result['publishedDate'] = result['publishedDate'].strftime('%d.%m.%Y')
|
||||||
|
|
||||||
if search.request_data.get('format') == 'json':
|
if search.request_data.get('format') == 'json':
|
||||||
return Response(json.dumps({'query': search.query,
|
return Response(json.dumps({'query': search.query,
|
||||||
'results': search.results}),
|
'results': search.results}),
|
||||||
|
|
Loading…
Reference in New Issue