mirror of
				https://github.com/searxng/searxng.git
				synced 2025-11-04 03:27:06 -05:00 
			
		
		
		
	add google videos
This commit is contained in:
		
							parent
							
								
									081f51db4e
								
							
						
					
					
						commit
						856dfc3018
					
				
							
								
								
									
										83
									
								
								searx/engines/google_videos.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								searx/engines/google_videos.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,83 @@
 | 
			
		||||
"""
 | 
			
		||||
 Google (Videos)
 | 
			
		||||
 | 
			
		||||
 @website     https://www.google.com
 | 
			
		||||
 @provide-api yes (https://developers.google.com/custom-search/)
 | 
			
		||||
 | 
			
		||||
 @using-api   no
 | 
			
		||||
 @results     HTML
 | 
			
		||||
 @stable      no
 | 
			
		||||
 @parse       url, title, content
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from datetime import date, timedelta
 | 
			
		||||
from json import loads
 | 
			
		||||
from lxml import html
 | 
			
		||||
from searx.engines.xpath import extract_text
 | 
			
		||||
from searx.url_utils import urlencode
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# engine dependent config
 | 
			
		||||
categories = ['videos']
 | 
			
		||||
paging = True
 | 
			
		||||
safesearch = True
 | 
			
		||||
time_range_support = True
 | 
			
		||||
number_of_results = 10
 | 
			
		||||
 | 
			
		||||
search_url = 'https://www.google.com/search'\
 | 
			
		||||
    '?{query}'\
 | 
			
		||||
    '&tbm=vid'\
 | 
			
		||||
    '&{search_options}'
 | 
			
		||||
time_range_attr = "qdr:{range}"
 | 
			
		||||
time_range_custom_attr = "cdr:1,cd_min:{start},cd_max{end}"
 | 
			
		||||
time_range_dict = {'day': 'd',
 | 
			
		||||
                   'week': 'w',
 | 
			
		||||
                   'month': 'm'}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# do search-request
 | 
			
		||||
def request(query, params):
 | 
			
		||||
    search_options = {
 | 
			
		||||
        'ijn': params['pageno'] - 1,
 | 
			
		||||
        'start': (params['pageno'] - 1) * number_of_results
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if params['time_range'] in time_range_dict:
 | 
			
		||||
        search_options['tbs'] = time_range_attr.format(range=time_range_dict[params['time_range']])
 | 
			
		||||
    elif params['time_range'] == 'year':
 | 
			
		||||
        now = date.today()
 | 
			
		||||
        then = now - timedelta(days=365)
 | 
			
		||||
        start = then.strftime('%m/%d/%Y')
 | 
			
		||||
        end = now.strftime('%m/%d/%Y')
 | 
			
		||||
        search_options['tbs'] = time_range_custom_attr.format(start=start, end=end)
 | 
			
		||||
 | 
			
		||||
    if safesearch and params['safesearch']:
 | 
			
		||||
        search_options['safe'] = 'on'
 | 
			
		||||
 | 
			
		||||
    params['url'] = search_url.format(query=urlencode({'q': query}),
 | 
			
		||||
                                      search_options=urlencode(search_options))
 | 
			
		||||
 | 
			
		||||
    return params
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# get response from search-request
 | 
			
		||||
def response(resp):
 | 
			
		||||
    results = []
 | 
			
		||||
 | 
			
		||||
    dom = html.fromstring(resp.text)
 | 
			
		||||
 | 
			
		||||
    # parse results
 | 
			
		||||
    for result in dom.xpath('//div[@class="g"]'):
 | 
			
		||||
 | 
			
		||||
        title = extract_text(result.xpath('.//h3/a'))
 | 
			
		||||
        url = result.xpath('.//h3/a/@href')[0]
 | 
			
		||||
        content = extract_text(result.xpath('.//span[@class="st"]'))
 | 
			
		||||
 | 
			
		||||
        # append result
 | 
			
		||||
        results.append({'url': url,
 | 
			
		||||
                        'title': title,
 | 
			
		||||
                        'content': content,
 | 
			
		||||
                        'thumbnail': '',
 | 
			
		||||
                        'template': 'videos.html'})
 | 
			
		||||
 | 
			
		||||
    return results
 | 
			
		||||
@ -266,6 +266,10 @@ engines:
 | 
			
		||||
    engine : google_news
 | 
			
		||||
    shortcut : gon
 | 
			
		||||
 | 
			
		||||
  - name : google videos
 | 
			
		||||
    engine : google_videos
 | 
			
		||||
    shortcut : gov
 | 
			
		||||
 | 
			
		||||
  - name : google scholar
 | 
			
		||||
    engine : xpath
 | 
			
		||||
    paging : True
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										60
									
								
								tests/unit/engines/test_google_videos.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								tests/unit/engines/test_google_videos.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,60 @@
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
import mock
 | 
			
		||||
from searx.engines import google_videos
 | 
			
		||||
from searx.testing import SearxTestCase
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TestGoogleVideosEngine(SearxTestCase):
 | 
			
		||||
 | 
			
		||||
    def test_request(self):
 | 
			
		||||
        query = 'test_query'
 | 
			
		||||
        dicto = defaultdict(dict)
 | 
			
		||||
        dicto['pageno'] = 1
 | 
			
		||||
        dicto['safesearch'] = 1
 | 
			
		||||
        dicto['time_range'] = ''
 | 
			
		||||
        params = google_videos.request(query, dicto)
 | 
			
		||||
        self.assertIn('url', params)
 | 
			
		||||
        self.assertIn(query, params['url'])
 | 
			
		||||
 | 
			
		||||
        dicto['safesearch'] = 0
 | 
			
		||||
        params = google_videos.request(query, dicto)
 | 
			
		||||
        self.assertNotIn('safe', params['url'])
 | 
			
		||||
 | 
			
		||||
    def test_response(self):
 | 
			
		||||
        self.assertRaises(AttributeError, google_videos.response, None)
 | 
			
		||||
        self.assertRaises(AttributeError, google_videos.response, [])
 | 
			
		||||
        self.assertRaises(AttributeError, google_videos.response, '')
 | 
			
		||||
        self.assertRaises(AttributeError, google_videos.response, '[]')
 | 
			
		||||
 | 
			
		||||
        html = r"""
 | 
			
		||||
        <div>
 | 
			
		||||
            <div>
 | 
			
		||||
                <div class="g">
 | 
			
		||||
                    <div>
 | 
			
		||||
                        <h3><a href="url_1">Title 1</h3>
 | 
			
		||||
                    </div>
 | 
			
		||||
                    <div>
 | 
			
		||||
                        <span class="st">Content 1</span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                </div>
 | 
			
		||||
                <div class="g">
 | 
			
		||||
                    <div>
 | 
			
		||||
                        <h3><a href="url_2">Title 2</h3>
 | 
			
		||||
                    </div>
 | 
			
		||||
                    <div>
 | 
			
		||||
                        <span class="st">Content 2</span>
 | 
			
		||||
                    </div>
 | 
			
		||||
                </div>
 | 
			
		||||
            </div>
 | 
			
		||||
        </div>
 | 
			
		||||
        """
 | 
			
		||||
        response = mock.Mock(text=html)
 | 
			
		||||
        results = google_videos.response(response)
 | 
			
		||||
        self.assertEqual(type(results), list)
 | 
			
		||||
        self.assertEqual(len(results), 2)
 | 
			
		||||
        self.assertEqual(results[0]['url'], u'url_1')
 | 
			
		||||
        self.assertEqual(results[0]['title'], u'Title 1')
 | 
			
		||||
        self.assertEqual(results[0]['content'], u'Content 1')
 | 
			
		||||
        self.assertEqual(results[1]['url'], u'url_2')
 | 
			
		||||
        self.assertEqual(results[1]['title'], u'Title 2')
 | 
			
		||||
        self.assertEqual(results[1]['content'], u'Content 2')
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user