File: test_ia_search.py

package info (click to toggle)
python-internetarchive 5.4.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 1,000 kB
  • sloc: python: 7,445; xml: 180; makefile: 180
file content (42 lines) | stat: -rw-r--r-- 1,490 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import responses

from internetarchive.utils import json
from tests.conftest import PROTOCOL, IaRequestsMock, ia_call, load_test_data_file


def test_ia_search_itemlist(capsys):
    test_scrape_response = load_test_data_file('scrape_response.json')

    with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
        url = f'{PROTOCOL}//archive.org/services/search/v1/scrape'
        p1 = {
            'q': 'collection:attentionkmartshoppers',
            'count': '10000'
        }
        _j = json.loads(test_scrape_response)
        del _j['cursor']
        _r = json.dumps(_j)
        rsps.add(responses.POST, url,
                 body=_r,
                 match=[responses.matchers.query_param_matcher(p1)])
        ia_call(['ia', 'search', 'collection:attentionkmartshoppers', '--itemlist'])

    out, err = capsys.readouterr()
    assert len(set(out.split())) == 100


def test_ia_search_num_found(capsys):
    with IaRequestsMock(assert_all_requests_are_fired=False) as rsps:
        url = f'{PROTOCOL}//archive.org/services/search/v1/scrape'
        p = {
            'q': 'collection:nasa',
            'total_only': 'true',
            'count': '10000'
        }
        rsps.add(responses.POST, url,
                 body='{"items":[],"count":0,"total":50}',
                 match=[responses.matchers.query_param_matcher(p)])

        ia_call(['ia', 'search', 'collection:nasa', '--num-found'])
    out, err = capsys.readouterr()
    assert out == '50\n'