Home>

I wrote a program that sends a GET request to the API and writes a response to a file every 1 million lines. Please let me know if there is any solution.

StreamConsumedError Traceback (most recent call last)
in
3 decoded_lines = ''
4 with open (path.format (i), 'w') as f:
---->5 for line in r.iter_lines ():
6 count + = 1
7 if count == i * 1000000:

~/opt/anaconda3/lib/python3.7/site-packages/requests/models.py in iter_lines (self, chunk_size, decode_unicode, delimiter)
792 pending = None
793
->794 for chunk in self.iter_content (chunk_size = chunk_size, decode_unicode = decode_unicode):
795
796 if pending is not None:

~/opt/anaconda3/lib/python3.7/site-packages/requests/models.py in iter_content (self, chunk_size, decode_unicode)
767
768 if self._content_consumed and isinstance (self._content, bool):
->769 raise StreamConsumedError ()
770 elif chunk_size is not None and not isinstance (chunk_size, int):
771 raise TypeError ("chunk_size must be an int, it is instead a% s."% Type (chunk_size))

StreamConsumedError:

base = 'https://api.nazuki-oto.com/historical/tweets?type=stream&estid= {}'
endpoint = 'kargrNyc'
header = {'Accept-Encoding': 'deflate, gzip'}
url = base.format (endpoint)
s = requests.Session ()
r = s.get (url, auth = ('id', 'pass'), headers = header, stream = True)
for i in range (1, 7):
    count = 0
    decoded_lines = ''
    with open (path.format (i), 'w') as f:
        for line in r.iter_lines ():
            count + = 1
            if count == i * 1000000:
                decoded_lines + = line.decode ('utf-8')
                break
            elif count>(i-1) * 1000000:
                decoded_lines + = line.decode ('utf-8') + '\ n'
            else:
                pass
        f.write (decoded_lines)