#====================================
#          network_thread
#   Copyright 2011 - Nathan Osman
#
#  Manages network reuqests to and
# from the SE API and processes them
#       in a separate thread.
#
#   StackApplet is released under
#         the MIT license
#====================================

import Queue
import threading
import time

import urllib2
import zlib

# We need gobject for cross-thread signalling
import gobject
gobject.threads_init()

# Now lets see if we can import the JSON
# module. We try either of these two classes
try:
	import json
except ImportError:
	import simplejson as json

# Constants
API_VERSION = "2.2"
API_KEY     = "VN8jyf7k8STlkO3lx*iw6w(("

class network_thread(threading.Thread):
	
	def __init__(self, error_handler):
		
		# Initialize the thread object
		threading.Thread.__init__(self)
		
		self.error_handler = error_handler
		
		# The list of items that we will
		# be requesting.
		self.request_queue = Queue.PriorityQueue()
		
		# Start it
		self.start()
	
	def issue_request(self, url, callback, data, decompress=True, priority=2):
		
		# Append the URL to the queue
		self.request_queue.put([priority, [url, callback, data, decompress]])
	
	def issue_api_request(self, site, method, callback, data, additional_params=''):
		
		url = "http://api.stackexchange.com/" + API_VERSION + method + "?key=" + API_KEY + "&site=" + site
		
		if not additional_params == '':
			url += '&' + additional_params
		
		self.issue_request(url, callback, data)
	
	def run(self):
	
		# Now we run in circles waiting
		# for each request to come in.
		while True:
			
			# Get the next item
			item_block = self.request_queue.get(True)
			
			item = item_block[1]
			
			if item == None:
				break;
			
			# Note that we need to ensure that there are at
			# least 100ms between requests, so we simply insert
			# a small sleep here.
			time.sleep(0.1)
			
			try:
			
				# Issue the request
				request = urllib2.Request(item[0])
				request.add_header('Accept-Encoding', 'gzip,deflate')
			
				opener = urllib2.build_opener()
				gzipped_stream = opener.open(request)
			
				# If the request needs decompression, assume
				# that it also needs json decoding.
				if item[3]:
					raw_data = zlib.decompress(gzipped_stream.read(), 16+zlib.MAX_WBITS)
					json_data = json.loads(raw_data)
				else:
					json_data = gzipped_stream.read()			
			
				time.sleep(1)
				gobject.idle_add(item[1], json_data, item[2])
			
			except urllib2.URLError:
				
				# There was an error accessing the URL - a
				# number of different things can cause this.
				gobject.idle_add(self.error_handler, item[2], "Network error.")
			
			except zlib.error:
			
				# There was an error decompressing the stuff.
				gobject.idle_add(self.error_handler, item[2], "GZip error.")
				
			except Exception, e:
				
				gobject.idle_add(self.error_handler, item[2], "Unknown error.")
