diff options
author | Ryan Huber <rhuber@gmail.com> | 2014-10-23 17:23:34 -0700 |
---|---|---|
committer | Ryan Huber <rhuber@gmail.com> | 2014-10-23 17:23:34 -0700 |
commit | 03169865fd0fe62c2db6f78b9ea92f9429416b6a (patch) | |
tree | 5a2e8b924f04ee759f5aa6c624789ec3eadab6c7 /wee_slack.py | |
parent | b756bbb3c8a1768d67aebf043c24f617d745dd40 (diff) | |
download | wee-slack-03169865fd0fe62c2db6f78b9ea92f9429416b6a.tar.gz |
add in some debug
Diffstat (limited to 'wee_slack.py')
-rw-r--r-- | wee_slack.py | 65 |
1 files changed, 31 insertions, 34 deletions
diff --git a/wee_slack.py b/wee_slack.py index e196325..33ff7ac 100644 --- a/wee_slack.py +++ b/wee_slack.py @@ -703,19 +703,19 @@ def create_slack_websocket(data): #NOTE: switched to async/curl because sync slowed down the UI def async_slack_api_request(request, data): - t = time.time() + random.random() - request += "?t=%s" % t - data["token"] = slack_api_token - data = urllib.urlencode(data) - post = {"maxconnects": "1", "post": "1", "postfields": data} - url = 'https://%s/api/%s' % (domain, request) - queue.append(['url:%s' % (url), post, 20000, 'url_processor_cb', str(data)]) + t = time.time() + random.random() + request += "?t=%s" % t + data["token"] = slack_api_token + data = urllib.urlencode(data) + post = {"maxconnects": "1", "post": "1", "postfields": data} + url = 'https://%s/api/%s' % (domain, request) + queue.append(['url:%s' % (url), post, 20000, 'url_processor_cb', str(data)]) queue = [] url_processor_lock=False #funny, right? -big_data = {} +big_data = '' def async_queue_cb(data, remaining_calls): global url_processor_lock @@ -734,36 +734,33 @@ def async_queue_cb(data, remaining_calls): return w.WEECHAT_RC_OK def url_processor_cb(data, command, return_code, out, err): - global url_processor_lock + global url_processor_lock, big_data + dbg(command) if return_code == 0: url_processor_lock=False - #dbg(return_code) - query = urlparse.parse_qs(data) - if query.has_key("channel"): - channel = channels.find(query["channel"][0]).name - global big_data - identifier = sha.sha(str(data) + command).hexdigest() - if not big_data.has_key(identifier): - big_data[identifier] = '' - big_data[identifier] += out - try: - my_json = json.loads(big_data[identifier]) - except: - my_json = False - if my_json: -# try: - #dbg('%-10s %-10s success %s %s' % (channel, len(big_data[identifier]), big_data[identifier][:5], big_data[identifier][-5:] )) -# dbg('%s %s success' % (big_data.keys(), identifier)) query = urlparse.parse_qs(data) if query.has_key("channel"): - channel = query["channel"][0] - message_json = json.loads(big_data[identifier]) - del big_data[identifier] - if message_json.has_key("messages"): - messages = message_json["messages"].reverse() - for message in message_json["messages"]: - message["channel"] = channels.find(channel) - process_message(message) + channel = channels.find(query["channel"][0]).name + if command.find("history") > -1: + dbg("getting history for channel %s" % (channel)) + big_data += out + try: + my_json = json.loads(big_data) + except: + my_json = False + if my_json: + query = urlparse.parse_qs(data) + if query.has_key("channel"): + channel = query["channel"][0] + message_json = json.loads(big_data) + big_data = '' + if message_json.has_key("messages"): + messages = message_json["messages"].reverse() + for message in message_json["messages"]: + message["channel"] = channels.find(channel) + process_message(message) + else: + print data return w.WEECHAT_RC_OK |