HCE Project Python language Distributed Tasks Manager Application, Distributed Crawler Application and client API bindings.  2.0.0-chaika
Hierarchical Cluster Engine Python language binding
ftest_dc_fetchers_twitter.py
Go to the documentation of this file.
1 """
2 HCE project, Python bindings, Crawler application.
3 Fetcher module tests.
4 
5 @package: dc
6 @file ftest_dc_fetchers.py
7 @author bgv <bgv.hce@gmail.com>
8 @link: http://hierarchical-cluster-engine.com/
9 @copyright: Copyright &copy; 2015 IOIX Ukraine
10 @license: http://hierarchical-cluster-engine.com/license/
11 @since: 1.4.3
12 """
13 
14 import logging
15 import os
16 import json
17 import jsonpickle
18 
19 from app.Utils import varDump
20 from dc_crawler.Fetcher import BaseFetcher
21 
22 
23 # create logger
24 logger = logging.getLogger('ftest_dc_fetchers')
25 logger.setLevel(logging.DEBUG)
26 # create console handler and set level to debug
27 ch = logging.StreamHandler()
28 ch.setLevel(logging.DEBUG)
29 # create formatter
30 formatter = logging.Formatter('%(asctime)s - %(thread)ld - %(threadName)s - %(module)s ' + \
31  '- %(funcName)s - %(levelname)s - %(message)s')
32 # add formatter to ch
33 ch.setFormatter(formatter)
34 # add ch to logger
35 logger.addHandler(ch)
36 
37 
38 if __name__ == "__main__":
39  res = None
40 
41  print "CurDir:\n", os.path.dirname(os.path.realpath(__file__))
42  loadHeaders = True
43  headersDict = {}
44  if loadHeaders:
45  hdrs = None
46  with open("../../ini/crawler-task_headers.txt", 'r') as f:
47  hdrs = ''.join(f.readlines()) + "\n"
48  for header in hdrs.splitlines():
49  print header
50  if not header or header[0] == '#':
51  continue
52  try:
53  key, value = header[:header.index(':')].strip(), header[header.index(':') + len(':'):].strip()
54  except Exception:
55  print "header error:%s", header
56  os.abort()
57  headersDict[key] = value
58 
59  print "headersDict:\n", varDump(headersDict)
60  #import sys
61  #sys.exit()
62 
63  #url = 'http://127.0.0.1/'
64  #url = 'http://127.0.0.1/index0.html'
65  #url = 'about:blank'
66  #url = 'https://www.google.co.jp/search?q=&gws_rd=cr###window.IFRAME_KWSRC="http://127.0.0.1/keywords_big.txt";'
67  #url = 'https://www.google.com/search?q=test&gws_rd=cr'
68  url = 'https://www.google.co.jp/search?q=&gws_rd=cr###window.IFRAME_KWSRC="http://127.0.0.1/keywords.txt";'
69  httpTimeout = 70000
70  tm = int(httpTimeout) / 1000.0
71  if isinstance(httpTimeout, float):
72  tm += float('0' + str(httpTimeout).strip()[str(httpTimeout).strip().find('.'):])
73  allowRedirects = 1
74  proxies = None
75  authorization = None
76  postData = None
77  process_content_types = ["text/html"]
78  maxResourceSize = 1024 * 1024
79  maxHttpRedirects = 3
80  fetchType = BaseFetcher.TYP_NORMAL
81  localFilters = None
82  urlObjDepth = 0
83 
84  #Dynamic fetcher test
85  #headersDict = {'--disable-web-security':'', '--allow-running-insecure-content':''}
86  macroCode = {"name":"tests",
87  "sets":[{"name":"set1", "items":['', '', '', ''], "repeat":1, "delay":0}],
88  "result_type":2,
89  "result_content_type":"text/json",
90  "result_fetcher_type":1}
91 
92  macroCode['sets'][0]['items'][0] = '1'
93  macroCode['sets'][0]['items'][1] = \
94  "\
95  var s=window.document.createElement('script');\
96  s.src='http://127.0.0.1/macro_test4.js';\
97  s.type='text/javascript';\
98  window.document.head.appendChild(s);\
99  return [window.jQuery===undefined, window.MACRO_PREPARE===undefined, window.MACRO_COLLECT===undefined];\
100  "
101  macroCode['sets'][0]['items'][2] = '30'
102  macroCode['sets'][0]['items'][3] = \
103  "\
104  if(window.MACRO_COLLECT===undefined){\
105  return [window.jQuery===undefined, window.MACRO_COLLECT===undefined];\
106  }else{\
107  return [window.jQuery===undefined, window.MACRO_COLLECT([window.IFRAME_NAME, window.IFRAME_URLS])];\
108  }\
109  "
110  fetchType = BaseFetcher.TYP_DYNAMIC
111  #change current dir for webdriver executable run with path ./
112  os.chdir("../../bin/")
113 
114  try:
115  #Test of NORMAL (request lib based) fetcher
116  res = BaseFetcher.get_fetcher(fetchType).open(url, timeout=tm, headers=headersDict,
117  allow_redirects=allowRedirects, proxies=proxies,
118  auth=authorization, data=postData, log=logger,
119  allowed_content_types=process_content_types,
120  max_resource_size=maxResourceSize,
121  max_redirects=maxHttpRedirects,
122  filters=localFilters, depth=urlObjDepth, macro=macroCode)
123 
124  except Exception, err:
125  #logger.debug("Exception:\n%s", varDump(err))
126  print "Exception:\n", varDump(err)
127 
128  #rd = varDump(res)
129  rd = json.dumps(json.loads(jsonpickle.encode(res)), indent=2)
130  #logger.debug("Result:\n%s", varDump(res))
131  print "Result:\n", rd
132 
133 
def varDump(obj, stringify=True, strTypeMaxLen=256, strTypeCutSuffix='...', stringifyType=1, ignoreErrors=False, objectsHash=None, depth=0, indent=2, ensure_ascii=False, maxDepth=10)
Definition: Utils.py:410
Definition: join.py:1