forked from Jerry1014/FundCrawler
-
Notifications
You must be signed in to change notification settings - Fork 0
/
MonkeyTest.py
53 lines (42 loc) · 1.58 KB
/
MonkeyTest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# -*- coding:UTF-8 -*-
"""
猴子测试嘛,就是瞎测两下的意思
"""
import time
import unittest
class MyTestCaseForGetFundList(unittest.TestCase):
def test(self):
from FundListProvider import GetFundListFromWebForTest
tem = GetFundListFromWebForTest()
my_iter = tem.get_fund_list()
try:
while True:
a_fund = next(my_iter)
print(a_fund)
except StopIteration:
pass
class MyTestCaseForCrawlingWebpage(unittest.TestCase):
def test_for_get_page_context(self):
from multiprocessing import Queue, Event
from CrawlingCore import GetPageByWebWithAnotherProcessAndMultiThreading
input_queue = Queue()
output_queue = Queue()
exit_after_finish = Event()
test = GetPageByWebWithAnotherProcessAndMultiThreading(input_queue, output_queue, exit_after_finish)
test.start()
input_queue.put(('http://baidu.com', ('just', 'for', 'test')))
input_queue.put(('http://www.10jqka.com.cn/', ('just', 'for', 'test')))
exit_after_finish.set()
while exit_after_finish.is_set(): time.sleep(1)
while not output_queue.empty():
print(output_queue.get())
class MyTestCaseForCrawling(unittest.TestCase):
def test_write_to_file(self):
import CrawlingFund
context_filename = [('11', '11.txt'), ('22', '22.txt')]
my_iter = CrawlingFund.write_to_file(False)
my_iter.send(None)
for i in context_filename:
my_iter.send(i)
if __name__ == '__main__':
unittest.main()