0%

Version1.0使用文件操作不断将随机测试数据写入code/0.in,通过管道输入到两个可执行文件,然后获取输出进行比较,直到找到不同的输出时停止,此时0.in内的内容即为结果。

阅读全文 »

Python作业,用turtle绘制校庆宣传画。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import turtle


def draw_line(angle, length):
turtle.pendown()
turtle.seth(angle)
turtle.forward(length)


def locate(x, y, extend=None):
startx, starty = -100, 200
if extend:
extend.penup()
extend.goto(startx + x, starty - y)
extend.pendown()
else:
turtle.penup()
turtle.goto(startx + x, starty - y)
turtle.pendown()


def draw_xq():
turtle.pencolor('white')
turtle.pensize(1)
turtle.penup()
locate(96, 1)
draw_line(270, 15)
locate(120, 28)
turtle.fillcolor('white')
turtle.begin_fill()
turtle.seth(120)
turtle.circle(27, 120)
draw_line(180, 53)
draw_line(270, 155)
draw_line(0, 150)
draw_line(90, 155)
draw_line(180, 53)
turtle.end_fill()
locate(10, 53)
turtle.pensize(6)
turtle.pencolor('red')
draw_line(0, 170)
locate(20, 82)
turtle.pensize(8)
draw_line(0, 30)
locate(170, 82)
draw_line(180, 30)
locate(96, 185)
turtle.pensize(50)
draw_line(90, 70)
turtle.pensize(12)
turtle.pencolor('white')
locate(60, 125)
draw_line(0, 70)
locate(60, 178)
draw_line(0, 70)
turtle.pensize(1)
locate(10, 188)
turtle.begin_fill()
draw_line(270, 18)
draw_line(240, 24)
draw_line(0, 56)
draw_line(240, 9)
draw_line(0, 88)
draw_line(120, 9)
draw_line(0, 56)
draw_line(120, 24)
draw_line(90, 18)
draw_line(180, 169)
turtle.end_fill()
turtle.pensize(4)
turtle.pencolor('red')
locate(8, 208)
draw_line(0, 58)
draw_line(60, 9)
draw_line(0, 53)
draw_line(300, 9)
draw_line(0, 58)
turtle.hideturtle()
font = turtle.Turtle()
locate(43, 265, font)
font.pencolor('white')
font.write('1909-2019', font=('Aria', 16, 'bold'))
font.hideturtle()


def draw_word():
font = turtle.Turtle()
locate(-186, 350, font)
font.pencolor('white')
font.write('河南理工大学建校110周年', font=('Aria', 36, 'bold'))
locate(-180, 380, font)
font.write('HENAN POLYTECHNIC UNIVERSITY 110TH ANNIVERSARY', font=('Aria', 16, 'bold'))
font.color(200/255, 166/255, 92/255)
locate(300, 100, font)
font.write('好学力行', font=('楷体', 24, 'bold'))
locate(-250, 200, font)
font.write('明德任责', font=('楷体', 24, 'bold'))
font.hideturtle()


def main():
width, height = 800, 600
turtle.screensize(bg='red')
turtle.setup(width, height, 400, 100)
turtle.speed(10)
draw_xq()
draw_word()
turtle.hideturtle()
turtle.done()


if __name__ == '__main__':
main()

效果如下:

* 爬取网站已转移

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
from concurrent.futures import ThreadPoolExecutor
from bs4 import BeautifulSoup
import requests
import random
import time
import os
import re

# Host = 'https://www.biquke.com/bq/37/37868/'
Host = 'https://www.biquke.com/bq/75/75429/'

header = [
{'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36'},
{'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 '
'(KHTML, like Gecko) Version/5.1 Safari/534.50'},
{'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)'},
{'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1'},
{'User-Agent': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)'}
]
name = ''


def geturl(url, page):
repeat = 0
while True:
try:
req = requests.get(Host + url, headers=header[random.randint(0, 4)], timeout=200)
req.close()
result = req.content
result = BeautifulSoup(result, "html.parser")
title = result.find('h1').getText()
result = result.find('div', {'id': 'content'}).getText()
title = re.sub('[\\/:*?"<>|\r\n]+', '', title)
print(title)
with open('./%s/%03d' % (name, page) + ' - ' + title + '.txt', 'w', encoding='utf-8') as file:
file.write(result)
break
except IOError:
print('第%d次重连' % repeat)
repeat += 1
if repeat > 10:
print(Host + url + '爬取失败!')
break
continue


def get_menu():
req = requests.get(Host, headers=header[random.randint(0, 4)])
html = req.content
html = BeautifulSoup(html, "html.parser")
global name
name = str(html.find('h1'))[4:-5]
if not os.path.exists(name):
os.makedirs(name)
html = html.find_all('dd')
with ThreadPoolExecutor(1024) as pool:
url = []
index = []
for i, j in enumerate(html):
url.append(j.find('a')['href'])
index.append(i)
pool.map(geturl, url, index)


if __name__ == '__main__':
start = time.time()
get_menu()
print("耗时%s" % (time.time() - start))