1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
|
#include <HttpClient.h>
#include <BRegex.h>
#include <CopyJob.h>
#include <ReadJob.h>
t_headers no_headers;
HttpClient::HttpClient(const String & _url, Handle * _out, int * _http_code_p, const String & _fake_host, t_headers _headers) : url(_url), out(_out), http_code_p(_http_code_p), fake_host(_fake_host), headers(_headers), host(""), uri("") {
DecodeURL();
Client.SetNonBlock();
Client.Connect(host, 80);
if (Client.IsConnected()) {
SetBurst();
} else {
WaitFor(&Client, W4_WRITING);
}
}
HttpClient::~HttpClient() {
}
int HttpClient::Do() throw (GeneralException) {
t_headers::iterator i;
String t;
switch (current) {
case 0:
if (Client.IsConnecting()) {
Client.FinalizeConnect();
}
if (!Client.IsConnected()) {
return TASK_DONE;
}
current = 1;
case 1:
b << "GET " + uri + " HTTP/1.1\r\n"
"Host: " + (fake_host == "" ? host : fake_host) + "\r\n"
"Connection: close\r\n";
for (i = headers.begin(); i != headers.end(); i++) {
b << *i + "\r\n";
}
b << "\r\n";
c = new CopyJob(&b, &Client);
WaitFor(c);
current = 2;
Suspend(TASK_ON_HOLD);
case 2:
delete c;
c = new ReadJob(&Client, &b);
WaitFor(c);
current = 3;
Suspend(TASK_ON_HOLD);
case 3:
delete c;
do {
b >> t;
printm(M_INFO, "Got response: " + t + "\n");
} while (t.strlen());
}
}
String HttpClient::GetStatus() {
return "";
}
void HttpClient::DecodeURL() throw (GeneralException) {
int p;
static const Regex isURLValid("^http://[^/]");
if (!isURLValid.Match(url))
throw GeneralException("Invalid URL.");
String tmp = url.extract(7);
p = tmp.strchr('/');
if (p < 0) {
host = tmp;
return;
}
host = tmp.extract(0, p - 1);
uri = tmp.extract(p);
}
|