我的爬虫框架选择的是Webmagic。由于爬取的频率太高,很不幸,IP应该是列入黑名单了,于是想到了 代理服务器。
2024-03-14 发现讯代理貌似挂了(lll¬ω¬)
1、花了20块钱买了“讯代理”的动态转发套餐
2、在Webmagic中使用
(1)首先是定时任务启动类,我是写在了controlelr中,定时任务调度使用了xxl-job这个框架。
主要是httpClientDownloader.setProxyProvider(CrowProxyProvider.from(new Proxy("forward.xdaili.cn", 80)));
其次是在 在常见Sprider的时候.setDownloader(httpClientDownloader)
package com.crow.web.controller;
import com.crow.config.ResponseMessage;
import com.crow.webmagic.downloader.CrowProxyProvider;
import com.crow.webmagic.pageprocessor.NewMotoPageProcessor;
import com.crow.webmagic.pageprocessor.YiChePageProcessor;
import com.crow.webmagic.pipeline.MTUOSpiderPipeline;
import com.crow.webmagic.pipeline.NewMotoSpiderPipeline;
import com.crow.webmagic.pipeline.YicheSpiderPipeline;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.downloader.HttpClientDownloader;
import us.codecraft.webmagic.proxy.Proxy;
import static com.crow.config.ConstantConfig.YICHE_BRAND_URL;
/**
* 通过url发起爬取任务,只能执行一次。用于调试和定时任务出现问题时的手动触发
* @return {}
* @author guancj
* @date 2020/4/1 9:25
*/
@RestController
public class StartUpController {
@Autowired
SpiderPipeline spiderPipeline;
@GetMapping
public ResponseMessage index() {
HttpClientDownloader httpClientDownloader = new HttpClientDownloader();
//设置动态转发代理,使用定制的ProxyProvider
httpClientDownloader.setProxyProvider(CrowProxyProvider.from(new Proxy("forward.xdaili.cn", 80)));
Spider.create(new YiChePageProcessor())
.setDownloader(httpClientDownloader)
.addUrl(URL)
.addPipeline(spiderPipeline)
.thread(5)
.run();
return ResponseMessage.ok("爬虫结束");
}
}</pre>
(2)上述代码中用到了几个类,现在把相关的代码贴出来
CrowProxyProvider类
package com.crow.webmagic.downloader;
import com.crow.utils.IPCheckUtil;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.ProxyProvider;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
/**
* 自定义的ProxyProvider
* 添加了代理有效性校验
*/
public class CrowProxyProvider implements ProxyProvider{
private final List proxies;
private final AtomicInteger pointer;
public CrowProxyProvider(List proxies) {
this(proxies, new AtomicInteger(-1));
}
private CrowProxyProvider(List proxies, AtomicInteger pointer) {
this.proxies = proxies;
this.pointer = pointer;
}
public static CrowProxyProvider from(Proxy... proxies) {
ArrayList proxiesTemp = new ArrayList(proxies.length);
Proxy[] var2 = proxies;
int var3 = proxies.length;
for(int var4 = 0; var4 < var3; ++var4) {
Proxy proxy = var2[var4];
if(IPCheckUtil.checkValidIP(proxy.getHost(), proxy.getPort())) {
proxiesTemp.add(proxy);
}
}
proxiesTemp.trimToSize();
return new CrowProxyProvider(Collections.unmodifiableList(proxiesTemp));
}
public void returnProxy(Proxy proxy, Page page, Task task) {
}
public Proxy getProxy(Task task) {
return (Proxy)this.proxies.get(this.incrForLoop());
}
private int incrForLoop() {
int p = this.pointer.incrementAndGet();
int size = this.proxies.size();
if(p < size) {
return p;
} else {
while(!this.pointer.compareAndSet(p, p % size)) {
p = this.pointer.get();
}
return p % size;
}
}
}
IPCheckUtil工具类
package com.crow.utils;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.URL;
public class IPCheckUtil {
/**
* 校验代理IP的有效性,测试地址为:http://www.ip138.com
* @param ip 代理IP地址
* @param port 代理IP端口
* @return 此代理IP是否有效
*/
public static boolean checkValidIP(String ip,Integer port){
URL url = null;
HttpURLConnection connection = null;
try {
url = new URL("http://www.ip138.com");
//代理服务器
InetSocketAddress proxyAddr = new InetSocketAddress(ip, port);
Proxy proxy = new Proxy(Proxy.Type.HTTP, proxyAddr);
connection = (HttpURLConnection) url.openConnection(proxy);
connection.setReadTimeout(4000);
connection.setConnectTimeout(4000);
connection.setRequestMethod("GET");
if(connection.getResponseCode() == 200){
connection.disconnect();
return true;
}
} catch (Exception e) {
connection.disconnect();
return false;
}
return false;
}
}
(3)接下来需要配置具体的讯代理的账号信息
我这是在Webmagic的process类当中进行的配置,具体的代码
主要是这段代码:.addHeader("Proxy-Authorization", ProxyGeneratedUtil.authHeader(ORDER_NUM, SECRET, (int) (new Date().getTime() / 1000)))//设置代理
package com.crow.webmagic.pageprocessor;
import com.alibaba.fastjson.JSONObject;
import com.crow.utils.ProxyGeneratedUtil;
import com.crow.utils.UserAgentUtil;
import com.crow.web.entity.VehicleBrandList;
import com.crow.web.entity.VehicleSeries;
import com.crow.web.entity.VehicleSeriesListVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.selector.JsonPathSelector;
import us.codecraft.webmagic.selector.Selectable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import static com.crow.config.ConstantConfig.IMG_UNDEFINED;
import static com.crow.config.ConstantConfig.VEHEICLE_KEY_CAR;
/**
* 数据爬取
* @return {}
* @author guancj
* @date 2020/5/9 17:36
*/
@Component
public class SelfPageProcessor implements PageProcessor {//修改改类,定制自己的抽取逻辑
private static final Logger log = LoggerFactory.getLogger(SelfPageProcessor.class);
private static final String ORDER_NUM = "XXXXXXXXX";//讯代理订单号
private static final String SECRET = "12345678901234567890";//讯代理SECRET
//抓取网站的相关配置,包括编码、抓取间隔、重试次数、代理、UserAgent等
private Site site = Site.me()
.addHeader("Proxy-Authorization", ProxyGeneratedUtil.authHeader(ORDER_NUM, SECRET, (int) (new Date().getTime() / 1000)))//设置代理
.setDisableCookieManagement(true)
.setCharset("UTF-8")
.setTimeOut(30000)
.setRetryTimes(3)
.setSleepTime(new Random().nextInt(20) * 100)
.setUserAgent(UserAgentUtil.getRandomUserAgent());
public void process(Page page) {
try {
log.info(" spider start--->");
log.info("访问的URL:{}", page.getUrl().toString());
log.info("spider end--->");
}
public Site getSite() {
return site;
}
}
(4)其中用的几个类,具体代码如下
ProxyGeneratedUtil类
package com.crow.utils;
public class ProxyGeneratedUtil {
public static String authHeader(String orderno, String secret, int timestamp){
//拼装签名字符串
String planText = String.format("orderno=%s,secret=%s,timestamp=%d", orderno, secret, timestamp);
//计算签名
String sign = org.apache.commons.codec.digest.DigestUtils.md5Hex(planText).toUpperCase();
//拼装请求头Proxy-Authorization的值
String authHeader = String.format("sign=%s&orderno=%s×tamp=%d", sign, orderno, timestamp);
return authHeader;
}
}