Spring mvc同时支持xml与json.
By:Roy.LiuLast updated:2019-10-05
点击下载此文件
http://www.v2ex.com/t/84938
http://jms-exception.iteye.com/blog/1189177
点击下载此文件
config svn ad
zhongwen
英文
public static String post(String pathToOurFile,String urlServer) throws ClientProtocolException, IOException {
HttpClient httpclient = new DefaultHttpClient();
//String pathToOurFile = "abc.txt"; //uploadfile
//String urlServer = "http://192.168.1.2/upload.action";
HttpPost httppost = new HttpPost(urlServer);
File file = new File(pathToOurFile);
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
/* example for adding an image part */
FileBody fileBody = new FileBody(new File(pathToOurFile)); //image should be a String
builder.addTextBody("name", "myname");
builder.addTextBody("passwd", "123456");
builder.addBinaryBody("file", "mytest".getBytes(), ContentType.DEFAULT_TEXT, "mytest");
//builder.addPart("my_file", fileBody);
//ContentBody cbFile = new FileBody(file);
HttpEntity mpEntity = builder.build();
httppost.setEntity(mpEntity);
CloseableHttpResponse response = (CloseableHttpResponse) httpclient.execute(httppost);
System.out.println(response.toString());
//System.out.println("executing request " + httppost.getRequestLine());
// HttpEntity entity = response.getEntity();
// String jsonStr = EntityUtils.toString(entity, "utf-8");
// System.out.println(jsonStr);
httppost.releaseConnection();
return response.toString();
}
/**
* Servlet implementation class FileUpload
*/
@MultipartConfig(maxFileSize = 1024 * 1024 * 10)
@WebServlet("/FileUpload")
public class FileUpload extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public FileUpload() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
System.out.println(request.getParameter("name"));
// System.out.println(request.getParameter("passwd"));
// Part parts1 = request.getPart("my_file");
// System.out.println(parts1);
Collection parts = request.getParts();
for (Part part : parts) {
System.out.println(part.getName());
try {
InputStream in = part.getInputStream();
System.out.println(converts(in)); // prints the value of author
}catch(Exception e){
e.printStackTrace();
}
}
PrintWriter out = response.getWriter();
out.write("aaaa");
out.flush();
}
public String converts(InputStream is)
{
StringBuilder sb = new StringBuilder();
String readline = "";
try
{
/**
* 若乱码,请改为new InputStreamReader(is, "GBK").
*/
BufferedReader br = new BufferedReader(new InputStreamReader(is));
while (br.ready())
{
readline = br.readLine();
sb.append(readline);
}
br.close();
} catch (IOException ie)
{
System.out.println("converts failed.");
}
return sb.toString();
}
}
spring cloud:
https://blog.csdn.net/zhongzunfa/article/details/79481984
https://blog.csdn.net/java_yes/article/details/80703623
http://www.cnblogs.com/cralor/p/9223994.html
https://windmt.com/2018/04/14/spring-cloud-1-services-governance/
https://github.com/zhaoyibo/spring-cloud-study
https://stackoverflow.com/questions/35517713/unable-to-access-spring-boot-actuator-actuator-endpoint
################
centos 安装 jdk1.8
https://www.cnblogs.com/xqzt/p/4934451.html
solr 7.4 linux 安装
http://www.cnblogs.com/tony-zt/p/9260017.html
https://blog.csdn.net/jiadajing267/article/details/78692702
https://blog.csdn.net/csdn_huzeliang/article/details/80876200
set CLASSPATH=.
nohup java -server -Xmx6G -Xms6G -Xmn2G -XX:+DisableExplicitGC -XX:SurvivorRatio=1 -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:CMSMaxAbortablePrecleanTime=500 -XX:CMSPermGenSweepingEnabled -XX:+CMSClassUnloadingEnabled -XX:+PrintClassHistogram -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintHeapAtGC -Xloggc:gc.log -Djava.ext.dirs=lib com.test.server.HttpChunkedServer 8000 >server.out 2>&1 &
yum install -y pcre pcre-devel
yum install -y zlib zlib-devel
#yum install -y openssl openssl-devel
./configure --prefix=/home/summer/nginx --with-stream
make
make install
./configure --prefix=/home/summer/projects/nginx/release --add-module=/home/summer/projects/nginx/nginx-upstream-fair-master --with-poll_module --with-threads --with-file-aio --with-http_ssl_module --with-http_sub_module --with-http_dav_module --with-http_flv_module --with-http_mp4_module --with-http_gunzip_module --with-http_gzip_static_module --with-http_stub_status_module --with-stream --with-stream=dynamic --with-stream_ssl_module --with-stream_realip_module
24392 2019-10-05 18:01:51.148 [mosquitto-hadoop-save-thread-0] ERROR com.upoint.device.utils.ConnectHadoop - ===hdfs create folder error: Cannot create directory /gdpi/public/0x ff04/20191005/18. Name node is in safe mode.
24393 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immed iately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off.
24394 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335)
24395 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3874)
24396 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:984)
24397 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:634)
24398 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
24399 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
24400 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
24401 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217)
24402 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213)
24403 at java.security.AccessController.doPrivileged(Native Method)
24404 at javax.security.auth.Subject.doAs(Subject.java:422)
24405 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
24406 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2211)
24407
24408 2019-10-05 18:01:51.149 [mosquitto-hadoop-save-thread-0] ERROR com.upoint.device.utils.ConnectHadoop - ===hdfs add file error: Cannot create file/user/tomcat/9811d29858ea4 e709bea7d50cb3f885b.txt. Name node is in safe mode.
24409 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immed iately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off.
24410 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335)
24411 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2474)
24412 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2363)
24413 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:624)
24414 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:398)
24415 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
24416 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
24417 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
24418 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217)
24419 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213)
24420 at java.security.AccessController.doPrivileged(Native Method)
24421 at javax.security.auth.Subject.doAs(Subject.java:422)
24422 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
24423 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2211)
http://www.v2ex.com/t/84938
http://jms-exception.iteye.com/blog/1189177
点击下载此文件
config svn ad
zhongwen
英文
= #startRow# ]]>
public static String post(String pathToOurFile,String urlServer) throws ClientProtocolException, IOException {
HttpClient httpclient = new DefaultHttpClient();
//String pathToOurFile = "abc.txt"; //uploadfile
//String urlServer = "http://192.168.1.2/upload.action";
HttpPost httppost = new HttpPost(urlServer);
File file = new File(pathToOurFile);
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
/* example for setting a HttpMultipartMode */
builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
/* example for adding an image part */
FileBody fileBody = new FileBody(new File(pathToOurFile)); //image should be a String
builder.addTextBody("name", "myname");
builder.addTextBody("passwd", "123456");
builder.addBinaryBody("file", "mytest".getBytes(), ContentType.DEFAULT_TEXT, "mytest");
//builder.addPart("my_file", fileBody);
//ContentBody cbFile = new FileBody(file);
HttpEntity mpEntity = builder.build();
httppost.setEntity(mpEntity);
CloseableHttpResponse response = (CloseableHttpResponse) httpclient.execute(httppost);
System.out.println(response.toString());
//System.out.println("executing request " + httppost.getRequestLine());
// HttpEntity entity = response.getEntity();
// String jsonStr = EntityUtils.toString(entity, "utf-8");
// System.out.println(jsonStr);
httppost.releaseConnection();
return response.toString();
}
/**
* Servlet implementation class FileUpload
*/
@MultipartConfig(maxFileSize = 1024 * 1024 * 10)
@WebServlet("/FileUpload")
public class FileUpload extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public FileUpload() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
System.out.println(request.getParameter("name"));
// System.out.println(request.getParameter("passwd"));
// Part parts1 = request.getPart("my_file");
// System.out.println(parts1);
Collection
for (Part part : parts) {
System.out.println(part.getName());
try {
InputStream in = part.getInputStream();
System.out.println(converts(in)); // prints the value of author
}catch(Exception e){
e.printStackTrace();
}
}
PrintWriter out = response.getWriter();
out.write("aaaa");
out.flush();
}
public String converts(InputStream is)
{
StringBuilder sb = new StringBuilder();
String readline = "";
try
{
/**
* 若乱码,请改为new InputStreamReader(is, "GBK").
*/
BufferedReader br = new BufferedReader(new InputStreamReader(is));
while (br.ready())
{
readline = br.readLine();
sb.append(readline);
}
br.close();
} catch (IOException ie)
{
System.out.println("converts failed.");
}
return sb.toString();
}
}
spring cloud:
https://blog.csdn.net/zhongzunfa/article/details/79481984
https://blog.csdn.net/java_yes/article/details/80703623
http://www.cnblogs.com/cralor/p/9223994.html
https://windmt.com/2018/04/14/spring-cloud-1-services-governance/
https://github.com/zhaoyibo/spring-cloud-study
https://stackoverflow.com/questions/35517713/unable-to-access-spring-boot-actuator-actuator-endpoint
################
centos 安装 jdk1.8
https://www.cnblogs.com/xqzt/p/4934451.html
solr 7.4 linux 安装
http://www.cnblogs.com/tony-zt/p/9260017.html
https://blog.csdn.net/jiadajing267/article/details/78692702
https://blog.csdn.net/csdn_huzeliang/article/details/80876200
set CLASSPATH=.
nohup java -server -Xmx6G -Xms6G -Xmn2G -XX:+DisableExplicitGC -XX:SurvivorRatio=1 -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+UseCMSCompactAtFullCollection -XX:CMSMaxAbortablePrecleanTime=500 -XX:CMSPermGenSweepingEnabled -XX:+CMSClassUnloadingEnabled -XX:+PrintClassHistogram -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintHeapAtGC -Xloggc:gc.log -Djava.ext.dirs=lib com.test.server.HttpChunkedServer 8000 >server.out 2>&1 &
yum install -y pcre pcre-devel
yum install -y zlib zlib-devel
#yum install -y openssl openssl-devel
./configure --prefix=/home/summer/nginx --with-stream
make
make install
./configure --prefix=/home/summer/projects/nginx/release --add-module=/home/summer/projects/nginx/nginx-upstream-fair-master --with-poll_module --with-threads --with-file-aio --with-http_ssl_module --with-http_sub_module --with-http_dav_module --with-http_flv_module --with-http_mp4_module --with-http_gunzip_module --with-http_gzip_static_module --with-http_stub_status_module --with-stream --with-stream=dynamic --with-stream_ssl_module --with-stream_realip_module
24392 2019-10-05 18:01:51.148 [mosquitto-hadoop-save-thread-0] ERROR com.upoint.device.utils.ConnectHadoop - ===hdfs create folder error: Cannot create directory /gdpi/public/0x ff04/20191005/18. Name node is in safe mode.
24393 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immed iately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off.
24394 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335)
24395 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3874)
24396 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:984)
24397 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:634)
24398 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
24399 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
24400 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
24401 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217)
24402 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213)
24403 at java.security.AccessController.doPrivileged(Native Method)
24404 at javax.security.auth.Subject.doAs(Subject.java:422)
24405 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
24406 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2211)
24407
24408 2019-10-05 18:01:51.149 [mosquitto-hadoop-save-thread-0] ERROR com.upoint.device.utils.ConnectHadoop - ===hdfs add file error: Cannot create file/user/tomcat/9811d29858ea4 e709bea7d50cb3f885b.txt. Name node is in safe mode.
24409 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immed iately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off.
24410 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335)
24411 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2474)
24412 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2363)
24413 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:624)
24414 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:398)
24415 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
24416 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
24417 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
24418 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217)
24419 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213)
24420 at java.security.AccessController.doPrivileged(Native Method)
24421 at javax.security.auth.Subject.doAs(Subject.java:422)
24422 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
24423 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2211)
From:一号门
Previous:jsp 模板继承 模仿djano的模板继承.
Next:python开发常见模块的安装
COMMENTS