send data to Flume client-sdk flume使用之httpSource
https://flume.apache.org/FlumeDeveloperGuide.html#client-sdk
flume使用之httpSource - CSDN博客 https://blog.csdn.net/liuxiao723846/article/details/78131732
flume自定义sink之文件 - 伊米伊念 - 博客园 https://www.cnblogs.com/sunyaxue/p/6645753.html
flume自定义组件的 jar 包管理 - CSDN博客 https://blog.csdn.net/u012373815/article/details/54352177
- package com.mycom;
- import java.io.File;
- import java.io.FileNotFoundException;
- import java.io.FileOutputStream;
- import java.io.IOException;
- import org.apache.flume.Channel;
- import org.apache.flume.Context;
- import org.apache.flume.Event;
- import org.apache.flume.EventDeliveryException;
- import org.apache.flume.Transaction;
- import org.apache.flume.conf.Configurable;
- import org.apache.flume.sink.AbstractSink;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- public class MyFlumeSink extends AbstractSink implements Configurable {
- private static final Logger logger = LoggerFactory.getLogger(MyFlumeSink.class);
- private static final String PROP_KEY_ROOTPATH = "fileName";
- private String fileName;
- @Override
- public void configure(Context context) {
- // TODO Auto-generated method stub
- fileName = context.getString(PROP_KEY_ROOTPATH);
- }
- @Override
- public Status process() throws EventDeliveryException {
- // TODO Auto-generated method stub
- Channel ch = getChannel();
- Transaction txn = ch.getTransaction();
- Event event = null;
- txn.begin();
- while (true) {
- event = ch.take();
- if (event != null) {
- break;
- }
- }
- try {
- logger.debug("Get event.");
- String body = new String(event.getBody());
- System.out.println("event.getBody()-----" + body);
- String res = body + ":" + System.currentTimeMillis() + "\r\n";
- File file = new File(fileName);
- FileOutputStream fos = null;
- try {
- fos = new FileOutputStream(file, true);
- } catch (FileNotFoundException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- try {
- fos.write(res.getBytes());
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- try {
- fos.close();
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- txn.commit();
- return Status.READY;
- } catch (Throwable th) {
- txn.rollback();
- if (th instanceof Error) {
- throw (Error) th;
- } else {
- throw new EventDeliveryException(th);
- }
- } finally {
- txn.close();
- }
- }
- }
- [root@d1 apache-flume-1.8.0-bin]# cat conf/http_test.conf
- a1.sources=r1
- a1.sinks=k1
- a1.channels=c1
- a1.sources.r1.type=http
- a1.sources.r1.bind=0.0.0.0
- a1.sources.r1.port=50000
- a1.sources.r1.channels=c1
- a1.sinks.k1.type=logger
- a1.sinks.k1.channel=c1
- a1.sinks.k1.type = com.mycom.MyFlumeSink
- a1.sinks.k1.fileName=/home/mysinks.txt
- a1.channels.c1.type=memory
- #a1.channels.c1.capacity=1000
- #a1.channels.c1.transactionCapacity=100
- a1.channels.c1.capacity=8
- a1.channels.c1.transactionCapacity=4
- [root@d1 apache-flume-1.8.0-bin]#
[root@d1 home]# cat ~/.bash_profile
# .bash_profile
# Get the aliases and functions
if [ -f ~/.bashrc ]; then
. ~/.bashrc
fi
# User specific environment and startup programs
PATH=$PATH:$HOME/bin
export PATH
JAVA_HOME=/usr/local/jdk;export JAVA_HOME;
HADOOP_PREFIX=/home/hadoop-2.9.1;export HADOOP_PREFIX;HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop;export HADOOP_CONF_DIR;HADOOP_HOME=/home/hadoop-2.9.1;export HADOOP_HOME;HADOOP_PREFIX=/home/hadoop-2.9.1;export HADOOP_PREFIX;HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop;export HADOOP_CONF_DIR;HADOOP_HOME=/home/hadoop-2.9.1;export HADOOP_HOME;HADOOP_YARN_HOME=$HADOOP_PREFIX;export HADOOP_YARN_HOME;
[root@d1 home]#
微信小程序入门篇(二)(wx.request的json数据传输) - 微风&细雨 - 博客园 https://www.cnblogs.com/weifengxiyu/p/6515317.html
bin/flume-ng agent -c /home/apache-flume-1.8.0-bin/conf/ -f /home/apache-flume-1.8.0-bin/conf/http_test.conf -n a1 -Dflume.root.logger=INFO,console -C /home/MyBgJavaLan/target/MyAid-1.0.0-jar-with-dependencies.jar
[INFO]
[INFO] --- maven-jar-plugin:2.3.2:jar (default-jar) @ MyAid ---
[INFO] Building jar: /home/MyBgJavaLan/target/MyAid-1.0.0.jar
[INFO]
[INFO] --- maven-assembly-plugin:2.4:single (make-assembly) @ MyAid ---
[INFO] artifact net.minidev:json-smart: checking for updates from central
[INFO] artifact net.minidev:json-smart: checking for updates from dynamodb-local-oregon
[INFO] artifact net.minidev:json-smart: checking for updates from apache.snapshots.https
[INFO] artifact net.minidev:json-smart: checking for updates from repository.jboss.org
[INFO] Building jar: /home/MyBgJavaLan/target/MyAid-1.0.0-jar-with-dependencies.jar
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 58.121s
[INFO] Finished at: Tue Aug 07 17:33:01 CST 2018
[INFO] Final Memory: 39M/749M
[INFO] ------------------------------------------------------------------------
[root@d1 MyBgJavaLan]# mvn clean;mvn compile;mvn package;
存在的问题:
[DEBUG] 2018-08-07 17:39:59,103 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
[DEBUG] 2018-08-07 17:39:59,604 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
[DEBUG] 2018-08-07 17:40:00,104 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
[DEBUG] 2018-08-07 17:40:00,604 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
[DEBUG] 2018-08-07 17:40:01,105 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
[DEBUG] 2018-08-07 17:40:01,605 method:org.apache.flume.SinkRunner.stop(SinkRunner.java:104)
Waiting for runner thread to exit
- package com.mycom;
- import java.io.File;
- import java.io.FileNotFoundException;
- import java.io.FileOutputStream;
- import java.io.IOException;
- import java.util.HashMap;
- import java.util.Map;
- import com.alibaba.fastjson.JSON;
- import com.alibaba.fastjson.JSONObject;
- import org.apache.flume.Channel;
- import org.apache.flume.Context;
- import org.apache.flume.Event;
- import org.apache.flume.EventDeliveryException;
- import org.apache.flume.Transaction;
- import org.apache.flume.conf.Configurable;
- import org.apache.flume.sink.AbstractSink;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- import com.google.gson.JsonArray;
- import com.google.gson.JsonIOException;
- import com.google.gson.JsonObject;
- import com.google.gson.JsonParser;
- import com.google.gson.JsonSyntaxException;
- public class MyFlumeSink extends AbstractSink implements Configurable {
- private static final Logger logger = LoggerFactory.getLogger(MyFlumeSink.class);
- private static final String PROP_KEY_ROOTPATH = "fileName";
- private String fileName;
- @Override
- public void configure(Context context) {
- // TODO Auto-generated method stub
- fileName = context.getString(PROP_KEY_ROOTPATH);
- }
- @Override
- public Status process() throws EventDeliveryException {
- // TODO Auto-generated method stub
- Channel ch = getChannel();
- Transaction txn = ch.getTransaction();
- Event event = null;
- txn.begin();
- while (true) {
- event = ch.take();
- if (event != null) {
- break;
- }
- }
- try {
- logger.debug("Get event.");
- // request.getParameter("username")
- // JSON json = JSONObject.parseObject(event.getBody());
- //JSONObject jsonObject1 = JSONObject.parseObject(JSON_OBJ_STR); //因为JSONObject继承了JSON,所以这样也是可以的
- // System.out.println(jsonObject.getString("studentName")+":"+jsonObject.getInteger("studentAge"));
- // String body = jsonObject.getString("body");
- //
- //old
- //JAVA解析JSON数据 - monsterLin - 博客园 https://www.cnblogs.com/boy1025/p/4551593.html
- // String body = new String(event.getBody());
- String body = event.getBody().toString();
- JsonParser parse = new JsonParser(); //创建json解析器
- JsonObject json = (JsonObject) parse.parse(body); //创建jsonObject对象
- String bodyReal = json.get("body").getAsString();
- System.out.println("event.getBody()-----" + bodyReal);
- String res = bodyReal + ":" + System.currentTimeMillis() + "\r\n";
- File file = new File(fileName);
- FileOutputStream fos = null;
- try {
- fos = new FileOutputStream(file, true);
- } catch (FileNotFoundException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- try {
- fos.write(res.getBytes());
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- try {
- fos.close();
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- txn.commit();
- return Status.READY;
- } catch (Throwable th) {
- txn.rollback();
- if (th instanceof Error) {
- throw (Error) th;
- } else {
- throw new EventDeliveryException(th);
- }
- } finally {
- txn.close();
- }
- }
- }
- package com.mycom;
- import org.apache.flume.Event;
- import org.apache.flume.EventDeliveryException;
- import org.apache.flume.api.RpcClient;
- import org.apache.flume.api.RpcClientFactory;
- import org.apache.flume.event.EventBuilder;
- import java.nio.charset.Charset;
- import java.text.SimpleDateFormat;
- //http://flume.apache.org/FlumeDeveloperGuide.html
- public class MyAppFlume {
- public static void main(String[] args) {
- MyRpcClientFacade client = new MyRpcClientFacade();
- // Initialize client with the remote Flume agent's host and port
- // client.init("hadoop1", 41414);
- // client.init("192.168.3.101", 41414);
- client.init("0.0.0.0", 41414);
- // Send 10 events to the remote Flume agent. That agent should be configured to listen with an AvroSource.
- String sampleData = "Hello Flume!";
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd--HH-mm-ss");
- long cTm = System.currentTimeMillis();
- String df = sdf.format(cTm);
- System.out.println(df);
- String str_ = "";
- for (int i = 0; i < 14; i++) {
- str_ = str_ + i + df + "加油!!" + sampleData;
- }
- System.out.println(str_);
- // for (int i = 0; i < 20; i++) {
- // String str = i + "------------" + str_ + df + "加油!!" + sampleData;
- // System.out.println(str);
- // client.sendDataToFlume(str);
- // }
- String str = "对StringBuilder抛出ArrayIndexOutOfBoundsException的探究 - CSDN博客 https://blog.csdn.net/liu_005/article/details/73699604" + "------------" + str_ + df + "加油!!" + sampleData;
- System.out.println(str);
- client.sendDataToFlume(str);
- client.cleanUp();
- }
- }
- class MyRpcClientFacade {
- private RpcClient client;
- private String hostname;
- private int port;
- public void init(String hostname, int port) {
- // Setup the RPC connection
- this.hostname = hostname;
- this.port = port;
- this.client = RpcClientFactory.getDefaultInstance(hostname, port);
- // Use the following method to create a thrift client(instead of the above line);
- // this.client=RpcClientFactory.getThriftInstance(hostname,port);
- }
- public void sendDataToFlume(String data) {
- // Create a Flume Event object that encapsulate the sample data
- Event event = EventBuilder.withBody(data, Charset.forName("UTF-8"));
- System.out.println("--->");
- System.out.println(data);
- System.out.println(event);
- System.out.println("<--->");
- /*
- * int getBatchSize();
- void append(Event var1) throws EventDeliveryException;
- void appendBatch(List<Event> var1) throws EventDeliveryException;
- boolean isActive();
- void close() throws FlumeException;
- *
- * */
- // Send the event
- try {
- this.client.append(event);
- System.out.println("<----------client.append(event)--------------->");
- } catch (EventDeliveryException e) {
- e.printStackTrace();
- System.out.println(e);
- // clean up and recreate the client
- client.close();
- client = null;
- client = RpcClientFactory.getDefaultInstance(hostname, port);
- }
- }
- public void cleanUp() {
- // Close the RPC connection
- client.close();
- }
- }
【动手】
分布式日志Va
版本V a
待废除php,引入go网关
- protected function data2unifiedlog($data)
{
/*
[{
"headers" : {
"timestamp" : "434324343",
"host" : "random_host.example.com"
},
"body" : "random_body"
},
{
"headers" : {
"namenode" : "namenode.example.com",
"datanode" : "random_datanode.example.com"
},
"body" : "really_random_body"
}]
*/
$ch = curl_init();
$now = date('y-m-d h:i:s', time()) . gethostname();
$post_data_json = '[{
"headers" : {
"timestamp" : "434324343",
"host" : "random_host.example.com"
},
"body" : \'str86677' . $now . '\'}]';
$s = '{"hostname":' . gethostname() . ',"timestamp":"' . time() . '"';
foreach ($data as $k => $v) {
$s = $s . ',"' . $k . '":"' . $v . '"';
}
$s = $s . '}';
$post_data_json = '[{
"headers" : {
"timestamp" : "434324343",
"host" : "random_host.example.com"
},
"body" : \'' . $s . '\'}]';- $curlopt_url = 'http://101.201.41.72:50000';
curl_setopt($ch, CURLOPT_URL, $curlopt_url);
curl_setopt($ch, CURLOPT_HEADER, TRUE);
curl_setopt($ch, CURLOPT_HTTPHEADER, array('Content-Type:application/json'));
curl_setopt($ch, CURLOPT_POSTFIELDS, $post_data_json);
curl_exec($ch);
curl_close($ch);
}
str8667718-11-02 04:23:28a
str8667718-11-02 04:23:28b
str8667718-11-02 04:23:28a
str8667718-11-02 04:23:28c
str8667718-11-02 04:23:28b
str8667718-11-02 04:23:29a
str8667718-11-02 04:23:29a
str8667718-11-02 04:23:29c
str8667718-11-02 04:23:29b
str8667718-11-02 04:23:29c
str8667718-11-02 04:23:29a
str8667718-11-02 04:23:29c
str8667718-11-02 04:23:29c
str8667718-11-02 04:23:29a
str8667718-11-02 04:23:29b
str8667718-11-02 04:23:29c
str8667718-11-02 04:23:30b
str8667718-11-02 04:23:30b
str8667718-11-02 04:23:30c
str8667718-11-02 04:23:30a
str8667718-11-02 04:23:30c
str8667718-11-02 04:23:30a
- 主机名
- a1.sources=r1
a1.sinks=k1
a1.channels=c1- a1.sources.r1.type=http
a1.sources.r1.bind=0.0.0.0
a1.sources.r1.port=50000
a1.sources.r1.channels=c1- a1.sinks.k1.channel=c1
#a1.sinks.k1.type = com.product.FlumeApp
a1.sinks.k1.type = file_roll
a1.sinks.k1.sink.directory = /data/UnifiedLog/log
a1.sinks.k1.batchSize=1
#a1.sinks.k1.pathManager=%y%m%d%H%M%S
a1.sinks.k1.pathManager.extension=log
a1.sinks.k1.pathManager.prefix=webTrack
a1.sinks.k1.rollInterval=0
a1.sinks.k1.sink.serializer = text- a1.channels.c1.type=memory
a1.channels.c1.capacity=1000
a1.channels.c1.transactionCapacity=100
#a1.channels.c1.capacity=8
#a1.channels.c1.transactionCapacity=4
- 启动
export FlumeHome=/data/UnifiedLog/flume; $FlumeHome/bin/flume-ng agent -c $FlumeHome/conf/ -f $FlumeHome/conf/httpSourceApp.conf -n a1 -Dflume.root.logger=INFO,console -Xms10240m -Xmx10240m ;- 进程
root 23619 1.4 7.9 16559800 2608576 pts/3 Sl+ 15:31 1:05 /usr/java/jdk1.8.0_101/bin/java -Xmx20m -Dflume.root.logger=INFO,console -Xms10240m -Xmx10240m -cp /data/UnifiedLog/flume/conf:/data/UnifiedLog/flume/lib/*:/lib/* -Djava.library.path= org.apache.flume.node.Application -f /data/UnifiedLog/flume/conf/httpSourceApp.conf -n a1
端口
tcp 0 0 0.0.0.0:50000 0.0.0.0:* LISTEN 23619/java
日志目录
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-102
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-103
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-104
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-105
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-106
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-107
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-108
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-109
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-110
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-111
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-112
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-113
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-114
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-115
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-116
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-117
8.0K -rw-r--r-- 1 root root 8.0K Nov 2 16:32 1541143920191-118
8.0K -rw-r--r-- 1 root root 5.3K Nov 2 16:32 1541143920191-119
4.0K -rw-r--r-- 1 root root 2.7K Nov 2 16:32 1541143920191-120
【动手】
https://github.com/apache/flume/blob/c5168c902634e8ea1f25ec578ed0b7055b246d68/flume-ng-core/src/test/java/org/apache/flume/source/http/TestHTTPSource.java
@Test | |
public void testSimpleUTF16() throws IOException, InterruptedException { | |
StringEntity input = new StringEntity("[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"}," | |
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]", "UTF-16"); | |
input.setContentType("application/json; charset=utf-16"); | |
postRequest.setEntity(input); | |
HttpResponse response = httpClient.execute(postRequest); | |
Assert.assertEquals(HttpServletResponse.SC_OK, | |
response.getStatusLine().getStatusCode()); | |
Transaction tx = httpChannel.getTransaction(); | |
tx.begin(); | |
Event e = httpChannel.take(); | |
Assert.assertNotNull(e); | |
Assert.assertEquals("b", e.getHeaders().get("a")); | |
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-16")); | |
e = httpChannel.take(); | |
Assert.assertNotNull(e); | |
Assert.assertEquals("f", e.getHeaders().get("e")); | |
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-16")); | |
tx.commit(); | |
tx.close(); | |
} | |
@Test | |
public void testInvalid() throws Exception { | |
StringEntity input = new StringEntity("[{\"a\": \"b\",[\"d\":\"e\"],\"body\": \"random_body\"}," | |
+ "{\"e\": \"f\",\"body\": \"random_body2\"}]"); | |
input.setContentType("application/json"); | |
postRequest.setEntity(input); | |
HttpResponse response = httpClient.execute(postRequest); | |
Assert.assertEquals(HttpServletResponse.SC_BAD_REQUEST, | |
response.getStatusLine().getStatusCode()); | |
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter"); | |
Assert.assertEquals(1, sc.getEventReadFail()); | |
} |
github 查测试用例
headers application/json
body [{"headers" : {"a":"b", "c":"d"},"body": "random_body"}, {"headers" : {"e": "f"},"body": "random_body2"}]
https://stackoverflow.com/questions/34765810/flume-httpsource-refuses-json-body-due-to-malform
Class JSONHandler
- java.lang.Object
- org.apache.flume.source.http.JSONHandler
- All Implemented Interfaces:
- Configurable, HTTPSourceHandler
- public class JSONHandler
- extends Object
- implements HTTPSourceHandler
JSONHandler for HTTPSource that accepts an array of events. This handler throws exception if the deserialization fails because of bad format or any other reason. Each event must be encoded as a map with two key-value pairs.1. headers - the key for this key-value pair is "headers". The value for this key is another map, which represent the event headers. These headers are inserted into the Flume event as is.
2. body - The body is a string which represents the body of the event. The key for this key-value pair is "body". All key-value pairs are considered to be headers. An example:
[{"headers" : {"a":"b", "c":"d"},"body": "random_body"}, {"headers" : {"e": "f"},"body": "random_body2"}]
would be interpreted as the following two flume events:
* Event with body: "random_body" (in UTF-8/UTF-16/UTF-32 encoded bytes) and headers : (a:b, c:d)
* Event with body: "random_body2" (in UTF-8/UTF-16/UTF-32 encoded bytes) and headers : (e:f)
The charset of the body is read from the request and used. If no charset is set in the request, then the charset is assumed to be JSON's default - UTF-8. The JSON handler supports UTF-8, UTF-16 and UTF-32. To set the charset, the request must have content type specified as "application/json; charset=UTF-8" (replace UTF-8 with UTF-16 or UTF-32 as required). One way to create an event in the format expected by this handler, is to use JSONEvent and use Gson to create the JSON string using the Gson.toJson(java.lang.Object, java.lang.reflect.Type) method. The type token to pass as the 2nd argument of this method for list of events can be created by:
Type type = new TypeToken<List<JSONEvent>>() {}.getType();
send data to Flume client-sdk flume使用之httpSource的更多相关文章
- Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)(二)
Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Logging raw data(记录原始数据) Logging the raw ...
- Flume官方文档翻译——Flume 1.7.0 User Guide (unreleased version)中一些知识点
Flume官方文档翻译--Flume 1.7.0 User Guide (unreleased version)(一) Flume官方文档翻译--Flume 1.7.0 User Guide (unr ...
- flume使用场景 flume与kafka的比较
Is Flume a good fit for your problem? If you need to ingest textual log data into Hadoop/HDFS then F ...
- 大数据技术之_09_Flume学习_Flume概述+Flume快速入门+Flume企业开发案例+Flume监控之Ganglia+Flume高级之自定义MySQLSource+Flume企业真实面试题(重点)
第1章 Flume概述1.1 Flume定义1.2 Flume组成架构1.2.1 Agent1.2.2 Source1.2.3 Channel1.2.4 Sink1.2.5 Event1.3 Flum ...
- TCP requires two packet transfers to set up the connection before it can send data
wHTTP重用现存连接来减少TCP建立时延. HTTP The Definitive Guide 4.2.3 TCP Connection Handshake Delays When you set ...
- Flume OG 与 Flume NG 的对比
Flume OG 与 Flume NG 的对比 1.Flume OG Flume OG:Flume original generation 即Flume 0.9.x版本,它由agent.collect ...
- lr11 BUG?Failed to send data by channels - post message failed.
问题描述 : http协议,场景运行一会之后,报错! erro信息: Failed to send data by channels - post message failed. 解决方法 :ht ...
- 【C#】Send data between applications
This sample shows how to send data between different applications, including object data——transform ...
- [Angular] Send Data via HTTP using Angular HttpParams
Obviously in a real world application we do not only fetch data from the backend, but we also send d ...
随机推荐
- es6总结(四)--对象
- python3基础语法(标识符,注释,数据类型,获取用户输出,代码块,python保留字)
编码 默认情况下,Python 3 源码文件以 UTF-8 编码,所有字符串都是 unicode 字符串. 当然你也可以为源码文件指定不同的编码: # -*- coding: cp-1252 -*- ...
- RTSP、 RTMP、HTTP的共同点、区别(转)
共同点: 1:RTSP.RTMP.HTTP都是在应用层. 2:理论上RTSP.RTMP.HTTP都可以做直播和点播,但一般做直播用RTSP.RTMP,做点播用HTTP.做视频会议的时候原来用SIP协议 ...
- TextReader 和StreamReader
TextReader 和StreamReader 目录: 为什么要介绍 TextReader? TextReader的常用属性和方法 TextReader 示例 从StreamReader想到多态 简 ...
- es6 export、import
一.输出变量 1.export var a = 0; 2.var a = 0'; export {a}; 3.var a =0 ; export {a as rename}; //使用as重命名的对外 ...
- python使用Queue进行进程间通信
1.Process之间有时需要通信,操作系统提供了很多机制来实现进程间的通信. 可以使用multiprocessing模块的Queue实现多进程之间的数据传递, Queue本身是一个消息列队程序: f ...
- Codeforces Gym 100203E bits-Equalizer 贪心
原题链接:http://codeforces.com/gym/100203/attachments/download/1702/statements.pdf 题解 考虑到交换可以减少一次操作,那么可以 ...
- 项目心得——按照指定的日期/时间创建Date对象
项目心得——按照指定的日期/时间创建Date对象 有时,在做项目中,需要获得指定日期的Date对象,这个指定的日期或者时间可能不是当前的时间.下面讲解两种获取指定日期/时间的Date对象的方法: pa ...
- ios7.1后setting中没有开启相机服务应用程序相机预览黑屏问题
if ( [[[UIDevice currentDevice] systemVersion] floatValue] >= 7.0){ ...
- ccs 中的定位
一.相对定位 position:relative; 作用: 相对定位 一般加给定位元素父级 特点: (1)不脱离文档流: (2)不改变元素类型: (3)参照物是元素本身: 二.绝对定位 posi ...