I found that this time is often faster than the network time, and the time error caused by running for too long continues to increase.So I change it .
from
、、、
// -----------------------------------------------------------------------------------
// get current time and write it to the request packet
long requestTime = System.currentTimeMillis();
long requestTicks = SystemClock.elapsedRealtime();
writeTimeStamp(buffer, INDEX_TRANSMIT_TIME, requestTime);
socket = new DatagramSocket();
socket.setSoTimeout(timeoutInMillis);
socket.send(request);
// -----------------------------------------------------------------------------------
// read the response
long t[] = new long[RESPONSE_INDEX_SIZE];
DatagramPacket response = new DatagramPacket(buffer, buffer.length);
socket.receive(response);
long responseTicks = SystemClock.elapsedRealtime();
t[RESPONSE_INDEX_RESPONSE_TICKS] = responseTicks;
// -----------------------------------------------------------------------------------
// extract the results
// See here for the algorithm used:
// https://en.wikipedia.org/wiki/Network_Time_Protocol#Clock_synchronization_algorithm
long originateTime = readTimeStamp(buffer, INDEX_ORIGINATE_TIME); // T0
long receiveTime = readTimeStamp(buffer, INDEX_RECEIVE_TIME); // T1
long transmitTime = readTimeStamp(buffer, INDEX_TRANSMIT_TIME); // T2
long responseTime = requestTime + (responseTicks - requestTicks); // T3
t[RESPONSE_INDEX_ORIGINATE_TIME] = originateTime;
t[RESPONSE_INDEX_RECEIVE_TIME] = receiveTime;
t[RESPONSE_INDEX_TRANSMIT_TIME] = transmitTime;
t[RESPONSE_INDEX_RESPONSE_TIME] = responseTime;
、、、
to
、、、
// long requestTime = System.currentTimeMillis();
long requestTicks = SystemClock.elapsedRealtime();//
long originateTime = requestTicks; // T0
// writeTimeStampWithOut(buffer, INDEX_ORIGINATE_TIME, requestTicks );
socket = new DatagramSocket();
socket.setSoTimeout(timeoutInMillis);
socket.send(request);
// -----------------------------------------------------------------------------------
//
long t[] = new long[RESPONSE_INDEX_SIZE];
DatagramPacket response = new DatagramPacket(buffer, buffer.length);
socket.receive(response);
long responseTicks = SystemClock.elapsedRealtime();//
t[RESPONSE_INDEX_RESPONSE_TICKS] = responseTicks;
// -----------------------------------------------------------------------------------
// extract the results
// See here for the algorithm used:
long receiveTime = readTimeStamp(buffer, INDEX_RECEIVE_TIME); // T1
long transmitTime = readTimeStamp(buffer, INDEX_TRANSMIT_TIME); // T2
long responseTime = responseTicks; // T3
t[RESPONSE_INDEX_ORIGINATE_TIME] = originateTime;
t[RESPONSE_INDEX_RECEIVE_TIME] = receiveTime;
t[RESPONSE_INDEX_TRANSMIT_TIME] = transmitTime;
t[RESPONSE_INDEX_RESPONSE_TIME] = responseTime;
I found that this time is often faster than the network time, and the time error caused by running for too long continues to increase.So I change it .
from
、、、
、、、
to
、、、
、、、