Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -113,12 +113,12 @@ public List<Range> getIndexRowRanges(String column, Range indexRange) {
if (col != null) {

try {
LOG.debug("Searching tab=" + indexTable + " column=" + column + " range=" + indexRange);
LOG.debug("Searching tab={} column={} range={}", indexTable, column, indexRange);
Connector conn = getConnector();
scan = conn.createScanner(indexTable, auths);
scan.setRange(indexRange);
Text cf = new Text(col);
LOG.debug("Using Column Family=" + toString());
LOG.debug("Using Column Family={}", toString());
scan.fetchColumnFamily(cf);

for (Map.Entry<Key, Value> entry : scan) {
Expand All @@ -135,7 +135,7 @@ public List<Range> getIndexRowRanges(String column, Range indexRange) {
if (rowIds.isEmpty()) {
LOG.debug("Found 0 index matches");
} else {
LOG.debug("Found " + rowIds.size() + " index matches");
LOG.debug("Found {} index matches", rowIds.size());
}

return rowIds;
Expand All @@ -149,7 +149,7 @@ public List<Range> getIndexRowRanges(String column, Range indexRange) {
}

// assume the index is bad and do a full scan
LOG.debug("Index lookup failed for table " + indexTable);
LOG.debug("Index lookup failed for table {}", indexTable);
return null;
}

Expand Down
2 changes: 1 addition & 1 deletion beeline/src/java/org/apache/hive/beeline/SQLCompleter.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public static Set<String> getSQLCompleters(BeeLine beeLine, boolean skipmeta)
try {
keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getSQLKeywords();
} catch (Exception e) {
LOG.debug("fail to get SQL key words from database metadata due to the exception: " + e, e);
LOG.debug("fail to get SQL key words from database metadata due to the exception: {}", e);
}
try {
keywords += "," + beeLine.getDatabaseConnection().getDatabaseMetaData().getStringFunctions();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ protected void execSql(String sqlScriptFile) throws IOException {
// We can be pretty sure that an entire line can be processed as a single command since
// we always add a line separator at the end while calling dbCommandParser.buildCommand.
beeLine.getOpts().setEntireLineAsCommand(true);
LOG.debug("Going to run command <" + builder.buildToLog() + ">");
LOG.debug("Going to run command <{}>", builder.buildToLog());
int status = beeLine.begin(builder.buildToRun(), null, false);
if (status != 0) {
throw new IOException("Schema script failed, errorcode " + status);
Expand Down
6 changes: 3 additions & 3 deletions common/src/java/org/apache/hadoop/hive/common/FileUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -975,15 +975,15 @@ public static boolean distCpWithSnapshot(String oldSnapshot, String newSnapshot,
*/
public static boolean moveToTrash(FileSystem fs, Path f, Configuration conf, boolean purge)
throws IOException {
LOG.debug("deleting " + f);
LOG.debug("Deleting {}", f);
boolean result = false;
try {
if(purge) {
LOG.debug("purge is set to true. Not moving to Trash " + f);
LOG.debug("Purge is set to true. Not moving to Trash {}", f);
} else {
result = Trash.moveToAppropriateTrash(fs, f, conf);
if (result) {
LOG.trace("Moved to trash: " + f);
LOG.trace("Moved to trash: {}", f);
return true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, Str
List<String> enabledProtocols = new ArrayList<String>();
for (String protocol : sslServerSocket.getEnabledProtocols()) {
if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) {
LOG.debug("Disabling SSL Protocol: " + protocol);
LOG.debug("Disabling SSL Protocol: {}", protocol);
} else {
enabledProtocols.add(protocol);
}
Expand Down
2 changes: 1 addition & 1 deletion common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
Original file line number Diff line number Diff line change
Expand Up @@ -6606,7 +6606,7 @@ private void initialize(Class<?> cls) {
if (msUri == null || msUri.isEmpty()) {
msUri = this.get("metastore.thrift.uris");
}
LOG.debug("Found metastore URI of " + msUri);
LOG.debug("Found metastore URI of {}", msUri);
if(HiveConfUtil.isEmbeddedMetaStore(msUri)){
setLoadMetastoreConfig(true);
}
Expand Down
4 changes: 2 additions & 2 deletions common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ public static void updateJobCredentialProviders(Configuration jobConf) {

if (StringUtils.isNotBlank(jobKeyStoreLocation)) {
jobConf.set(Constants.HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG, jobKeyStoreLocation);
LOG.debug("Setting job conf credstore location to " + jobKeyStoreLocation
+ " previous location was " + oldKeyStoreLocation);
LOG.debug("Setting job conf credstore location to {} previous location was {}",
jobKeyStoreLocation, oldKeyStoreLocation);
}

updateCredentialProviderPasswordForJobs(jobConf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ public static String getBuildVersion(){
}

public static void main(String[] args) {
LOG.debug("version: "+ version);
LOG.debug("version: {}", version);
System.out.println("Hive " + getVersion());
System.out.println("Git " + getUrl() + " -r " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
Expand Down
26 changes: 10 additions & 16 deletions common/src/java/org/apache/hive/http/JMXJsonServlet.java
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) {
private void listBeans(JsonGenerator jg, ObjectName qry, String attribute,
HttpServletResponse response)
throws IOException {
LOG.debug("Listing beans for "+qry);
LOG.debug("Listing beans for {}", qry);
Set<ObjectName> names = null;
names = mBeanServer.queryNames(qry, null);

Expand All @@ -241,40 +241,34 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute,
} catch (AttributeNotFoundException e) {
// If the modelerType attribute was not found, the class name is used
// instead.
LOG.error("getting attribute " + prs + " of " + oname
+ " threw an exception", e);
LOG.error("getting attribute {} of {} threw an exception", prs, oname, e);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am wondering if the exception serialization remains the same after this change since we no longer use the API that accepts a Throwable.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In addition it is strange that we use two placeholders {} but we pass three arguments.

} catch (MBeanException e) {
// The code inside the attribute getter threw an exception so log it,
// and fall back on the class name
LOG.error("getting attribute " + prs + " of " + oname
+ " threw an exception", e);
LOG.error("getting attribute {} of {} threw an exception", prs, oname, e);
} catch (RuntimeException e) {
// For some reason even with an MBeanException available to them
// Runtime exceptions can still find their way through, so treat them
// the same as MBeanException
LOG.error("getting attribute " + prs + " of " + oname
+ " threw an exception", e);
LOG.error("getting attribute {} of {} threw an exception", prs, oname, e);
} catch ( ReflectionException e ) {
// This happens when the code inside the JMX bean (setter?? from the
// java docs) threw an exception, so log it and fall back on the
// class name
LOG.error("getting attribute " + prs + " of " + oname
+ " threw an exception", e);
LOG.error("getting attribute {} of {} threw an exception", prs, oname, e);
}
} catch (InstanceNotFoundException e) {
//Ignored for some reason the bean was not found so don't output it
continue;
} catch ( IntrospectionException e ) {
// This is an internal error, something odd happened with reflection so
// log it and don't output the bean.
LOG.error("Problem while trying to process JMX query: " + qry
+ " with MBean " + oname, e);
LOG.error("Problem while trying to process JMX query: {} with MBean {}", qry, oname, e);
continue;
} catch ( ReflectionException e ) {
// This happens when the code inside the JMX bean threw an exception, so
// log it and don't output the bean.
LOG.error("Problem while trying to process JMX query: " + qry
+ " with MBean " + oname, e);
LOG.error("Problem while trying to process JMX query: {} with MBean {}", qry, oname, e);
continue;
}

Expand Down Expand Up @@ -325,15 +319,15 @@ private void writeAttribute(JsonGenerator jg, ObjectName oname, MBeanAttributeIn
// UnsupportedOperationExceptions happen in the normal course of business,
// so no need to log them as errors all the time.
if (e.getCause() instanceof UnsupportedOperationException) {
LOG.debug("getting attribute "+attName+" of "+oname+" is unsupported");
LOG.debug("getting attribute {} of {} is unsupported", attName, oname);
} else {
LOG.error("getting attribute "+attName+" of "+oname+" threw an exception", e);
LOG.error("getting attribute {} of {} threw an exception", attName, oname, e);
}
return;
} catch (RuntimeErrorException e) {
// RuntimeErrorException happens when an unexpected failure occurs in getAttribute
// for example https://issues.apache.org/jira/browse/DAEMON-120
LOG.debug("getting attribute "+attName+" of "+oname+" threw an exception", e);
LOG.debug("getting attribute {} of {} threw an exception", attName, oname, e);
return;
} catch (AttributeNotFoundException e) {
//Ignored the attribute was not found, which should never happen because the bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ static void setupKeyRange(Scan scan, List<IndexSearchCondition> conditions, bool
scan.setStopRow(stopRow);

if (LOG.isDebugEnabled()) {
LOG.debug(Bytes.toStringBinary(startRow) + " ~ " + Bytes.toStringBinary(stopRow));
LOG.debug("{} ~ {}", Bytes.toStringBinary(startRow), Bytes.toStringBinary(stopRow));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ private Scan createFilterScan(JobConf jobConf, int iKey, int iTimestamp, boolean
// Re-assess this when negotiation is honored and the duplicate evaluation is removed.
// THIS IGNORES RESIDUAL PARSING FROM HBaseStorageHandler#decomposePredicate
if (residualPredicate != null) {
LOG.debug("Ignoring residual predicate " + residualPredicate.getExprString());
LOG.debug("Ignoring residual predicate {}", residualPredicate.getExprString());
}

Map<String, List<IndexSearchCondition>> split = HiveHBaseInputFormatUtil.decompose(conditions);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ default Table loadHmsTable() throws TException, InterruptedException {
try {
return metaClients().run(client -> client.getTable(database(), table()));
} catch (NoSuchObjectException nte) {
LOG.trace("Table not found {}", database() + "." + table(), nte);
LOG.trace("Table not found {}.{}", database(), table(), nte);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Two placeholders but three arguments.

return null;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ public void initialize(Configuration configuration, Properties tableProperties,
}

if (log.isDebugEnabled()) {
log.debug("JdbcSerDe initialized with\n" + "\t columns: " + Arrays.toString(hiveColumnNames) + "\n\t types: "
+ Arrays.toString(hiveColumnTypes));
log.debug("JdbcSerDe initialized with\n\t columns: {}\n\t types: {}",
Arrays.toString(hiveColumnNames), Arrays.toString(hiveColumnTypes));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,10 @@ public static void importCredentialsFromCurrentSubject(KuduClient client) {
// 'client'. This is necessary if we want to support a job which
// reads from one cluster and writes to another.
if (!tok.getService().equals(service)) {
LOG.debug("Not importing credentials for service " + service +
"(expecting service " + service + ")");
LOG.debug("Not importing credentials for service {} (expecting service {})", service, service);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Passing the same object two times does not make much sense but I guess outside the scope of the PR.

continue;
}
LOG.debug("Importing credentials for service " + service);
LOG.debug("Importing credentials for service {}", service);
client.importAuthenticationCredentials(tok.getPassword());
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ public synchronized void close() throws IOException {
try {
din.close();
} catch (Exception err) {
LOG.error("Error closing input stream:" + err.getMessage(), err);
LOG.error("Error closing input stream: {}", err.getMessage(), err);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about something simpler:

LOG.error("Error closing input stream:", err);

caughtException = err;
}
// Don't close the socket - the stream already does that if needed.
Expand All @@ -86,7 +86,7 @@ public synchronized void close() throws IOException {
try {
client.close();
} catch (Exception err) {
LOG.error("Error closing client:" + err.getMessage(), err);
LOG.error("Error closing client: {}", err.getMessage(), err);
caughtException = (caughtException == null ? err : caughtException);
}
}
Expand Down Expand Up @@ -246,7 +246,7 @@ public void handleEvent(ReaderEvent event) {
}
// Reader is using a blocking socket .. interrupt it.
if (LOG.isDebugEnabled()) {
LOG.debug("Interrupting reader thread due to reader event with error " + event.getMessage());
LOG.debug("Interrupting reader thread due to reader event with error {}", event.getMessage());
}
readerThread.interrupt();
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ private void terminateRequest() {

@Override
public void setResponse(TerminateFragmentResponseProto response) {
LOG.debug("Received terminate response for " + taskAttemptId);
LOG.debug("Received terminate response for {}", taskAttemptId);
}

@Override
Expand Down Expand Up @@ -311,7 +311,7 @@ private void registerClient() {
LlapTaskUmbilicalExternalClient prevVal =
umbilicalServer.umbilicalProtocol.registeredClients.putIfAbsent(requestInfo.taskAttemptId, this);
if (prevVal != null) {
LOG.warn("Unexpected - fragment " + requestInfo.taskAttemptId + " is already registered!");
LOG.warn("Unexpected - fragment {} is already registered!", requestInfo.taskAttemptId);
}
umbilicalServer.llapTaskUmbilicalServer.addTokenForJob(tokenIdentifier, sessionToken);
}
Expand Down Expand Up @@ -390,7 +390,7 @@ public void run() {

for (LlapTaskUmbilicalExternalClient timedOutTask : timedOutTasks) {
String taskAttemptId = timedOutTask.requestInfo.taskAttemptId;
LOG.info("Running taskAttemptId " + taskAttemptId + " timed out");
LOG.info("Running taskAttemptId {} timed out", taskAttemptId);
timedOutTask.unregisterClient();
timedOutTask.responder.heartbeatTimeout(taskAttemptId);
}
Expand Down Expand Up @@ -447,7 +447,7 @@ public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request) throws IOExce
LlapTaskUmbilicalExternalClient client = registeredClients.get(taskAttemptIdString);
if (client == null) {
// Heartbeat is from a task that we are not currently tracking.
LOG.info("Unexpected heartbeat from " + taskAttemptIdString);
LOG.info("Unexpected heartbeat from {}", taskAttemptIdString);
response.setShouldDie(); // Do any of the other fields need to be set?
return response;
}
Expand All @@ -470,26 +470,25 @@ public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request) throws IOExce

List<TezEvent> inEvents = request.getEvents();
if (LOG.isDebugEnabled()) {
LOG.debug("Heartbeat from " + taskAttemptIdString +
" events: " + (inEvents != null ? inEvents.size() : -1));
LOG.debug("Heartbeat from {} events: {}", taskAttemptIdString, (inEvents != null ? inEvents.size() : -1));
}
for (TezEvent tezEvent : ListUtils.emptyIfNull(inEvents)) {
EventType eventType = tezEvent.getEventType();
switch (eventType) {
case TASK_ATTEMPT_COMPLETED_EVENT:
LOG.debug("Task completed event for " + taskAttemptIdString);
LOG.debug("Task completed event for {}", taskAttemptIdString);
shouldUnregisterClient = true;
break;
case TASK_ATTEMPT_FAILED_EVENT:
LOG.debug("Task failed event for " + taskAttemptIdString);
LOG.debug("Task failed event for {}", taskAttemptIdString);
shouldUnregisterClient = true;
break;
case TASK_STATUS_UPDATE_EVENT:
// If we want to handle counters
LOG.debug("Task update event for " + taskAttemptIdString);
LOG.debug("Task update event for {}", taskAttemptIdString);
break;
default:
LOG.warn("Unhandled event type " + eventType);
LOG.warn("Unhandled event type {}", eventType);
break;
}
}
Expand All @@ -514,7 +513,7 @@ public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request) throws IOExce
public void nodeHeartbeat(Text hostname, Text uniqueId, int port, TezAttemptArray aw,
BooleanArray guaranteed) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Node heartbeat from " + hostname + ":" + port + ", " + uniqueId);
LOG.debug("Node heartbeat from {}:{}, {}", hostname, port, uniqueId);
}
// External client currently cannot use guaranteed.
updateHeartbeatInfo(hostname.toString(), uniqueId.toString(), port, aw);
Expand All @@ -532,7 +531,7 @@ public void taskKilled(TezTaskAttemptID taskAttemptId) throws IOException {
client.retrySubmission();
} else {
try {
LOG.error("Task killed - " + taskAttemptIdString);
LOG.error("Task killed - {}", taskAttemptIdString);
client.unregisterClient();
if (client.responder != null) {
client.responder.taskKilled(taskAttemptId);
Expand All @@ -542,7 +541,7 @@ public void taskKilled(TezTaskAttemptID taskAttemptId) throws IOException {
}
}
} else {
LOG.info("Received task killed notification for task which is not currently being tracked: " + taskAttemptId);
LOG.info("Received task killed notification for task which is not currently being tracked: {}", taskAttemptId);
}
}

Expand Down Expand Up @@ -572,7 +571,7 @@ private void updateHeartbeatInfo(String taskAttemptId) {
}

if (updateCount == 0) {
LOG.warn("No tasks found for heartbeat from taskAttemptId " + taskAttemptId);
LOG.warn("No tasks found for heartbeat from taskAttemptId {}", taskAttemptId);
}
}

Expand All @@ -598,11 +597,11 @@ private void updateHeartbeatInfo(
}
}
if (!error.isEmpty()) {
LOG.info("The tasks we expected to be on the node are not there: " + error);
LOG.info("The tasks we expected to be on the node are not there: {}", error);
}

if (updateCount == 0) {
LOG.info("No tasks found for heartbeat from hostname " + hostname + ", port " + port);
LOG.info("No tasks found for heartbeat from hostname {}, port {}", hostname, port);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,12 +144,11 @@ public FixedServiceInstance(String host) {
if (NetUtils.isLocalAddress(inetAddress)) {
InetSocketAddress socketAddress = new InetSocketAddress(0);
socketAddress = NetUtils.getConnectAddress(socketAddress);
LOG.info("Adding host identified as local: " + host + " as "
+ socketAddress.getHostName());
LOG.info("Adding host identified as local: {} as {}", host, socketAddress.getHostName());
host = socketAddress.getHostName();
}
} catch (UnknownHostException e) {
LOG.warn("Ignoring resolution issues for host: " + host, e);
LOG.warn("Ignoring resolution issues for host: {}", host, e);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One placeholder, two arguments.

}
}
this.host = host;
Expand Down
Loading
Loading