4949os .environ ["HADOOP_CONF_DIR" ] = "/opt/hadoop/conf/etc/analytix/hadoop.analytix"
5050os .environ ["JAVA_HOME" ] = "/etc/alternatives/jre"
5151os .environ ["HADOOP_PREFIX" ] = "/usr/hdp/hadoop"
52- import pydoop .hdfs
52+ try :
53+ import pydoop .hdfs
54+ except :
55+ pass
5356# ########################################################################### #
5457
5558
@@ -295,7 +298,7 @@ def evhc_template_cfg():
295298
296299
297300
298- def evhc_grafana_jobs (startTIS , limitTIS ):
301+ def evhc_grafana_jobs (startTIS , limitTIS , mustClauses = None ):
299302 """function to fetch HammerCloud HTCondor job records via Grafana"""
300303 # ############################################################# #
301304 # fill global HTCondor list with job records from ElasticSearch #
@@ -313,19 +316,42 @@ def evhc_grafana_jobs(startTIS, limitTIS):
313316
314317 # prepare Lucene ElasticSearch query:
315318 # ===================================
316- queryString = ("{\" search_type\" :\" query_then_fetch\" ,\" index\" :[\" monit" +
317- "_prod_condor_raw_metric_v002-*\" ]}\n {\" query\" :{\" bool\" " +
318- ":{\" must\" :[{\" match_phrase\" :{\" data.metadata.spider_so" +
319- "urce\" :\" condor_history\" }},{\" match_phrase\" :{\" data.CR" +
320- "AB_UserHN\" :\" sciaba\" }}],\" filter\" :{\" range\" :{\" data." +
321- "RecordTime\" :{\" gte\" :%d,\" lt\" :%d,\" format\" :\" epoch_se" +
322- "cond\" }}}}},\" _source\" :{\" includes\" :[\" data.GlobalJobI" +
323- "d\" ,\" data.Site\" ,\" data.Status\" ,\" data.NumRestarts\" ," +
324- "\" data.RemoveReason\" ,\" data.Chirp_CRAB3_Job_ExitCode\" ," +
325- "\" data.ExitCode\" ,\" data.CRAB_Workflow\" ,\" data.CRAB_Id" +
326- "\" ,\" data.CRAB_Retry\" ,\" data.RecordTime\" ]},\" size\" :81" +
327- "92,\" search_after\" :[%%d],\" sort\" :[{\" data.RecordTime\" " +
328- ":\" asc\" }]}\n " ) % (startTIS , limitTIS )
319+ queryType = {
320+ "search_type" : "query_then_fetch" ,
321+ "index" : ["monit_prod_condor_raw_metric_v002-*" ]
322+ }
323+ source = {
324+ 'includes' : ['data.GlobalJobId' , 'data.Site' , 'data.Status' ,
325+ 'data.NumRestarts' , 'data.RemoveReason' ,
326+ 'data.Chirp_CRAB3_Job_ExitCode' , 'data.ExitCode' ,
327+ 'data.CRAB_Workflow' , 'data.CRAB_Id' , 'data.CRAB_Retry' ,
328+ 'data.RecordTime' ]
329+ }
330+ query = {
331+ 'bool' : {
332+ 'must' : [
333+ {'match_phrase' : {'data.metadata.spider_source' :
334+ 'condor_history' }},
335+ {'match_phrase' : {'data.CRAB_UserHN' : 'sciaba' }}
336+ ],
337+ 'filter' : {
338+ 'range' : {
339+ 'data.RecordTime' : {
340+ 'gte' : int (startTIS ),
341+ 'lt' : int (limitTIS ),
342+ 'format' : 'epoch_second' }
343+ }
344+ }
345+ },
346+ }
347+ query ['bool' ]['must' ].extend (mustClauses or [])
348+ totalQuery = {
349+ 'query' : query ,
350+ '_source' : source ,
351+ 'size' : 8192 ,
352+ 'search_after' : [ None ], # Filled later
353+ 'sort' : [ {'data.RecordTime' : 'asc' } ]
354+ }
329355
330356 # prepare regular expression for HammerCloud CRAB workflow name match:
331357 # ====================================================================
@@ -341,9 +367,11 @@ def evhc_grafana_jobs(startTIS, limitTIS):
341367 #
342368 # fetch chunk job records from ElasticSearch:
343369 # ===========================================
370+ totalQuery ['search_after' ][0 ] = int (afterTImS )
371+ queryString = json .dumps (queryType ) + '\n ' + json .dumps (totalQuery ) + '\n '
344372 try :
345373 requestObj = urllib .request .Request (URL_GRAFANA ,
346- data = ( queryString % afterTImS ) .encode ("utf-8" ),
374+ data = queryString .encode ("utf-8" ),
347375 headers = HDR_GRAFANA , method = "POST" )
348376 responseObj = urllib .request .urlopen ( requestObj , timeout = 60 )
349377 #
0 commit comments