44import threading
55import time
66from subprocess import call
7- from typing import List
7+ from typing import Dict , List
88import azure .batch .models as batch_models
99import aztk .spark
1010from aztk import error
@@ -247,7 +247,14 @@ def print_jobs(jobs: List[aztk.spark.models.Job]):
247247 )
248248
249249
250- def print_job (client , job : aztk .spark .models .Job ):
250+ def get_applications (client , job : aztk .spark .models .Job ):
251+ if job .applications :
252+ return {j .name : j for j in job .applications }
253+ else :
254+ return client .list_applications (job .id )
255+
256+
257+ def print_job (client , job : aztk .spark .models .Job , apps : bool ):
251258 print_format = '{:<36}| {:<15}'
252259
253260 log .info ("" )
@@ -267,11 +274,12 @@ def print_job(client, job: aztk.spark.models.Job):
267274 log .info (print_format .format ("Cluster" , "Provisioning" ))
268275 log .info ("" )
269276
270- if job .applications :
271- application_summary (job .applications )
272- else :
273- application_summary (client .list_applications (job .id ))
277+ applications = get_applications (client , job )
278+ application_summary (applications )
274279 log .info ("" )
280+ if apps :
281+ print_applications (applications )
282+ log .info ("" )
275283
276284
277285def node_state_count (cluster : aztk .spark .models .Cluster ):
@@ -296,22 +304,22 @@ def print_cluster_summary(cluster: aztk.spark.models.Cluster):
296304 log .info ("" )
297305
298306
299- def application_summary (applications ):
307+ def application_summary (applications : Dict [ str , aztk . spark . models . Application ] ):
300308 states = {"scheduling" : 0 }
301309 for state in batch_models .TaskState :
302310 states [state .name ] = 0
303311
304312 warn_scheduling = False
305313
306- for application in applications :
307- if type ( application ) == str :
314+ for name , application in applications . items () :
315+ if application is None :
308316 states ["scheduling" ] += 1
309317 warn_scheduling = True
310318 else :
311319 states [application .state ] += 1
312320
313321 print_format = '{:<17} {:<14}'
314- log .info ("Applications " )
322+ log .info ("Application States " )
315323 log .info ("-" * 42 )
316324 for state in states :
317325 if states [state ] > 0 :
@@ -320,15 +328,15 @@ def application_summary(applications):
320328 if warn_scheduling :
321329 log .warning ("\n No Spark applications will be scheduled until the master is selected." )
322330
323- def print_applications (applications ):
331+ def print_applications (applications : Dict [ str , aztk . spark . models . Application ] ):
324332 print_format = '{:<36}| {:<15}| {:<16} | {:^9} |'
325333 print_format_underline = '{:-<36}|{:-<16}|{:-<18}|{:-<11}|'
326334 log .info (print_format .format ("Applications" , "State" , "Transition Time" , "Exit Code" ))
327335 log .info (print_format_underline .format ('' , '' , '' , '' ))
328336
329337 warn_scheduling = False
330- for name in applications :
331- if applications [ name ] is None :
338+ for name , application in applications . items () :
339+ if application is None :
332340 log .info (
333341 print_format .format (
334342 name ,
@@ -339,10 +347,9 @@ def print_applications(applications):
339347 )
340348 warn_scheduling = True
341349 else :
342- application = applications [name ]
343350 log .info (
344351 print_format .format (
345- application . name ,
352+ name ,
346353 application .state ,
347354 utc_to_local (application .state_transition_time ),
348355 application .exit_code if application .exit_code is not None else "-"
0 commit comments