This wiki is obsolete, see the NorduGrid web pages for up to date information.
ARC Compute Clients/Python examples
From NorduGrid
Jump to navigationJump to search
Introduction
The Python interface to arclib is created with Swig. To be able to use the Python bindings you have add the directory containing the bindings to your Python search path. There are several ways to do this. One way is, in Linux, to set the environment variable PYTHONPATH to the directory, as shown below:
export PYTHONPATH=$PYTHONPATH:<arc-install-path>/lib/python2.4/site-packages
When the arclib swig files have been updated by a 'svn update', the arclib Python interface can be updated by doing a make and optionally make install in the python directory of the svn code.
Submission
#!/usr/bin/python import arc, sys; joblist = "jobs.xml" usercfg = arc.UserConfig(); logger = arc.Logger(arc.Logger_getRootLogger(), "arcsub.py"); logcout = arc.LogStream(sys.stdout); arc.Logger_getRootLogger().addDestination(logcout); arc.Logger_getRootLogger().setThreshold(arc.DEBUG); targen = arc.TargetGenerator(usercfg, ['ARC1:https://knowarc1.grid.niif.hu:60000/arex'], []); targen.GetTargets(0, 1); job = arc.JobDescription(); job.Application.Executable.Name = '/bin/echo' job.Application.Executable.Argument.append('Hello') job.Application.Executable.Argument.append('World') job.Application.Output = 'std.out' #std.out will be not deleted if it is finished job_output = arc.FileType() job_output.Name = 'std.out' job.DataStaging.File.append(job_output) targets = targen.FoundTargets() info = arc.XMLNode(arc.NS(), 'Jobs') for target in targets: submitter = target.GetSubmitter(usercfg) submitted = submitter.Submit(job, joblist) if submitted: print "Job ID: " + submitted.fullstr() break;
Job status
#!/usr/bin/python import arc, sys; usercfg = arc.UserConfig(""); joblist = "jobs.xml"; # Logging... logger = arc.Logger(arc.Logger_getRootLogger(), "arcstat.py"); logcout = arc.LogStream(sys.stdout); arc.Logger_getRootLogger().addDestination(logcout); arc.Logger_getRootLogger().setThreshold(arc.DEBUG); jobmaster = arc.JobSupervisor(usercfg, [sys.argv[1]], [], joblist); jobcontrollers = jobmaster.GetJobControllers(); for job in jobcontrollers: job.Stat([], True);
Job retrieving
#!/usr/bin/python import arc, sys; # User configuration file. # Initialise a default user configuration. usercfg = arc.UserConfig(""); # List of job ids to process. jobids = sys.argv[1:]; # List of clusters to process. clusters = []; # Job list containing active jobs. joblist = "jobs.xml"; # Process only jobs with the following status codes. # If list is empty all jobs will be processed. status = []; # Directory where the job directory will be created. downloaddir = ""; # Keep the files on the server. keep = False; # Logging... logger = arc.Logger(arc.Logger_getRootLogger(), "arcget.py"); logcout = arc.LogStream(sys.stdout); arc.Logger_getRootLogger().addDestination(logcout); arc.Logger_getRootLogger().setThreshold(arc.DEBUG); jobmaster = arc.JobSupervisor(usercfg, jobids, clusters, joblist); jobcontrollers = jobmaster.GetJobControllers(); for job in jobcontrollers: job.Get(status, downloaddir, keep);
Job cleaning
#!/usr/bin/python import arc, sys; # User configuration file. # Initialise a default user configuration. usercfg = arc.UserConfig(""); # List of job ids to process. jobids = sys.argv[1:]; # List of clusters to process. clusters = []; # Job list containing active jobs. joblist = "jobs.xml"; # Process only jobs with the following status codes. # If list is empty all jobs will be processed. status = []; # Force cleaning. force = False; # Logging... logger = arc.Logger(arc.Logger_getRootLogger(), "arcclean.py"); logcout = arc.LogStream(sys.stdout); arc.Logger_getRootLogger().addDestination(logcout); arc.Logger_getRootLogger().setThreshold(arc.DEBUG); jobmaster = arc.JobSupervisor(usercfg, jobids, clusters, joblist); jobcontrollers = jobmaster.GetJobControllers(); for job in jobcontrollers: job.Clean(status, force);
Job termination
#!/usr/bin/python import arc, sys; # User configuration file. # Initialise a default user configuration. usercfg = arc.UserConfig(""); # List of job ids to process. jobids = sys.argv[1:]; # List of clusters to process. clusters = []; # Job list containing active jobs. joblist = "jobs.xml"; # Process only jobs with the following status codes. # If list is empty all jobs will be processed. status = []; # Force cleaning. force = False; # Logging... logger = arc.Logger(arc.Logger_getRootLogger(), "arckill.py"); logcout = arc.LogStream(sys.stdout); arc.Logger_getRootLogger().addDestination(logcout); arc.Logger_getRootLogger().setThreshold(arc.DEBUG); jobmaster = arc.JobSupervisor(usercfg, jobids, clusters, joblist); jobcontrollers = jobmaster.GetJobControllers(); for job in jobcontrollers: job.Kill(status, force);