extras-buildsys/builder Builder.py, 1.15, 1.16 BuilderMock.py, 1.9, 1.10

Daniel Williams (dcbw) fedora-extras-commits at redhat.com
Mon May 29 20:26:39 UTC 2006


Author: dcbw

Update of /cvs/fedora/extras-buildsys/builder
In directory cvs-int.fedora.redhat.com:/tmp/cvs-serv877/builder

Modified Files:
	Builder.py BuilderMock.py 
Log Message:
2006-05-29  Dan Williams  <dcbw at redhat.com>

    * Make most everything work now; both Passive and Active builders will
        download/upload the SRPM from the server.  The SRPM's URL is no longer
        passed to the builder in the initial job request, but comes along later
        once the SPRM is actually available to the builder (which might be after
        an upload to the builder in the case of Passive builders).
    * Pylint cleanups
    * Clean up logging on builders by consolidating newline handling is one place
    * Use the Targets command to pass back to the server any upload URL the builder
        might have (for passive builders)




Index: Builder.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/builder/Builder.py,v
retrieving revision 1.15
retrieving revision 1.16
diff -u -r1.15 -r1.16
--- Builder.py	20 May 2006 05:10:02 -0000	1.15
+++ Builder.py	29 May 2006 20:26:32 -0000	1.16
@@ -94,6 +94,7 @@
         self._server_ip = None
         self._get_server_ip()
 
+        self._upload_url = ""
         self._certs = None
         self._use_ssl = cfg.get_bool("SSL", "use_ssl")
         if self._use_ssl:
@@ -218,7 +219,7 @@
 
         # always send a target list
         next_seq = self._seq_gen.next()
-        cmd = Commands.PlgCommandTargets(self.supported_targets(), next_seq)
+        cmd = Commands.PlgCommandTargets(self.supported_targets(), self._upload_url, next_seq)
         defcmds.append(cmd)
 
         # always send free & max slots
@@ -250,6 +251,8 @@
             reply = self._handle_job_files_request(cmd)
             if reply:
                 self._queued_cmds.append(reply)
+        elif isinstance(cmd, Commands.PlgCommandJobSRPM):
+            self._handle_job_srpm_command(cmd)
 
     def _process_server_commands(self, cmd_list):
         """Process the server's command stream."""
@@ -281,7 +284,7 @@
 
     def _start_new_job(self, cmd):
         target_dict = cmd.target_dict()
-        srpm_url = cmd.srpm_url()
+        jobname = cmd.job_name()
         uniqid = cmd.archjob_id()
         target_str = Config.make_target_string(target_dict['distro'], target_dict['target'], target_dict['arch'], target_dict['repo'])
 
@@ -289,29 +292,27 @@
         (free_slots, max_slots) = self.slots()
         if free_slots <= 0:
             msg = "Error: Tried to build '%s' on target %s when already building" \
-                        " maximum (%d) jobs" % (srpm_url, target_str, max_slots)
+                        " maximum (%d) jobs" % (jobname, target_str, max_slots)
             self._log(msg)
             return (-1, msg)
 
         target_cfg = self._get_target_cfg(target_dict)
         if not target_cfg:
-            msg = "Error: Tried to build '%s' on target %s which isn't supported" % (srpm_url, target_str)
+            msg = "Error: Tried to build '%s' on target %s which isn't supported" % (jobname, target_str)
             self._log(msg)
             return (-1, msg)
 
-        archjob = None
         try:
-            archjob = self._new_job_for_arch(target_cfg, target_dict['arch'], srpm_url, uniqid)
+            archjob = self._new_job_for_arch(target_cfg, target_dict['arch'], jobname, uniqid)
             self._all_jobs[uniqid] = archjob
             self._building_jobs_lock.acquire()
             self._building_jobs.append(archjob)
             self._building_jobs_lock.release()
-            filename = os.path.basename(srpm_url)
-            msg = "%s: started %s on %s arch %s at time %d" % (uniqid, filename,
+            msg = "%s: started %s on %s arch %s at time %d" % (uniqid, jobname,
                         target_str, target_dict['arch'], archjob.starttime())
             archjob.start()
         except (OSError, TypeError), exc:
-            msg = "Failed request for %s on %s: '%s'" % (srpm_url,
+            msg = "Failed request for %s on %s: '%s'" % (jobname,
                     target_str, exc)
 
         self._log(msg)
@@ -355,6 +356,14 @@
         except KeyError:
             pass
 
+    def _handle_job_srpm_command(self, cmd):
+        try:
+            uniqid = cmd.archjob_id()
+            archjob = self._all_jobs[uniqid]
+            archjob.notify_srpm_url(cmd.srpm_url())
+        except KeyError:
+            pass
+
 
 class PassiveBuilder(Builder, threading.Thread):
     """
@@ -382,9 +391,7 @@
         """Startup HTTP and XML-RPC servers which the build server uses
         to talk to us."""
         hostname = get_hostname(self._cfg, bind_all=True)
-        xmlrpc_port = self._cfg.get_int("Passive", "xmlrpc_port")
-        
-        self._log("Binding to address '%s:%d'\n" % (hostname, xmlrpc_port))
+        external_hostname = get_hostname(self._cfg)
 
         try:
             self._http_server = HTTPServer.PlgHTTPServerManager((hostname, self._fileserver_port),
@@ -392,8 +399,15 @@
         except socket.error, exc:
             raise socket.error(exc[0], "Couldn't create server for %s:%s: '%s'" % (hostname,
                     self._fileserver_port, exc[1]))
-        self._http_server.set_POST_handler('/upload', self.upload_callback)
 
+        # Set up upload handlers and addresses
+        upload_loc = "/upload"
+        host = self._prefix_url(external_hostname)
+        self._http_server.set_POST_handler(upload_loc, self.upload_callback)
+        self._upload_url = "%s:%d%s" % (host, self._fileserver_port, upload_loc)
+
+        xmlrpc_port = self._cfg.get_int("Passive", "xmlrpc_port")
+        self._log("Binding to address '%s:%d'\n" % (external_hostname, xmlrpc_port))
         try:
             if self._use_ssl:
                 self._xmlrpc_server = AuthedSSLXMLRPCServer((hostname, xmlrpc_port), None, self._certs)
@@ -428,8 +442,8 @@
             if item.name == 'archjob_id':
                 try:
                     jobid = urllib.unquote(str(item.value))
-                    # Ensure archjob_id is only as long as a sha1 hash
-                    if len(jobid) is not 40:
+                    # ensure we know about this job already
+                    if not jobid in self._all_jobs.keys():
                         jobid = None
                 except ValueError:
                     pass
@@ -524,6 +538,7 @@
         msg = "Failed"
         for fpath in files:
             if fpath.startswith(work_dir):
+                result = FileTransfer.FT_RESULT_SUCCESS
                 continue
             last_part = fpath[len(work_dir):]
             new_path = os.path.join(work_dir, last_part)
@@ -531,7 +546,7 @@
                 shutil.move(fpath, new_path)
                 result = FileTransfer.FT_RESULT_SUCCESS
             except IOError, exc:
-                msg = str(exc)
+                msg = "Failed moving %s to %s: '%s'" % (fpath, new_path, str(exc))
                 break
 
         if result == FileTransfer.FT_RESULT_SUCCESS:
@@ -569,6 +584,7 @@
         self.done = False
         self.failed = False
         self.response = None
+        self.err_msg = ''
         threading.Thread.__init__(self)
 
     def run(self):
@@ -578,9 +594,10 @@
             self.response = self._server.request(cmd_stream)
         except (socket.error, socket.timeout, OpenSSL.SSL.SysCallError,
                 OpenSSL.SSL.Error, xmlrpclib.ProtocolError), exc:
+            self.err_msg = "Builder Error (%s) in request(): network problem '%s'" % (self._address, exc)
             self.failed = True
         except xmlrpclib.Fault, exc:
-            print "Builder Error (%s) in request(): server replied '%s'" % (self._address, exc)
+            self.err_msg = "Builder Error (%s) in request(): server replied '%s'" % (self._address, exc)
             self.failed = True
         self.done = True
 
@@ -605,6 +622,7 @@
     def download_srpm(self, archjob_id, url, target_dir, dl_callback, cb_data=None):
         """Download an SRPM from the build server.  Only used by BuilderMock
         objects."""
+        self._log("%s: Starting download of %s.\n" % (archjob_id, url))
         downloader = FileDownloader.FileDownloader(url, target_dir, ['.src.rpm'],
                 self._certs)
         downloader.set_callback(dl_callback, url)
@@ -655,9 +673,12 @@
                 break
             time.sleep(0.5)
 
-        if req.done and not req.failed:
-            self._queued_cmds = []
-            return req.response
+        if req.done:
+            if not req.failed:
+                self._queued_cmds = []
+                return req.response
+            else:
+                self._log(req.err_msg)
         return None
 
     def run(self):


Index: BuilderMock.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/builder/BuilderMock.py,v
retrieving revision 1.9
retrieving revision 1.10
diff -u -r1.9 -r1.10
--- BuilderMock.py	20 May 2006 05:07:56 -0000	1.9
+++ BuilderMock.py	29 May 2006 20:26:32 -0000	1.10
@@ -40,9 +40,10 @@
     """puts things together for an arch - baseclass for handling builds for 
        other arches"""
 
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self._controller = controller
         self._buildarch = buildarch
+        self._jobname = jobname
         self._starttime = time.time()
         self._endtime = 0
         self._mockstarttime = 0
@@ -55,8 +56,8 @@
         self._childpid = 0
         self._target_cfg = target_cfg
         self._builder_cfg = target_cfg.parent_cfg()
-        self._srpm_url = srpm_url
-        self._srpm_tries = 0
+        self._srpm_path = None
+        self._srpm_wait_start = 0
         self._log_fd = None
         self._mock_config = None
         self._done_status = ''
@@ -100,7 +101,7 @@
     def _handle_death(self):
         self._die = False
         self._done_status = 'killed'
-        self._log("Killing build process...\n")
+        self._log("Killing build process...")
 
         if self._downloader:
             self._downloader.cancel()
@@ -116,30 +117,32 @@
                     # Kill all members of the child's process group
                     os.kill(child_pgroup, 9)
                 except OSError, exc:
-                    self._log("ERROR: Couldn't kill child process group %d: %s\n" % (child_pgroup, exc))
+                    self._log("ERROR: Couldn't kill child process group %d: %s" % (child_pgroup, exc))
                 else:
                     # Ensure child process is reaped
-                    self._log("Waiting for mock process %d to exit...\n" % self._childpid)
+                    self._log("Waiting for mock process %d to exit..." % self._childpid)
                     try:
                         (pid, status) = os.waitpid(self._childpid, 0)
                     except OSError:
                         pass
-                    self._log("Mock process %d exited.\n" % self._childpid)
+                    self._log("Mock process %d exited." % self._childpid)
                 self._childpid = 0
 
             # Start cleanup up the job
             self._start_cleanup()
 
-        self._log("Killed.\n");
+        self._log("Killed.");
 
-    def _log(self, msg):
+    def _log(self, msg, newline=True):
         if msg and self._log_fd:
+            if newline:
+                msg = msg + "\n"
             self._log_fd.write(msg)
             self._log_fd.flush()
             os.fsync(self._log_fd.fileno())
             if self._builder_cfg.get_bool("General", "debug"):
-                s = "%s: " % self._uniqid
-                sys.stdout.write(s + msg)
+                logtext = "%s: %s" % (self._uniqid, msg)
+                sys.stdout.write(logtext)
                 sys.stdout.flush()
 
     def _copy_mock_output_to_log(self):
@@ -155,7 +158,7 @@
             self._mock_log = None
 
     def _start_build(self):
-        self._log("Starting step 'building' with command:\n")
+        self._log("Starting step 'building' with command:")
         if not os.path.exists(self._result_dir):
             os.makedirs(self._result_dir)
 
@@ -177,7 +180,7 @@
         args.append("--statedir=%s" % self._state_dir)
         args.append("--uniqueext=%s" % self._uniqid)
         args.append(self._srpm_path)
-        self._log("   %s\n" % string.join(args))
+        self._log("   %s" % string.join(args))
 
         self._mock_log = os.path.join(self._result_dir, "mock-output.log")
         self._childpid = ExecUtils.exec_with_redirect(cmd, args, None, self._mock_log, self._mock_log)
@@ -185,7 +188,7 @@
         self._status = 'prepping'
 
     def _start_cleanup(self):
-        self._log("Cleaning up the buildroot...\n")
+        self._log("Cleaning up the buildroot...")
 
         args = []
         builder_cmd = os.path.abspath(self._builder_cfg.get_str("General", "builder_cmd"))
@@ -201,7 +204,7 @@
         args.append("-r")
         args.append(self._buildroot)
 
-        self._log("   %s\n" % string.join(args))
+        self._log("   %s" % string.join(args))
         self._childpid = ExecUtils.exec_with_redirect(cmd, args, None, None, None)
         self._status = 'cleanup'
 
@@ -274,32 +277,62 @@
         f.close()
         return contents
 
-    def dl_callback(self, dl_status, cb_data, err_msg=None):
-        if not self.is_done_status():
-            url = cb_data
-            if dl_status == FileTransfer.FT_RESULT_SUCCESS:
-                self._status = 'downloaded'
-                self._log("Retrieved %s.\n" % url)
-            elif dl_status == FileTransfer.FT_RESULT_FAILED:
-                self._done_status = 'failed'
-                self._log("ERROR: Failed to retrieve '%s' because: %s\n" % (url, err_msg))
-                self._post_cleanup()
-            elif dl_status == FileTransfer.FT_RESULT_CANCELED:
-                # Ignore cancelation
-                pass
+    def _srpm_download_cb(self, dl_status, cb_data, err_msg=None):
+        # We might have been cancelled
+        if self.is_done_status():
+            return
+
+        url = cb_data
+        if dl_status == FileTransfer.FT_RESULT_SUCCESS:
+            self._status = 'downloaded'
+            self._log("Retrieved %s." % url)
+        elif dl_status == FileTransfer.FT_RESULT_FAILED:
+            self._log("ERROR: Failed to retrieve '%s' because: %s" % (url, err_msg))
+            self._post_cleanup('failed')
+        elif dl_status == FileTransfer.FT_RESULT_CANCELED:
+            # Ignore cancelation
+            pass
         self._downloader = None
 
     def _status_init(self):
-        self._log("Starting download of %s.\n" % self._srpm_url)
-        self._status = 'downloading'
+        if not self._srpm_wait_start:
+            self._srpm_wait_start = time.time()
+
+        # Kill a job that's waiting for its SRPM URL
+        # after 30 minutes because it's likely orphaned
+        if time.time() > (self._srpm_wait_start + (60 * 30)):
+            self._log("Job waited too long waiting for its SRPM URL. Killing it...")
+            self._post_cleanup('failed')
+
+    def notify_srpm_url(self, srpm_url):
+        """Called by our controlling Builder object to tell us that the server
+        has sent our SRPM URL."""
+        if self._status != "init":
+            return
+
+        err_msg = None
         try:
-            target_dir = os.path.dirname(self._srpm_path)
-            self._downloader = self._controller.download_srpm(self._uniqid,
-                    self._srpm_url, target_dir, self.dl_callback, self._srpm_url)
+            # Validate the SRPM URL and the SRPM's filename
+            srpm_filename = FileDownloader.get_base_filename_from_url(srpm_url, ['.src.rpm'])
+            self._srpm_path = os.path.join(self._source_dir, srpm_filename)
+            success = True
         except FileDownloader.FileNameException, exc:
-            self._done_status = 'failed'
-            self._log("ERROR: Failed to begin SRPM download.  Error: '%s'  URL: %s\n" % (exc, self._srpm_url))
-            self._post_cleanup()
+            err_msg = "ERROR: SRPM file name was invalid.  Message: '%s'" % exc
+
+        if not err_msg:
+            # Ask our controlling builder to download it (if an Active builder)
+            # or to move it for us (if a Passive builder)
+            self._status = 'downloading'
+            try:
+                target_dir = os.path.dirname(self._srpm_path)
+                self._downloader = self._controller.download_srpm(self._uniqid,
+                        srpm_url, target_dir, self._srpm_download_cb, srpm_url)
+            except FileDownloader.FileNameException, exc:
+                err_msg = "ERROR: Failed to begin SRPM download.  Error: '%s'  URL: %s" % (exc, srpm_url)
+
+        if err_msg:
+            self._log(err_msg)
+            self._post_cleanup('failed')
 
     def _status_downloading(self):
         pass
@@ -309,18 +342,18 @@
         # server tells us the repo is unlocked.
         if not self._repo_locked:
             self._start_build()
-        else:
-            # Only show this message once
-            if self._repo_wait_start <= 0:
-                self._log("Waiting for repository to unlock before starting the build...\n")
-                self._repo_wait_start = time.time()
-
-            # Kill a job in 'downloaded' state after 30 minutes because
-            # it's likely orphaned
-            if time.time() > (self._repo_wait_start + (60 * 30)):
-                self._done_status = 'failed'
-                self._log("Job waited too long for repo to unlock. Killing it...\n")
-                self._post_cleanup()
+            return
+
+        # Only show this message once
+        if self._repo_wait_start <= 0:
+            self._log("Waiting for repository to unlock before starting the build...")
+            self._repo_wait_start = time.time()
+
+        # Kill a job in 'downloaded' state after 30 minutes because
+        # it's likely orphaned
+        if time.time() > (self._repo_wait_start + (60 * 30)):
+            self._log("Job waited too long for repo to unlock. Killing it...")
+            self._post_cleanup('failed')
 
     def _watch_mock(self, good_exit, bad_exit):
         (aux_pid, status) = os.waitpid(self._childpid, os.WNOHANG)
@@ -349,9 +382,8 @@
             # something is wrong if mock takes more than 15s to write the status file
             if time.time() > self._mockstarttime + 15:
                 self._mockstarttime = 0
-                self._done_status = 'failed'
-                self._log("ERROR: Timed out waiting for the mock status file!  %s\n" % mockstatusfile)
-                self._post_cleanup()
+                self._log("ERROR: Timed out waiting for the mock status file!  %s" % mockstatusfile)
+                self._post_cleanup('failed')
         else:
             if not self._mock_config and self._mock_is_prepping():
                 self._mock_config = self._read_mock_config()
@@ -364,8 +396,8 @@
     def _status_cleanup(self):
         (aux_pid, status) = os.waitpid(self._childpid, os.WNOHANG)
         if aux_pid:
-            self._childpid = 0
             # Mock exited
+            self._childpid = 0
             if self._mock_config:
                 if self._mock_config.has_key('rootdir'):
                     mock_root_dir = os.path.abspath(os.path.join(self._mock_config['rootdir'], "../"))
@@ -384,7 +416,7 @@
         # Ensure child process is reaped if it's still around
         if self._childpid:
             try:
-                self._log("Waiting for child process %d to exit.\n" % self._childpid)
+                self._log("Waiting for child process %d to exit." % self._childpid)
                 (pid, status) = os.waitpid(self._childpid, 0)
             except OSError:
                 self._childpid = 0
@@ -395,21 +427,25 @@
 
         self._post_cleanup()
 
-    def _post_cleanup(self):
+    def _post_cleanup(self, done_status=None):
+        if done_status:
+            self._done_status = done_status
+
+        # If we're killed, we don't care about uploading logs to the build server
         if self._done_status is not 'killed':
             self._status = "uploading"
             self._files = self._find_files()
             self._uploader = self._controller.upload_files(self._uniqid, self._files,
-                    self.ul_callback, None)
+                    self._upload_cb, None)
         else:
             self._status = self._done_status        
 
-    def ul_callback(self, status, cb_data, msg):
+    def _upload_cb(self, status, cb_data, msg):
         if status == FileTransfer.FT_RESULT_SUCCESS:
             pass
         elif status == FileTransfer.FT_RESULT_FAILED:
             self._done_status = 'failed'
-            self._log("Job failed because files could not be uploaded: %s\n" % msg)
+            self._log("Job failed because files could not be uploaded: %s" % msg)
         self._status = self._done_status
         self._uploader = None
 
@@ -417,14 +453,14 @@
         pass
 
     def _job_done(self):
-        self._log("-----------------------\n")
+        self._log("-----------------------")
         if self._status == 'done':
-            self._log("Job completed successfully.\n")
+            self._log("Job completed successfully.")
         elif self._status == 'failed':
-            self._log("Job failed due to build errors!  Please see build logs.\n")
+            self._log("Job failed due to build errors!  Please see build logs.")
         elif self._status == 'killed':
-            self._log("Job failed because it was killed.\n")
-        self._log("\n\n")
+            self._log("Job failed because it was killed.")
+        self._log("\n")
 
         if self._log_fd:
             self._log_fd.close()
@@ -433,21 +469,12 @@
     def run(self):
         # Print out a nice message at the start of the job
         target_str = Config.make_target_string_from_dict(self._target_cfg.target_dict())
-        time_str = time.asctime(time.localtime(self._starttime))
-        self._log("""Starting job:
-   Time: %s
-   Target: %s
-   UID: %s
-   Architecture: %s
-   SRPM: %s\n\n""" % (time_str, target_str, self._uniqid, self._buildarch, self._srpm_url))
-
-        try:
-            srpm_filename = FileDownloader.get_base_filename_from_url(self._srpm_url, ['.src.rpm'])
-            self._srpm_path = os.path.join(self._source_dir, srpm_filename)
-        except FileDownloader.FileNameException, exc:
-            self._done_status = 'failed'
-            self._log("ERROR: SRPM file name was invalid.  Message: '%s'\n" % exc)
-            self._post_cleanup()
+        self._log("Starting job:")
+        self._log("   Name:   %s" % self._jobname)
+        self._log("   UID:    %s" % self._uniqid)
+        self._log("   Arch:   %s" % self._buildarch)
+        self._log("   Time:   %s" % time.asctime(time.localtime(self._starttime)))
+        self._log("   Target: %s" % target_str)
 
         # Main build job work loop
         while not self.is_done_status():
@@ -458,9 +485,8 @@
             try:
                 func = getattr(self, "_status_%s" % self._status)
             except AttributeError:
-                self._done_status = 'failed'
-                self._log("ERROR: internal builder inconsistency, didn't recognize status '%s'.\n" % self._status)
-                self._post_cleanup()
+                self._log("ERROR: internal builder inconsistency, didn't recognize status '%s'." % self._status)
+                self._post_cleanup('failed')
             else:
                 func()
             time.sleep(3)
@@ -476,8 +502,8 @@
         files_in_dir = os.listdir(self._result_dir)
         file_list = []
         self._log("\n")
-        self._log("Output File List:\n")
-        self._log("-----------------\n")
+        self._log("Output File List:")
+        self._log("-----------------")
         log_files = []
         rpms = []
         # sort into logs first, rpms later
@@ -493,10 +519,10 @@
         i = 1
         num_files = len(file_list)
         for fpath in file_list:
-            self._log("  File (%d of %d): %s\n" % (i, num_files,
+            self._log("  File (%d of %d): %s" % (i, num_files,
                     os.path.basename(fpath)))
             i = i + 1
-        self._log("-----------------\n")
+        self._log("-----------------")
         return file_list
 
     def status(self):
@@ -520,34 +546,34 @@
 class InvalidTargetError(exceptions.Exception): pass
 
 class i386Arch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = '/usr/bin/setarch i686'
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 class x86_64Arch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = ''
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 class PPCArch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = '/usr/bin/setarch ppc32'
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 class PPC64Arch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = ''
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 class SparcArch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = '/usr/bin/sparc32'
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 class Sparc64Arch(BuilderMock):
-    def __init__(self, controller, target_cfg, buildarch, srpm_url, uniqid):
+    def __init__(self, controller, target_cfg, buildarch, jobname, uniqid):
         self.arch_command = '/usr/bin/sparc64'
-        BuilderMock.__init__(self, controller, target_cfg, buildarch, srpm_url, uniqid)
+        BuilderMock.__init__(self, controller, target_cfg, buildarch, jobname, uniqid)
 
 
 BuilderClassDict = {




More information about the fedora-extras-commits mailing list