[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]

[Pulp-list] package refactor pushed



I just pushed the big refactor of merging:

Package
PackageVersion

to:

Package

http://git.fedorahosted.org/git/?p=pulp.git;a=commit;h=17aceb0131cf3ffd388232c1b0af904942d291e5

There are 2 unit test failures I'm working on resolving. Things I have not done yet but will do ASAP:

* tested the refactor effects on the webservices
* tested the refactor effects on the CLI

Feel free to yell about things I've caused massive breakages with,
Mike
--
Mike McCune
mmccune AT redhat.com
Red Hat Engineering       | Portland, OR
Systems Management        | 650.254.4248
diff --git a/playpen/mongodb/display_pkgs.py b/playpen/mongodb/display_pkgs.py
new file mode 100755
index 0000000..f43b354
--- /dev/null
+++ b/playpen/mongodb/display_pkgs.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+import time
+from pymongo import Connection
+from pymongo.son_manipulator import AutoReference, NamespaceInjector
+from optparse import OptionParser
+
+import pulp.util
+from pulp.api.package_version import PackageApi
+
+if __name__ == "__main__":
+    
+    package_id = "pulp-test-package"
+    checksum = "6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f"
+    checksum_type = "sha256"
+    filename = "pulp-test-package-0.3.1-1.fc11.x86_64.rpm"
+
+
+    config = pulp.util.loadConfig("../../etc/pulp.ini")
+    pApi = PackageApi(config)
+
+    found = pApi.package(filename=filename, checksum_type=checksum_type, checksum=checksum)
+    print "Lookup for %s, %s, %s yielded %s" % (filename, checksum_type, checksum, found)
+
+    db = pApi.objectdb
+    print "db = %s" % (db)
+    found = db.find({"filename":filename})
+    print "Search for all PV's with %s: %s" % (filename, found)
+    for f in found:
+        print f
+    found = db.find()
+    print "%s PV objects found with an open search" % (found.count())
+
+    found = pApi.package()
+    print "search with empty searchDict returned %s results" % (found.count())
+
diff --git a/src/juicer/controllers/packages.py b/src/juicer/controllers/packages.py
index 3ebff9f..4a5a1f4 100644
--- a/src/juicer/controllers/packages.py
+++ b/src/juicer/controllers/packages.py
@@ -29,8 +29,8 @@ from pulp.api.package import PackageApi
 # DELETE -  Delete all packages
 # 
 # /packages/<name>
-# GET    -  All package versions for that package name
-# DELETE -  All package versions for that package name
+# GET    -  All package for that package name
+# DELETE -  All package for that package name
 # 
 # /packages/<name>/<version>/<release>/<epoch>/<arch>
 # GET    -  Package version details for that package version
@@ -85,5 +85,5 @@ class Versions(JSONController):
 
     @JSONController.error_handler
     def GET(self, name, version, release, epoch, arch):
-        pv = API.packageversion_by_ivera(name, version, epoch, release, arch)
+        pv = API.package_by_ivera(name, version, epoch, release, arch)
         return self.output(pv)
diff --git a/src/pulp/api/base.py b/src/pulp/api/base.py
index c57e1bd..dfcadd5 100644
--- a/src/pulp/api/base.py
+++ b/src/pulp/api/base.py
@@ -26,7 +26,8 @@ class BaseApi(object):
 
         # Mongo DB
         self.connection = pymongo.Connection()
-        self.db = self.connection._database
+        #self.db = self.connection._database
+        self.db = self.connection._database_model_changes
         # Inject the collection's namespace into each object
         self.db.add_son_manipulator(NamespaceInjector())
         # Provides auto-referencing/auto-dereferencing ability
diff --git a/src/pulp/api/consumer.py b/src/pulp/api/consumer.py
index 70890d8..4a18ebd 100644
--- a/src/pulp/api/consumer.py
+++ b/src/pulp/api/consumer.py
@@ -12,14 +12,16 @@
 # Red Hat trademarks are not licensed under GPLv2. No permission is
 # granted to use or replicate Red Hat trademarks that are incorporated
 # in this software or its documentation.
-
-import pymongo
 import logging
-# Pulp
+import pymongo
+import re
+
 from pulp import model
 from pulp.api.base import BaseApi
-from pulp.util import chunks
 from pulp.pexceptions import PulpException
+from pulp.util import chunks
+
+# Pulp
 
 log = logging.getLogger('pulp.api.consumer')
 
@@ -70,11 +72,13 @@ class ConsumerApi(BaseApi):
         """
         return self.objectdb.find_one({'id': id})
     
-    def consumerswithpackage(self, packageid):
+    def consumers_with_package_name(self, name):
         """
-        List consumers using passed in packageid
+        List consumers using passed in name
         """
-        return list(self.objectdb.find({"packageids":  packageid}))
+        
+        regex = re.compile(".*%s" % name)
+        return list(self.objectdb.find({"packageids": regex}))
 
     def bind(self, id, repoid):
         """
diff --git a/src/pulp/api/package.py b/src/pulp/api/package.py
index ac32d72..fba9d50 100644
--- a/src/pulp/api/package.py
+++ b/src/pulp/api/package.py
@@ -23,39 +23,111 @@ class PackageApi(BaseApi):
 
     def __init__(self, config):
         BaseApi.__init__(self, config)
+        self.objectdb.ensure_index([('name', pymongo.DESCENDING), 
+            ('epoch', pymongo.DESCENDING), 
+            ('version', pymongo.DESCENDING),
+            ('release', pymongo.DESCENDING),
+            ('arch', pymongo.DESCENDING), 
+            ('filename', pymongo.DESCENDING),
+            ('checksum', pymongo.DESCENDING)], 
+            unique=True, background=True)
+        
 
     def _get_unique_indexes(self):
         return []
 
     def _get_indexes(self):
-        return ["packageid"]
+        return ["name", "filename", "checksum", "epoch", "version", "release",
+                "arch", "description"]
         
     def _getcollection(self):
         return self.db.packages
         
-    def create(self, id, name):
+        
+    def create(self, name, epoch, version, release, arch, description, 
+            checksum_type, checksum, filename):
         """
         Create a new Package object and return it
         """
-        p = model.Package(id, name)
+        p = model.Package(name, epoch, version, release, arch, description,
+                checksum_type, checksum, filename)
         self.insert(p)
         return p
-        
-    def package(self, id, filter=None):
+
+    def delete(self, object):
         """
-        Return a single Package object
+        Delete package version object based on "_id" key
         """
-        return self.objectdb.find_one({'packageid': id})
+        self.objectdb.remove({"_id":object["_id"]})
+
+    def package(self, name=None, epoch=None, version=None, release=None, arch=None, 
+            filename=None, checksum_type=None, checksum=None):
+        """
+        Return a list of all package version objects matching search terms
+        """
+        searchDict = {}
+        if name:
+            searchDict['name'] = name
+        if epoch:
+            searchDict['epoch'] = epoch
+        if version:
+            searchDict['version'] = version
+        if release:
+            searchDict['release'] = release
+        if arch:
+            searchDict['arch'] = arch
+        if filename:
+            searchDict['filename'] = filename
+        if checksum_type and checksum:
+            searchDict['checksum.%s' % checksum_type] = checksum
+        return self.objectdb.find(searchDict)
 
     def packages(self):
         """
         List all packages.  Can be quite large
         """
         return list(self.objectdb.find())
-        
+
+    def package_by_ivera(self, name, version, epoch, release, arch):
+        """
+        Returns the package version identified by the given package and VERA.
+        """
+        return self.objectdb.find_one({'packageid' : package_id, 'version' : version,
+                                       'epoch' : epoch, 'release' : release, 'arch' : arch,})
+                                       
     def package_descriptions(self):
         '''
         List of all package names and descriptions (will not contain package
         version information).
         '''
-        return list(self.objectdb.find({}, {'packageid' : True, 'description' : True,}))
+        return list(self.objectdb.find({}, {'name' : True, 'description' : True,}))
+                                       
+        
+    ###### OLD API ########
+    # TODO: Remove
+    #def create(self, id, name):
+        #"""
+        #Create a new Package object and return it
+        #"""
+        #p = model.Package(id, name)
+        #self.objectdb.insert(p)
+        #return p
+        
+    #def package(self, id, filter=None):
+        #"""
+        #Return a single Package object
+        #"""
+        #return self.objectdb.find_one({'packageid': id})
+
+    #def packages(self):
+        #"""
+        #List all packages.  Can be quite large
+        #"""
+        #return list(self.objectdb.find())
+        
+    #def package_descriptions(self):
+        #'''
+        #List of all package names and descriptions (will not contain package
+        #version information).
+        #'''
+        #return list(self.objectdb.find({}, {'packageid' : True, 'description' : True,}))
diff --git a/src/pulp/api/package_version.py b/src/pulp/api/package_version.py
index d6f5286..faf312c 100644
--- a/src/pulp/api/package_version.py
+++ b/src/pulp/api/package_version.py
@@ -23,29 +23,62 @@ class PackageVersionApi(BaseApi):
 
     def __init__(self, config):
         BaseApi.__init__(self, config)
+        self.objectdb.ensure_index([('name', pymongo.DESCENDING), 
+            ('epoch', pymongo.DESCENDING), 
+            ('version', pymongo.DESCENDING),
+            ('release', pymongo.DESCENDING),
+            ('arch', pymongo.DESCENDING), 
+            ('filename', pymongo.DESCENDING),
+            ('checksum', pymongo.DESCENDING)], 
+            unique=True, background=True)
 
     def _get_unique_indexes(self):
         return []
 
     def _get_indexes(self):
-        return ["packageid"]
+        return ["name", "filename", "checksum", "epoch", "version", "release",
+                "arch", "description"]
 
     def _getcollection(self):
         return self.db.packageversions
 
-    def create(self, packageid, epoch, version, release, arch):
+    def create(self, name, epoch, version, release, arch, description, 
+            checksum_type, checksum, filename):
         """
         Create a new PackageVersion object and return it
         """
-        pv = model.PackageVersion(packageid, epoch, version, release, arch)
+        pv = model.PackageVersion(name, epoch, version, release, arch, description,
+                checksum_type, checksum, filename)
         self.insert(pv)
         return pv
-        
-    def packageversion(self, id, filter=None):
+
+    def delete(self, object):
+        """
+        Delete package version object based on "_id" key
+        """
+        self.objectdb.remove({"_id":object["_id"]})
+
+    def packageversion(self, name=None, epoch=None, version=None, release=None, arch=None, 
+            filename=None, checksum_type=None, checksum=None):
         """
-        Return a single PackageVersion object
+        Return a list of all package version objects matching search terms
         """
-        return self.objectdb.find_one({'id': id})
+        searchDict = {}
+        if name:
+            searchDict['name'] = name
+        if epoch:
+            searchDict['epoch'] = epoch
+        if version:
+            searchDict['version'] = version
+        if release:
+            searchDict['release'] = release
+        if arch:
+            searchDict['arch'] = arch
+        if filename:
+            searchDict['filename'] = filename
+        if checksum_type and checksum:
+            searchDict['checksum.%s' % checksum_type] = checksum
+        return self.objectdb.find(searchDict)
 
     def packageversions(self):
         """
@@ -59,3 +92,11 @@ class PackageVersionApi(BaseApi):
         """
         return self.objectdb.find_one({'packageid' : package_id, 'version' : version,
                                        'epoch' : epoch, 'release' : release, 'arch' : arch,})
+                                       
+    def package_descriptions(self):
+        '''
+        List of all package names and descriptions (will not contain package
+        version information).
+        '''
+        return list(self.objectdb.find({}, {'packageid' : True, 'description' : True,}))
+                                       
diff --git a/src/pulp/api/repo.py b/src/pulp/api/repo.py
index 6d39d39..d4787f2 100644
--- a/src/pulp/api/repo.py
+++ b/src/pulp/api/repo.py
@@ -30,7 +30,6 @@ from pulp import model
 from pulp import repo_sync, upload
 from pulp.api.base import BaseApi
 from pulp.api.package import PackageApi
-from pulp.api.package_version import PackageVersionApi
 from pulp.api.package_group import PackageGroupApi
 from pulp.api.package_group_category import PackageGroupCategoryApi
 from pulp.pexceptions import PulpException
@@ -46,13 +45,19 @@ class RepoApi(BaseApi):
     def __init__(self, config):
         BaseApi.__init__(self, config)
 
+        #self.packageApi = PackageApi(config)
         self.packageApi = PackageApi(config)
-        self.packageVersionApi = PackageVersionApi(config)
         self.packageGroupApi = PackageGroupApi(config)
         self.packageGroupCategoryApi = PackageGroupCategoryApi(config)
 
         # TODO: Extract this to a config
         self.localStoragePath = config.get('paths', 'local_storage')
+   
+    def _get_indexes(self):
+        return ["packages", "packagegroups", "packagegroupcategories"]
+
+    def _get_unique_indexes(self):
+        return ["id"]
 
     def _getcollection(self):
         return self.db.repos
@@ -69,13 +74,112 @@ class RepoApi(BaseApi):
         """
         return self.objectdb.find_one({'id': id})
         
-    def packages(self, id):
+    def packages(self, id, name=None):
         """
         Return list of Package objects in this Repo
         """
         repo = self.repository(id)
-        return repo['packages']
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % id)
+        if (name == None):
+            return repo['packages']
+        else:
+            matches = []
+            packages = repo['packages']
+            for packageid in packages.keys():
+                if (packageid.index(name) >= 0):
+                    matches.append(packages[packageid])
+            return matches
     
+    def add_package(self, repoid, p):
+        """
+        Adds the passed in package to this repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        # TODO:  We might want to restrict Packages we add to only
+        #        allow 1 NEVRA per repo and require filename to be unique
+        packages = repo['packages']
+        if (packages.has_key(p['id'])):
+            # No need to update repo, this Package is already under this repo
+            return
+        # Note:  A DBRef() for the objects '_id' is what's added in mongo
+        #        This is a reference to the Package collection's object
+        packages[p['id']] = p
+        self.update(repo)
+
+    def remove_package(self, repoid, p):
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        del repo["packages"][p['id']]
+        self.update(repo)
+
+    def remove_packagegroup(self, repoid, groupid):
+        """
+        Remove a packagegroup from a repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        if repo['packagegroups'].has_key(groupid):
+            del repo['packagegroups'][groupid]
+        self.update(repo)
+
+    def update_packagegroup(self, repoid, pg):
+        """
+        Save the passed in PackageGroup to this repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        repo['packagegroups'][pg['id']] = pg
+        self.update(repo)
+
+    def update_packagegroups(self, repoid, pglist):
+        """
+        Save the list of passed in PackageGroup objects to this repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        for item in pglist:
+            repo['packagegroups'][item['id']] = item
+        self.update(repo)
+
+    def translate_packagegroup(self, obj):
+        """
+        Translate a SON Document to an object that yum.comps.Comps can work with
+        """
+        # Main reason for doing this is that yum.comps expects the passed in 
+        # object to support dot notation references, the returned SON document
+        # does not support this, so yum.comps isn't able to read the info 
+        #TODO: More work is needed in this method before output of groups will work
+        pg = model.PackageGroup(obj['id'], obj['name'], obj['description'], 
+                user_visible=obj['user_visible'], display_order=obj['display_order'],
+                default=obj['default'], langonly=obj['langonly'])
+        pg.groupid = obj['id']  
+        pg.translated_name = {}
+        for key in obj['translated_name']:
+            pg.translated_name[key] = obj['translated_name'][key]
+        pg.translated_description = {}
+        for key in obj['translated_description']:
+            pg.translated_description[key] = obj['translated_description']
+        pg.mandatory_packages = {}
+        for pkgname in obj['mandatory_package_names']:
+            pg.mandatory_packages[pkgname] = 1 
+        pg.optional_packages = {}
+        for pkgname in obj['optional_package_names']:
+            pg.optional_packages[pkgname] = 1
+        pg.default_packages = {}
+        for pkgname in obj['default_package_names']:
+            pg.default_packages[pkgname] = 1
+        pg.conditional_packages = {}
+        for key in obj['conditional_package_names']:
+            pg.conditional_packages[key] = obj['conditional_package_names'][key]
+        return pg
+
     def packagegroups(self, id):
         """
         Return list of PackageGroup objects in this Repo
@@ -83,13 +187,98 @@ class RepoApi(BaseApi):
         repo = self.repository(id)
         return repo['packagegroups']
     
+    def packagegroup(self, repoid, groupid):
+        """
+        Return a PackageGroup from this Repo
+        """
+        repo = self.repository(repoid)
+        if not repo['packagegroups'].has_key(groupid):
+            return None
+        return repo['packagegroups'][groupid]
+
+    def remove_packagegroupcategory(self, repoid, categoryid):
+        """
+        Remove a packagegroupcategory from a repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        if repo['packagegroupcategories'].has_key(categoryid):
+            del repo['packagegroupcategories'][categoryid]
+        self.update(repo)
+    
+    def update_packagegroupcategory(self, repoid, pgc):
+        """
+        Save the passed in PackageGroupCategory to this repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        repo['packagegroupcategories'][pgc['id']] = pgc
+        self.update(repo)
+    
+    def update_packagegroupcategories(self, repoid, pgclist):
+        """
+        Save the list of passed in PackageGroupCategory objects to this repo
+        """
+        repo = self.repository(repoid)
+        if (repo == None):
+            raise PulpException("No Repo with id: %s found" % repoid)
+        for item in pgclist:
+            repo['packagegroupcategories'][item['id']] = item
+        self.update(repo)
+
+    def translate_packagegroupcategory(self, obj):
+        """
+        Translate a SON Document to an object that yum.comps.Comps can work with
+        """
+        #TODO: More work is needed in this method before output of categories will work
+        pgc = model.PackageGroupCategory(obj['id'], obj['name'], obj['description'], 
+                display_order=obj['display_order'])
+        pgc.categoryid = obj['id']
+        pgc.translated_name = {}
+        for key in obj['translated_name']:
+            pgc.translated_name[key] = obj['translated_name'][key]
+        pgc.translated_description = {}
+        for key in obj['translated_description']:
+            pgc.translated_description[key] = obj['translated_description'][key]
+        pgc._groups = {}
+        for groupid in obj['packagegroupids']:
+            pgc._groups[groupid] = groupid
+        return pgc
+
+    def packagegroups(self, id):
+        """
+        Return list of PackageGroup objects in this Repo
+        """
+        repo = self.repository(id)
+        return repo['packagegroups']
+
+    def packagegroup(self, repoid, groupid):
+        """
+        Return a PackageGroup from this Repo
+        """
+        repo = self.repository(repoid)
+        if not repo['packagegroups'].has_key(groupid):
+            return None
+        return repo['packagegroups'][groupid]
+
     def packagegroupcategories(self, id):
         """
         Return list of PackageGroupCategory objects in this Repo
         """
         repo = self.repository(id)
         return repo['packagegroupcategories']
-    
+
+    def packagegroupcategory(self, repoid, categoryid):
+        """
+        Return a PackageGroupCategory object from this Repo
+        """
+        repo = self.repository(repoid)
+        if not repo['packagegroupcategories'].has_key(categoryid):
+            return None
+        return repo['packagegroupcategories'][categoryid]
+
     def create(self, id, name, arch, feed):
         """
         Create a new Repository object and return it
diff --git a/src/pulp/model.py b/src/pulp/model.py
index a77a87b..38acee3 100644
--- a/src/pulp/model.py
+++ b/src/pulp/model.py
@@ -29,6 +29,7 @@ class Base(dict):
 class Repo(Base):
     def __init__(self, id, name, arch, source):
         self._id = id
+        self.id = id
         self.source = source
         self.repo_source = RepoSource(source)
         self.id = id
@@ -63,35 +64,31 @@ class RepoSource(Base):
         self.type = parts[0]
         self.url = source.replace((self.type + ":"), "")
 
-
 class Package(Base):
-    def __init__(self, packageid, description):
-        #TODO: move 'description' to PackageVersion
-        #TODO: Consider getting rid of 'package', we might not need it
-        self._id = packageid
-        self.packageid = packageid
-        self.description = description
-        self.versions = []
-
-class PackageVersion(Base):
-    def __init__(self, packageid, epoch, version, release, arch):
-        self._id = str((packageid, epoch, version, release, arch))
-        self.packageid = packageid
+    def __init__(self, name, epoch, version, release, arch, description, 
+            checksum_type, checksum, filename):
+        self._id = str((name, epoch, version, release, arch))
+        self.id = self._id
+        self.name = name
         self.epoch = epoch
         self.version = version
         self.release = release
         self.arch = arch
-        #TODO: add support for 'filename' and 'checksum' to constructor, apis, and tests
-        #self.filename = ""
-        #self.checksum = {}
+        self.description = description
+        self.filename = filename
+        self.checksum = {checksum_type: checksum}
+        # Add gpg keys
         self.requires = []
         self.provides = []
 
 class PackageGroup(Base):
-    def __init__(self, groupid, name, description, user_visible=False, 
+    """
+    Class represents a yum.comps.Group
+    """
+    def __init__(self, id, name, description, user_visible=False, 
             display_order=1024, default=True, langonly=None):
-        self._id = groupid
-        self.groupid = groupid
+        self._id = id
+        self.id = id
         self.name = name
         self.description = description
         self.user_visible = user_visible
@@ -106,9 +103,10 @@ class PackageGroup(Base):
         self.translated_description = {}
 
 class PackageGroupCategory(Base):
-    def __init__(self, categoryid, name, description, display_order=99):
-        self._id = categoryid
-        self.categoryid = categoryid
+
+    def __init__(self, id, name, description, display_order=99):
+        self._id = id
+        self.id = id
         self.name = name
         self.description = description
         self.display_order = display_order
diff --git a/src/pulp/repo_sync.py b/src/pulp/repo_sync.py
index f5264ea..5d51403 100644
--- a/src/pulp/repo_sync.py
+++ b/src/pulp/repo_sync.py
@@ -17,6 +17,7 @@
 import gzip
 import logging
 import os
+import time
 import traceback
 from urlparse import urlparse
 
@@ -27,7 +28,6 @@ from grinder.RepoFetch import YumRepoGrinder
 from grinder.RHNSync import RHNSync
 from pulp import model
 from pulp.api.package import PackageApi
-from pulp.api.package_version import PackageVersionApi
 from pulp.api.package_group import PackageGroupApi
 from pulp.api.package_group_category import PackageGroupCategoryApi
 from pulp.pexceptions import PulpException
@@ -55,99 +55,91 @@ class BaseSynchronizer(object):
     def __init__(self, config):
         self.config = config
         self.package_api = PackageApi(config)
-        self.package_version_api = PackageVersionApi(config)
         self.package_group_category_api = PackageGroupCategoryApi(config)
         self.package_group_api = PackageGroupApi(config)
 
     def add_packages_from_dir(self, dir, repo):
-
         dir_list = os.listdir(dir)
         package_count = 0
+        startTime = time.time()
         for fname in dir_list:
             self.import_package(dir + fname, repo)
             package_count = package_count + 1
-        log.debug("read [%s] packages" % package_count)
-        self._read_comps_xml(dir, repo)
+        endTime = time.time()
+        log.debug("Repo: %s read [%s] packages took %s seconds" % 
+                (repo['id'], package_count, endTime - startTime))
+        # TODO: Parse repomd.xml and lookup name for groups element
+        compsfile = None
+        compspath = os.path.join(dir, 'repodata/comps.xml')
+        if os.path.isfile(compspath):
+            compsfile = open(compspath, "r")
+        else:
+            compspath = os.path.join(dir, 'repodata/comps.xml.gz')
+            if os.path.isfile(compspath):
+                compsfile = gzip.open(compspath, 'r')
+        if compsfile:
+            repo['comps_xml_path'] = compspath
+            self.import_groups_data(compsfile, repo)
+            log.debug("Loaded comps info from %s" % (compspath))
 
     def import_package(self, pkg_path, repo):
-        packages = repo['packages']
         if (pkg_path.endswith(".rpm")):
             try:
+                file_name = os.path.basename(pkg_path)
                 info = pulp.util.get_rpm_information(pkg_path)
-                p = self.package_api.package(info['name'])
-                if not p:
-                    p = self.package_api.create(info['name'], info['description'])
-
-                pv = self.package_version_api.packageversion_by_ivera(p['packageid'],
-                                                                      info['version'],
-                                                                      info['epoch'],
-                                                                      info['release'],
-                                                                      info['arch'],)
-                if not pv:
-                    pv = self.package_version_api.create(p["packageid"], info['epoch'], 
-                                              info['version'], info['release'], info['arch'])
+                if not repo["packages"].has_key(info['name']):
+                    repo["packages"][info['name']] = []
+                hashtype = "sha256"
+                checksum = pulp.util.getFileChecksum(hashtype=hashtype, 
+                        filename=pkg_path)
+                found = self.package_api.package(name=info['name'], 
+                        epoch=info['epoch'], version=info['version'], 
+                        release=info['release'], arch=info['arch'],filename=file_name, 
+                        checksum_type=hashtype, checksum=checksum)
+                if found.count() == 1:
+                    pv = found[0]
+                else:
+                    pv = self.package_api.create(info['name'], info['epoch'],
+                        info['version'], info['release'], info['arch'], info['description'],
+                        "sha256", checksum, file_name)
                     for dep in info['requires']:
                         pv.requires.append(dep)
                     for dep in info['provides']:
                         pv.provides.append(dep)
-                    self.package_version_api.update(pv)
-
-                p["versions"].append(pv)
-                self.package_api.update(p)
-                packages[p["packageid"]] = p
-                log.debug("Repo <%s> added package <%s> with %s versions" %
-                          (repo["id"], p["packageid"], len(p["versions"])))
+                    self.package_api.update(pv)
+                #TODO:  Ensure we don't add duplicate pv's to the 'packages' list
+                repo['packages'][info['name']].append(pv)
             except Exception, e:
-                log.debug("Exception = %s" % (traceback.format_exc()))
+                log.debug("%s" % (traceback.format_exc()))
                 log.error("error reading package %s" % (pkg_path))
 
-    def _read_comps_xml(self, dir, repo):
+    def import_groups_data(self, compsfile, repo):
         """
         Reads a comps.xml or comps.xml.gz under repodata from dir
         Loads PackageGroup and Category info our db
         """
-
-        compspath = os.path.join(dir, 'repodata/comps.xml')
-        compsxml = None
-        if os.path.isfile(compspath):
-            compsxml = open(compspath, "r")
-        else:
-            compspath = os.path.join(dir, 'repodata/comps.xml.gz')
-            if os.path.isfile(compspath):
-                compsxml = gzip.open(compspath, 'r')
-    
-        if not compsxml:
-            log.info("Not able to find a comps.xml(.gz) to read")
-            return False
-
-        log.info("Reading comps info from %s" % (compspath))
-        repo['comps_xml_path'] = compspath
         try:
             comps = yum.comps.Comps()
-            comps.add(compsxml)
+            comps.add(compsfile)
             for c in comps.categories:
-                ctg = self.package_group_category_api.create(c.categoryid, c.name,
-                                                          c.description, c.display_order)
+                ctg = model.PackageGroupCategory(c.categoryid, c.name,
+                    c.description, c.display_order)
                 groupids = [grp for grp in c.groups]
-                ctg.packagegroupids.extend(groupids)
-                ctg.translated_name = c.translated_name
-                ctg.translated_description = c.translated_description
-                self.package_group_category_api.update(ctg)
-                repo['packagegroupcategories'][ctg.categoryid] = ctg
-
+                ctg['packagegroupids'].extend(groupids)
+                ctg['translated_name'] = c.translated_name
+                ctg['translated_description'] = c.translated_description
+                repo['packagegroupcategories'][ctg['id']] = ctg
             for g in comps.groups:
-                grp = self.package_group_api.create(g.groupid, g.name, g.description,
-                                              g.user_visible, g.display_order, g.default, g.langonly)
+                grp = model.PackageGroup(g.groupid, g.name, g.description,
+                    g.user_visible, g.display_order, g.default, g.langonly)
                 grp.mandatory_package_names.extend(g.mandatory_packages.keys())
                 grp.optional_package_names.extend(g.optional_packages.keys())
                 grp.default_package_names.extend(g.default_packages.keys())
                 grp.conditional_package_names = g.conditional_packages
                 grp.translated_name = g.translated_name
                 grp.translated_description = g.translated_description
-                self.package_group_api.update(grp)
-                repo['packagegroups'][grp.groupid] = grp
-            log.info("Comps info added from %s" % (compspath))
-        except yum.comps.CompsException:
+                repo['packagegroups'][grp['id']] = grp
+        except yum.Errors.CompsException:
             log.error("Unable to parse comps info for %s" % (compspath))
             return False
         return True
diff --git a/src/pulp/util.py b/src/pulp/util.py
index aa33ca3..d19e01b 100644
--- a/src/pulp/util.py
+++ b/src/pulp/util.py
@@ -28,6 +28,7 @@ try:
 except:
     print "Please install python-hashlib"
     sys.exit(1)
+import base64
 
 log = logging.getLogger("pulp.util")
 
diff --git a/src/pulptools/connection.py b/src/pulptools/connection.py
index 710b4d5..fd1dc48 100644
--- a/src/pulptools/connection.py
+++ b/src/pulptools/connection.py
@@ -221,18 +221,10 @@ class PackageConnection(PulpConnection):
         method = "/packages/%s/" % packageid
         return self.conn.request_delete(method)
 
-
-class PackageVersionConnection(PulpConnection):
-
-    def clean(self):
-        method = "/packages/"
-        return self.conn.request_delete(method)
-
-    def packageversion_by_ivera(self, name, version, release, epoch, arch):
+    def package_by_ivera(self, name, version, release, epoch, arch):
         method = "/packages/%s/%s/%s/%s/%s/" % (name, version, release, epoch, arch)
         return self.conn.request_get(method)
 
-
 class PackageGroupConnection(PulpConnection):
 
     def clean(self):
diff --git a/test/common/large_load.py b/test/common/large_load.py
index 413cd0d..5ab9fc6 100644
--- a/test/common/large_load.py
+++ b/test/common/large_load.py
@@ -25,13 +25,15 @@ import fileinput
 import random
 
 
-sys.path.append("../src")
+srcdir = os.path.abspath(os.path.dirname(__file__)) + "/../../src"
+sys.path.insert(0, srcdir)
+
 from pulp.api.repo import RepoApi
 from pulp.api.package import PackageApi
 from pulp.api.consumer import ConsumerApi
 from pulp.model import Package
 from pulp.model import Consumer
-from pulp.util import randomString
+from pulp.util import random_string
 import pulp.util
 
 TEST_PACKAGE_ID = 'random-package'
@@ -87,9 +89,9 @@ class LargeLoad(unittest.TestCase):
             # self.capi.update(c)
             if (i % 100 == 0):
                 print "created [%s] consumers" % i
-                p = Package(TEST_PACKAGE_ID, 'random package to be found')
+                p = Package(repo["id"], TEST_PACKAGE_ID, 'random package to be found')
                 c.packageids.append(p.id)
-                # self.capi.update(c)
+                #self.capi.update(c)
             last_desc = c.description
             last_id = c.id
             consumers.append(c)
@@ -117,6 +119,7 @@ class LargeLoad(unittest.TestCase):
         cwithp = ll.capi.consumerswithpackage(TEST_PACKAGE_ID)
         print "Found [%s] consumers with packageid: [%s]" % (len(cwithp), TEST_PACKAGE_ID)
 
+
 parser = optparse.OptionParser()
 parser.add_option('--dirlist', dest='dirlist', 
                  action='store', help='File containing list of directories containing the repos you wish to use for this test')
diff --git a/test/rhn/test_rhn_sync.py b/test/rhn/test_rhn_sync.py
index 649f07f..86531b3 100644
--- a/test/rhn/test_rhn_sync.py
+++ b/test/rhn/test_rhn_sync.py
@@ -23,7 +23,6 @@ import unittest
 # Pulp
 import pulp.api.repo
 import pulp.api.package
-import pulp.api.package_version
 
 srcdir = os.path.abspath(os.path.dirname(__file__)) + '/../common'
 sys.path.append(srcdir)
@@ -38,8 +37,6 @@ class TestRhnSync(unittest.TestCase):
     def clean(self):
         self.rapi.clean()
         self.papi.clean()
-        self.pvapi.clean()
-
         dest_dir = '%s/%s/' % (self.config.get('paths', 'local_storage'), REPO_ID)
         if os.path.exists(dest_dir):
             shutil.rmtree(dest_dir)
@@ -49,7 +46,6 @@ class TestRhnSync(unittest.TestCase):
 
         self.rapi = pulp.api.repo.RepoApi(self.config)
         self.papi = pulp.api.package.PackageApi(self.config)
-        self.pvapi = pulp.api.package_version.PackageVersionApi(self.config)
 
         self.clean()
         
diff --git a/test/unit/data/rhel-i386-server-5/comps.xml b/test/unit/data/rhel-i386-server-5/comps.xml
new file mode 100644
index 0000000..922a106
--- /dev/null
diff --git a/test/unit/test_api.py b/test/unit/test_api.py
index 3853ab6..c06849a 100644
--- a/test/unit/test_api.py
+++ b/test/unit/test_api.py
@@ -39,7 +39,6 @@ from pulp.api.consumer import ConsumerApi
 from pulp.api.package import PackageApi
 from pulp.api.package_group import PackageGroupApi
 from pulp.api.package_group_category import PackageGroupCategoryApi
-from pulp.api.package_version import PackageVersionApi
 from pulp.api.repo import RepoApi
 
 from pulp.model import Package
@@ -70,7 +69,6 @@ class TestApi(unittest.TestCase):
         self.rapi.clean()
         self.papi.clean()
         self.capi.clean()
-        self.pvapi.clean()
         self.pgapi.clean()
         self.pgcapi.clean()
         
@@ -79,13 +77,13 @@ class TestApi(unittest.TestCase):
         self.rapi = RepoApi(self.config)
         self.papi = PackageApi(self.config)
         self.capi = ConsumerApi(self.config)
-        self.pvapi = PackageVersionApi(self.config)
         self.pgapi = PackageGroupApi(self.config)
         self.pgcapi = PackageGroupCategoryApi(self.config)
         self.clean()
         
     def tearDown(self):
-        self.clean()
+        # self.clean()
+        pass
         
     def test_create(self):
         repo = self.rapi.create('some-id','some name', 
@@ -170,58 +168,51 @@ class TestApi(unittest.TestCase):
     def test_repo_packages(self):
         repo = self.rapi.create('some-id','some name', \
             'i386', 'yum:http://example.com')
-        package = Package('test_repo_packages','test package')
-        repo['packages'][package["packageid"]] = package
-        self.rapi.update(repo)
+        p = self.create_package('test_repo_packages')
+        self.rapi.add_package(repo["id"], p)
+        for i in range(10):
+            package = self.create_package(random_string())
+            self.rapi.add_package(repo["id"], package)
         
         found = self.rapi.repository('some-id')
         packages = found['packages']
         assert(packages != None)
-        assert(packages['test_repo_packages'] != None)
+        assert(packages[p['id']] != None)
     
     def test_repo_package_groups(self):
         repo = self.rapi.create('some-id','some name', \
             'i386', 'yum:http://example.com')
         pkggroup = PackageGroup('test-group-id', 'test-group-name', 
                 'test-group-description')
-        package = Package('test_repo_packages','test package')
-        pkggroup.default_package_names.append(package["packageid"])
-        repo['packagegroups'][pkggroup["groupid"]] = pkggroup
-        repo['packages'][package["packageid"]] = package
+        package = self.create_package('test_repo_packages')
+        pkggroup.default_package_names.append(package["id"])
+        repo['packagegroups'][pkggroup["id"]] = pkggroup
+        repo['packages'][package["id"]] = package
+        
         self.rapi.update(repo)
         
         found = self.rapi.repository('some-id')
-        packages = found['packages']
-        assert(packages != None)
-        assert(packages['test_repo_packages'] != None)
         assert(found['packagegroups'] != None)
-        print "test_repo_package_groups found['packagegroups'] = %s" % (found['packagegroups'])
-        assert(pkggroup.groupid in found['packagegroups'])
+        assert(pkggroup['id'] in found['packagegroups'])
     
     def test_repo_package_group_categories(self):
-        repo = self.rapi.create('some-id','some name', \
+        repo = self.rapi.create('some-id_pkg_group_categories','some name', \
             'i386', 'yum:http://example.com')
-        package = Package('test_repo_packages','test package')
         pkggroup = PackageGroup('test-group-id', 'test-group-name', 
                 'test-group-description')
-        pkggroup.default_package_names.append(package["packageid"])
+        pkggroup.default_package_names.append("test-package-name")
         ctg = PackageGroupCategory('test-group-cat-id', 'test-group-cat-name',
                 'test-group-cat-description')
         ctg.packagegroupids = pkggroup.id
-        repo['packagegroupcategories'][ctg.categoryid] = ctg
-        repo['packagegroups'][pkggroup.groupid] = pkggroup
-        repo['packages'][package["packageid"]] = package
+        repo['packagegroupcategories'][ctg.id] = ctg
+        repo['packagegroups'][pkggroup.id] = pkggroup
         self.rapi.update(repo)
         
-        found = self.rapi.repository('some-id')
-        packages = found['packages']
-        assert(packages != None)
-        assert(packages['test_repo_packages'] != None)
+        found = self.rapi.repository('some-id_pkg_group_categories')
         assert(found['packagegroups'] != None)
-        print "test_repo_package_groups found['packagegroups'] = %s" % (found['packagegroups'])
-        assert(pkggroup.groupid in found['packagegroups'])
+        assert(pkggroup['id'] in found['packagegroups'])
         assert(found['packagegroupcategories'] != None)
-        assert(ctg.categoryid in found['packagegroupcategories'])
+        assert(ctg['id'] in found['packagegroupcategories'])
     
     def test_consumer_create(self):
         c = self.capi.create('test-consumer', 'some consumer desc')
@@ -254,17 +245,22 @@ class TestApi(unittest.TestCase):
             
     def test_consumerwithpackage(self):
         c = self.capi.create('test-consumer', 'some consumer desc')
-        package = Package('test_consumerwithpackage','test package search')
-        c['packageids'].append(package["packageid"])
+        repo = self.rapi.create('some-id', 'some name',
+                'i386', 'yum:http://example.com')
+        test_pkg_name = "test_consumerwithpackage"
+        #TODO: The consumer model/api needs to be updated, it's not setup to handle
+        #       tracking a package
+        package = self.create_package(test_pkg_name)
+        c['packageids'].append(package["id"])
         for i in range(10):
-            package = Package(random_string(), random_string())
-            c['packageids'].append(package["packageid"])
+            package = self.create_package(random_string())
+            c['packageids'].append(package["id"])
         self.capi.update(c)
         
-        found = self.capi.consumerswithpackage('some-invalid-id')
+        found = self.capi.consumers_with_package_name('some-invalid-id')
         assert(len(found) == 0)
 
-        found = self.capi.consumerswithpackage('test_consumerwithpackage')
+        found = self.capi.consumers_with_package_name('test_consumerwithpackage')
         assert(len(found) > 0)
         
     def test_json(self):
@@ -276,7 +272,7 @@ class TestApi(unittest.TestCase):
         assert(parsed != None)
         print parsed
     
-    def test_sync_two_repos_share_common_package(self):
+    def test_sync_two_repos_same_nevra_different_checksum(self):
         """
         Sync 2 repos that have a package with same NEVRA 
         but different checksum
@@ -292,10 +288,9 @@ class TestApi(unittest.TestCase):
         repo_a = self.rapi.create(repo_name_a,'some name', 'x86_64', 
                                   'local:file://%s' % datadir_a)
         repo_b = self.rapi.create(repo_name_b,'some name', 'x86_64', 
-                                  'local:file://%s' % datadir_b)
-        self.rapi.sync(repo_a.id)
-        self.rapi.sync(repo_b.id)
-
+                                'local:file://%s' % datadir_b)
+        self.rapi.sync(repo_a["id"])
+        self.rapi.sync(repo_b["id"])
         # Look up each repo from API
         found_a = self.rapi.repository(repo_a.id)
         found_b = self.rapi.repository(repo_b.id)
@@ -305,17 +300,16 @@ class TestApi(unittest.TestCase):
         assert (found_b["packages"].has_key(test_pkg_name))
 
         # Grab the associated package version (there should only be 1)
-        # Ensure that the package versions have different md5sums, but all other
+        # Ensure that the package versions have different checksums, but all other
         # keys are identical
-        assert (len(found_a["packages"][test_pkg_name]["versions"]) == 1)
-        assert (len(found_b["packages"][test_pkg_name]["versions"]) == 1)
-        pkgVerA = found_a["packages"][test_pkg_name]["versions"][0]
-        pkgVerB = found_a["packages"][test_pkg_name]["versions"][0]
-        for key in ['epoch', 'version', 'release', 'arch']:
+        assert (len(found_a["packages"][test_pkg_name]) == 1)
+        assert (len(found_b["packages"][test_pkg_name]) == 1)
+        pkgVerA = found_a["packages"][test_pkg_name][0]
+        pkgVerB = found_b["packages"][test_pkg_name][0]
+        for key in ['epoch', 'version', 'release', 'arch', 'filename', 'name']:
             assert (pkgVerA[key] == pkgVerB[key])
-
         #TODO:
-        # Add test to compare checksum when it's implemented in PackageVersion
+        # Add test to compare checksum when it's implemented in Package
         # verify the checksums are different
 
     def test_sync_two_repos_share_common_package(self):
@@ -323,12 +317,12 @@ class TestApi(unittest.TestCase):
         Sync 2 repos that share a common package, same NEVRA
         same checksum
         """
-        test_pkg_name = "pulp-test-package-same-nevra"
+        test_pkg_name = "pulp-test-package"
         my_dir = os.path.abspath(os.path.dirname(__file__))
         repo_name_a = "test_two_repos_share_common_pkg_repo_A"
         repo_name_b = "test_two_repos_share_common_pkg_repo_B"
-        datadir_a = my_dir + "/data/sameNEVRA_differentChecksums/A/repo/"
-        datadir_b = my_dir + "/data/sameNEVRA_differentChecksums/B/repo/"
+        datadir_a = my_dir + "/data/sameNEVRA_sameChecksums/A/repo/"
+        datadir_b = my_dir + "/data/sameNEVRA_sameChecksums/B/repo/"
         # Create & Sync Repos
         repo_a = self.rapi.create(repo_name_a,'some name', 'x86_64', 
                                 'local:file://%s' % datadir_a)
@@ -346,14 +340,14 @@ class TestApi(unittest.TestCase):
         # Ensure that the package versions have different md5sums, but all other
         # keys are identical
 
-        # BELOW TEST Needs more changes to model/sync code before it can pass
-        #assert (len(found_a["packages"][test_pkg_name]["versions"]) == 1)
-        #assert (len(found_b["packages"][test_pkg_name]["versions"]) == 1)
-        #pkgVerA = found_a["packages"][test_pkg_name]["versions"][0]
-        #pkgVerB = found_a["packages"][test_pkg_name]["versions"][0]
-        # Ensure that the 2 PackageVersions instances actually point 
+        assert (len(found_a["packages"][test_pkg_name]) == 1)
+        assert (len(found_b["packages"][test_pkg_name]) == 1)
+        pkgVerA = found_a["packages"][test_pkg_name][0]
+        pkgVerB = found_b["packages"][test_pkg_name][0]
+        # Ensure that the 2 Package instances actually point 
         # to the same single instance
-        #assert(pkgVerA['_id'] == pkgVerB['_id'])
+        assert(repo_a['_id'] != repo_b['_id'])
+        assert(pkgVerA['_id'] == pkgVerB['_id'])
     
     def test_sync(self):
         repo = self.rapi.create('some-id','some name', 'i386', 
@@ -372,6 +366,7 @@ class TestApi(unittest.TestCase):
         assert(len(dirList) > 0)
         found = self.rapi.repository(repo['id'])
         packages = found['packages']
+        print "packages = ", packages
         assert(packages != None)
         assert(len(packages) > 0)
         
@@ -388,27 +383,85 @@ class TestApi(unittest.TestCase):
         assert(len(packages) > 0)
         print packages
         p = packages.values()[0]
-        assert(p['versions'] != None)
+        assert(p != None)
         # versions = p['versions']
         
-    def test_package_versions(self):
-        p = self.papi.create('some-package-id', 'some package desc')
-        pv = self.pvapi.create(p.packageid, 0, '1.2.3', '1', 'i386')
-        p.versions.append(pv)
-        self.papi.update(p)
-        
-        found = self.papi.package(p.packageid)
-        versions = found['versions']
-        assert(versions != None)
-        assert(versions[0]['packageid'] == p.packageid)
-        print found
+    def create_package(self, name): 
+        test_pkg_name = name
+        test_epoch = "1"
+        test_version = "1.2.3"
+        test_release = "1.el5"
+        test_arch = "x86_64"
+        test_description = "test description text"
+        test_checksum_type = "sha256"
+        test_checksum = "9d05cc3dbdc94150966f66d76488a3ed34811226735e56dc3e7a721de194b42e"
+        test_filename = "test-filename-1.2.3-1.el5.x86_64.rpm"
+        p = self.papi.create(name=test_pkg_name, epoch=test_epoch, version=test_version, 
+                release=test_release, arch=test_arch, description=test_description, 
+                checksum_type="sha256", checksum=test_checksum, filename=test_filename)
+        return p
         
-    def test_packages(self):
-        p = self.papi.create('some-package-id', 'some package desc')
-        packages = self.papi.packages()
-        print "packages: %s" % packages
-        assert(len(packages) > 0)
-    
+    def test_package_versions(self):
+        repo = self.rapi.create('some-id','some name',
+            'i386', 'yum:http://example.com')
+        repo = self.rapi.repository(repo["id"])
+        test_pkg_name = "test_package_versions_name"
+        test_epoch = "1"
+        test_version = "1.2.3"
+        test_release = "1.el5"
+        test_arch = "x86_64"
+        test_description = "test description text"
+        test_checksum_type = "sha256"
+        test_checksum = "9d05cc3dbdc94150966f66d76488a3ed34811226735e56dc3e7a721de194b42e"
+        test_filename = "test-filename-1.2.3-1.el5.x86_64.rpm"
+        p = self.papi.create(name=test_pkg_name, epoch=test_epoch, version=test_version, 
+                release=test_release, arch=test_arch, description=test_description, 
+                checksum_type="sha256", checksum=test_checksum, filename=test_filename)
+        # Add this package version to the repo
+        self.rapi.add_package(repo["id"], p)
+        # Lookup repo and confirm new package version was added
+        repo = self.rapi.repository(repo["id"])
+        self.assertTrue(repo["packages"].has_key(p['id']))
+        packageid = p['id']
+        self.assertTrue(len(repo["packages"][p['id']]) != None)
+        saved_pkg = repo["packages"][packageid]
+        self.assertTrue(saved_pkg['name'] == test_pkg_name)
+        self.assertTrue(saved_pkg['epoch'] == test_epoch)
+        self.assertTrue(saved_pkg['version'] == test_version)
+        self.assertTrue(saved_pkg['release'] == test_release)
+        self.assertTrue(saved_pkg['arch'] == test_arch)
+        self.assertTrue(saved_pkg['description'] == test_description)
+        self.assertTrue(saved_pkg['checksum'].has_key(test_checksum_type))
+        self.assertTrue(saved_pkg['checksum'][test_checksum_type] == test_checksum)
+        self.assertTrue(saved_pkg['filename'] == test_filename)
+        # Verify we can find this package version through repo api calls
+        pkgs = self.rapi.packages(repo['id'])
+        self.assertTrue(pkgs.has_key(packageid))
+        self.assertTrue(pkgs[packageid] != None)
+        self.assertTrue(pkgs[packageid]['filename'] == test_filename)
+        pkgs = self.rapi.packages(repo['id'], test_pkg_name)
+        self.assertTrue(len(pkgs) == 1)
+        self.assertTrue(pkgs[0]['filename'] == test_filename)
+
+        # Remove package version from repo
+        self.rapi.remove_package(repo['id'], p)
+        repo = self.rapi.repository(repo['id'])
+        self.assertTrue(not repo["packages"].has_key(test_pkg_name))
+        # Verify package version from repo
+        found = self.papi.package(name=test_pkg_name, epoch=test_epoch, 
+                version=test_version, release=test_release, arch=test_arch, 
+                filename=test_filename, checksum_type=test_checksum_type,
+                checksum=test_checksum)
+        self.assertTrue(found.count() == 1)
+        # Remove from Package collection
+        self.papi.delete(found[0])
+        # Verify it's deleted
+        found = self.papi.package(name=test_pkg_name, epoch=test_epoch, 
+                version=test_version, release=test_release, arch=test_arch, 
+                filename=test_filename, checksum_type=test_checksum_type,
+                checksum=test_checksum)
+        self.assertTrue(found.count() == 0)
+
     def test_package_groups(self):
         pkggroup = self.pgapi.create('test-pkg-group-id', 'test-pkg-group-name', 
                 'test-pkg-group-description')
diff --git a/test/unit/test_comps.py b/test/unit/test_comps.py
new file mode 100644
index 0000000..a868805
--- /dev/null
+++ b/test/unit/test_comps.py
@@ -0,0 +1,192 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2010 Red Hat, Inc.
+#
+#
+# This software is licensed to you under the GNU General Public License,
+# version 2 (GPLv2). There is NO WARRANTY for this software, express or
+# implied, including the implied warranties of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
+# along with this software; if not, see
+# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
+#
+# Red Hat trademarks are not licensed under GPLv2. No permission is
+# granted to use or replicate Red Hat trademarks that are incorporated
+# in this software or its documentation.
+#
+import sys
+import os
+srcdir = os.path.abspath(os.path.dirname(__file__)) + "/../../src"
+sys.path.append(srcdir)
+import unittest
+import logging
+
+import yum
+
+import pulp.util
+import pulp.model
+from pulp.api.repo import RepoApi
+from pulp.repo_sync import BaseSynchronizer
+
+class TestComps(unittest.TestCase):
+
+    def setUp(self):
+        config_file = os.path.join(srcdir, "../etc/pulp/pulp.ini")
+        self.config = pulp.util.loadConfig(config_file)
+        self.rapi = RepoApi(self.config)
+        self.rapi.clean()
+
+    def tearDown(self):
+        self.rapi.clean()
+
+
+    def test_import_groups_data(self):
+        repo = self.rapi.create('test_import_groups_data_id',
+                'test_import_groups_data_id', 'i386', 
+                'yum:http://example.com/')
+        # Parse existing comps.xml
+        compspath = "./data/rhel-i386-server-5/comps.xml"
+        compsfile = open(compspath)
+        base = BaseSynchronizer(self.config)
+        base.import_groups_data(compsfile, repo)
+        # 'repo' object should now contain groups/categories
+        # we need to save it to the db so we can query from it
+        self.rapi.update(repo)
+        # Testing for expected values
+        found = self.rapi.packagegroup(repo['id'], "web-server")
+        self.assertTrue(found != None)
+        self.assertTrue("httpd" in found['mandatory_package_names'])
+        self.assertTrue("mod_auth_kerb" in found['optional_package_names'])
+        self.assertTrue("mod_auth_mysql" in found['optional_package_names'])
+        self.assertTrue("crypto-utils" in found['default_package_names'])
+        self.assertTrue("distcache" in found['default_package_names'])
+        # PackageGroupCategory, look up expected values,
+        found = self.rapi.packagegroupcategory(repo['id'], "BAD_VALUE_NOT_IN_CATEGORY")
+        self.assertTrue(found == None)
+        found = self.rapi.packagegroupcategory(repo['id'], "development")
+        self.assertTrue(found != None)
+
+    def test_basic_comps(self):
+        repo = self.rapi.create('test_comps_id','test_comps_name', 
+            'i386', 'yum:http://example.com/')
+        grp = pulp.model.PackageGroup("groupid1", "groupname1", 
+            "description", "user_visible", "display_order", "default"
+            "langonly")
+        grp['mandatory_package_names'] = ["mandatory_package_name1"]
+        grp['optional_package_names'] = ["optional_package_name1"]
+        grp['default_package_names'] = ["default_package_name1"]
+        grp['conditional_package_names'] = {"pkg1":"value pkg1"}
+        grp['translated_name'] = {"a":"value"}
+        grp['translated_description'] = {"b":"value"}
+        self.rapi.update_packagegroup(repo['id'], grp)
+        found = self.rapi.packagegroup(repo['id'], grp['id'])
+        self.assertTrue(found != None)
+        self.assertTrue(found['name'] == 'groupname1')
+        self.assertTrue("mandatory_package_name1" in found['mandatory_package_names'])
+
+        ctg = pulp.model.PackageGroupCategory("categoryid1", 
+                    "categoryname", "description", "display_order")
+        ctg['packagegroupids'] = ["groupid1"]
+        ctg['translated_name'] = {"a":"name"}
+        ctg['translated_description'] = {"b":"description"}
+        self.rapi.update_packagegroupcategory(repo["id"], ctg)
+        found = self.rapi.packagegroupcategory(repo["id"], ctg["id"]) 
+        self.assertTrue(found != None)
+        self.assertTrue(found["name"] == "categoryname")
+        self.assertTrue("groupid1" in found["packagegroupids"])
+
+    def broken_intend_this_to_run_full_read_write_out_to_xml(self):
+        """
+        Test full cycle of Groups/Categories, import a comps.xml, parse it
+        modify the entries, then write them out to XML
+        """
+        #TODO: Writing to XML is broken 
+        # Parse existing comps.xml
+        compsPath = "./data/rhel-i386-server-5/comps.xml"
+        comps = yum.comps.Comps()
+        comps.add(compsPath)
+        self.assertTrue(len(comps.get_groups()) != 0)
+        self.assertTrue(len(comps.get_categories()) != 0)
+        # Create Groups/Categories from parsed data
+        repo = self.rapi.create('test_comps_id','test_comps_name', 
+                'i386', 'yum:http://example.com/')
+        found = self.rapi.packagegroups(repo['id'])
+        self.assertTrue(len(found) == 0)
+        found = self.rapi.packagegroupcategories(repo['id'])
+        self.assertTrue(len(found) == 0)
+
+        grp_list = []
+        groupids = []
+        for g in comps.get_groups():
+            grp = pulp.model.PackageGroup(g.groupid, g.name, 
+                    g.description, g.user_visible, g.display_order, g.default, 
+                    g.langonly)
+            grp['mandatory_package_names'].extend(g.mandatory_packages.keys())
+            grp['optional_package_names'].extend(g.optional_packages.keys())
+            grp['default_package_names'].extend(g.default_packages.keys())
+            grp['conditional_package_names'] = g.conditional_packages
+            grp['translated_name'] = g.translated_name
+            grp['translated_description'] = g.translated_description
+            grp_list.append(grp)
+            groupids.append(grp['id'])
+        self.rapi.update_packagegroups(repo['id'], grp_list)
+        ctg_list = []
+        categoryids = []
+        for c in comps.get_categories():
+            ctg = pulp.model.PackageGroupCategory(c.categoryid, 
+                    c.name, c.description, c.display_order)
+            groupids = [grp for grp in c.groups]
+            ctg['packagegroupids'].extend(groupids)
+            ctg['translated_name'] = c.translated_name
+            ctg['translated_description'] = c.translated_description
+            ctg_list.append(ctg)
+            categoryids.append(ctg['id'])
+        self.rapi.update_packagegroupcategories(repo['id'], ctg_list)
+        # Lookup data from API calls
+        found = self.rapi.packagegroups(repo['id'])
+        self.assertTrue(len(found) > 0)
+        found = self.rapi.packagegroupcategories(repo['id'])
+        self.assertTrue(len(found) > 0)
+        # PackageGroup, look up expected values, 
+        # good values come from known data in rhel-5 comps.xml
+        found = self.rapi.packagegroup(repo['id'], "BAD_VALUE_NOT_IN_GROUP")
+        self.assertTrue(found == None)
+        found = self.rapi.packagegroup(repo['id'], "web-server")
+        self.assertTrue(found != None)
+        self.assertTrue("httpd" in found['mandatory_package_names'])
+        self.assertTrue("mod_auth_kerb" in found['optional_package_names'])
+        self.assertTrue("mod_auth_mysql" in found['optional_package_names'])
+        self.assertTrue("crypto-utils" in found['default_package_names'])
+        self.assertTrue("distcache" in found['default_package_names'])
+        # PackageGroupCategory, look up expected values,
+        found = self.rapi.packagegroupcategory(repo['id'], "BAD_VALUE_NOT_IN_CATEGORY")
+        self.assertTrue(found == None)
+        found = self.rapi.packagegroupcategory(repo['id'], "development")
+        self.assertTrue(found != None)
+        # Test Removal
+        self.rapi.remove_packagegroup(repo['id'], "web-server")
+        found = self.rapi.packagegroup(repo['id'], "web-server")
+        self.assertTrue(found == None)
+        self.rapi.remove_packagegroupcategory(repo['id'], "development")
+        found = self.rapi.packagegroupcategory(repo['id'], "development")
+        self.assertTrue(found == None)
+
+        newComps = yum.comps.Comps()
+        # Look up categories from a repo
+        ctgs = self.rapi.packagegroupcategories(repo["id"])
+        grps = self.rapi.packagegroups(repo["id"])
+
+        for cid in ctgs:
+            category = self.rapi.translate_packagegroupcategory(ctgs[cid])
+            newComps.add_category(category)
+        for gid in grps:
+            pkggrp = self.rapi.translate_packagegroup(grps[gid])
+            newComps.add_group(pkggrp)
+        # Write back to xml
+        xml = newComps.xml()
+        print "Generated XML = %s" % (xml)
+        self.assertTrue(True)
+
+
+
+
diff --git a/test/ws/test_ws_api.py b/test/ws/test_ws_api.py
index d1c9b03..2f0237b 100644
--- a/test/ws/test_ws_api.py
+++ b/test/ws/test_ws_api.py
@@ -23,7 +23,6 @@ sys.path.append(os.path.join(cdir, '../unit'))
 from pulptools.connection import RepoConnection
 from pulptools.connection import ConsumerConnection
 from pulptools.connection import PackageConnection
-from pulptools.connection import PackageVersionConnection
 from pulptools.connection import PackageGroupConnection
 from pulptools.connection import PackageGroupCategoryConnection
 
@@ -40,8 +39,23 @@ class RemoteTestApi(TestApi):
         self.pvapi = PackageVersionConnection(**d)
         self.pgapi = PackageGroupConnection(**d)
         self.pgcapi = PackageGroupCategoryConnection(**d)
+        
+    def test_bulk_create(self):
+        pass
+
+    def test_json(self):
+        pass
+
+    def test_sync_two_repos_share_common_package(self):
+        pass
+
+    def test_sync_two_repos_share_common_package(self):
+        pass
+
+    def test_sync(self):
+        pass
 
-    def test_package_versions(self):
+    def test_local_sync(self):
         pass
 
     def test_packages(self):

[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]