[Gelistirici] pisi-1.1

S.Çağlar Onur caglar at pardus.org.tr
1 Şub 2008 Cum 21:35:10 EET


Selamlar;

pisi-1.1 branchına veya pakete yapılan bir sürü şey pakete yansımamış, farm pisi güncelleyince (ki svn'dekini kullanıyordu), şöyle bir diff çıktı ortaya paket ile arasında

Faik bu kargaşaya son vermek için branchdan _temiz_ bir paket release eder misin lütfen? Bu diff'in bir kısmı gürer'in util.py yaması, farm uzun zamandır bu yamayı da kullanıyor ve bir sorun ile karşılaşmadık mümkünse onu da branch'a alalım isterim.

buildfarm pisi # svn diff
Index: pisi/actionsapi/autotools.py
===================================================================
--- pisi/actionsapi/autotools.py        (revision 16919)
+++ pisi/actionsapi/autotools.py        (working copy)
@@ -60,7 +60,7 @@

         args = './configure \
                 --prefix=/%s \
-                --build=%s \
+                --host=%s \
                 --mandir=/%s \
                 --infodir=/%s \
                 --datadir=/%s \
@@ -85,7 +85,7 @@
             raise ConfigureError(_('Configure failed.'))
     else:
         raise ConfigureError(_('No configure script found.'))
-
+
 def compile(parameters = ''):
     #FIXME: Only one package uses this until now, hmmm
     system('%s %s %s' % (get.CC(), get.CFLAGS(), parameters))
Index: pisi/actionsapi/kde.py
===================================================================
--- pisi/actionsapi/kde.py      (revision 16919)
+++ pisi/actionsapi/kde.py      (working copy)
@@ -23,7 +23,6 @@
 import pisi.actionsapi
 import pisi.actionsapi.get as get
 from pisi.actionsapi.shelltools import system
-from pisi.actionsapi.pisitoolsfunctions import *
 from pisi.actionsapi.shelltools import can_access_file

 class ConfigureError(pisi.actionsapi.Error):
@@ -51,7 +50,7 @@
     if can_access_file('configure'):
         args = './configure \
                 --prefix=%s \
-                --build=%s \
+                --host=%s \
                 --with-x \
                 --enable-mitshm \
                 --with-xinerama \
@@ -80,12 +79,3 @@
             raise InstallError(_('Install failed.'))
     else:
         raise InstallError(_('No Makefile found.'))
-
-def domo(sourceFile, locale, destinationFile ):
-    '''inserts the mo files in the list of files into /usr/kde/VERSION/share/locale/LOCALE/LC_MESSAGES'''
-
-    '''example call: kde.domo("po/tr.po", "tr", "pam_login.mo")'''
-
-    system('msgfmt %s' % sourceFile)
-    makedirs(join_path(get.installDIR(), get.kdeDIR(), 'share/locale', locale, 'LC_MESSAGES'))
-    move('messages.mo', join_path(get.installDIR(), get.kdeDIR(), 'share/locale', locale, 'LC_MESSAGES', destinationFile))
Index: pisi/index.py
===================================================================
--- pisi/index.py       (revision 16919)
+++ pisi/index.py       (working copy)
@@ -101,7 +101,10 @@
                 if fn == 'distribution.xml':
                     self.add_distro(os.path.join(root, fn))

-        obsoletes_list = map(str, self.distribution.obsoletes)
+        try:
+            obsoletes_list = map(str, self.distribution.obsoletes)
+        except AttributeError:
+            obsoletes_list = []

         for pkg in util.filter_latest_packages(packages):
             pkg_name = util.parse_package_name(os.path.basename(pkg))[0]
@@ -126,7 +129,8 @@
         for comp in self.components:
             ctx.componentdb.update_component(comp, repo, txn)
         ctx.packagedb.remove_repo(repo, txn=txn)
-        ctx.packagedb.add_obsoletes(self.distribution.obsoletes, repo, txn=txn)
+        if self.distribution is not None:
+            ctx.packagedb.add_obsoletes(self.distribution.obsoletes, repo, txn=txn)
         for pkg in self.packages:
             ctx.packagedb.add_package(pkg, repo, txn=txn)
             update_progress()
Index: pisi/util.py
===================================================================
--- pisi/util.py        (revision 16919)
+++ pisi/util.py        (working copy)
@@ -326,66 +326,96 @@
         fp.write(line)
     fp.close()

-def calculate_hash(path):
-    """Return a (path, hash) tuple for given path."""
-    if os.path.islink(path):
-        try:
-            # For symlinks, path string is hashed instead of the content
-            value = sha1_data(os.readlink(path))
-        except FileError:
-            ctx.ui.info(_("Including external link '%s'") % path)
-            value = None
-    elif os.path.isdir(path):
-        ctx.ui.info(_("Including directory '%s'") % path)
-        value = None
-    else:
-        if path.endswith('.a'):
-            # .a file content changes with each compile due to timestamps
-            # We pad them with zeroes, thus hash will be stable
-            clean_ar_timestamps(path)
-        value = sha1_file(path)
+# FIXME: this should be done in a much much simpler way
+# as it stands, it seems to be a kludge to solve
+# an unrelated problem
+def get_file_hashes(top, excludePrefix=None, removePrefix=None):
+    """Iterate over given path and return a list of file hashes.

-    return (path, value)
+    Generator function iterates over a toplevel path and returns the
+    (filePath, sha1Hash) tuples for all files. If excludePrefixes list
+    is given as a parameter, function will exclude the filePaths
+    matching those prefixes. The removePrefix string parameter will be
+    used to remove prefix from filePath while matching excludes, if
+    given.
+    """

-def get_file_hashes(top, excludePrefix=None, removePrefix=None):
-    """Yield (path, hash) tuples for given directory tree."""
-    def is_included(path):
-        if excludePrefix:
-            temp = remove_prefix(removePrefix, path)
-            if len(filter(lambda x: temp.startswith(x), excludePrefix)) > 0:
-                return False
-        return True
-
-    # single file/symlink case
-    if not os.path.isdir(top) or os.path.islink(top):
-        if is_included(top):
-            yield calculate_hash(top)
+    def sha1_sum(f, data=False):
+        if not data and f.endswith('.a'):
+            #workaround for .a issue..
+            #don't skip .a files,
+            #but pad their timestamps with '0'..
+            clean_ar_timestamps(f)
+
+        func = None
+
+        if data:
+            func = sha1_data
+        else:
+            func = sha1_file
+
+        try:
+            return func(f)
+        except FileError, e:
+            if os.path.islink(f):
+                ctx.ui.info(_("Including external link '%s'") % f)
+            elif os.path.isdir(f):
+                ctx.ui.info(_("Including directory '%s'") % f)
+            else:
+                raise e
+            return None
+
+    def has_excluded_prefix(filename):
+        if excludePrefix and removePrefix:
+            tempfnam = remove_prefix(removePrefix, filename)
+            for p in excludePrefix:
+                if tempfnam.startswith(p):
+                    return 1
+        return 0
+
+    # handle single file
+    if os.path.isfile(top):
+        yield (top, sha1_sum(top))
         return
-
-    for root, dirs, files in os.walk(top):
-        # Hash files and file symlinks
-        for name in files:
-            path = os.path.join(root, name)
-            if is_included(path):
-                yield calculate_hash(path)
-
-        # Hash symlink dirs
-        # os.walk doesn't enter them, we don't want to follow them either
-        # but their name and hashes must be reported
-        # Discussed in bug #339
-        for name in dirs:
-            path = os.path.join(root, name)
-            if os.path.islink(path):
-                if is_included(path):
-                    yield calculate_hash(path)
-
-        # Hash empty dir
-        # Discussed in bug #340
-        if len(files) == 0 and len(dirs) == 0:
-            if is_included(root):
-                yield calculate_hash(root)

+    # handle single symlink declaration here.
+    if os.path.islink(top):
+        yield (top, sha1_sum(os.readlink(top), True))
+        return

+    for root, dirs, files in os.walk(top, topdown=False):
+        #bug 339
+        if os.path.islink(root) and not has_excluded_prefix(root):
+            #yield the symlink..
+            #bug 373
+            yield (root, sha1_sum(os.readlink(root), True))
+            excludePrefix.append(remove_prefix(removePrefix, root) + "/")
+            continue
+
+        #bug 397
+        for directory in dirs:
+            d = join_path(root, directory)
+            if os.path.islink(d) and not has_excluded_prefix(d):
+                yield (d, sha1_sum(os.readlink(d), True))
+                excludePrefix.append(remove_prefix(removePrefix, d) + "/")
+
+        #bug 340
+        if os.path.isdir(root) and not has_excluded_prefix(root):
+            parent, r, d, f = root, '', '', ''
+            for r, d, f in os.walk(parent, topdown=False): pass
+            if not f and not d:
+                yield (parent, sha1_sum(parent))
+
+        for fname in files:
+            f = join_path(root, fname)
+            if has_excluded_prefix(f):
+                continue
+            #bug 373
+            elif os.path.islink(f):
+                yield (f, sha1_sum(os.readlink(f), True))
+            else:
+                yield (f, sha1_sum(f))
+
 def copy_dir(src, dest):
     """Copy source dir to destination dir recursively."""
     shutil.copytree(src, dest)
@@ -419,9 +449,14 @@

 def sha1_data(data):
     """Calculate sha1 hash of given data."""
-    m = sha.new()
-    m.update(data)
-    return m.hexdigest()
+    try:
+        m = sha.new()
+        m.update(data)
+        return m.hexdigest()
+    except KeyboardInterrupt:
+        raise
+    except Exception: #FIXME: what exception could we catch here, replace with that.
+        raise Error(_("Cannot calculate SHA1 hash of given data"))

 def uncompress(patchFile, compressType="gz", targetDir=None):
     """Uncompress the file and return the new path."""
@@ -632,17 +667,8 @@
         name, version = parse_package_name(os.path.basename(path[:-len(ctx.const.package_suffix)]))

         if latest.has_key(name):
-            l_version = pisi.version.Version(latest[name][2])
-            r_version = pisi.version.Version(version)
-
-            # Bug 6352
-            # If two has build nos than only look to build nos
-            if l_version.build and r_version.build:
-                if l_version.build < r_version.build:
-                    latest[name] = (root, name, version)
-            else:
-                if l_version < r_version:
-                    latest[name] = (root, name, version)
+            if pisi.version.Version(latest[name][2]) < pisi.version.Version(version):
+                latest[name] = (root, name, version)
         else:
             if version:
                 latest[name] = (root, name, version)
Index: pisi/configfile.py
===================================================================
--- pisi/configfile.py  (revision 16919)
+++ pisi/configfile.py  (working copy)
@@ -25,8 +25,8 @@
 #host = i686-pc-linux-gnu
 #generateDebug = False
 #jobs = "-j1"
-#CFLAGS= -mtune=i686 -march=i686 -O2 -pipe -fomit-frame-pointer
-#CXXFLAGS= -mtune=i686 -march=i686 -O2 -pipe -fomit-frame-pointer
+#CFLAGS= -mtune=i686 -O2 -pipe -fomit-frame-pointer
+#CXXFLAGS= -mtune=i686 -O2 -pipe -fomit-frame-pointer
 #LDFLAGS=
 #buildno=True     # necessary for generating build nos
 #buildhelper = None / ccache / icecream
@@ -40,7 +40,7 @@
 #packages_dir = /var/cache/pisi/packages
 #compiled_packages_dir = "/var/cache/pisi/packages"
 #index_dir = /var/cache/pisi/index
-#tmp_dir = /var/pisi
+#tmp_dir = /var/tmp/pisi
 #kde_dir = /usr/kde/3.5
 #qt_dir = /usr/qt/3


Saygılar
-- 
S.Çağlar Onur <caglar at pardus.org.tr>
http://cekirdek.pardus.org.tr/~caglar/

Linux is like living in a teepee. No Windows, no Gates and an Apache in house!
-------------- sonraki bölüm --------------
A non-text attachment was scrubbed...
Name: kullanılamıyor
Type: application/pgp-signature
Size: 189 bytes
Desc: This is a digitally signed message part.
URL: <http://liste.pardus.org.tr/gelistirici/attachments/20080201/6450fe6b/attachment-0002.pgp>


Gelistirici mesaj listesiyle ilgili daha fazla bilgi