util.py 37.8 KB
Newer Older
1 2
# -*- coding: utf-8 -*-
#
3
# Main fork Pisi: Copyright (C) 2005 - 2011, Tubitak/UEKAE
4
#
5
# Copyright (C) 2016 - 2018, Suleyman POYRAZ (Zaryob)
6 7
#
# This program is free software; you can redistribute it and/or modify it under
8
# the terms of the GNU General Public License as published by the Free
9 10 11 12 13 14 15 16 17 18 19 20 21 22
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#

"""misc. utility functions, including process and file utils"""

# standard python modules

import fcntl
import fnmatch
import hashlib
import operator
23
import os
Suleyman Poyraz's avatar
Suleyman Poyraz committed
24
import platform
25 26 27
import re
import shutil
import struct
28

29 30
import sys
import termios
31 32 33
import unicodedata

from functools import reduce
34

35
import gettext
36 37 38
__trans = gettext.translation('inary', fallback=True)
_ = __trans.gettext

39 40 41 42 43
try:
    import subprocess
except ImportError:
    raise Exception(_("Module: \'subprocess\' can not imported."))

44

45 46 47 48 49 50 51 52 53 54 55
class Singleton(type):
    def __init__(cls, name, bases, dict):
        super(Singleton, cls).__init__(name, bases, dict)
        cls.instance = None

    def __call__(cls, *args, **kw):
        if cls.instance is None:
            cls.instance = super(Singleton, cls).__call__(*args, **kw)

        return cls.instance

56

57 58 59 60 61
# inary modules
import inary
import inary.errors
import inary.context as ctx

62

63 64 65
class Error(inary.errors.Error):
    pass

66

67 68 69
class FileError(Error):
    pass

70

71 72 73 74 75 76 77
class FilePermissionDeniedError(Error):
    pass

def locked(func):
    """
    Decorator for synchronizing privileged functions
    """
78 79

    def wrapper(*__args, **__kw):
80 81 82 83 84 85 86 87 88 89
        try:
            lock = open(join_path(ctx.config.lock_dir(), 'inary'), 'w')
        except IOError:
            raise inary.errors.PrivilegeError(_("You have to be root for this operation."))

        try:
            fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
            ctx.locked = True
        except IOError:
            if not ctx.locked:
90 91
                raise inary.errors.AnotherInstanceError(
                    _("Another instance of Inary is running. Only one instance is allowed."))
92 93 94

        try:
            inary.db.invalidate_caches()
95 96 97
            ctx.ui.info(_('Invalidating database caches...'), verbose=True)
            ret = func(*__args, **__kw)
            ctx.ui.info(_('Updating database caches...'), verbose=True)
98 99 100 101 102
            inary.db.update_caches()
            return ret
        finally:
            ctx.locked = False
            lock.close()
103

104 105
    return wrapper

106

107 108 109 110 111 112 113 114 115 116 117 118 119 120
#########################
# string/list/functional#
#########################

whitespace = ' \t\n\r\v\f'
ascii_lowercase = 'abcdefghijklmnopqrstuvwxyz'
ascii_uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
ascii_letters = ascii_lowercase + ascii_uppercase
digits = '0123456789'
hexdigits = digits + 'abcdef' + 'ABCDEF'
octdigits = '01234567'
punctuation = """!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
printable = digits + ascii_letters + punctuation + whitespace

121

122 123 124
def every(pred, seq):
    return reduce(operator.and_, list(map(pred, seq)), True)

125

126 127 128
def any(pred, seq):
    return reduce(operator.or_, list(map(pred, seq)), False)

129

130 131 132
def unzip(seq):
    return list(zip(*seq))

133

134 135
def concat(l):
    """Concatenate a list of lists."""
136 137
    return reduce(operator.concat, l)

138 139 140 141 142 143 144 145

def multisplit(str, chars):
    """Split str with any of the chars."""
    l = [str]
    for c in chars:
        l = concat([x.split(c) for x in l])
    return l

146

147 148
def same(l):
    """Check if all elements of a sequence are equal."""
149
    if len(l) == 0:
150 151 152 153
        return True
    else:
        last = l.pop()
        for x in l:
154
            if x != last:
155 156 157
                return False
        return True

158

159 160 161
def any(pred, seq):
    return reduce(operator.or_, list(map(pred, seq)), False)

162

163 164 165 166 167 168
def flatten_list(l):
    """Flatten a list of lists."""
    # Fastest solution is list comprehension
    # See: http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
    return [item for sublist in l for item in sublist]

169 170 171 172 173
def unique_list(l):
    """Creates a unique list by deleting duplicate items"""
    list_set = set(l)
    unique_list = (list(list_set))
    return [x for x in unique_list]
174

175 176 177 178
def strlist(l):
    """Concatenate string reps of l's elements."""
    return "".join([str(x) + ' ' for x in l])

179

180 181 182 183 184 185 186 187 188
def prefix(a, b):
    """Check if sequence a is a prefix of sequence b."""
    if len(a) > len(b):
        return False
    for i in range(0, len(a)):
        if a[i] != b[i]:
            return False
    return True

189

190 191 192 193 194
def remove_prefix(a, b):
    """Remove prefix a from sequence b."""
    assert prefix(a, b)
    return b[len(a):]

195

196 197 198 199 200 201 202 203 204
def suffix(a, b):
    """Check if sequence a is a suffix of sequence b."""
    if len(a) > len(b):
        return False
    for i in range(1, len(a) + 1):
        if a[-i] != b[-i]:
            return False
    return True

205

206 207 208 209 210
def remove_suffix(a, b):
    """Remove suffix a from sequence b."""
    assert suffix(a, b)
    return b[:-len(a)]

211 212

def human_readable_size(size=0):
213 214 215 216 217 218 219 220
    symbols, depth = [' B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'], 0

    while size > 1000 and depth < 8:
        size = float(size / 1024)
        depth += 1

    return size, symbols[depth]

221 222

def human_readable_rate(size=0):
223 224 225
    x = human_readable_size(size)
    return x[0], x[1] + '/s'

226

227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282
def format_by_columns(strings, sep_width=2):
    longest_str_len = len(max(strings, key=len))
    term_rows, term_columns = get_terminal_size()

    def get_columns(max_count):
        if longest_str_len > term_columns:
            return [longest_str_len]

        columns = []
        for name in strings:
            table_width = sum(columns) + len(name) + len(columns) * sep_width
            if table_width > term_columns:
                break

            columns.append(len(name))
            if len(columns) == max_count:
                break

        return columns

    def check_size(columns):
        total_sep_width = (len(columns) - 1) * sep_width

        for n, name in enumerate(strings):
            col = n % len(columns)
            if len(name) > columns[col]:
                columns[col] = len(name)

            if len(columns) > 1:
                width = sum(columns) + total_sep_width
                if width > term_columns:
                    return False

        return True

    columns = get_columns(term_columns)

    while not check_size(columns):
        columns = get_columns(len(columns) - 1)

    sep = " " * sep_width
    lines = []
    current_line = []
    for n, name in enumerate(strings):
        col = n % len(columns)
        current_line.append(name.ljust(columns[col]))

        if col == len(columns) - 1:
            lines.append(sep.join(current_line))
            current_line = []

    if current_line:
        lines.append(sep.join(current_line))

    return "\n".join(lines)

283

284 285 286 287 288 289 290 291 292 293 294 295
##############################
# Process Releated Functions #
##############################

def search_executable(executable):
    """Search for the executable in user's paths and return it."""
    for _path in os.environ["PATH"].split(":"):
        full_path = os.path.join(_path, executable)
        if os.path.exists(full_path) and os.access(full_path, os.X_OK):
            return full_path
    return None

296

297 298 299 300 301 302 303 304
def run_batch(cmd, ui_debug=True):
    """Run command and report return value and output."""
    ctx.ui.info(_('Running ') + cmd, verbose=True)
    p = subprocess.Popen(cmd, shell=True,
                         stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    out, err = p.communicate()
    if ui_debug: ctx.ui.debug(_('return value for "{0}" is {1}').format(cmd, p.returncode))
    return p.returncode, out.decode('utf-8'), err
305 306
p=None
def logger_init():
307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323
    if ctx.stdout:
        stdout = ctx.stdout
    else:
        if ctx.get_option('debug'):
            stdout = None
        else:
            stdout = subprocess.PIPE
    if ctx.stderr:
        stderr = ctx.stderr
    else:
        if ctx.get_option('debug'):
            stderr = None
        else:
            stderr = subprocess.STDOUT

    p = subprocess.Popen(cmd, shell=True, stdout=stdout, stderr=stderr)
    out, err = p.communicate()
324 325 326 327 328 329 330 331 332 333

# TODO: it might be worthwhile to try to remove the
# use of ctx.stdout, and use run_batch()'s return
# values instead. but this is good enough :)
def run_logged(cmd):
    """Run command and get the return value."""
    ctx.ui.info(_('Running ') + cmd, verbose=True)
    if(p==None):
        logger_init()
    p.write(cmd+"\n")
334 335 336 337
    ctx.ui.debug(_('return value for "{0}" is {1}').format(cmd, p.returncode))

    return p.returncode

338

339 340 341 342 343 344 345 346 347 348 349 350 351 352
######################
# Terminal functions #
######################

def get_terminal_size():
    try:
        ret = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, "1234")
    except IOError:
        rows = int(os.environ.get("LINES", 25))
        cols = int(os.environ.get("COLUMNS", 80))
        return rows, cols

    return struct.unpack("hh", ret)

353

354 355 356 357 358 359
def xterm_title(message):
    """Set message as console window title."""
    if "TERM" in os.environ and sys.stderr.isatty():
        terminalType = os.environ["TERM"]
        for term in ["xterm", "Eterm", "aterm", "rxvt", "screen", "kterm", "rxvt-unicode"]:
            if terminalType.startswith(term):
360
                sys.stderr.write("\x1b]2;" + str(message) + "\x07")
361 362 363
                sys.stderr.flush()
                break

364

365 366 367 368 369
def xterm_title_reset():
    """Reset console window title."""
    if "TERM" in os.environ:
        xterm_title("")

370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395
#############################
#   ncurses like functions  #
#############################
def initscr():
    """Clear and create a window"""
    printw("\x1b[s\x1bc")

def endsrc():
    """Clear and restore screen"""
    printw("\x1bc\x1b[u")

def move(x,y):
    """Move"""
    printw("\x1b[{};{}f".format(y,x))

def printw(msg=''):
    """Print clone"""
    sys.stdout.write(msg)
    sys.stdout.flush()

def mvprintw(x,y,msg=''):
    """Move and print"""
    move(x,y)
    printw(msg)

def noecho(enabled=True):
Ali Rıza Keskin's avatar
Ali Rıza Keskin committed
396 397 398 399 400
    if(ctx.get_option('no_color')==False):
        if(enabled):
            printw("\x1b[?25l")
        else:
            printw("\x1b[?25h")
401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463

def attron(attribute):
    """Attribute enable"""
    if(attribute=="A_NORMAL"):
        sys.stdout.write("\x1b[;0m")
    elif(attribute=="A_UNDERLINE"):
        sys.stdout.write("\x1b[4m")
    elif(attribute=="A_REVERSE"):
        sys.stdout.write("\x1b[7m")
    elif(attribute=="A_BLINK"):
        sys.stdout.write("\x1b[5m")
    elif(attribute=="A_DIM"):
        sys.stdout.write("\x1b[2m")
    elif(attribute=="A_BOLD"):
        sys.stdout.write("\x1b[1m")
    elif(attribute=="A_INVIS"):
        sys.stdout.write("\x1b[8m")
    elif(attribute=="C_BLACK"):
        sys.stdout.write("\x1b[30m")
    elif(attribute=="C_RED"):
        sys.stdout.write("\x1b[31m")
    elif(attribute=="C_GREEN"):
        sys.stdout.write("\x1b[32m")
    elif(attribute=="C_YELLOW"):
        sys.stdout.write("\x1b[33m")
    elif(attribute=="C_BLUE"):
        sys.stdout.write("\x1b[34m")
    elif(attribute=="C_MAGENTA"):
        sys.stdout.write("\x1b[35m")
    elif(attribute=="C_CYAN"):
        sys.stdout.write("\x1b[36m")
    elif(attribute=="C_WHITE"):
        sys.stdout.write("\x1b374m")
    elif(attribute=="B_BLACK"):
        sys.stdout.write("\x1b[40m")
    elif(attribute=="B_RED"):
        sys.stdout.write("\x1b[41m")
    elif(attribute=="B_GREEN"):
        sys.stdout.write("\x1b[42m")
    elif(attribute=="B_YELLOW"):
        sys.stdout.write("\x1b[43m")
    elif(attribute=="B_BLUE"):
        sys.stdout.write("\x1b[44m")
    elif(attribute=="B_MAGENTA"):
        sys.stdout.write("\x1b[45m")
    elif(attribute=="B_CYAN"):
        sys.stdout.write("\x1b[46m")
    elif(attribute=="B_WHITE"):
        sys.stdout.write("\x1b[47m")
    sys.stdout.flush()

def drawbox(x1,y1,x2,y2):
    """Draw box"""
    mvprintw(x1,y1,"╔")
    mvprintw(x1,y2,"╚")
    mvprintw(x2,y1,"╗")
    mvprintw(x2,y2,"╝")
    for i in range((x1+1),(x2-1)):
        mvprintw(i,y1,"═")
        mvprintw(i,y2,"═")
    for i in range((y1+1),(y2-1)):
        mvprintw(x1,i,"║")
        mvprintw(x2,i,"║")
464

465 466 467 468 469 470 471 472
#############################
# Path Processing Functions #
#############################

def splitpath(a):
    """split path into components and return as a list
    os.path.split doesn't do what I want like removing trailing /"""
    comps = a.split(os.path.sep)
473
    if comps[len(comps) - 1] == '':
474 475 476
        comps.pop()
    return comps

477 478

def makepath(comps, relative=False, sep=os.path.sep):
479
    """Reconstruct a path from components."""
480
    path = reduce(lambda x, y: x + sep + y, comps, '')
481 482 483 484 485
    if relative:
        return path[len(sep):]
    else:
        return path

486 487

def parentpath(a, sep=os.path.sep):
488 489 490 491
    # remove trailing '/'
    a = a.rstrip(sep)
    return a[:a.rfind(sep)]

492

493 494 495
def parenturi(a):
    return parentpath(a, '/')

496

497 498 499 500
def subpath(a, b):
    """Find if path a is before b in the directory tree."""
    return prefix(splitpath(a), splitpath(b))

501

502 503 504 505 506 507 508 509
def removepathprefix(prefix, path):
    """Remove path prefix a from b, finding the pathname rooted at a."""
    comps = remove_prefix(splitpath(prefix), splitpath(path))
    if len(comps) > 0:
        return join_path(*tuple(comps))
    else:
        return ""

510

511 512 513 514 515 516 517 518
def join_path(a, *p):
    """Join two or more pathname components.
    Python os.path.join cannot handle '/' at the start of latter components.
    """
    path = a
    for b in p:
        b = b.lstrip('/')
        if path == '' or path.endswith('/'):
519
            path += b
520 521 522 523
        else:
            path += '/' + b
    return path

524

525 526 527 528
####################################
# File/Directory Related Functions #
####################################

529
def check_file(_file, mode=os.F_OK):
530 531
    """Shorthand to check if a file exists."""
    if not os.access(_file, mode):
Suleyman Poyraz's avatar
Suleyman Poyraz committed
532
        raise FileError(_("File {} not found.").format(_file))
533 534
    return True

535

536 537 538 539 540
def ensure_dirs(path):
    """Make sure the given directory path exists."""
    if not os.path.exists(path):
        os.makedirs(path)

541

542 543 544 545 546
def clean_dir(path):
    """Remove all content of a directory."""
    if os.path.exists(path):
        shutil.rmtree(path)

547 548 549 550
def delete_file(path):
    if os.path.isfile(path):
        if os.path.exists(path):
            os.remove(path)
551

552 553 554 555 556 557 558
def creation_time(_file):
    """Return the creation time of the given file."""
    if check_file(_file):
        import time
        st = os.stat(_file)
        return time.localtime(st.st_ctime)

559

560 561 562 563 564 565 566 567 568
def dir_size(_dir):
    """Calculate the size of files under a directory."""
    # It's really hard to give an approximate value for package's
    # installed size. Gettin a sum of all files' sizes if far from
    # being true. Using 'du' command (like Debian does) can be a
    # better solution :(.
    # Not really, du calculates size on disk, this is much better

    if os.path.exists(_dir) and (not os.path.isdir(_dir) and not os.path.islink(_dir)):
569
        # so, this is not a directory but file..
570 571 572 573 574 575 576
        return os.path.getsize(_dir)

    if os.path.islink(_dir):
        return int(len(read_link(_dir)))

    def sizes():
        for root, dirs, files in os.walk(_dir):
577 578 579
            yield sum(
                [os.path.getsize(join_path(root, name)) for name in files if not os.path.islink(join_path(root, name))])

580 581
    return sum(sizes())

582 583

def copy_file(src, dest):
584 585 586 587 588
    """Copy source file to the destination file."""
    check_file(src)
    ensure_dirs(os.path.dirname(dest))
    shutil.copyfile(src, dest)

589 590

def copy_file_stat(src, dest):
591 592 593 594 595
    """Copy source file to the destination file with all stat info."""
    check_file(src)
    ensure_dirs(os.path.dirname(dest))
    shutil.copy2(src, dest)

596

597 598 599 600 601 602 603 604 605 606
def free_space(directory=None):
    """Returns the free space (x Byte) in the device. """
    if not directory:
        # Defaults to /
        directory = ctx.config.values.general.destinationdirectory
    _stat = os.statvfs(directory)
    free_space = _stat.f_bfree * _stat.f_bsize

    return free_space

607

608 609 610 611 612 613 614
def read_link(link):
    """Return the normalized path which is pointed by the symbolic link."""
    # tarfile module normalizes the paths pointed by symbolic links. This
    # causes problems as the file hashes and sizes are calculated before
    # this normalization.
    return os.path.normpath(os.readlink(link))

615

616 617 618
def is_ar_file(file_path):
    return open(file_path, 'rb').read(8) == '!<arch>\n'

619

620 621 622 623 624 625 626 627 628
def clean_ar_timestamps(ar_file):
    """Zero all timestamps in the ar files."""
    if not is_ar_file(ar_file):
        return
    content = open(ar_file).readlines()
    fp = open(ar_file, 'w')
    for line in content:
        pos = line.rfind(chr(32) + chr(96))
        if pos > -1 and line[pos - 57:pos + 2].find(chr(47)) > -1:
629
            line = line[:pos - 41] + '0000000000' + line[pos - 31:]
630 631 632
        fp.write(line)
    fp.close()

633

634 635 636 637 638 639
def calculate_hash(path):
    """Return a (path, hash) tuple for given path."""
    if os.path.islink(path):
        # For symlinks, path string is hashed instead of the content
        value = sha1_data(read_link(path))
        if not os.path.exists(path):
Suleyman Poyraz's avatar
Suleyman Poyraz committed
640
            ctx.ui.info(_("Including external link \"{}\"").format(path))
641
    elif os.path.isdir(path):
Suleyman Poyraz's avatar
Suleyman Poyraz committed
642
        ctx.ui.info(_("Including directory \"{}\"").format(path))
643 644 645 646 647 648 649 650 651 652
        value = None
    else:
        if path.endswith('.a'):
            # .a file content changes with each compile due to timestamps
            # We pad them with zeroes, thus hash will be stable
            clean_ar_timestamps(path)
        value = sha1_file(path)

    return path, value

653

654 655 656 657 658 659 660 661 662 663
def get_file_hashes(top, excludePrefix=None, removePrefix=None):
    """Yield (path, hash) tuples for given directory tree.

    Generator function iterates over a toplevel path and returns the
    (filePath, sha1Hash) tuples for all files. If excludePrefixes list
    is given as a parameter, function will exclude the filePaths
    matching those prefixes. The removePrefix string parameter will be
    used to remove prefix from filePath while matching excludes, if
    given.
    """
664

665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702
    def is_included(path):
        if excludePrefix:
            temp = remove_prefix(removePrefix, path)
            while temp != "/":
                if len([x for x in excludePrefix if fnmatch.fnmatch(temp, x)]) > 0:
                    return False
                temp = os.path.dirname(temp)
        return True

    # single file/symlink case
    if not os.path.isdir(top) or os.path.islink(top):
        if is_included(top):
            yield calculate_hash(top)
        return

    for root, dirs, files in os.walk(top):
        # Hash files and file symlinks
        for name in files:
            path = os.path.join(root, name)
            if is_included(path):
                yield calculate_hash(path)

        # Hash symlink dirs
        # os.walk doesn't enter them, we don't want to follow them either
        # but their name and hashes must be reported
        # Discussed in bug #339
        for name in dirs:
            path = os.path.join(root, name)
            if os.path.islink(path):
                if is_included(path):
                    yield calculate_hash(path)

        # Hash empty dir
        # Discussed in bug #340
        if len(files) == 0 and len(dirs) == 0:
            if is_included(root):
                yield calculate_hash(root)

703

Suleyman Poyraz's avatar
Suleyman Poyraz committed
704
def check_file_hash(filename, hash):
705
    """Check the file's integrity with a given hash."""
Suleyman Poyraz's avatar
Suleyman Poyraz committed
706
    return sha1_file(filename) == hash
707

708

709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733
def sha1_file(filename):
    """Calculate sha1 hash of file."""
    # Broken links can cause problem!
    try:
        m = hashlib.sha1()
        f = open(filename, 'rb')
        while True:
            # 256 KB seems ideal for speed/memory tradeoff
            # It wont get much faster with bigger blocks, but
            # heap peak grows
            block = f.read(256 * 1024)
            if len(block) == 0:
                # end of file
                break
            m.update(block)
            # Simple trick to keep total heap even lower
            # Delete the previous block, so while next one is read
            # we wont have two allocated blocks with same size
            del block
        return m.hexdigest()
    except IOError as e:
        if e.errno == 13:
            # Permission denied, the file doesn't have read permissions, skip
            raise FilePermissionDeniedError(_("You don't have necessary read permissions"))
        else:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
734
            raise FileError(_("Cannot calculate SHA1 hash of \"{}\"").format(filename))
735

736

737 738 739 740 741 742
def sha1_data(data):
    """Calculate sha1 hash of given data."""
    m = hashlib.sha1()
    m.update(data.encode('utf-8'))
    return m.hexdigest()

743

744 745 746 747 748 749 750 751 752 753
def uncompress(patchFile, compressType="gz", targetDir=""):
    """Uncompress the file and return the new path."""
    formats = ("gz", "gzip", "bz2", "bzip2", "lzma", "xz")
    if compressType not in formats:
        raise Error(_("Compression type is not valid: '{}'").format(compressType))

    archive = inary.archive.Archive(patchFile, compressType)
    try:
        archive.unpack(targetDir)
    except Exception as msg:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
754
        raise Error(_("Error while decompressing \"{0}\": {1}").format(patchFile, msg))
755 756 757 758 759 760 761 762 763

    # FIXME: Get file path from Archive instance
    filePath = join_path(targetDir, os.path.basename(patchFile))

    # remove suffix from file cause its uncompressed now
    extensions = {"gzip": "gz", "bzip2": "bz2"}
    extension = extensions.get(compressType, compressType)
    return filePath.split(".{}".format(extension))[0]

764

765 766 767 768 769 770
def check_patch_level(workdir, path):
    level = 0
    while path:
        if os.path.isfile("{0}/{1}".format(workdir, path)): return level
        if path.find("/") == -1: return None
        level += 1
771 772
        path = path[path.find("/") + 1:]

773 774 775 776 777 778 779 780 781 782 783 784

def do_patch(sourceDir, patchFile, level=0, name=None, reverse=False):
    """Apply given patch to the sourceDir."""
    cwd = os.getcwd()
    if os.path.exists(sourceDir):
        os.chdir(sourceDir)
    else:
        raise Error(_("ERROR: WorkDir ({}) does not exist\n").format(sourceDir))

    check_file(patchFile)

    if level is None:
785
        with open(patchFile) as patchfile:
786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803
            lines = patchfile.readlines()
            try:
                paths_m = [l.strip().split()[1] for l in lines if l.startswith("---") and "/" in l]
                try:
                    paths_p = [l.strip().split()[1] for l in lines if l.startswith("+++")]
                except IndexError:
                    paths_p = []
            except IndexError:
                pass
            else:
                if not paths_p:
                    paths_p = paths_m[:]
                    try:
                        paths_m = [l.strip().split()[1] for l in lines if l.startswith("***") and "/" in l]
                    except IndexError:
                        pass

                for path_p, path_m in zip(paths_p, paths_m):
804
                    if "/dev/null" in path_m and not len(paths_p) - 1 == paths_p.index(path_p): continue
805
                    level = check_patch_level(sourceDir, path_p)
806
                    if level is None and len(paths_m) - 1 == paths_m.index(path_m):
807 808
                        level = check_patch_level(sourceDir, path_m)
                    if not level is None:
809
                        ctx.ui.info(_("Detected patch level={0} for {1}").format(level, os.path.basename(patchFile)), verbose=True)
810 811 812 813 814 815 816 817 818 819 820 821 822 823
                        break

    if level is None:
        level = 0

    if name is None:
        name = os.path.basename(patchFile)

    if ctx.get_option('use_quilt'):
        patchesDir = join_path(sourceDir, ctx.const.quilt_dir_suffix)
        # Make sure sourceDir/patches directory exists and if not create one!
        if not os.path.exists(patchesDir):
            os.makedirs(patchesDir)
        # Import original patch into quilt tree
824 825
        (ret, out, err) = run_batch(
            'quilt import {0} -p {1} -P {2} \"{3}\"'.format(("-R" if reverse else ""), level, name, patchFile))
826 827 828 829
        # run quilt push to apply original patch into tree
        (ret, out, err) = run_batch('quilt push')
    else:
        # run GNU patch to apply original patch into tree
830 831 832
        (ret, out, err) = run_batch(
            "patch --remove-empty-files --no-backup-if-mismatch {0} -p{1} -i \"{2}\"".format(("-R" if reverse else ""),
                                                                                             level, patchFile))
833 834 835 836

    if ret:
        if out is None and err is None:
            # Which means stderr and stdout directed so they are None
Suleyman Poyraz's avatar
Suleyman Poyraz committed
837
            raise Error(_("ERROR: patch (\"{}\") failed.").format(patchFile))
838
        else:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
839
            raise Error(_("ERROR: patch (\"{0}\") failed: {1}").format(patchFile, out))
840 841 842

    os.chdir(cwd)

843

844 845
def strip_file(filepath, fileinfo, outpath):
    """Strip an elf file from debug symbols."""
846

847 848 849 850
    def run_strip(f, flags=""):
        p = os.popen("strip {0} {1}".format(flags, f))
        ret = p.close()
        if ret:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
851
            ctx.ui.warning(_("\'strip\' command failed for file \"{}\"!").format(f))
852 853 854 855 856 857

    def run_chrpath(f):
        """ remove rpath info from binary """
        p = os.popen("chrpath -d {}".format(f))
        ret = p.close()
        if ret:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
858
            ctx.ui.warning(_("\'chrpath\' command failed for file \"{}\"!").format(f))
859 860 861 862 863 864

    def save_elf_debug(f, o):
        """copy debug info into file.debug file"""
        p = os.popen("objcopy --only-keep-debug {0} {1}{2}".format(f, o, ctx.const.debug_file_suffix))
        ret = p.close()
        if ret:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
865
            ctx.ui.warning(_("\'objcopy\' (keep-debug) command failed for file \"{}\"!").format(f))
866 867 868 869 870

        """mark binary/shared objects to use file.debug"""
        p = os.popen("objcopy --add-gnu-debuglink={0}{1} {2}".format(o, ctx.const.debug_file_suffix, f))
        ret = p.close()
        if ret:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
871
            ctx.ui.warning(_("\'objcopy\' (add-debuglink) command failed for file \"{}\"!").format(f))
872 873


874
    if "current ar archive" in fileinfo:
875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897
        run_strip(filepath, "--strip-debug")
        return True

    elif re.search("SB\s+executable", fileinfo):
        if ctx.config.values.build.generatedebug:
            ensure_dirs(os.path.dirname(outpath))
            save_elf_debug(filepath, outpath)
        run_strip(filepath)
        # FIXME: removing RPATH also causes problems, for details see gelistirici mailing list - caglar10ur
        # run_chrpath(filepath)
        return True

    elif re.search("SB\s+shared object", fileinfo):
        if ctx.config.values.build.generatedebug:
            ensure_dirs(os.path.dirname(outpath))
            save_elf_debug(filepath, outpath)
        run_strip(filepath, "--strip-unneeded")
        # run_chrpath(filepath)
        # FIXME: warn for TEXTREL
        return True

    return False

898

899 900 901
def partition_freespace(directory):
    """Return free space of given directory's partition."""
    st = os.statvfs(directory)
902
    return st.f_frsize * st.f_bfree
903

904

905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922
########################################
# Package/Repository Related Functions #
########################################

def package_filename(name, version, release, distro_id=None, arch=None):
    """Return a filename for a package with the given information. """

    if distro_id is None:
        distro_id = ctx.config.values.general.distribution_id

    if arch is None:
        arch = ctx.config.values.general.architecture

    fn = "-".join((name, version, release, distro_id, arch))
    fn += ctx.const.package_suffix

    return fn

923

924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940
def parse_package_name_legacy(package_name):
    """Separate package name and version string for package formats <= 1.1.

    example: tasma-1.0.3-5-2 -> (tasma, 1.0.3-5-2)
    """
    # We should handle package names like 855resolution
    name = []
    for part in package_name.split("-"):
        if name != [] and part[0] in digits:
            break
        else:
            name.append(part)
    name = "-".join(name)
    version = package_name[len(name) + 1:]

    return name, version

941

942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964
def parse_package_name(package_name):
    """Separate package name and version string.

    example: tasma-1.0.3-5-p11-x86_64 -> (tasma, 1.0.3-5)
    """

    # Strip extension if exists
    if package_name.endswith(ctx.const.package_suffix):
        package_name = remove_suffix(ctx.const.package_suffix, package_name)

    try:
        name, version, release, distro_id, arch = package_name.rsplit("-", 4)

        # Arch field cannot start with a digit. If a digit is found,
        # the package might have an old format. Raise here to call
        # the legacy function.
        if not arch or arch[0] in digits:
            raise ValueError

    except ValueError:
        try:
            return parse_package_name_legacy(package_name)
        except:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
965
            raise Error(_("Invalid package name: \"{}\"").format(package_name))
966 967 968

    return name, "{0}-{1}".format(version, release)

969

970 971
def parse_package_dir_path(package_name):
    name = parse_package_name(package_name)[0]
972
    if name.split("-").pop() in ["devel", "32bit", "doc", "docs", "pages", "static", "dbginfo", "32bit-dbginfo",
973 974 975 976
                                 "userspace"]: name = name[:-1 - len(name.split("-").pop())]
    return "{0}/{1}".format(name[0:4].lower() if name.startswith("lib") and len(name) > 3 else name.lower()[0],
                            name.lower())

977 978 979 980 981 982 983 984 985 986 987 988

def parse_delta_package_name_legacy(package_name):
    """Separate delta package name and release infos for package formats <= 1.1.

    example: tasma-5-7.delta.inary -> (tasma, 5, 7)
    """
    name, build = parse_package_name(package_name)
    build = build[:-len(ctx.const.delta_package_suffix)]
    buildFrom, buildTo = build.split("-")

    return name, buildFrom, buildTo

989

990 991 992 993 994 995 996 997 998 999 1000 1001 1002
def parse_delta_package_name(package_name):
    """Separate delta package name and release infos

    example: tasma-5-7-p11-x86_64.delta.inary -> (tasma, 5, 7)
    """

    # Strip extension if exists
    if package_name.endswith(ctx.const.delta_package_suffix):
        package_name = remove_suffix(ctx.const.delta_package_suffix,
                                     package_name)

    try:
        name, source_release, target_release, distro_id, arch = \
1003
            package_name.rsplit("-", 4)
1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014

        # Arch field cannot start with a digit. If a digit is found,
        # the package might have an old format. Raise here to call
        # the legacy function.
        if not arch or arch[0] in digits:
            raise ValueError

    except ValueError:
        try:
            return parse_delta_package_name_legacy(package_name)
        except:
Suleyman Poyraz's avatar
Suleyman Poyraz committed
1015
            raise Error(_("Invalid delta package name: \"{}\"").format(package_name))
1016 1017 1018

    return name, source_release, target_release

1019

1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044
def split_package_filename(filename):
    """Split fields in package filename.

    example: tasma-1.0.3-5-p11-x86_64.inary -> (tasma, 1.0.3, 5, p11, x86_64)
    """

    # Strip extension if exists
    if filename.endswith(ctx.const.package_suffix):
        filename = remove_suffix(ctx.const.package_suffix, filename)

    try:
        name, version, release, distro_id, arch = filename.rsplit("-", 4)

        # Arch field cannot start with a digit. If a digit is found,
        # the package might have an old format.
        if not arch or arch[0] in digits:
            raise ValueError

    except ValueError:
        name, version = parse_package_name_legacy(filename)
        version, release, build = split_version(version)
        distro_id = arch = None

    return name, version, release, distro_id, arch

1045

1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057
def split_delta_package_filename(filename):
    """Split fields in delta package filename.

    example: tasma-5-7-p11-x86_64.delta.inary -> (tasma, 5, 7, p11, x86-64)
    """

    # Strip extension if exists
    if filename.endswith(ctx.const.delta_package_suffix):
        filename = remove_suffix(ctx.const.delta_package_suffix, filename)

    try:
        name, source_release, target_release, distro_id, arch = \
1058
            filename.rsplit("-", 4)
1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071

        # Arch field cannot start with a digit. If a digit is found,
        # the package might have an old format.
        if not arch or arch[0] in digits:
            raise ValueError

    except ValueError:
        # Old formats not supported
        name = parse_delta_package_name_legacy(filename)[0]
        source_release = target_release = None

    return name, source_release, target_release, distro_id, arch

1072

1073 1074 1075 1076 1077 1078 1079 1080 1081
def split_version(package_version):
    """Split version, release and build parts of a package version

    example: 1.0.3-5-2 -> (1.0.3, 5, 2)
    """
    version, sep, release_and_build = package_version.partition("-")
    release, sep, build = release_and_build.partition("-")
    return version, release, build

1082

1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126
def filter_latest_packages(package_paths):
    """ For a given inary package paths list where there may also be multiple versions
        of the same package, filters only the latest versioned ones """

    import inary.version

    latest = {}
    for path in package_paths:

        name, version = parse_package_name(os.path.basename(path[:-len(ctx.const.package_suffix)]))

        if name in latest:
            l_version, l_release, l_build = split_version(latest[name][1])
            r_version, r_release, r_build = split_version(version)

            try:
                l_release = int(l_release)
                r_release = int(r_release)

                l_build = int(l_build) if l_build else None
                r_build = int(r_build) if r_build else None

            except ValueError:
                continue

            if l_build and r_build:
                if l_build > r_build:
                    continue

            elif l_release > r_release:
                continue

            elif l_release == r_release:
                l_version = inary.version.make_version(l_version)
                r_version = inary.version.make_version(r_version)

                if l_version > r_version:
                    continue

        if version:
            latest[name] = (path, version)

    return [x[0] for x in list(latest.values())]

1127

1128 1129 1130 1131 1132 1133 1134
def colorize(msg, color):
    """Colorize the given message for console output"""
    if color in ctx.const.colors and not ctx.get_option('no_color'):
        return str(ctx.const.colors[color] + msg + ctx.const.colors['default'])
    else:
        return str(msg)

1135

1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147
def config_changed(config_file):
    fpath = join_path(ctx.config.dest_dir(), config_file.path)
    if os.path.exists(fpath) and not os.path.isdir(fpath):
        if os.path.islink(fpath):
            f = os.readlink(fpath)
            if os.path.exists(f) and sha1_data(f) != config_file.hash:
                return True
        else:
            if sha1_file(fpath) != config_file.hash:
                return True
    return False

1148

1149 1150 1151
# recursively remove empty dirs starting from dirpath
def rmdirs(dirpath):
    if os.path.isdir(dirpath) and not os.listdir(dirpath):
1152
        ctx.ui.info(_("Removing empty dir: \"{}\"").format(dirpath),verbose=True)
1153 1154 1155
        os.rmdir(dirpath)
        rmdirs(os.path.dirname(dirpath))

1156

1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176
# Python regex sucks
# http://mail.python.org/pipermail/python-list/2009-January/523704.html
def letters():
    start = end = None
    result = []
    for index in range(sys.maxunicode + 1):
        c = chr(index)
        if unicodedata.category(c)[0] == 'L':
            if start is None:
                start = end = c
            else:
                end = c
        elif start:
            if start == end:
                result.append(start)
            else:
                result.append(start + "-" + end)
            start = None
    return ''.join(result)

1177

1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208
def get_kernel_option(option):
    """Get a dictionary of args for the given kernel command line option"""

    args = {}

    try:
        cmdline = open("/proc/cmdline").read().split()
    except IOError:
        return args

    for cmd in cmdline:
        if "=" in cmd:
            optName, optArgs = cmd.split("=", 1)
        else:
            optName = cmd
            optArgs = ""

        if optName == option:
            for arg in optArgs.split(","):
                if ":" in arg:
                    k, v = arg.split(":", 1)
                    args[k] = v
                else:
                    args[arg] = ""

    return args

def get_cpu_count():
    """
    This function part of portage
    Copyright 2015 Gentoo Foundation
1209
    Distributed under the terms of the GNU General Public License v2
1210 1211 1212 1213 1214 1215 1216 1217 1218

    Using:
    Try to obtain the number of CPUs available.
    @return: Number of CPUs or None if unable to obtain.
    """
    try:
        import multiprocessing
        return multiprocessing.cpu_count()
    except (ImportError, NotImplementedError):
Suleyman Poyraz's avatar
Suleyman Poyraz committed
1219 1220
        return None

1221

1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271
def get_vm_info():
    vm_info = {}

    if platform.system() == 'Linux':
        try:
            proc = subprocess.Popen(["free"],
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
        except OSError:
            pass
        output = proc.communicate()[0].decode('utf-8')
        if proc.wait() == os.EX_OK:
            for line in output.splitlines():
                line = line.split()
                if len(line) < 2:
                    continue
                if line[0] == "Mem:":
                    try:
                        vm_info["ram.total"] = int(line[1]) * 1024
                    except ValueError:
                        pass
                    if len(line) > 3:
                        try:
                            vm_info["ram.free"] = int(line[3]) * 1024
                        except ValueError:
                            pass
                elif line[0] == "Swap:":
                    try:
                        vm_info["swap.total"] = int(line[1]) * 1024
                    except ValueError:
                        pass
                    if len(line) > 3:
                        try:
                            vm_info["swap.free"] = int(line[3]) * 1024
                        except ValueError:
                            pass
    else:
        try:
            proc = subprocess.Popen(["sysctl", "-a"],
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
        except OSError:
            pass
        else:
            output = proc.communicate()[0].decode('utf-8')
            if proc.wait() == os.EX_OK:
                for line in output.splitlines():
                    line = line.split(":", 1)
                    if len(line) != 2:
                        continue