Merge branch 'develop' into 'master'

1.4.2

See merge request sulinos/devel/inary!43
......@@ -116,3 +116,5 @@ config/inary.conf
MANIFEST
.config
DESTDIR=/
all: build
all: clean build
clean:
`find | grep pycache | sed 's/^/rm -rf /g'`
......
......@@ -63,6 +63,9 @@ def _cleanup():
ctx.enable_keyboard_interrupts()
# we need umask 0x022 (0x027 may broke so we need force 0x022)
os.umask(18)
# Hack for inary to work with non-patched Python. inary needs
# lots of work for not doing this.
importlib.reload(sys)
......
......@@ -106,8 +106,8 @@ def installDIR():
def lsbINFO():
"""Returns a dictionary filled through /etc/lsb-release."""
return dict([(l.split("=")[0], l.split("=")[1].strip("'\""))
for l in open("/etc/lsb-release").read().strip().split("\n") if "=" in l])
return dict([(line.split("=")[0], line.split("=")[1].strip("'\""))
for line in open("/etc/lsb-release").read().strip().split("\n") if "=" in line])
def kernelVERSION():
......
......@@ -249,8 +249,8 @@ def installModuleHeaders(extraHeaders=None, distro=""):
# First create the skel
find_cmd = "find . -path %s -prune -o -type f \( -name %s \) -print" % \
(
" -prune -o -path ".join(["'./%s/*'" % l for l in pruned]),
" -o -name ".join(["'%s'" % k for k in wanted])
" -prune -o -path ".join(["'./%s/*'" % prune for prune in pruned]),
" -o -name ".join(["'%s'" % want for want in wanted])
) + " | cpio -pVd --preserve-modification-time %s" % destination
shelltools.system(find_cmd)
......
......@@ -114,7 +114,12 @@ def auto_dodoc():
def install(parameters=''):
"""does ruby setup.rb install"""
if system(
'ruby -w setup.rb --prefix=/{0}/{1} --destdir={1} {2}'.format(get.defaultprefixDIR(), get_gemdir(), get.installDIR(), parameters)):
'ruby -w setup.rb --prefix=/{0}/{1} --destdir={1} {2}'.format(
get.defaultprefixDIR(),
get_gemdir(),
get.installDIR(),
parameters
)):
raise InstallError(_('Install failed.'))
auto_dodoc()
......
......@@ -123,9 +123,8 @@ def unlink(pattern):
ctx.ui.error(
_('ActionsAPI [unlink]: Permission denied: \"{}\"').format(filePath))
elif isDirectory(filePath):
ctx.ui.warning(_(
'ActionsAPI [unlink]: \"{}\" is not a file, use \'unlinkDir\' or \'removeDir\' to remove directories.').format(
filePath))
ctx.ui.warning(
_('ActionsAPI [unlink]: \"{}\" is not a file, use \'unlinkDir\' or \'removeDir\' to remove directories.').format(filePath))
else:
ctx.ui.error(
......
......@@ -186,8 +186,9 @@ def handleConfigFiles():
ctx.ui.info(
_('Creating \"/etc/texmf/{}.d\"').format(dirname))
dodir("/etc/texmf/{}.d".format(dirname))
ctx.ui.info(_('Moving (and symlinking) \"/usr/share/texmf/{0}\" to \"/etc/texmf/{1}.d\"').format(configFile,
dirname))
ctx.ui.info(
_('Moving (and symlinking) \"/usr/share/texmf/{0}\" to \"/etc/texmf/{1}.d\"').format(configFile,
dirname))
domove("/usr/share/texmf/{0}/{1}".format(dirname,
configFile), "/etc/texmf/{}.d".format(dirname))
dosym("/etc/texmf/{0}.d/{1}".format(dirname, configFile),
......
......@@ -449,13 +449,11 @@ class ArchiveTar(ArchiveBase):
if not os.path.isdir(tarinfo.name) and not os.path.islink(tarinfo.name):
try:
os.unlink(tarinfo.name)
except:
except Exception:
# TODO: review this block
pass
self.tar.extract(tarinfo)
except IOError as e:
os.remove(tarinfo.name)
self.tar.extract(tarinfo)
except OSError as e:
# Handle the case where an upper directory cannot
# be created because of a conflict with an existing
......
......@@ -163,6 +163,7 @@ class Install(AtomicOperation):
self.store_old_paths = None
self.old_path = None
self.trigger = inary.trigger.Trigger()
self.ask_reinstall=False
def install(self, ask_reinstall=True):
......@@ -338,13 +339,13 @@ class Install(AtomicOperation):
def postinstall(self):
# Chowning for additional files
# for _file in self.package.get_files().list:
# fpath = util.join_path(ctx.config.dest_dir(), _file.path)
# if os.path.islink(fpath):
# ctx.ui.info(_("Added symlink '{}' ").format(fpath), verbose=True)
# else:
# ctx.ui.info(_("Chowning in postinstall {0} ({1}:{2})").format(_file.path, _file.uid, _file.gid), verbose=True)
# os.chown(fpath, int(_file.uid), int(_file.gid))
for _file in self.package.get_files().list:
fpath = util.join_path(ctx.config.dest_dir(), _file.path)
if os.path.islink(fpath):
if os.path.lexists(fpath) and os.path.exists(fpath):
ctx.ui.info(_("Added symlink '{}' ").format(fpath), verbose=True)
else:
ctx.ui.warning(_("Broken or missing symlink '{}'").format(fpath))
if 'postOps' in self.metadata.package.isA:
if ctx.config.get_option(
......@@ -437,12 +438,19 @@ class Install(AtomicOperation):
new_paths.append(f.path)
for old_file in self.old_files.list:
if old_file.path in new_paths:
continue
old_file_path = os.path.join(
ctx.config.dest_dir(), old_file.path)
if old_file.path in new_paths:
continue
if old_file_path not in new_paths:
if os.path.islink(old_file_path):
os.unlink(old_file_path)
continue
try:
old_file_stat = os.lstat(old_file_path)
except OSError:
......@@ -523,7 +531,7 @@ class Install(AtomicOperation):
try:
self.package.extract_file_synced(
postops, ctx.config.tmp_dir())
except:
except Exception:
pass
def store_inary_files(self):
......@@ -539,7 +547,7 @@ class Install(AtomicOperation):
try:
self.package.extract_file_synced(
postops, self.package.pkg_dir())
except:
except Exception:
pass
def update_databases(self):
......
......@@ -43,6 +43,7 @@ NB: We support only local files (e.g., /a/b/c) and http:// URIs at the moment
super(AddRepo, self).__init__(args)
self.repodb = inary.db.repodb.RepoDB()
name = ("add-repo", "ar")
def options(self):
......@@ -66,31 +67,9 @@ NB: We support only local files (e.g., /a/b/c) and http:// URIs at the moment
if len(self.args) == 2:
self.init()
name, indexuri = self.args
self.just_add = False
if ctx.get_option('no_fetch'):
if ctx.ui.confirm(_('Add \"{}\" repository without updating the database?\nBy confirming '
'this you are also adding the repository to your system without '
'checking the distribution of the repository.\n'
'Would you like to continue?').format(name)):
self.just_add = True
if indexuri.endswith(".xml.xz") or indexuri.endswith(".xml"):
repository.add_repo(name, indexuri, ctx.get_option('at'))
if not self.just_add:
try:
repository.update_repos([name])
except (inary.errors.Error, IOError) as e:
ctx.ui.info(
_("Error: {0} repository could not be reached: \n{1}").format(
name, e), color="red")
self.warn_and_remove(
_("Removing {0} from system.").format(name), name)
else:
ctx.ui.warning(
_("Couldn't trust \"{0}\" repository. It is deactivated.").format(name))
repository.set_repo_activity(name, False)
else:
raise Exception(
_("Extension of repository URI must be \".xml.xz\" or \".xml\"."))
......
......@@ -71,7 +71,7 @@ Usage: check-relation
installed.sort()
need_reinstall = []
broken_packages=[]
broken_packages = []
for pkg in installed:
for p in self.installdb.get_package(pkg).runtimeDependencies():
......@@ -79,7 +79,7 @@ Usage: check-relation
if not self.installdb.has_package(str(p.package)):
need_reinstall.append(p.package)
sys.stderr.write(
_("Missing: - {} : Needed by: - {}").format(p.package,pkg)+"\n")
_("Missing: - {} : Needed by: - {}").format(p.package, pkg)+"\n")
if self.options.force:
for pkg in installed:
......
......@@ -60,8 +60,8 @@ class Command(object):
@staticmethod
def commands_string():
s = ''
l = sorted([x.name[0] for x in Command.cmd])
for name in l:
lst = sorted([x.name[0] for x in Command.cmd])
for name in lst:
commandcls = Command.cmd_dict[name]
trans = gettext.translation('inary', fallback=True)
summary = trans.gettext(commandcls.__doc__).split('\n')[0]
......
......@@ -53,7 +53,8 @@ You can also give the name of a component.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation."
"Applies all sysconf operations"))
self.parser.add_option_group(group)
......
......@@ -53,7 +53,8 @@ You can also give the name of a component.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation. "
"Applies all sysconf operations"))
self.parser.add_option_group(group)
......
......@@ -68,8 +68,7 @@ Lists previous operations.""")
history.takeback(operation)
def print_history(self):
ordered_history = []
ordered_history.append(_("Inary Transaction History: "))
ordered_history = [_("Inary Transaction History: ")]
for operation in self.historydb.get_last(ctx.get_option('last')):
msg_oprt = util.colorize(_("Operation "), 'yellow') \
......
......@@ -61,7 +61,8 @@ expanded to package names.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation."
"Applies all sysconf operations"))
group.add_option("-c", "--component", action="append",
default=None, help=_("Install component's and recursive components' packages."))
group.add_option("-r", "--repository", action="store",
......
......@@ -80,21 +80,21 @@ all repositories.
component = ctx.get_option('component')
if component:
try:
l = self.componentdb.get_packages(
packages = self.componentdb.get_packages(
component, repo=repo, walk=True)
except BaseException:
return
else:
l = self.packagedb.list_packages(repo)
packages = self.packagedb.list_packages(repo)
installed_list = inary.db.installdb.InstallDB().list_installed()
# maxlen is defined dynamically from the longest package name (#9021)
if l:
maxlen = max([len(_p) for _p in l])
if packages:
maxlen = max([len(_p) for _p in packages])
l.sort()
for p in l:
packages.sort()
for p in packages:
if ctx.config.get_option('uninstalled') and p in installed_list:
continue
......
......@@ -50,11 +50,11 @@ repositories.
self.init(database=True, write=False)
l = self.componentdb.list_components(ctx.get_option('repository'))
if l:
maxlen = max([len(_p) for _p in l])
l.sort()
for p in l:
components = self.componentdb.list_components(ctx.get_option('repository'))
if components:
maxlen = max([len(_p) for _p in components])
components.sort()
for p in components:
component = self.componentdb.get_component(p)
if self.options.long:
ctx.ui.info(str(component))
......
......@@ -71,8 +71,8 @@ packages from all repositories.
int(ctx.config.get_option('last')))
else:
since = None
l = self.packagedb.list_newest(repo, since, self.historydb)
if not l:
newests = self.packagedb.list_newest(repo, since, self.historydb)
if not newests:
return
if since:
ctx.ui.info(
......@@ -82,10 +82,10 @@ packages from all repositories.
ctx.ui.info(_("Packages added to \'{}\':").format(repo))
# maxlen is defined dynamically from the longest package name (#9021)
maxlen = max([len(_p) for _p in l])
maxlen = max([len(_p) for _p in newests])
l.sort()
for p in l:
newests.sort()
for p in newests:
pkgsum = self.packagedb.get_summary(p)
lenp = len(p)
p += ' ' * max(0, maxlen - lenp)
......
......@@ -48,15 +48,15 @@ Gives a brief list of sources published in the repositories.
self.init(database=True, write=False)
l = self.sourcedb.list_sources()
sources = self.sourcedb.list_sources()
if l:
if sources:
maxlen = max([len(_p) for _p in l])
installed_list = inary.db.sourcedb.SourceDB().list_sources()
l.sort()
sources.sort()
for p in l:
for p in sources:
sf, repo = self.sourcedb.get_spec_repo(p)
if self.options.long:
ctx.ui.info(_('[Repository: ') + repo + ']')
......
......@@ -47,7 +47,8 @@ expanded to package names.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation."
"Applies all sysconf operations"))
group.add_option("--purge", action="store_true",
default=False, help=_("Removes everything including changed config files of the package."))
group.add_option("-c", "--component", action="append",
......
......@@ -48,7 +48,8 @@ Remove all orphaned packages from the system.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation."
"Applies all sysconf operations"))
self.parser.add_option_group(group)
......
......@@ -54,8 +54,6 @@ expanded to package names.
super(Upgrade, self).options(group)
group.add_option("--security-only", action="store_true",
default=False, help=_("Security related package upgrades only."))
group.add_option("-b", "--bypass-update-repo", action="store_true",
default=False, help=_("Do not update repositories."))
group.add_option("--ignore-file-conflicts", action="store_true",
default=False, help=_("Ignore file conflicts."))
group.add_option("--ignore-package-conflicts", action="store_true",
......@@ -63,7 +61,8 @@ expanded to package names.
group.add_option("--ignore-sysconf", action="store_true",
default=False, help=_("Skip sysconf operations after installation."))
group.add_option("--force-sysconf", action="store_true",
default=False, help=_("Force sysconf operations after installation. Applies all sysconf operations"))
default=False, help=_("Force sysconf operations after installation. "
"Applies all sysconf operations"))
group.add_option("--preserve-permanent", action="store_true",
default=False,
help=_("Preserves permanent tagged files on upgrade action "
......@@ -97,13 +96,6 @@ expanded to package names.
else:
self.init()
if not ctx.get_option('bypass_update_repo'):
ctx.ui.info(_('Updating repositories.'), color='green')
repos = inary.db.repodb.RepoDB().list_repos(only_active=True)
repository.update_repos(repos)
else:
ctx.ui.info(_('Will not update repositories.'))
reposit = ctx.get_option('repository')
components = ctx.get_option('component')
packages = []
......
......@@ -176,7 +176,6 @@ class Index(xmlfile.XmlFile, metaclass=autoxml.autoxml):
pool.terminate()
pool.join()
ctx.ui.info("")
raise
latest_packages = []
......
......@@ -70,8 +70,10 @@ class PGraph:
if self.installdb.has_package(dep):
self.packages.append(dep)
else:
if self.installdb.has_package(pkg) and not self.reinstall:
return
if self.installdb.has_package(pkg):
if self.packagedb.get_package(pkg).release == self.installdb.get_package(pkg).release:
if not self.reinstall:
return
if pkg not in self.packages:
self.packages.append(pkg)
for dep in self.packagedb.get_package(pkg).runtimeDependencies():
......
......@@ -37,7 +37,7 @@ def flush_caches():
def update_caches():
# Updates ondisk caches
# Updates on disk caches
for db in [packagedb.PackageDB(), sourcedb.SourceDB(), componentdb.ComponentDB(),
installdb.InstallDB(), groupdb.GroupDB()]:
if db.is_initialized():
......@@ -47,6 +47,9 @@ def update_caches():
def regenerate_caches():
flush_caches()
# Force cache regeneration
for db in [packagedb.PackageDB(), sourcedb.SourceDB(),
try:
for db in [packagedb.PackageDB(), sourcedb.SourceDB(),
componentdb.ComponentDB(), groupdb.GroupDB()]:
db.cache_regenerate()
db.cache_regenerate()
except Exception: # TODO: warning message needed
pass
......@@ -139,7 +139,7 @@ class HistoryDB(lazydb.LazyDB):
yield hist.operation
def get_last_repo_update(self, last=1):
repoupdates = [l for l in self.__logs if l.endswith("repoupdate.xml")]
repoupdates = [log for log in self.__logs if log.endswith("repoupdate.xml")]
repoupdates.reverse()
if last != 1 and len(repoupdates) <= last:
......
......@@ -175,7 +175,7 @@ class RepoDB(lazydb.LazyDB):
index_path = os.path.splitext(index_path)[0]
if not os.path.exists(index_path):
#ctx.ui.warning(_("{} repository needs to be updated").format(repo_name))
# ctx.ui.warning(_("{} repository needs to be updated").format(repo_name))
return xmlext.newDocument("INARY")
try:
......@@ -291,8 +291,13 @@ class RepoDB(lazydb.LazyDB):
if not compatible:
self.deactivate_repo(name)
raise IncompatibleRepoError(
_("Repository \"{}\" is not compatible with your distribution. Repository is disabled.\nYour distribution is {} release {}\nRepository distribution is {} release {}\n\nIf you want add this repository please use \"--ignore-check\" parameter with this command.").format(name,
ctx.config.values.general.distribution,
ctx.config.values.general.distribution_release,
dist_name,
dist_release))
_("Repository \"{}\" is not compatible with your distribution. Repository is disabled."
"Your distribution is {} release {}"
"Repository distribution is {} release {}\n"
"If you want add this repository please use \"--ignore-check\" parameter with this command.").format(
name,
ctx.config.values.general.distribution,
ctx.config.values.general.distribution_release,
dist_name,
dist_release)
)
......@@ -269,7 +269,12 @@ class Fetcher:
c.close()
def _get_wget(self):
return os.system("busybox wget -c --user-agent \"{}\" \"{}\" -O \"{}\" 2>&1".format(self.useragent, self.url.get_uri(), self.partial_file))
return os.system("busybox wget -c --user-agent \"{}\" \"{}\" -O \"{}\" 2>&1".format(
self.useragent,
self.url.get_uri(),
self.partial_file
)
)
def _get_requests(self):
from requests import get
......@@ -397,7 +402,8 @@ class Fetcher:
return True
else:
ctx.ui.debug(
_("Server doesn't support partial downloads. Previously downloaded part of the file will be over-written."))
_("Server doesn't support partial downloads."
"Previously downloaded part of the file will be over-written."))
os.remove(self.partial_file)
return False
......
......@@ -9,7 +9,9 @@
#
# Please read the COPYING file.
#
def sort_bubble(array=[], reverse=False):
def sort_bubble(array=None, reverse=False):
if array is None:
array = []
mlen = len(array)
cout_i = 0
while cout_i < mlen:
......@@ -76,7 +78,9 @@ def sort_min_max(x, reverse=False):
return array
def sort_auto(array=[], reverse=False):
def sort_auto(array=None, reverse=False):
if array is None:
array = []
if len(array) <= 10:
return sort_bubble(array, reverse)
elif len(array) <= 500:
......
......@@ -210,22 +210,34 @@ class Builder:
_("Source \"{}\" not found in any active repository.").format(name))
def __init__(self, specuri):
self.emerge=False
if "://" in specuri:
self.emerge=True
self.componentdb = inary.db.componentdb.ComponentDB()
self.installdb = inary.db.installdb.InstallDB()
self.specuri=specuri
self.specdiruri = os.path.dirname(self.specuri)
if len(self.specdiruri) > 0 and self.emerge:
self.pkgname = os.path.basename(self.specdiruri)
self.destdir = util.join_path(ctx.config.tmp_dir(), self.pkgname)
else:
self.destdir=None
# process args
if not isinstance(specuri, inary.uri.URI):
specuri = inary.uri.URI(specuri)
if self.emerge:
self.fetch_pspecfile()
# read spec file, we'll need it :)
self.set_spec_file(specuri)
self.spec = self.set_spec_file(specuri)
if specuri.is_remote_file():
self.specdir = self.fetch_files()
else:
self.specdir = os.path.dirname(self.specuri.get_uri())
# Don't wait until creating .inary file for complaining about versioning
# scheme errors
self.package_rfp = None
......@@ -245,7 +257,10 @@ class Builder:
self.target_package_format = ctx.get_option("package_format") \
or inary.package.Package.default_format
self.read_translations(self.specdir)
try:
self.read_translations(self.specdir)
except Exception:
ctx.ui.output(_("Translation cannot readed.")+"\n")
self.sourceArchives = inary.archive.SourceArchives(self.spec)
......@@ -270,15 +285,20 @@ class Builder:
self.has_ccache = False
self.has_icecream = False
self.variable_buffer = {}
self.destdir=os.getcwd()
def set_spec_file(self, specuri):
if not specuri.is_remote_file():
# FIXME: doesn't work for file://
specuri = inary.uri.URI(os.path.realpath(specuri.get_uri()))
self.specuri = specuri
spec = Specfile.SpecFile()
spec.read(self.specuri, ctx.config.tmp_dir())
self.spec = spec
if self.emerge:
spec.read("{}/{}".format(self.destdir,ctx.const.pspec_file),self.specuri)
else:
spec.read(self.specuri, ctx.config.tmp_dir())
return spec
def read_translations(self, specdir):
self.spec.read_translations(util.join_path(specdir,
......@@ -356,7 +376,12 @@ class Builder:
self.check_patches()
self.check_build_dependencies()
self.fetch_component()
try:
self.fetch_component()
except:
ctx.ui.output(_("Component cannot readed.")+"\n")
self.fetch_source_archives()
util.clean_dir(self.pkg_install_dir())
......@@ -468,9 +493,6 @@ class Builder:
os.environ["CCACHE_DIR"] = "/tmp/.ccache"
def fetch_files(self):
self.specdiruri = os.path.dirname(self.specuri.get_uri())
pkgname = os.path.basename(self.specdiruri)
self.destdir = util.join_path(ctx.config.tmp_dir(), pkgname)
# self.location = os.path.dirname(self.url.uri)
self.fetch_actionsfile()
......@@ -479,7 +501,6 @@ class Builder:
self.fetch_patches()
self.fetch_additionalFiles()
self.fetch_postops()
return self.destdir
def fetch_pspecfile(self):
......@@ -523,9 +544,8 @@ class Builder:
def fetch_postops(self):
for postops in ctx.const.postops:
postops_script = util.join_path(self.specdiruri, postops)
if util.check_file(postops_script, noerr=True):
self.download(postops_script, util.join_path(self.specdir))
ctx.ui.info(_("PostOps Script Fetched."))
if util.check_file(postops_script, noerr=True) or "://" in postops_script:
self.download(postops_script, util.join_path(self.destdir))
@staticmethod
def download(uri, transferdir):
......@@ -703,11 +723,11 @@ class Builder:
"""Returns the real path of WorkDir for an unpacked archive."""
dirname = self.get_action_variable("WorkDir", "")
src_list=os.listdir(self.pkg_work_dir())
src_list = os.listdir(self.pkg_work_dir())
if "inaryBuildState" in src_list:
src_list.remove("inaryBuildState")
if dirname == "":
if len(src_list)==1:
if len(src_list) == 1:
dirname = src_list[0]
else:
dirname = self.spec.source.name + "-" + self.spec.getSourceVersion()
......@@ -731,6 +751,11 @@ class Builder:
# we'll need our working directory after actionscript
# finished its work in the archive source directory.
curDir = os.getcwd()
self.specdiruri = os.path.dirname(self.specuri.get_uri())
pkgname = os.path.basename(self.specdiruri)
self.destdir = util.join_path(ctx.config.tmp_dir(), pkgname)
if os.path.exists(self.destdir):
curDir=self.destdir
src_dir = self.pkg_src_dir()
self.set_environment_vars()
os.environ['WORK_DIR'] = src_dir
......@@ -1057,7 +1082,10 @@ package might be a good solution."))
fileinfo = witcher(filepath).name
except ValueError:
ctx.ui.warning(
_("File \"{}\" might be a broken symlink. Check it before publishing package.".format(filepath)))
_("File \"{}\" might be a broken symlink. Check it before publishing package.".format(
filepath)
)
)
fileinfo = "broken symlink"
ctx.ui.info(
_("\'magic\' return of \"{0}\" is \"{1}\"").format(
......@@ -1066,8 +1094,11 @@ package might be a good solution."))
result = util.run_batch(
"file {}".format(filepath), ui_debug=False)
if result[0]:
ctx.ui.error(_("\'file\' command failed with return code {0} for file: \"{1}\"").format(result[0], filepath) +
_("Output:\n{}").format(result[1]))
ctx.ui.error(_("\'file\' command failed with return code {0} for file: \"{1}\"").format(
result[0],
filepath) +
_("Output:\n{}").format(result[1])
)
fileinfo = str(result[1])
ctx.ui.info(
......@@ -1090,7 +1121,10 @@ package might be a good solution."))
doc_ptrn = re.compile(ctx.const.doc_package_end)
self.fetch_component() # bug 856
try:
self.fetch_component()
except:
ctx.ui.output(_("Component cannot readed.")+"\n")
ctx.ui.status(
_("Running file actions: \"{}\"").format(
self.spec.source.name), push_screen=True)
......
......@@ -46,13 +46,17 @@ def create_delta_packages_from_obj(old_packages, new_package_obj, specdir):
if old_pkg_info.name != new_pkg_info.name:
ctx.ui.warning(
_("The file \"{0}\" belongs to a different package other than '{1}'. Skipping it...").format(old_package,
new_pkg_info.name))
_("The file \"{0}\" belongs to a different package other than '{1}'. Skipping it...").format(
old_package,
new_pkg_info.name
)
)
continue
if old_pkg_info.release == new_pkg_info.release:
ctx.ui.warning(
_("Package \"{}\" has the same release number with the new package. Skipping it...").format(old_package))
_("Package \"{}\" has the same release number with the new package. Skipping it...").format(old_package)
)
continue
delta_name = "-".join((old_pkg_info.name,
......@@ -71,9 +75,8 @@ def create_delta_packages_from_obj(old_packages, new_package_obj, specdir):
files_delta = find_delta(old_pkg_files, new_pkg_files)
if len(files_delta) == len(new_pkg_files.list):
ctx.ui.warning(_(
"All files in the package \"{}\" are different from the files in the new package. Skipping it...").format(
old_package))
ctx.ui.warning(
_("All files in the package \"{}\" are different from the files in the new package. Skipping it...").format(old_package))
continue
delta_pkg = inary.package.Package(
......@@ -84,7 +87,7 @@ def create_delta_packages_from_obj(old_packages, new_package_obj, specdir):
for postops in ctx.const.postops:
try:
delta_pkg.add_to_package(ctx.const.postops)
except:
except Exception:
pass
# add xmls and files
......
......@@ -21,6 +21,7 @@ import inary.data
import inary.ui as ui
import inary.util as util
import inary.context as ctx
import inary.data.pgraph as pgraph
import inary.operations as operations
import inary.atomicoperations as atomicoperations
......@@ -79,7 +80,6 @@ installed in the respective order to satisfy dependencies:
atomicoperations.install_single_name(x)
# ctx.ui.notify(ui.packagestogo, order = order_build)
for x in order_build:
package_names = operations.build.build(x).new_packages
inary.operations.install.install_pkg_files(
......@@ -95,11 +95,12 @@ installed in the respective order to satisfy dependencies:
def plan_emerge(A):
sourcedb = inary.db.sourcedb.SourceDB()
installdb = inary.db.installdb.InstallDB()
# try to construct a inary graph of packages to
# install / reinstall
G_f = inary.data.pgraph.Digraph()
G_f = pgraph.PGraph(sourcedb, installdb)
def get_spec(name):
if sourcedb.has_spec(name):
......@@ -113,8 +114,9 @@ def plan_emerge(A):
def add_src(src):
if not str(src.name) in G_f.vertices():
G_f.add_vertex(str(src.name), (src.version, src.release))
# TODO replace this shitty way with a function
G_f.packages.append(src.name)
def pkgtosrc(pkg):
return sourcedb.pkgtosrc(pkg)
......@@ -140,11 +142,8 @@ def plan_emerge(A):
install_list.add(dep.package)
return
srcdep = pkgtosrc(dep.package)
if srcdep not in G_f.vertices():
Bp.add(srcdep)
add_src(get_src(srcdep))
if not src.name == srcdep: # firefox - firefox-devel thing
G_f.add_edge(src.name, srcdep)
print(dep.package)
G_f.packages.append(dep.package)
for builddep in src.buildDependencies:
process_dep(builddep)
......
......@@ -256,7 +256,8 @@ def show_changed_configs(package_dict, opt):
for package in package_dict:
if package_dict[package]:
if ctx.ui.confirm(util.colorize(
_("[?] Would you like to see changes in config files of \"{0}\" package").format(package), color='brightyellow')):
_("[?] Would you like to see changes in config files of \"{0}\" package").format(package),
color='brightyellow')):
for file in package_dict[package]:
new_file = util.join_path(
ctx.config.history_dir(), opt, package, ctx.config.dest_dir(), file)
......
......@@ -211,13 +211,19 @@ def install_pkg_files(package_URIs, reinstall=False):
for x in list(d_t.keys()):
pkg = d_t[x]
if pkg.distributionRelease > ctx.config.values.general.distribution_release:
raise Exception(_('Package \"{0}\" is not compatible with your distribution release \'{1}\' \'{2}\'.').format(
x, ctx.config.values.general.distribution,
ctx.config.values.general.distribution_release))
raise Exception(
_('Package \"{0}\" is not compatible with your distribution release \'{1}\' \'{2}\'.').format(
x,
ctx.config.values.general.distribution,
ctx.config.values.general.distribution_release)
)
if pkg.architecture != ctx.config.values.general.architecture:
raise Exception(
_('Package \"{0}\" (\'{1}\') is not compatible with your \'{2}\' architecture.').format(x, pkg.architecture,
ctx.config.values.general.architecture))
_('Package \"{0}\" (\'{1}\') is not compatible with your \'{2}\' architecture.').format(
x,
pkg.architecture,
ctx.config.values.general.architecture)
)
def satisfiesDep(dep):
# is dependency satisfied among available packages
......
......@@ -140,7 +140,6 @@ def plan_remove(A):
Bp = set()
for x in B:
G_f.add_package_revdep(x)
#IDEA: Optimize
if ctx.config.values.general.allow_docs:
doc_package = x + ctx.const.doc_package_end
if packagedb.has_package(doc_package):
......
......@@ -84,15 +84,11 @@ def set_repo_activity(name, active):
def update_repos(repos, force=False):
inary.db.historydb.HistoryDB().create_history("repoupdate")
updated = False
try:
for repo in repos:
updated |= __update_repo(repo, force)
finally:
if updated:
ctx.ui.info(_('Regenerating database caches...'), verbose=True)
for repo in repos:
updated = __update_repo(repo, force)
if updated :
inary.db.regenerate_caches()
@util.locked
def update_repo(repo, force=False):
inary.db.historydb.HistoryDB().create_history("repoupdate")
......
......@@ -184,7 +184,6 @@ def upgrade(A=None, repo=None):
return True
ctx.ui.debug('A = {}'.format(str(A)))
if not ctx.config.get_option('ignore_dependency'):
order = plan_upgrade(A, replaces=replaces)
else:
......@@ -224,7 +223,11 @@ def upgrade(A=None, repo=None):
paths = []
extra_paths = {}
lndig = math.floor(math.log(len(order), 10)) + 1
try:
lndig = math.floor(math.log(len(order), 10)) + 1
except ValueError:
lndig = 1
for x in order:
ctx.ui.info(_("Downloading") +
str(" [ {:>" +
......@@ -280,13 +283,10 @@ def plan_upgrade(A, force_replaced=True, replaces=None):
# install / reinstall
G_f = pgraph.PGraph() # construct G_f
installdb = G_f.get_installdb()
packagedb = G_f.get_packagedb()
A = set(A)
# Force upgrading of installed but replaced packages or else they will be removed (they are obsoleted also).
A = set(A) # Force upgrading of installed but replaced packages or else they will be removed (they are obsoleted also).
# This is not wanted for a replaced driver package (eg. nvidia-X).
#
# FIXME: this is also not nice. this would not be needed if replaced packages are not written as obsoleted also.
......@@ -379,6 +379,7 @@ def plan_upgrade(A, force_replaced=True, replaces=None):
order = G_f.topological_sort()
order.reverse()
return order
......
......@@ -112,7 +112,8 @@ class Package:
# Bug 3465
if ctx.get_option('reinstall'):
raise Error(_(
"There was a problem while fetching \"{}\".\nThe package may have been upgraded. Please try to upgrade the package.").format(
"There was a problem while fetching \"{}\"."
"The package may have been upgraded. Please try to upgrade the package.").format(
url))
raise
else:
......
......@@ -815,7 +815,7 @@ class autoxml(oo.autosuper, oo.autoprop):
return []
def decode(node, errs, where):
l = []
lst = []
nodes = xmlext.getAllNodes(node, path)
# print node, tag + '/' + comp_tag, nodes
if len(nodes) == 0 and req == mandatory:
......@@ -825,14 +825,14 @@ class autoxml(oo.autosuper, oo.autoprop):
for node in nodes:
dummy = xmlext.newNode(node, "Dummy")
xmlext.addNode(dummy, '', node)
l.append(decode_item(
lst.append(decode_item(
dummy, errs, where + str("[{}]".format(ix))))
ix += 1
return l
return lst
def encode(node, l, errs):
if l:
for item in l:
def encode(node, lst, errs):
if lst:
for item in lst:
if list_tagpath:
listnode = xmlext.addNode(
node, list_tagpath, branch=False)
......@@ -846,17 +846,17 @@ class autoxml(oo.autosuper, oo.autoprop):
_('Mandatory list "{0}" under "{1}" node is empty.').format(
path, node.name()))
def errors(l, where):
def errors(lst, where):
errs = []
ix = 1
for node in l:
for node in lst:
errs.extend(errors_item(node, where + '[{}]'.format(ix)))
ix += 1
return errs
def format(l, f, errs):
l.sort()
for node in l:
def format(lst, f, errs):
lst.sort()
for node in lst:
format_item(node, f, errs)
f.add_literal_data(' ')
......
......@@ -111,6 +111,7 @@ def proceed(force=False):
t("exebin", "/bin", "chmod +x -R /bin")
t("libexec", "/usr/libexec", "chmod +x -R /usr/libexec")
t("ca-certficates", "/etc/ssl/certs", "update-ca-certificates --fresh")
t("profile.env", "/etc/env.d", "env-update")
t("cracklib", "/usr/share/cracklib/",
"create-cracklib-dict /usr/share/cracklib/*")
sys.stdout.write("\n")
......
......@@ -47,8 +47,8 @@ class Trigger:
else:
cmd_extra = " > /dev/null"
ret_val = os.system(
'bash --noprofile --norc -c \'source postoperations.sh ; if declare -F {0} &>/dev/null ; then {0} ; fi\''.format(func) +
cmd_extra)
'bash --noprofile --norc -c \'source postoperations.sh ; if declare -F {0} &>/dev/null ; then {0} ; fi\''.format(func) + cmd_extra
)
os.chdir(curDir)
if (ret_val != 0):
return False
......@@ -67,9 +67,10 @@ class Trigger:
"brightyellow")))
else:
cmd_extra = " > /dev/null"
ret_val = os.system(
'python3 -c \'import postoperations\nif(hasattr(postoperations,"{0}")):\n postoperations.{0}()\''.format(func) +
cmd_extra)
'python3 -c \'import postoperations\nif(hasattr(postoperations,"{0}")):\n postoperations.{0}()\''.format(func) + cmd_extra
)
os.chdir(curDir)
if (ret_val != 0):
return False
......
......@@ -14,20 +14,22 @@
"""misc. utility functions, including process and file utils"""
# Inary Modules
from inary.util.strings import *
from inary.util.process import *
from inary.util.path import *
from inary.util.package import *
from inary.util.misc import *
from inary.util.kernel import *
from inary.util.filesystem_terminal import *
from inary.util.files import *
from inary.util.curses import *
import fcntl
import inary
import inary.errors
import inary.context as ctx
import inary.errors
from inary.util.curses import *
from inary.util.files import *
from inary.util.filesystem import *
from inary.util.kernel import *
from inary.util.misc import *
from inary.util.package import *
from inary.util.path import *
from inary.util.process import *
# Inary Modules
from inary.util.strings import *
from inary.util.terminal import *
from inary.util.terminal import *
# Gettext Library
import gettext
......
......@@ -60,7 +60,7 @@ def mvprintw(x, y, msg=''):
def noecho(enabled=True):
if not ctx.get_option('no_color'):
if(enabled):
if enabled:
printw("\x1b[?25l")
else:
printw("\x1b[?25h")
......@@ -68,51 +68,51 @@ def noecho(enabled=True):
def attron(attribute):
"""Attribute enable"""
if(attribute == "A_NORMAL"):
if attribute == "A_NORMAL":
sys.stdout.write("\x1b[;0m")
elif(attribute == "A_UNDERLINE"):
elif attribute == "A_UNDERLINE":
sys.stdout.write("\x1b[4m")
elif(attribute == "A_REVERSE"):
elif attribute == "A_REVERSE":
sys.stdout.write("\x1b[7m")
elif(attribute == "A_BLINK"):
elif attribute == "A_BLINK":
sys.stdout.write("\x1b[5m")
elif(attribute == "A_DIM"):
elif attribute == "A_DIM":
sys.stdout.write("\x1b[2m")
elif(attribute == "A_BOLD"):
elif attribute == "A_BOLD":
sys.stdout.write("\x1b[1m")
elif(attribute == "A_INVIS"):
elif attribute == "A_INVIS":
sys.stdout.write("\x1b[8m")
elif(attribute == "C_BLACK"):
elif attribute == "C_BLACK":
sys.stdout.write("\x1b[30m")
elif(attribute == "C_RED"):
elif attribute == "C_RED":
sys.stdout.write("\x1b[31m")
elif(attribute == "C_GREEN"):
elif attribute == "C_GREEN":
sys.stdout.write("\x1b[32m")
elif(attribute == "C_YELLOW"):
elif attribute == "C_YELLOW":
sys.stdout.write("\x1b[33m")
elif(attribute == "C_BLUE"):
elif attribute == "C_BLUE":
sys.stdout.write("\x1b[34m")
elif(attribute == "C_MAGENTA"):
elif attribute == "C_MAGENTA":
sys.stdout.write("\x1b[35m")
elif(attribute == "C_CYAN"):
elif attribute == "C_CYAN":
sys.stdout.write("\x1b[36m")
elif(attribute == "C_WHITE"):
elif attribute == "C_WHITE":
sys.stdout.write("\x1b374m")
elif(attribute == "B_BLACK"):
elif attribute == "B_BLACK":
sys.stdout.write("\x1b[40m")
elif(attribute == "B_RED"):
elif attribute == "B_RED":
sys.stdout.write("\x1b[41m")
elif(attribute == "B_GREEN"):
elif attribute == "B_GREEN":
sys.stdout.write("\x1b[42m")
elif(attribute == "B_YELLOW"):
elif attribute == "B_YELLOW":
sys.stdout.write("\x1b[43m")
elif(attribute == "B_BLUE"):
elif attribute == "B_BLUE":
sys.stdout.write("\x1b[44m")
elif(attribute == "B_MAGENTA"):
elif attribute == "B_MAGENTA":
sys.stdout.write("\x1b[45m")
elif(attribute == "B_CYAN"):
elif attribute == "B_CYAN":
sys.stdout.write("\x1b[46m")
elif(attribute == "B_WHITE"):
elif attribute == "B_WHITE":
sys.stdout.write("\x1b[47m")
sys.stdout.flush()
......
......@@ -14,20 +14,20 @@
"""misc. utility functions, including process and file utils"""
from inary.errors import FileError, FilePermissionDeniedError, Error
from inary.util.strings import remove_prefix
from inary.util.process import run_batch
from inary.util.path import join_path
import fnmatch
import hashlib
import shutil
import os
import re
import shutil
# Inary Modules
import inary
import inary.errors
import inary.context as ctx
import inary.errors
from inary.errors import FileError, FilePermissionDeniedError, Error
from inary.util.path import join_path
from inary.util.process import run_batch
from inary.util.strings import remove_prefix
# Gettext Library
import gettext
......@@ -315,11 +315,11 @@ def do_patch(sourceDir, patchFile, level=0, name=None, reverse=False):
with open(patchFile) as patchfile:
lines = patchfile.readlines()
try:
paths_m = [l.strip().split()[1]
for l in lines if l.startswith("---") and "/" in l]
paths_m = [line.strip().split()[1]
for line in lines if line.startswith("---") and "/" in line]
try:
paths_p = [l.strip().split()[1]
for l in lines if l.startswith("+++")]
paths_p = [line.strip().split()[1]
for line in lines if line.startswith("+++")]
except IndexError:
paths_p = []
except IndexError:
......@@ -328,8 +328,8 @@ def do_patch(sourceDir, patchFile, level=0, name=None, reverse=False):
if not paths_p:
paths_p = paths_m[:]
try:
paths_m = [l.strip().split()[1]
for l in lines if l.startswith("***") and "/" in l]
paths_m = [line.strip().split()[1]
for line in lines if line.startswith("***") and "/" in line]
except IndexError:
pass
......@@ -341,7 +341,7 @@ def do_patch(sourceDir, patchFile, level=0, name=None, reverse=False):
if level is None and len(
paths_m) - 1 == paths_m.index(path_m):
level = check_patch_level(sourceDir, path_m)
if not level is None:
if level is not None:
ctx.ui.info(
_("Detected patch level={0} for {1}").format(
level, os.path.basename(patchFile)), verbose=True)
......
# -*- coding: utf-8 -*-
#
# Main fork Pisi: Copyright (C) 2005 - 2011, Tubitak/UEKAE
#
# Copyright (C) 2016 - 2020, Suleyman POYRAZ (Zaryob)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
"""misc. utility functions, including filesystem utils"""
# Inary Modules
import os
import inary.context as ctx
# Gettext Library
import gettext
__trans = gettext.translation('inary', fallback=True)
_ = __trans.gettext
########################
# Filesystem functions #
########################
def fs_sync():
if ctx.config.values.general.fs_sync:
ctx.ui.debug(
_("Filesystem syncing (It wouldn't be run whether nosync set with kernel parameters)"))
os.sync()
......@@ -18,8 +18,6 @@ import os
# Inary Modules
import platform
import inary
import inary.errors
import inary.context as ctx
# Gettext Library
......@@ -75,13 +73,18 @@ def get_cpu_count():
def getenv(key):
if os.getenv(key) == None:
if os.getenv(key) is None:
return ""
return os.getenv[key]
def get_vm_info():
vm_info = {}
try:
import subprocess
except ImportError:
ctx.ui.error(_("A problem occurred caused by your python installation."
"No \"subprocess\" module caught. Probably your python installation corrupted."))
if platform.system() == 'Linux':
try:
......
......@@ -16,9 +16,8 @@
# Inary Modules
from inary.util.strings import *
import inary
import inary.errors
import inary.context as ctx
from inary.errors import Error
# Gettext Library
import gettext
......
......@@ -14,15 +14,11 @@
"""misc. utility functions, including process and file utils"""
from inary.util.strings import *
import struct
from functools import reduce
import os
# Inary Modules
import inary
import inary.errors
import inary.context as ctx
from inary.util.strings import *
# Gettext Library
import gettext
......
......@@ -15,13 +15,11 @@
"""misc. utility functions, including process and file utils"""
# Inary Modules
from inary.util.filesystem_terminal import get_terminal_size
from inary.util.terminal import get_terminal_size
import operator
from inary.util.filesystem_terminal import *
from inary.util.terminal import *
from functools import reduce
import inary
import inary.errors
import inary.context as ctx
# Gettext Library
import gettext
......@@ -56,49 +54,49 @@ def unzip(seq):
return list(zip(*seq))
def concat(l):
def concat(lst):
"""Concatenate a list of lists."""
return reduce(operator.concat, l)
return reduce(operator.concat, lst)
def multisplit(str, chars):
def multisplit(m_str, chars):
"""Split str with any of the chars."""
l = [str]
ms = [m_str]
for c in chars:
l = concat([x.split(c) for x in l])
return l
ms = concat([x.split(c) for x in ms])
return ms
def same(l):
def same(lst):
"""Check if all elements of a sequence are equal."""
if len(l) == 0:
if len(lst) == 0:
return True
else:
last = l.pop()
for x in l:
last = lst.pop()
for x in lst:
if x != last:
return False
return True
def flatten_list(l):
def flatten_list(lst):
"""Flatten a list of lists."""
# Fastest solution is list comprehension
# See:
# http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
return [item for sublist in l for item in sublist]
return [item for sublist in lst for item in sublist]
def unique_list(l):
def unique_list(lst):
"""Creates a unique list by deleting duplicate items"""
list_set = set(l)
list_set = set(lst)
unique_list = (list(list_set))
return [x for x in unique_list]
def strlist(l):
def strlist(lst):
"""Concatenate string reps of l's elements."""
return "".join([str(x) + ' ' for x in l])
return "".join([str(x) + ' ' for x in lst])
def prefix(a, b):
......@@ -149,7 +147,10 @@ def human_readable_rate(size=0):
def format_by_columns(strings, sep_width=2):
longest_str_len = len(max(strings, key=len))
if(len(strings)>0):
longest_str_len = len(max(strings, key=len))
else:
longest_str_len = 0
term_columns = get_terminal_size()[1]
def get_columns(max_count):
......
......@@ -20,9 +20,6 @@ import termios
import fcntl
import sys
import os
import inary
import inary.errors
import inary.context as ctx
# Gettext Library
import gettext
......@@ -30,17 +27,6 @@ __trans = gettext.translation('inary', fallback=True)
_ = __trans.gettext
########################
# Filesystem functions #
########################
def fs_sync():
if ctx.config.values.general.fs_sync:
ctx.ui.debug(
_("Filesystem syncing (It wouldn't be run whether nosync set with kernel parameters)"))
os.sync()
######################
# Terminal functions #
######################
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -29,7 +29,7 @@ _ = __trans.gettext
try:
import ciksemel
except:
except Exception:
#FIXME: Gorunusu guzel olsa bile kodda anlamsizlik yaratiyor
warn = inary.util.colorize(_("WARNING:\n"),"blinkingred")+ \
inary.util.colorize(_("\tCiksemel XML Parser not found!!!\n"
......
......@@ -104,9 +104,9 @@ class BuildPo(build):
# Update PO files
# FIXME: enable this block
# for item in glob.glob1("po", "*.po"):
# print("Updating .. ", item)
# os.system("msgmerge --update --no-wrap --sort-by-file po/{0} po/{1}.pot".format(item, PROJECT))
for item in glob.glob1("po", "*.po"):
print("Updating .. ", item)
os.system("msgmerge --update --no-wrap --sort-by-file po/{0} po/{1}.pot".format(item, PROJECT))
# Cleanup
os.unlink(files)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment