JFIF$        dd7 

Viewing File: /usr/lib/python3.9/site-packages/dnf/cli/commands/clean.py

# clean.py
# Clean CLI command.
#
# Copyright (C) 2014-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
# Public License for more details.  You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.  Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnf.cli import commands
from dnf.i18n import _, P_
from dnf.yum import misc

import dnf.cli
import dnf.exceptions
import dnf.lock
import dnf.logging
import dnf.repo
import logging
import os
import re
import time

logger = logging.getLogger("dnf")

# Dict mapping cmdline arguments to actual data types to be cleaned up
_CACHE_TYPES = {
    'metadata': ['metadata', 'dbcache', 'expire-cache'],
    'packages': ['packages'],
    'dbcache': ['dbcache'],
    'expire-cache': ['expire-cache'],
    'all': ['metadata', 'packages', 'dbcache'],
}


def _tree(dirpath):
    """Traverse dirpath recursively and yield relative filenames."""
    for root, dirs, files in os.walk(dirpath):
        base = os.path.relpath(root, dirpath)
        for f in files:
            path = os.path.join(base, f)
            yield os.path.normpath(path)


def _filter(files, patterns):
    """Yield those filenames that match any of the patterns."""
    return (f for f in files for p in patterns if re.match(p, f))


def _clean(dirpath, files):
    """Remove the given filenames from dirpath."""
    count = 0
    for f in files:
        path = os.path.join(dirpath, f)
        logger.log(dnf.logging.DDEBUG, _('Removing file %s'), path)
        misc.unlink_f(path)
        count += 1
    return count


def _cached_repos(files):
    """Return the repo IDs that have some cached metadata around."""
    metapat = dnf.repo.CACHE_FILES['metadata']
    matches = (re.match(metapat, f) for f in files)
    return set(m.group('repoid') for m in matches if m)


class CleanCommand(commands.Command):
    """A class containing methods needed by the cli to execute the
    clean command.
    """

    aliases = ('clean',)
    summary = _('remove cached data')

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('type', nargs='+',
                           choices=_CACHE_TYPES.keys(),
                           help=_('Metadata type to clean'))

    def run(self):
        cachedir = self.base.conf.cachedir
        md_lock = dnf.lock.build_metadata_lock(cachedir, True)
        download_lock = dnf.lock.build_download_lock(cachedir, True)
        rpmdb_lock = dnf.lock.build_rpmdb_lock(self.base.conf.persistdir, True)
        while True:
            try:
                with md_lock, download_lock, rpmdb_lock:
                    types = set(t for c in self.opts.type for t in _CACHE_TYPES[c])
                    files = list(_tree(cachedir))
                    logger.debug(_('Cleaning data: ' + ' '.join(types)))

                    if 'expire-cache' in types:
                        expired = _cached_repos(files)
                        self.base._repo_persistor.expired_to_add.update(expired)
                        types.remove('expire-cache')
                        logger.info(_('Cache was expired'))

                    patterns = [dnf.repo.CACHE_FILES[t] for t in types]
                    count = _clean(cachedir, _filter(files, patterns))
                    logger.info(P_('%d file removed', '%d files removed', count) % count)
                    return
            except dnf.exceptions.LockError as e:
                if not self.base.conf.exit_on_lock:
                    msg = _('Waiting for process with pid %d to finish.') % (e.pid)
                    logger.info(msg)
                    time.sleep(3)
                else:
                    raise e
Back to Directory  nL+D550H?Mx ,D"v]qv;6*Zqn)ZP0!1 A "#a$2Qr D8 a Ri[f\mIykIw0cuFcRı?lO7к_f˓[C$殷WF<_W ԣsKcëIzyQy/_LKℂ;C",pFA:/]=H  ~,ls/9ć:[=/#f;)x{ٛEQ )~ =𘙲r*2~ a _V=' kumFD}KYYC)({ *g&f`툪ry`=^cJ.I](*`wq1dđ#̩͑0;H]u搂@:~וKL Nsh}OIR*8:2 !lDJVo(3=M(zȰ+i*NAr6KnSl)!JJӁ* %݉?|D}d5:eP0R;{$X'xF@.ÊB {,WJuQɲRI;9QE琯62fT.DUJ;*cP A\ILNj!J۱+O\͔]ޒS߼Jȧc%ANolՎprULZԛerE2=XDXgVQeӓk yP7U*omQIs,K`)6\G3t?pgjrmۛجwluGtfh9uyP0D;Uڽ"OXlif$)&|ML0Zrm1[HXPlPR0'G=i2N+0e2]]9VTPO׮7h(F*癈'=QVZDF,d߬~TX G[`le69CR(!S2!P <0x<!1AQ "Raq02Br#SCTb ?Ζ"]mH5WR7k.ۛ!}Q~+yԏz|@T20S~Kek *zFf^2X*(@8r?CIuI|֓>^ExLgNUY+{.RѪ τV׸YTD I62'8Y27'\TP.6d&˦@Vqi|8-OΕ]ʔ U=TL8=;6c| !qfF3aů&~$l}'NWUs$Uk^SV:U# 6w++s&r+nڐ{@29 gL u"TÙM=6(^"7r}=6YݾlCuhquympǦ GjhsǜNlɻ}o7#S6aw4!OSrD57%|?x>L |/nD6?/8w#[)L7+6〼T ATg!%5MmZ/c-{1_Je"|^$'O&ޱմTrb$w)R$& N1EtdU3Uȉ1pM"N*(DNyd96.(jQ)X 5cQɎMyW?Q*!R>6=7)Xj5`J]e8%t!+'!1Q5 !1 AQaqё#2"0BRb?Gt^## .llQT $v,,m㵜5ubV =sY+@d{N! dnO<.-B;_wJt6;QJd.Qc%p{ 1,sNDdFHI0ГoXшe黅XۢF:)[FGXƹ/w_cMeD,ʡcc.WDtA$j@:) -# u c1<@ۗ9F)KJ-hpP]_x[qBlbpʖw q"LFGdƶ*s+ډ_Zc"?%t[IP 6J]#=ɺVvvCGsGh1 >)6|ey?Lӣm,4GWUi`]uJVoVDG< SB6ϏQ@ TiUlyOU0kfV~~}SZ@*WUUi##; s/[=!7}"WN]'(L! ~y5g9T̅JkbM' +s:S +B)v@Mj e Cf jE 0Y\QnzG1д~Wo{T9?`Rmyhsy3!HAD]mc1~2LSu7xT;j$`}4->L#vzŏILS ֭T{rjGKC;bpU=-`BsK.SFw4Mq]ZdHS0)tLg