-- Copyright (C) 2003-2005 David Roundy---- This program is free software; you can redistribute it and/or modify-- it under the terms of the GNU General Public License as published by-- the Free Software Foundation; either version 2, or (at your option)-- any later version.---- This program is distributed in the hope that it will be useful,-- but WITHOUT ANY WARRANTY; without even the implied warranty of-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the-- GNU General Public License for more details.---- You should have received a copy of the GNU General Public License-- along with this program; see the file COPYING. If not, write to-- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,-- Boston, MA 02110-1301, USA.{-# LANGUAGE CPP #-}moduleDarcs.Commands.Optimize(optimize)whereimportControl.Applicative((<$>))importControl.Exception(finally)importControl.Monad(when,unless)importData.Maybe(isJust)importData.List(sort)importSystem.Directory(getDirectoryContents,doesDirectoryExist,doesFileExist,renameFile,getModificationTime)importSystem.IO.Unsafe(unsafeInterleaveIO)importqualifiedData.ByteString.Char8asBSimportqualifiedData.ByteString.LazyasBLimportStorage.Hashed.Darcs(decodeDarcsSize)importDarcs.Patch.PatchInfoAnd(info,extractHash)importDarcs.Commands(DarcsCommand(..),nodefaults)importDarcs.Arguments(DarcsFlag(UpgradeFormat,UseHashedInventory,Compress,UnCompress,NoCompress,Reorder,Relink,OptimizePristine,OptimizeHTTP),reorderPatches,uncompressNocompress,relink,sibling,flagsToSiblings,upgradeFormat,workingRepoDir,umaskOption,optimizePristine,optimizeHTTP)importDarcs.Repository.Prefs(getPreflist)importDarcs.Repository(Repository,withRepoLock,RepoJob(..),withGutsOf,readRepo,optimizeInventory,tentativelyReplacePatches,cleanRepository,amInRepository,finalizeRepositoryChanges,replacePristine)importDarcs.Repository.Old(oldRepoFailMsg)importDarcs.Witnesses.Ordered((+<+),reverseRL,mapRL,(:>)(..),mapFL,bunchFL,lengthRL)importDarcs.Patch.Info(isTag)importDarcs.Patch(RepoPatch)importDarcs.Patch.Set(PatchSet(..),newset2RL,newset2FL,progressPatchSet)importDarcs.Patch.Apply(ApplyState)importByteStringUtils(gzReadFilePS)importDarcs.Patch.Depends(splitOnTag)importDarcs.Lock(maybeRelink,gzWriteAtomicFilePS,writeAtomicFilePS)importDarcs.RepoPath(toFilePath)importDarcs.Utils(withCurrentDirectory)importProgress(debugMessage)importDarcs.Global(darcsdir)-- imports for optimize --upgrade; to be tidiedimportSystem.Directory(createDirectoryIfMissing,removeFile)importSystem.FilePath.Posix(takeExtension,(</>),(<.>),takeFileName)importProgress(beginTedious,endTedious,tediousSize)importDarcs.Flags(compression)importDarcs.Lock(rmRecursive)importDarcs.ProgressPatches(progressFL)importDarcs.Repository.Cache(hashedDir,HashedDir(HashedPristineDir))importDarcs.Repository.Format(identifyRepoFormat,createRepoFormat,writeRepoFormat,formatHas,RepoProperty(HashedInventory))importqualifiedDarcs.Repository.HashedRepoasHashedRepoimportDarcs.Repository.Prefs(getCaches)importDarcs.Repository.State(readRecorded)importDarcs.Utils(catchall)importStorage.Hashed.Tree(Tree,TreeItem(..),list,expand,emptyTree)importStorage.Hashed.AnchoredPath(anchorPath)importStorage.Hashed.Plain(readPlainTree)importStorage.Hashed.Darcs(writeDarcsHashed)importCodec.Archive.Tar(write)importCodec.Archive.Tar.Entry(fileEntry,toTarPath)importCodec.Compression.GZip(compress)#include "gadts.h"optimizeDescription::StringoptimizeDescription="Optimize the repository."optimizeHelp::StringoptimizeHelp="The `darcs optimize' command modifies the current repository in an\n"++"attempt to reduce its resource requirements. By default a single\n"++"fast, safe optimization is performed; additional optimization\n"++"techniques can be enabled by passing options to `darcs optimize'.\n"++"\n"++optimizeHelpInventory++-- "\n" ++ optimize_help_reorder ++"\n"++optimizeHelpRelink++-- uncompression is least useful, so it is last."\n"++optimizeHelpCompression++"\n"++"There is one more optimization which CAN NOT be performed by this\n"++"command. Every time your record a patch, a new inventory file is\n"++"written to _darcs/inventories/, and old inventories are never reaped.\n"++"\n"++"If _darcs/inventories/ is consuming a relatively large amount of\n"++"space, you can safely reclaim it by using `darcs get' to make a\n"++"complete copy of the repo. When doing so, don't forget to copy over\n"++"any unsaved changes you have made to the working tree or to\n"++"unversioned files in _darcs/prefs/ (such as _darcs/prefs/author).\n"optimize::DarcsCommandoptimize=DarcsCommand{commandProgramName="darcs",commandName="optimize",commandHelp=optimizeHelp,commandDescription=optimizeDescription,commandExtraArgs=0,commandExtraArgHelp=[],commandCommand=optimizeCmd,commandPrereq=amInRepository,commandGetArgPossibilities=return[],commandArgdefaults=nodefaults,commandAdvancedOptions=[uncompressNocompress,umaskOption],commandBasicOptions=[workingRepoDir,reorderPatches,sibling,relink,upgradeFormat,optimizePristine,optimizeHTTP]}optimizeCmd::[DarcsFlag]->[String]->IO()optimizeCmdorigopts_=dowhen(UpgradeFormat`elem`origopts)optimizeUpgradeFormatwithRepoLockopts$RepoJob$\repository->docleanRepositoryrepository-- garbage collect pristine.hashed directorywhen(OptimizeHTTP`elem`origopts)$doOptimizeHTTPrepositoryif(OptimizePristine`elem`opts)thendoOptimizePristinerepositoryelsedowhen(Reorder`elem`opts)$doReorderoptsrepositorydoOptimizeInventoryrepositorywhen(Compress`elem`opts||UnCompress`elem`opts)$optimizeCompressionoptswhen(Relink`elem`opts)$doRelinkoptsputStrLn"Done optimizing!"whereopts=ifUnCompress`elem`origoptsthenNoCompress:origoptselseorigoptsoptimizeHelpInventory::StringoptimizeHelpInventory="The default optimization moves recent patches (those not included in\n"++"the latest tag) to the `front', reducing the amount that a typical\n"++"remote command needs to download. It should also reduce the CPU time\n"++"needed for some operations.\n"doOptimizeInventory::(RepoPatchp,ApplyStatep~Tree)=>RepositorypC(rut)->IO()doOptimizeInventoryrepository=dodebugMessage"Writing out a nice copy of the inventory."optimizeInventoryrepositorydebugMessage"Done writing out a nice copy of the inventory."optimizeHelpCompression::StringoptimizeHelpCompression="By default patches are compressed with zlib (RFC 1951) to reduce\n"++"storage (and download) size. In exceptional circumstances, it may be\n"++"preferable to avoid compression. In this case the `--dont-compress'\n"++"option can be used (e.g. with `darcs record') to avoid compression.\n"++"\n"++"The `darcs optimize --uncompress' and `darcs optimize --compress'\n"++"commands can be used to ensure existing patches in the current\n"++"repository are respectively uncompressed or compressed. Note that\n"++"repositories in the legacy `old-fashioned-inventory' format have a .gz\n"++"extension on patch files even when uncompressed.\n"optimizeCompression::[DarcsFlag]->IO()optimizeCompressionopts=doputStrLn"Optimizing (un)compression of patches..."do_compress(darcsdir++"/patches")putStrLn"Optimizing (un)compression of inventories..."do_compress(darcsdir++"/inventories")wheredo_compressf=doisd<-doesDirectoryExistfifisdthenwithCurrentDirectoryf$dofs<-filternotdot`fmap`getDirectoryContents"."mapM_do_compressfselseifCompress`elem`optsthengzReadFilePSf>>=gzWriteAtomicFilePSfelsegzReadFilePSf>>=writeAtomicFilePSfnotdot('.':_)=Falsenotdot_=TrueoptimizeHelpRelink::StringoptimizeHelpRelink="The `darcs optimize --relink' command hard-links patches that the\n"++"current repository has in common with its peers. Peers are those\n"++"repositories listed in _darcs/prefs/sources, or defined with the\n"++"`--sibling' option (which can be used multiple times).\n"++"\n"++"Darcs uses hard-links automatically, so this command is rarely needed.\n"++"It is most useful if you used `cp -r' instead of `darcs get' to copy a\n"++"repository, or if you pulled the same patch from a remote repository\n"++"into multiple local repositories.\n"doOptimizePristine::(RepoPatchp,ApplyStatep~Tree)=>RepositorypC(rut)->IO()doOptimizePristinerepo=dohashed<-doesFileExist$darcsdir</>"hashed_inventory"whenhashed$doinv<-BS.readFile(darcsdir</>"hashed_inventory")letlinesInv=BS.split'\n'invcaselinesInvof[]->return()(pris_line:_)->letsize=decodeDarcsSize$BS.drop9pris_lineinwhen(isJustsize)$doputStrLn"Optimizing hashed pristine..."readRecordedrepo>>=replacePristinerepocleanRepositoryrepodoRelink::[DarcsFlag]->IO()doRelinkopts=dosome_siblings<-return(flagsToSiblingsopts)defrepolist<-getPreflist"defaultrepo"siblings<-return(maptoFilePathsome_siblings++defrepolist)ifnullsiblingsthenputStrLn"No siblings -- no relinking done."elsedodebugMessage"Relinking patches..."patch_tree<-expand=<<readPlainTree"_darcs/patches"letpatches=[anchorPath""p|(p,File_)<-listpatch_tree]maybeRelinkFilessiblingspatches"_darcs/patches"debugMessage"Done relinking."maybeRelinkFiles::[String]->[String]->String->IO()maybeRelinkFilessrcdstdir=mapM_(maybeRelinkFilesrc)(map((dir++"/")++)dst)maybeRelinkFile::[String]->String->IO()maybeRelinkFile[]_=return()maybeRelinkFile(h:t)f=dodone<-maybeRelink(h++"/"++f)funlessdone$maybeRelinkFiletf-- FIXME: someone needs to grovel through the source and determine-- just how optimizeInventory differs from doReorder. The following-- is purely speculation. --twb, 2009-04-- optimize_help_reorder :: String-- optimize_help_reorder =-- "The `darcs optimize --reorder' command is a more comprehensive version\n" ++-- "of the default optimization. It reorders patches with respect to ALL\n" ++-- "tags, rather than just the latest tag.\n"doReorder::(RepoPatchp,ApplyStatep~Tree)=>[DarcsFlag]->RepositorypC(rur)->IO()doReorderoptsrepository=dodebugMessage"Reordering the inventory."PatchSetps_<-chooseOrder`fmap`readReporepository-- Warning: A do-notation statement discarded a result of type Repository p r u r.withGutsOfrepository$do_<-tentativelyReplacePatchesrepository(compressionopts)$reverseRLpsfinalizeRepositoryChangesrepositorydebugMessage"Done reordering the inventory."chooseOrder::forallpC(sx).RepoPatchp=>PatchSetpC(sx)->PatchSetpC(sx)chooseOrderps=casefilterisTag$mapRLinfo$newset2RLpsof[]->ps(lt:_)->casesplitOnTagltpsofPatchSetxsts:>r->PatchSet(r+<+xs)tsoptimizeUpgradeFormat::IO()optimizeUpgradeFormat=dodebugMessage$"Upgrading to hashed..."rf<-identifyRepoFormat"."debugMessage$"Found our format"ifformatHasHashedInventoryrfthenputStrLn"No action taken because this repository already is hashed."elsedoputStrLn"Checking repository in case of corruption..."withRepoLock[]$RepoJob$\repository->doactuallyUpgradeFormatrepositoryactuallyUpgradeFormat::(RepoPatchp,ApplyStatep~Tree)=>RepositorypC(rut)->IO()actuallyUpgradeFormatrepository=do-- convert patches/inventorypatches<-readReporepositoryletk="Hashing patch"beginTediousktediousSizek(lengthRL$newset2RLpatches)letpatches'=progressPatchSetkpatchescache<-getCaches[]"."letcompr=compression[]-- default compressionHashedRepo.writeTentativeInventorycachecomprpatches'endTediousk-- convert pristine by applying patches-- the faster alternative would be to copy pristine, but the apply method is more reliableletpatchesToApply=progressFL"Applying patch"$newset2FL$patches'createDirectoryIfMissingFalse$darcsdir</>hashedDirHashedPristineDir-- Warning: A do-notation statement discarded a result of type Storage.Hashed.Hash.Hash._<-writeDarcsHashedemptyTree"_darcs/pristine.hashed"sequence_$mapFLHashedRepo.applyToTentativePristine$bunchFL100patchesToApply-- now make it officialHashedRepo.finalizeTentativeChangesrepositorycomprwriteRepoFormat(createRepoFormat[UseHashedInventory])(darcsdir</>"format")-- clean out old-fashioned junkdebugMessage"Cleaning out old-fashioned repository files..."removeFile$darcsdir</>"inventory"removeFile$darcsdir</>"tentative_inventory"rmRecursive(darcsdir</>"pristine")`catchall`rmRecursive(darcsdir</>"current")rmGzsIn(darcsdir</>"patches")rmGzsIn(darcsdir</>"inventories")letcheckpointDir=darcsdir</>"checkpoints"hasCheckPoints<-doesDirectoryExistcheckpointDirwhenhasCheckPoints$rmRecursivecheckpointDirputStrLn"Done upgrading!"wherermGzsIndir=withCurrentDirectorydir$dogzs<-filter((==".gz").takeExtension)`fmap`getDirectoryContents"."mapM_removeFilegzsdoOptimizeHTTP::(RepoPatchp,ApplyStatep~Tree)=>RepositorypC(rut)->IO()doOptimizeHTTPrepo=flipfinally(mapM_(removeFileIfExists)[darcsdir</>"meta-filelist-inventories",darcsdir</>"meta-filelist-pristine",basicTar<.>"part",patchesTar<.>"part"])$dorf<-identifyRepoFormat"."unless(formatHasHashedInventoryrf)$failoldRepoFailMsgcreateDirectoryIfMissingFalsepacksDir-- pack patchesTarps<-mapRLhashedPatchFileName.newset2RL<$>readReporepois<-map((darcsdir</>"inventories")</>)<$>HashedRepo.listInventorieswriteFile(darcsdir</>"meta-filelist-inventories").unlines$maptakeFileNameisBL.writeFile(patchesTar<.>"part").compress.write=<<mapMfileEntry'((darcsdir</>"meta-filelist-inventories"):ps++reverseis)renameFile(patchesTar<.>"part")patchesTar-- pack basicTarpr<-sortByMTime=<<dirContents"pristine.hashed"writeFile(darcsdir</>"meta-filelist-pristine").unlines$maptakeFileNameprBL.writeFile(basicTar<.>"part").compress.write=<<mapMfileEntry'([darcsdir</>"meta-filelist-pristine",darcsdir</>"hashed_inventory"]++reversepr)renameFile(basicTar<.>"part")basicTarwherepacksDir=darcsdir</>"packs"basicTar=packsDir</>"basic.tar.gz"patchesTar=packsDir</>"patches.tar.gz"fileEntry'x=unsafeInterleaveIO$docontent<-BL.fromChunks.return<$>gzReadFilePSxtp<-eitherfailreturn$toTarPathFalsexreturn$fileEntrytpcontentdirContentsd=dirContents'd$constTruedirContents'df=map((darcsdir</>d)</>).filter(\x->headx/='.'&&fx)<$>getDirectoryContents(darcsdir</>d)hashedPatchFileNamex=caseextractHashxofLeft_->fail"unexpected unhashed patch"Righth->darcsdir</>"patches"</>hsortByMTimexs=mapsnd.sort<$>mapM(\x->(\t->(t,x))<$>getModificationTimex)xsremoveFileIfExistsx=doex<-doesFileExistxwhenex$removeFilex