{-# OPTIONS_GHC -cpp -fglasgow-exts #-}{-# LANGUAGE CPP #-}#include "gadts.h"moduleDarcs.Repository.Cache(cacheHash,okayHash,takeHash,Cache(..),CacheType(..),CacheLoc(..),WritableOrNot(..),HashedDir(..),hashedDir,unionCaches,unionRemoteCaches,cleanCaches,cleanCachesWithHint,fetchFileUsingCache,speculateFileUsingCache,speculateFilesUsingCache,writeFileUsingCache,peekInCache,repo2cache,writable,isthisrepo,hashedFilePath,allHashedDirs,compareByLocality)whereimportControl.Monad(liftM,when,guard,unless,filterM,forM_)importData.List(nub)importData.Maybe(catMaybes,listToMaybe)importSystem.Directory(removeFile,doesFileExist,doesDirectoryExist,getDirectoryContents,getPermissions)importqualifiedSystem.DirectoryasSD(writable)importSystem.Posix.Files(linkCount,getSymbolicLinkStatus)importSystem.IO(hPutStrLn,stderr)importCrypt.SHA256(sha256sum)importByteStringUtils(gzWriteFilePS,linesPS)importqualifiedData.ByteStringasB(length,drop,ByteString)importqualifiedData.ByteString.Char8asBC(unpack)importSHA1(sha1PS)importSystem.Posix.Files(createLink)importSystem.Directory(createDirectoryIfMissing)importDarcs.External(gzFetchFilePS,fetchFilePS,speculateFileOrUrl,copyFileOrUrl,Cachable(Cachable))importDarcs.Flags(Compression(..))importDarcs.Global(darcsdir)importDarcs.Lock(writeAtomicFilePS,gzWriteAtomicFilePS)importProgress(progressList,debugMessage,debugFail)importDarcs.URL(isFile,isUrl,isSsh)importDarcs.Utils(withCurrentDirectory,catchall)dataHashedDir=HashedPristineDir|HashedPatchesDir|HashedInventoriesDirhashedDir::HashedDir->StringhashedDirHashedPristineDir="pristine.hashed"hashedDirHashedPatchesDir="patches"hashedDirHashedInventoriesDir="inventories"allHashedDirs::[HashedDir]allHashedDirs=[HashedPristineDir,HashedPatchesDir,HashedInventoriesDir]dataWritableOrNot=Writable|NotWritablederiving(Show)dataCacheType=Repo|Directoryderiving(Eq,Show)dataCacheLoc=Cache!CacheType!WritableOrNot!StringnewtypeCache=Ca[CacheLoc]-- abstract type for hiding cacheinstanceEqCacheLocwhere(CacheRepo_a)==(CacheRepo_b)=a==b(CacheDirectory_a)==(CacheDirectory_b)=a==b_==_=FalseinstanceShowCacheLocwhereshow(CacheRepoWritablea)="thisrepo:"++ashow(CacheRepoNotWritablea)="repo:"++ashow(CacheDirectoryWritablea)="cache:"++ashow(CacheDirectoryNotWritablea)="readonly:"++ainstanceShowCachewhereshow(Cacs)=unlines$mapshowcsunionCaches::Cache->Cache->CacheunionCaches(Caa)(Cab)=Ca(nub(a++b))-- | unionRemoteCaches merges caches. It tries to do better than just blindly-- copying remote cache entries:---- * If remote repository is accessed through network, do not copy any cache-- entries from it. Taking local entries does not make sense and using-- network entries can lead to darcs hang when it tries to get to-- unaccessible host.---- * If remote repositoty is local, copy all network cache entries. For local-- cache entries if the cache directory exists and is writable it is added-- as writable cache, if it exists but is not writable it is added as-- read-only cache.---- This approach should save us from bogus cache entries. One case it does not-- work very well is when you fetch from partial repository over network.-- Hopefully this is not a common case.unionRemoteCaches::Cache->Cache->String->IO(Cache)unionRemoteCacheslocal(Caremote)repourl|isFilerepourl=dof<-filteredreturn$local`unionCaches`Caf|otherwise=returnlocalwherefiltered=mapM(\x->fnx`catchall`returnNothing)remote>>=return.catMaybesfn::CacheLoc->IO(MaybeCacheLoc)fn(CacheRepoWritable_)=returnNothingfnc@(Cachet_url)|isFileurl=doex<-doesDirectoryExisturlifexthendop<-getPermissionsurlreturn$Just$ifwritablec&&SD.writablepthencelseCachetNotWritableurlelsereturnNothing|otherwise=return$Justc-- | Compares two caches, a remote cache is greater than a local one.-- The order of the comparison is given by: local < http < sshcompareByLocality::CacheLoc->CacheLoc->OrderingcompareByLocality(Cache__x)(Cache__y)|isLocalx&&isRemotey=LT|isRemotex&&isLocaly=GT|isUrlx&&isSshy=LT|isSshx&&isUrly=GT|otherwise=EQwhereisRemoter=isUrlr||isSshrisLocal=isFilerepo2cache::String->Cacherepo2cacher=Ca[CacheRepoNotWritabler]-- | 'cacheHash' computes the cache hash (i.e. filename) of a packed string.cacheHash::B.ByteString->StringcacheHashps=caseshow(B.lengthps)ofx|l>10->sha256sumps|otherwise->take(10-l)(repeat'0')++x++'-':sha256sumpswherel=lengthxokayHash::String->BoolokayHashs=lengths==40||lengths==64||lengths==75takeHash::B.ByteString->Maybe(String,B.ByteString)takeHashps=doh<-listToMaybe$linesPSpsletv=BC.unpackhguard$okayHashvJust(v,B.drop(B.lengthh)ps)checkHash::String->B.ByteString->BoolcheckHashhs|lengthh==40=sha1PSs==h|lengthh==64=sha256sums==h|lengthh==75=B.lengths==read(take10h)&&sha256sums==drop11h|otherwise=FalsefetchFileUsingCache::Cache->HashedDir->String->IO(String,B.ByteString)fetchFileUsingCache=fetchFileUsingCachePrivateAnywherewritable::CacheLoc->Boolwritable(Cache_NotWritable_)=Falsewritable(Cache_Writable_)=Trueisthisrepo::CacheLoc->Boolisthisrepo(CacheRepoWritable_)=Trueisthisrepo_=False-- | @hashedFilePath cachelocation subdir hash@ returns the physical filename of-- hash @hash@ in the @subdir@ section of @cachelocation@.hashedFilePath::CacheLoc->HashedDir->String->StringhashedFilePath(CacheDirectory_d)sf=d++"/"++(hashedDirs)++"/"++fhashedFilePath(CacheRepo_r)sf=r++"/"++darcsdir++"/"++(hashedDirs)++"/"++f-- | @peekInCache cache subdir hash@ tells whether @cache@ and-- contains an object with hash @hash@ in a writable position.-- Florent: why do we want it to be in a writable position?peekInCache::Cache->HashedDir->String->IOBoolpeekInCache(Cacache)subdirf=cacheHasItcache`catchall`returnFalsewherecacheHasIt[]=returnFalsecacheHasIt(c:cs)|not$writablec=cacheHasItcs|otherwise=doex<-doesFileExist$fncifexthenreturnTrueelsecacheHasItcsfnc=hashedFilePathcsubdirf-- | @speculateFileUsingCache cache subdirectory name@ takes note that-- the file @name@ is likely to be useful soon: pipelined downloads-- will add it to the (low-priority) queue, for the rest it is a noop.speculateFileUsingCache::Cache->HashedDir->String->IO()speculateFileUsingCachecsdh=dodebugMessage$"Speculating on "++hcopyFileUsingCacheOnlySpeculatecsdh-- | Note that the files are likely to be useful soon: pipelined downloads will-- add them to the (low-priority) queue, for the rest it is a noop.speculateFilesUsingCache::Cache->HashedDir->[String]->IO()speculateFilesUsingCache__[]=return()speculateFilesUsingCachecachesdhs=do--debugMessage $ "Thinking about speculating on "++unwords hshs'<-filterM(fmapnot.peekInCachecachesd)hsunless(nullhs')$dodebugMessage$"Speculating on "++unwordshs'copyFilesUsingCacheOnlySpeculatecachesdhs'dataOrOnlySpeculate=ActuallyCopy|OnlySpeculatederiving(Eq)copyFileUsingCache::OrOnlySpeculate->Cache->HashedDir->String->IO()copyFileUsingCacheoos(Cacache)subdirf=dodebugMessage$"I'm doing copyFileUsingCache on "++(hashedDirsubdir)++"/"++fJuststickItHere<-cacheLoccachecreateDirectoryIfMissingFalse(reverse$dropWhile(/='/')$reversestickItHere)sfuccachestickItHere`catchall`return()wherecacheLoc[]=returnNothingcacheLoc(c:cs)|not$writablec=cacheLoccs|otherwise=doex<-doesFileExist$fncifexthenfail"Bug in darcs: This exception should be caught in speculateFileUsingCache"elsedoothercache<-cacheLoccscaseothercacheofJustx->return$JustxNothing->return$Just(fnc)sfuc[]_=return()sfuc(c:cs)out|not$writablec=ifoos==OnlySpeculatethenspeculateFileOrUrl(fnc)outelsecopyFileOrUrl[](fnc)outCachable|otherwise=sfuccsoutfnc=hashedFilePathcsubdirfcopyFilesUsingCache::OrOnlySpeculate->Cache->HashedDir->[String]->IO()copyFilesUsingCacheooscachesubdirhs=doforM_hs$copyFileUsingCacheooscachesubdirdataFromWhere=LocalOnly|Anywherederiving(Eq)fetchFileUsingCachePrivate::FromWhere->Cache->HashedDir->String->IO(String,B.ByteString)fetchFileUsingCachePrivatefromWhere(Cacache)subdirf=dowhen(fromWhere==Anywhere)$copyFileUsingCacheActuallyCopy(Cacache)subdirfffuccache`catchall`debugFail("Couldn't fetch `"++f++"'\nin subdir "++(hashedDirsubdir)++" from sources:\n\n"++show(Cacache))whereffuc(c:cs)|not(writablec)&&(Anywhere==fromWhere||isFile(fnc))=dodebugMessage$"In fetchFileUsingCachePrivate I'm going manually"debugMessage$" getting "++fdebugMessage$" from "++fncx<-gzFetchFilePS(fnc)Cachableifnot$checkHashfxthendox'<-fetchFilePS(fnc)Cachablewhen(not$checkHashfx')$dohPutStrLnstderr$"Hash failure in "++fncfail$"Hash failure in "++fncreturn(fnc,x')elsereturn(fnc,x)-- FIXME: create links in caches`catchall`ffuccs|writablec=dox1<-gzFetchFilePS(fnc)Cachablex<-ifnot$checkHashfx1thendox2<-fetchFilePS(fnc)Cachablewhen(not$checkHashfx2)$dohPutStrLnstderr$"Hash failure in "++fncremoveFile$fncfail$"Hash failure in "++fncreturnx2elsereturnx1mapM_(tryLinking(fnc))csreturn(fnc,x)`catchall`do(fname,x)<-ffuccsdocreateCachecsubdircreateLinkfname(fnc)return(fnc,x)`catchall`dogzWriteFilePS(fnc)x`catchall`return()return(fname,x)|otherwise=ffuccsffuc[]=debugFail$"No sources from which to fetch file `"++f++"'\n"++show(Cacache)tryLinkingffc@(CacheDirectoryWritabled)=docreateDirectoryIfMissingFalse(d++"/"++(hashedDirsubdir))createLinkff(fnc)`catchall`return()tryLinking__=return()fnc=hashedFilePathcsubdirfcreateCache::CacheLoc->HashedDir->IO()createCache(CacheDirectory_d)subdir=createDirectoryIfMissingTrue(d++"/"++(hashedDirsubdir))createCache__=return()-- | @write compression filename content@ writes @content@ to the file @filename@ according-- to the policy given by @compression@.write::Compression->String->B.ByteString->IO()writeNoCompression=writeAtomicFilePSwriteGzipCompression=gzWriteAtomicFilePS-- | @writeFileUsingCache cache compression subdir contents@ write the string @contents@ to-- the directory subdir, except if it is already in the cache, in which case it is a noop.-- Warning (?) this means that in case of a hash collision, writing using writeFileUsingCache is-- a noop. The returned value is the filename that was given to the string.writeFileUsingCache::Cache->Compression->HashedDir->B.ByteString->IOStringwriteFileUsingCache(Cacache)comprsubdirps=(fetchFileUsingCachePrivateLocalOnly(Cacache)subdirhash>>returnhash)`catchall`wfuccache`catchall`debugFail("Couldn't write `"++hash++"'\nin subdir "++(hashedDirsubdir)++" to sources:\n\n"++show(Cacache))wherehash=cacheHashpswfuc(c:cs)|not$writablec=wfuccs|otherwise=docreateCachecsubdirwritecompr(fnc)ps-- FIXME: create links in cachesreturnhashwfuc[]=debugFail$"No location to write file `"++(hashedDirsubdir)++"/"++hash++"'"fnc=hashedFilePathcsubdirhashcleanCaches::Cache->HashedDir->IO()cleanCachescd=cleanCachesWithHint'cdNothingcleanCachesWithHint::Cache->HashedDir->[String]->IO()cleanCachesWithHintcdh=cleanCachesWithHint'cd(Justh)cleanCachesWithHint'::Cache->HashedDir->Maybe[String]->IO()cleanCachesWithHint'(Cacs)subdirhint=mapM_cleanCachecswherecleanCache(CacheDirectoryWritabled)=(withCurrentDirectory(d++"/"++(hashedDirsubdir))$dofs'<-getDirectoryContents"."letfs=casehintofJusth->hNothing->fs'mapM_clean$progressList("Cleaning cache "++d++"/"++(hashedDirsubdir))$filterokayHashfs)`catchall`return()cleanCache_=return()cleanf=dolc<-linkCount`liftM`getSymbolicLinkStatusfwhen(lc<2)$removeFilef`catchall`return()