From 039e3ffdfad86fa4e39ca777273be65a7be893db Mon Sep 17 00:00:00 2001 From: Joey Hess Date: Fri, 16 Nov 2012 23:16:18 -0400 Subject: webdav now checks presence of and receives chunked content Note that receiving encrypted chunked content currently involves buffering. (So does doing so with the directory special remote.) --- Remote/Directory.hs | 27 +---------- Remote/Helper/Chunked.hs | 28 +++++++++++- Remote/WebDAV.hs | 113 ++++++++++++++++++++++++++++++++++++----------- 3 files changed, 116 insertions(+), 52 deletions(-) (limited to 'Remote') diff --git a/Remote/Directory.hs b/Remote/Directory.hs index 6bf725379..794a8c468 100644 --- a/Remote/Directory.hs +++ b/Remote/Directory.hs @@ -89,7 +89,7 @@ withCheckedFiles check (Just _) d k a = go $ locations d k let chunkcount = f ++ chunkCount ifM (check chunkcount) ( do - chunks <- getChunks f <$> readFile chunkcount + chunks <- listChunks f <$> readFile chunkcount ifM (all id <$> mapM check chunks) ( a chunks , return False ) , go fs @@ -155,29 +155,6 @@ storeSplit' meterupdate chunksize (d:dests) bs c = do feed (sz - s) ls h else return (l:ls) -{- Write a L.ByteString to a file, updating a progress meter - - after each chunk of the L.ByteString, typically every 64 kb or so. -} -meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO () -meteredWriteFile meterupdate dest b = - meteredWriteFile' meterupdate dest (L.toChunks b) feeder - where - feeder chunks = return ([], chunks) - -{- Writes a series of S.ByteString chunks to a file, updating a progress - - meter after each chunk. The feeder is called to get more chunks. -} -meteredWriteFile' :: MeterUpdate -> FilePath -> s -> (s -> IO (s, [S.ByteString])) -> IO () -meteredWriteFile' meterupdate dest startstate feeder = - E.bracket (openFile dest WriteMode) hClose (feed startstate []) - where - feed state [] h = do - (state', cs) <- feeder state - unless (null cs) $ - feed state' cs h - feed state (c:cs) h = do - S.hPut h c - meterupdate $ toInteger $ S.length c - feed state cs h - storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> Annex Bool storeHelper d chunksize key storer = check <&&> go where @@ -203,7 +180,7 @@ retrieve :: FilePath -> ChunkSize -> Key -> AssociatedFile -> FilePath -> Annex retrieve d chunksize k _ f = metered Nothing k $ \meterupdate -> liftIO $ withStoredFiles chunksize d k $ \files -> catchBoolIO $ do - meteredWriteFile' meterupdate f files feeder + meteredWriteFileChunks meterupdate f files feeder return True where feeder [] = return ([], []) diff --git a/Remote/Helper/Chunked.hs b/Remote/Helper/Chunked.hs index 59117eaca..dd6e3eb0d 100644 --- a/Remote/Helper/Chunked.hs +++ b/Remote/Helper/Chunked.hs @@ -13,6 +13,7 @@ import Types.Remote import qualified Data.Map as M import qualified Data.ByteString.Lazy as L +import qualified Data.ByteString as S import Data.Int import qualified Control.Exception as E @@ -45,8 +46,8 @@ chunkCount = ".chunkcount" {- Parses the String from the chunkCount file, and returns the files that - are used to store the chunks. -} -getChunks :: FilePath -> String -> [FilePath] -getChunks basedest chunkcount = take count $ map (basedest ++) chunkStream +listChunks :: FilePath -> String -> [FilePath] +listChunks basedest chunkcount = take count $ map (basedest ++) chunkStream where count = fromMaybe 0 $ readish chunkcount @@ -119,3 +120,26 @@ storeChunked chunksize dests storer content = let (chunk, b') = L.splitAt sz b storer d chunk storechunks sz (d:useddests) ds b' + +{- Write a L.ByteString to a file, updating a progress meter + - after each chunk of the L.ByteString, typically every 64 kb or so. -} +meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO () +meteredWriteFile meterupdate dest b = + meteredWriteFileChunks meterupdate dest (L.toChunks b) feeder + where + feeder chunks = return ([], chunks) + +{- Writes a series of S.ByteString chunks to a file, updating a progress + - meter after each chunk. The feeder is called to get more chunks. -} +meteredWriteFileChunks :: MeterUpdate -> FilePath -> s -> (s -> IO (s, [S.ByteString])) -> IO () +meteredWriteFileChunks meterupdate dest startstate feeder = + E.bracket (openFile dest WriteMode) hClose (feed startstate []) + where + feed state [] h = do + (state', cs) <- feeder state + unless (null cs) $ + feed state' cs h + feed state (c:cs) h = do + S.hPut h c + meterupdate $ toInteger $ S.length c + feed state cs h diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs index c1e221295..b69d51f23 100644 --- a/Remote/WebDAV.hs +++ b/Remote/WebDAV.hs @@ -20,6 +20,7 @@ import Network.URI (normalizePathSegments) import qualified Control.Exception as E import Network.HTTP.Conduit (HttpException(..)) import Network.HTTP.Types +import System.IO.Error import Common.Annex import Types.Remote @@ -108,25 +109,45 @@ storeHelper r urlbase user pass b = catchBoolIO $ do storehttp url v = putContentAndProps url user pass (noProps, (contentType, v)) -retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool -retrieve r k _f d = retrieveHelper r k (L.writeFile d) - retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool retrieveCheap _ _ _ = return False -retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool -retrieveEncrypted r (cipher, enck) _ d = retrieveHelper r enck $ \b -> do - withDecryptedContent cipher (return b) (L.writeFile d) +retrieve :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool +retrieve r k _f d = metered Nothing k $ \meterupdate -> + davAction r False $ \(baseurl, user, pass) -> liftIO $ catchBoolIO $ + withStoredFiles r k baseurl user pass onerr $ \urls -> do + meteredWriteFileChunks meterupdate d urls $ + feeder user pass + return True + where + onerr _ = return False -retrieveHelper :: Remote -> Key -> (L.ByteString -> IO ()) -> Annex Bool -retrieveHelper r k saver = davAction r False $ \(baseurl, user, pass) -> liftIO $ do - let url = davLocation baseurl k - maybe (return False) save - =<< catchMaybeHttp (getPropsAndContent url user pass) + feeder _ _ [] = return ([], []) + feeder user pass (url:urls) = do + mb <- davGetUrlContent url user pass + case mb of + Nothing -> throwDownloadFailed + Just b -> return (urls, L.toChunks b) + +throwDownloadFailed :: IO a +throwDownloadFailed = ioError $ mkIOError userErrorType "download failed" Nothing Nothing + +retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool +retrieveEncrypted r (cipher, enck) k d = metered Nothing k $ \meterupdate -> + davAction r False $ \(baseurl, user, pass) -> liftIO $ catchBoolIO $ + withStoredFiles r enck baseurl user pass onerr $ \urls -> do + withDecryptedContent cipher (L.concat <$> feeder user pass urls []) $ + meteredWriteFile meterupdate d + return True where - save (_, (_, b)) = do - saver b - return True + onerr _ = return False + + feeder _ _ [] c = return $ reverse c + feeder user pass (url:urls) c = do + mb <- davGetUrlContent url user pass + case mb of + Nothing -> throwDownloadFailed + Just b -> feeder user pass urls (b:c) remove :: Remote -> Key -> Annex Bool remove r k = davAction r False $ \(baseurl, user, pass) -> liftIO $ do @@ -136,20 +157,48 @@ remove r k = davAction r False $ \(baseurl, user, pass) -> liftIO $ do isJust <$> catchMaybeHttp (deleteContent url user pass) checkPresent :: Remote -> Key -> Annex (Either String Bool) -checkPresent r k = davAction r noconn $ \(baseurl, user, pass) -> do - showAction $ "checking " ++ name r - let url = davLocation baseurl k - v <- liftIO $ catchHttp $ getProps url user pass - case v of - Right _ -> return $ Right True - Left (Left (StatusCodeException status _)) - | statusCode status == statusCode notFound404 -> return $ Right False - | otherwise -> return $ Left $ show $ statusMessage status - Left (Left httpexception) -> return $ Left $ show httpexception - Left (Right ioexception) -> return $ Left $ show ioexception +checkPresent r k = davAction r noconn go where noconn = Left $ error $ name r ++ " not configured" + go (baseurl, user, pass) = do + showAction $ "checking " ++ name r + liftIO $ withStoredFiles r k baseurl user pass onerr check + where + check [] = return $ Right True + check (url:urls) = do + v <- davUrlExists url user pass + if v == Right True + then check urls + else return v + + {- Failed to read the chunkcount file; see if it's missing, + - or if there's a problem accessing it, + - or perhaps this was an intermittent error. -} + onerr url = do + v <- davUrlExists url user pass + if v == Right True + then return $ Left $ "failed to read " ++ url + else return v + +withStoredFiles + :: Remote + -> Key + -> DavUrl + -> DavUser + -> DavPass + -> (DavUrl -> IO a) + -> ([DavUrl] -> IO a) + -> IO a +withStoredFiles r k baseurl user pass onerr a + | isJust $ chunkSize $ config r = do + let chunkcount = url ++ chunkCount + maybe (onerr chunkcount) (a . listChunks url . L8.toString) + =<< davGetUrlContent chunkcount user pass + | otherwise = a [url] + where + url = davLocation baseurl k + davAction :: Remote -> a -> ((DavUrl, DavUser, DavPass) -> Annex a) -> Annex a davAction r unconfigured action = case config r of Nothing -> return unconfigured @@ -173,6 +222,20 @@ davLocation baseurl k = davUrl baseurl $ annexLocation k hashDirLower davUrl :: DavUrl -> FilePath -> DavUrl davUrl baseurl file = baseurl file +davUrlExists :: DavUrl -> DavUser -> DavPass -> IO (Either String Bool) +davUrlExists url user pass = decode <$> catchHttp (getProps url user pass) + where + decode (Right _) = Right True + decode (Left (Left (StatusCodeException status _))) + | statusCode status == statusCode notFound404 = Right False + | otherwise = Left $ show $ statusMessage status + decode (Left (Left httpexception)) = Left $ show httpexception + decode (Left (Right ioexception)) = Left $ show ioexception + +davGetUrlContent :: DavUrl -> DavUser -> DavPass -> IO (Maybe L.ByteString) +davGetUrlContent url user pass = fmap (snd . snd) <$> + catchMaybeHttp (getPropsAndContent url user pass) + {- Creates a directory in WebDAV, if not already present; also creating - any missing parent directories. -} davMkdir :: DavUrl -> DavUser -> DavPass -> IO () -- cgit v1.2.3