From 5e38a5fba69a7b13036e2910e74e2c72d5c9da30 Mon Sep 17 00:00:00 2001 From: Joey Hess Date: Thu, 5 Mar 2015 14:46:08 -0400 Subject: addurl: Added --raw option, which bypasses special handling of quvi, bittorrent etc urls. --- Command/AddUrl.hs | 8 ++++++-- Command/ImportFeed.hs | 39 +++++++++++++++++++++++---------------- 2 files changed, 29 insertions(+), 18 deletions(-) (limited to 'Command') diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs index 426bc372c..f8a4ca59b 100644 --- a/Command/AddUrl.hs +++ b/Command/AddUrl.hs @@ -38,7 +38,7 @@ import qualified Utility.Quvi as Quvi #endif cmd :: [Command] -cmd = [notBareRepo $ withOptions [fileOption, pathdepthOption, relaxedOption] $ +cmd = [notBareRepo $ withOptions [fileOption, pathdepthOption, relaxedOption, rawOption] $ command "addurl" (paramRepeating paramUrl) seek SectionCommon "add urls to annex"] @@ -51,14 +51,18 @@ pathdepthOption = fieldOption [] "pathdepth" paramNumber "path components to use relaxedOption :: Option relaxedOption = flagOption [] "relaxed" "skip size check" +rawOption :: Option +rawOption = flagOption [] "raw" "disable special handling for torrents, quvi, etc" + seek :: CommandSeek seek us = do optfile <- getOptionField fileOption return relaxed <- getOptionFlag relaxedOption + raw <- getOptionFlag rawOption pathdepth <- getOptionField pathdepthOption (return . maybe Nothing readish) forM_ us $ \u -> do r <- Remote.claimingUrl u - if Remote.uuid r == webUUID + if Remote.uuid r == webUUID || raw then void $ commandAction $ startWeb relaxed optfile pathdepth u else do pathmax <- liftIO $ fileNameLengthLimit "." diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs index ed035fa85..231c921c3 100644 --- a/Command/ImportFeed.hs +++ b/Command/ImportFeed.hs @@ -28,7 +28,7 @@ import Types.UrlContents import Logs.Web import qualified Utility.Format import Utility.Tmp -import Command.AddUrl (addUrlFile, downloadRemoteFile, relaxedOption) +import Command.AddUrl (addUrlFile, downloadRemoteFile, relaxedOption, rawOption) import Annex.Perms import Annex.UUID import Backend.URL (fromUrl) @@ -42,7 +42,7 @@ import Logs.MetaData import Annex.MetaData cmd :: [Command] -cmd = [notBareRepo $ withOptions [templateOption, relaxedOption] $ +cmd = [notBareRepo $ withOptions [templateOption, relaxedOption, rawOption] $ command "importfeed" (paramRepeating paramUrl) seek SectionCommon "import files from podcast feeds"] @@ -53,23 +53,30 @@ seek :: CommandSeek seek ps = do tmpl <- getOptionField templateOption return relaxed <- getOptionFlag relaxedOption + raw <- getOptionFlag rawOption + let opts = Opts { relaxedOpt = relaxed, rawOpt = raw } cache <- getCache tmpl - withStrings (start relaxed cache) ps + withStrings (start opts cache) ps -start :: Bool -> Cache -> URLString -> CommandStart -start relaxed cache url = do +data Opts = Opts + { relaxedOpt :: Bool + , rawOpt :: Bool + } + +start :: Opts -> Cache -> URLString -> CommandStart +start opts cache url = do showStart "importfeed" url - next $ perform relaxed cache url + next $ perform opts cache url -perform :: Bool -> Cache -> URLString -> CommandPerform -perform relaxed cache url = do +perform :: Opts -> Cache -> URLString -> CommandPerform +perform opts cache url = do v <- findDownloads url case v of [] -> do feedProblem url "bad feed content" next $ return True l -> do - ok <- and <$> mapM (performDownload relaxed cache) l + ok <- and <$> mapM (performDownload opts cache) l unless ok $ feedProblem url "problem downloading item" next $ cleanup url True @@ -138,15 +145,15 @@ downloadFeed url = do , return Nothing ) -performDownload :: Bool -> Cache -> ToDownload -> Annex Bool -performDownload relaxed cache todownload = case location todownload of +performDownload :: Opts -> Cache -> ToDownload -> Annex Bool +performDownload opts cache todownload = case location todownload of Enclosure url -> checkknown url $ rundownload url (takeExtension url) $ \f -> do r <- Remote.claimingUrl url - if Remote.uuid r == webUUID + if Remote.uuid r == webUUID || rawOpt opts then do urlinfo <- Url.withUrlOptions (Url.getUrlInfo url) - maybeToList <$> addUrlFile relaxed url urlinfo f + maybeToList <$> addUrlFile (relaxedOpt opts) url urlinfo f else do res <- tryNonAsync $ maybe (error $ "unable to checkUrl of " ++ Remote.name r) @@ -156,10 +163,10 @@ performDownload relaxed cache todownload = case location todownload of Left _ -> return [] Right (UrlContents sz _) -> maybeToList <$> - downloadRemoteFile r relaxed url f sz + downloadRemoteFile r (relaxedOpt opts) url f sz Right (UrlMulti l) -> do kl <- forM l $ \(url', sz, subf) -> - downloadRemoteFile r relaxed url' (f fromSafeFilePath subf) sz + downloadRemoteFile r (relaxedOpt opts) url' (f fromSafeFilePath subf) sz return $ if all isJust kl then catMaybes kl else [] @@ -177,7 +184,7 @@ performDownload relaxed cache todownload = case location todownload of let videourl = Quvi.linkUrl link checkknown videourl $ rundownload videourl ("." ++ Quvi.linkSuffix link) $ \f -> - maybeToList <$> addUrlFileQuvi relaxed quviurl videourl f + maybeToList <$> addUrlFileQuvi (relaxedOpt opts) quviurl videourl f #else return False #endif -- cgit v1.2.3