summaryrefslogtreecommitdiff
path: root/Command
diff options
context:
space:
mode:
authorGravatar Joey Hess <joeyh@joeyh.name>2015-01-22 14:52:52 -0400
committerGravatar Joey Hess <joeyh@joeyh.name>2015-01-22 14:52:52 -0400
commit07211ad1d80d2be9693c5f37f1bf104c6ce6baa0 (patch)
tree43fedb306be22459b548589afa08efd5c64f636a /Command
parentc222a65270a95a59307377f81522a46d95db6e9a (diff)
addurl: When a Content-Disposition header suggests a filename to use, addurl will consider using it, if it's reasonable and doesn't conflict with an existing file. (--file overrides this)
Diffstat (limited to 'Command')
-rw-r--r--Command/AddUrl.hs100
-rw-r--r--Command/ImportFeed.hs4
2 files changed, 55 insertions, 49 deletions
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index 9e3aa31fb..67f883d69 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -120,17 +120,16 @@ downloadRemoteFile r relaxed uri file sz = do
loguri = setDownloader uri OtherDownloader
startWeb :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
-startWeb relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
+startWeb relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI urlstring
where
- (s', downloader) = getDownloader s
- bad = fromMaybe (error $ "bad url " ++ s') $
- parseURI $ escapeURIString isUnescapedInURI s'
- choosefile = flip fromMaybe optfile
+ (urlstring, downloader) = getDownloader s
+ bad = fromMaybe (error $ "bad url " ++ urlstring) $
+ parseURI $ escapeURIString isUnescapedInURI urlstring
go url = case downloader of
QuviDownloader -> usequvi
_ ->
#ifdef WITH_QUVI
- ifM (quviSupported s')
+ ifM (quviSupported urlstring)
( usequvi
, regulardownload url
)
@@ -139,30 +138,44 @@ startWeb relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
#endif
regulardownload url = do
pathmax <- liftIO $ fileNameLengthLimit "."
- let file = choosefile $ url2file url pathdepth pathmax
+ urlinfo <- if relaxed
+ then pure $ Url.UrlInfo True Nothing Nothing
+ else Url.withUrlOptions (Url.getUrlInfo urlstring)
+ file <- case optfile of
+ Just f -> pure f
+ Nothing -> case Url.urlSuggestedFile urlinfo of
+ Nothing -> pure $ url2file url pathdepth pathmax
+ Just sf -> do
+ let f = truncateFilePath pathmax $
+ sanitizeFilePath sf
+ ifM (liftIO $ doesFileExist f <||> doesDirectoryExist f)
+ ( pure $ url2file url pathdepth pathmax
+ , pure f
+ )
showStart "addurl" file
- next $ performWeb relaxed s' file
+ next $ performWeb relaxed urlstring file urlinfo
#ifdef WITH_QUVI
- badquvi = error $ "quvi does not know how to download url " ++ s'
+ badquvi = error $ "quvi does not know how to download url " ++ urlstring
usequvi = do
page <- fromMaybe badquvi
- <$> withQuviOptions Quvi.forceQuery [Quvi.quiet, Quvi.httponly] s'
+ <$> withQuviOptions Quvi.forceQuery [Quvi.quiet, Quvi.httponly] urlstring
let link = fromMaybe badquvi $ headMaybe $ Quvi.pageLinks page
pathmax <- liftIO $ fileNameLengthLimit "."
- let file = choosefile $ truncateFilePath pathmax $ sanitizeFilePath $
- Quvi.pageTitle page ++ "." ++ Quvi.linkSuffix link
+ let file = flip fromMaybe optfile $
+ truncateFilePath pathmax $ sanitizeFilePath $
+ Quvi.pageTitle page ++ "." ++ Quvi.linkSuffix link
showStart "addurl" file
- next $ performQuvi relaxed s' (Quvi.linkUrl link) file
+ next $ performQuvi relaxed urlstring (Quvi.linkUrl link) file
#else
usequvi = error "not built with quvi support"
#endif
-performWeb :: Bool -> URLString -> FilePath -> CommandPerform
-performWeb relaxed url file = ifAnnexed file addurl geturl
+performWeb :: Bool -> URLString -> FilePath -> Url.UrlInfo -> CommandPerform
+performWeb relaxed url file urlinfo = ifAnnexed file addurl geturl
where
- geturl = next $ isJust <$> addUrlFile relaxed url file
- addurl = addUrlChecked relaxed url webUUID checkexistssize
- checkexistssize = Url.withUrlOptions . Url.check url . keySize
+ geturl = next $ isJust <$> addUrlFile relaxed url urlinfo file
+ addurl = addUrlChecked relaxed url webUUID $ \k -> return $
+ (Url.urlExists urlinfo, Url.urlSize urlinfo == keySize k)
#ifdef WITH_QUVI
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
@@ -189,7 +202,8 @@ addUrlFileQuvi relaxed quviurl videourl file = do
- retained, because the size of a video stream
- might change and we want to be able to download
- it later. -}
- sizedkey <- addSizeUrlKey videourl key
+ urlinfo <- Url.withUrlOptions (Url.getUrlInfo videourl)
+ let sizedkey = addSizeUrlKey urlinfo key
prepGetViaTmpChecked sizedkey Nothing $ do
tmp <- fromRepo $ gitAnnexTmpObjectLocation key
showOutput
@@ -225,17 +239,17 @@ addUrlChecked relaxed url u checkexistssize key
stop
)
-addUrlFile :: Bool -> URLString -> FilePath -> Annex (Maybe Key)
-addUrlFile relaxed url file = do
+addUrlFile :: Bool -> URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
+addUrlFile relaxed url urlinfo file = do
liftIO $ createDirectoryIfMissing True (parentDir file)
ifM (Annex.getState Annex.fast <||> pure relaxed)
- ( nodownload relaxed url file
- , downloadWeb url file
+ ( nodownload relaxed url urlinfo file
+ , downloadWeb url urlinfo file
)
-downloadWeb :: URLString -> FilePath -> Annex (Maybe Key)
-downloadWeb url file = do
- dummykey <- addSizeUrlKey url =<< Backend.URL.fromUrl url Nothing
+downloadWeb :: URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
+downloadWeb url urlinfo file = do
+ dummykey <- addSizeUrlKey urlinfo <$> Backend.URL.fromUrl url Nothing
let downloader f _ = do
showOutput
downloadUrl [url] f
@@ -272,15 +286,9 @@ downloadWith downloader dummykey u url file =
liftIO $ createDirectoryIfMissing True (parentDir tmp)
downloader tmp p
-{- Hits the url to get the size, if available.
- -
- - This is needed to avoid exceeding the diskreserve when downloading,
- - and so the assistant can display a pretty progress bar.
- -}
-addSizeUrlKey :: URLString -> Key -> Annex Key
-addSizeUrlKey url key = do
- size <- snd <$> Url.withUrlOptions (Url.exists url)
- return $ key { keySize = size }
+{- Adds the url size to the Key. -}
+addSizeUrlKey :: Url.UrlInfo -> Key -> Key
+addSizeUrlKey urlinfo key = key { keySize = Url.urlSize urlinfo }
cleanup :: UUID -> URLString -> FilePath -> Key -> Maybe FilePath -> Annex ()
cleanup u url file key mtmp = do
@@ -295,19 +303,15 @@ cleanup u url file key mtmp = do
Annex.Queue.flush
maybe noop (moveAnnex key) mtmp
-nodownload :: Bool -> URLString -> FilePath -> Annex (Maybe Key)
-nodownload relaxed url file = do
- (exists, size) <- if relaxed
- then pure (True, Nothing)
- else Url.withUrlOptions (Url.exists url)
- if exists
- then do
- key <- Backend.URL.fromUrl url size
- cleanup webUUID url file key Nothing
- return (Just key)
- else do
- warning $ "unable to access url: " ++ url
- return Nothing
+nodownload :: Bool -> URLString -> Url.UrlInfo -> FilePath -> Annex (Maybe Key)
+nodownload relaxed url urlinfo file
+ | Url.urlExists urlinfo = do
+ key <- Backend.URL.fromUrl url (Url.urlSize urlinfo)
+ cleanup webUUID url file key Nothing
+ return (Just key)
+ | otherwise = do
+ warning $ "unable to access url: " ++ url
+ return Nothing
url2file :: URI -> Maybe Int -> Int -> FilePath
url2file url pathdepth pathmax = case pathdepth of
diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs
index b9d78d713..ed035fa85 100644
--- a/Command/ImportFeed.hs
+++ b/Command/ImportFeed.hs
@@ -144,7 +144,9 @@ performDownload relaxed cache todownload = case location todownload of
rundownload url (takeExtension url) $ \f -> do
r <- Remote.claimingUrl url
if Remote.uuid r == webUUID
- then maybeToList <$> addUrlFile relaxed url f
+ then do
+ urlinfo <- Url.withUrlOptions (Url.getUrlInfo url)
+ maybeToList <$> addUrlFile relaxed url urlinfo f
else do
res <- tryNonAsync $ maybe
(error $ "unable to checkUrl of " ++ Remote.name r)