aboutsummaryrefslogtreecommitdiff
path: root/Command
diff options
context:
space:
mode:
authorGravatar Joey Hess <joey@kitenet.net>2014-02-24 22:00:25 -0400
committerGravatar Joey Hess <joey@kitenet.net>2014-02-24 22:00:25 -0400
commit9f9f1decca4a06d81ce97b64ef1a06fda3b8efad (patch)
tree1f207862430497549281d510837dfcd9782f69af /Command
parentba6f7e1e38063e4b338d6a7537b575411193b2b6 (diff)
add UrlOptions sum type
Diffstat (limited to 'Command')
-rw-r--r--Command/AddUrl.hs9
-rw-r--r--Command/ImportFeed.hs4
2 files changed, 5 insertions, 8 deletions
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index da4da414f..f45303416 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -134,8 +134,7 @@ perform relaxed url file = ifAnnexed file addurl geturl
setUrlPresent key url
next $ return True
| otherwise = do
- (headers, options) <- getHttpHeadersOptions
- (exists, samesize) <- Url.withUserAgent $ Url.check url headers options (keySize key)
+ (exists, samesize) <- Url.withUrlOptions $ Url.check url (keySize key)
if exists && samesize
then do
setUrlPresent key url
@@ -192,8 +191,7 @@ download url file = do
-}
addSizeUrlKey :: URLString -> Key -> Annex Key
addSizeUrlKey url key = do
- (headers, options) <- getHttpHeadersOptions
- size <- snd <$> Url.withUserAgent (Url.exists url headers options)
+ size <- snd <$> Url.withUrlOptions (Url.exists url)
return $ key { keySize = size }
cleanup :: URLString -> FilePath -> Key -> Maybe FilePath -> Annex Bool
@@ -212,10 +210,9 @@ cleanup url file key mtmp = do
nodownload :: Bool -> URLString -> FilePath -> Annex Bool
nodownload relaxed url file = do
- (headers, options) <- getHttpHeadersOptions
(exists, size) <- if relaxed
then pure (True, Nothing)
- else Url.withUserAgent $ Url.exists url headers options
+ else Url.withUrlOptions (Url.exists url)
if exists
then do
key <- Backend.URL.fromUrl url size
diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs
index dfa89b344..005d42d20 100644
--- a/Command/ImportFeed.hs
+++ b/Command/ImportFeed.hs
@@ -121,10 +121,10 @@ findDownloads u = go =<< downloadFeed u
downloadFeed :: URLString -> Annex (Maybe Feed)
downloadFeed url = do
showOutput
- ua <- Url.getUserAgent
+ uo <- Url.getUrlOptions
liftIO $ withTmpFile "feed" $ \f h -> do
fileEncoding h
- ifM (Url.download url [] [] f ua)
+ ifM (Url.download url f uo)
( parseFeedString <$> hGetContentsStrict h
, return Nothing
)