diff options
author | Joey Hess <joey@kitenet.net> | 2013-09-28 14:35:21 -0400 |
---|---|---|
committer | Joey Hess <joey@kitenet.net> | 2013-09-28 14:35:21 -0400 |
commit | a05cefbd7cdfc75109d8f55c4cb699352745841c (patch) | |
tree | a3d10d759b00a2c00340d352827fe9d287bed07c /Command | |
parent | 309750f7588d7c9a6eadbdd30b630250f766311f (diff) |
Send a git-annex user-agent when downloading urls.
Overridable with --user-agent option.
Not yet done for S3 or WebDAV due to limitations of libraries used --
nether allows a user-agent header to be specified.
This commit sponsored by Michael Zehrer.
Diffstat (limited to 'Command')
-rw-r--r-- | Command/AddUrl.hs | 8 | ||||
-rw-r--r-- | Command/ImportFeed.hs | 5 |
2 files changed, 7 insertions, 6 deletions
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs index e767a45e0..951bbdbe8 100644 --- a/Command/AddUrl.hs +++ b/Command/AddUrl.hs @@ -17,8 +17,8 @@ import Backend import qualified Command.Add import qualified Annex import qualified Annex.Queue +import qualified Annex.Url as Url import qualified Backend.URL -import qualified Utility.Url as Url import Annex.Content import Logs.Web import qualified Option @@ -123,7 +123,7 @@ perform relaxed url file = ifAnnexed file addurl geturl next $ return True | otherwise = do headers <- getHttpHeaders - ifM (liftIO $ Url.check url headers $ keySize key) + ifM (Url.withUserAgent $ Url.check url headers $ keySize key) ( do setUrlPresent key url next $ return True @@ -174,7 +174,7 @@ download url file = do size <- ifM (liftIO $ isJust <$> checkDaemon pidfile) ( do headers <- getHttpHeaders - liftIO $ snd <$> Url.exists url headers + snd <$> Url.withUserAgent (Url.exists url headers) , return Nothing ) Backend.URL.fromUrl url size @@ -203,7 +203,7 @@ nodownload relaxed url file = do headers <- getHttpHeaders (exists, size) <- if relaxed then pure (True, Nothing) - else liftIO $ Url.exists url headers + else Url.withUserAgent $ Url.exists url headers if exists then do key <- Backend.URL.fromUrl url size diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs index d2f806402..7f54643c9 100644 --- a/Command/ImportFeed.hs +++ b/Command/ImportFeed.hs @@ -17,7 +17,7 @@ import Data.Time.Clock import Common.Annex import qualified Annex import Command -import qualified Utility.Url as Url +import qualified Annex.Url as Url import Logs.Web import qualified Option import qualified Utility.Format @@ -102,9 +102,10 @@ findEnclosures url = extract <$> downloadFeed url downloadFeed :: URLString -> Annex (Maybe Feed) downloadFeed url = do showOutput + ua <- Url.getUserAgent liftIO $ withTmpFile "feed" $ \f h -> do fileEncoding h - ifM (Url.download url [] [] f) + ifM (Url.download url [] [] f ua) ( liftIO $ parseFeedString <$> hGetContentsStrict h , return Nothing ) |