summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile1
-rw-r--r--Utility/Url.hs87
-rw-r--r--debian/changelog1
-rw-r--r--debian/control1
-rwxr-xr-xdebian/rules4
-rw-r--r--doc/install/fromscratch.mdwn1
-rw-r--r--doc/todo/wishlist:_addurl_https:.mdwn3
7 files changed, 31 insertions, 67 deletions
diff --git a/Makefile b/Makefile
index e1f9724a3..41ebc7dc6 100644
--- a/Makefile
+++ b/Makefile
@@ -6,7 +6,6 @@ BASEFLAGS=-Wall -outputdir $(GIT_ANNEX_TMP_BUILD_DIR) -IUtility
# you can turn off some of these features.
#
# If you're using an old version of yesod, enable -DWITH_OLD_YESOD
-# Or with an old version of the uri library, enable -DWITH_OLD_URI
FEATURES?=$(GIT_ANNEX_LOCAL_FEATURES) -DWITH_ASSISTANT -DWITH_S3 -DWITH_WEBDAV -DWITH_WEBAPP -DWITH_PAIRING -DWITH_XMPP -DWITH_DNS
bins=git-annex
diff --git a/Utility/Url.hs b/Utility/Url.hs
index 67efdb558..8b924a699 100644
--- a/Utility/Url.hs
+++ b/Utility/Url.hs
@@ -16,8 +16,6 @@ module Utility.Url (
) where
import Common
-import qualified Network.Browser as Browser
-import Network.HTTP
import Network.URI
import Data.Either
@@ -38,20 +36,34 @@ check url headers expected_size = handle <$> exists url headers
- also returning its size if available. -}
exists :: URLString -> Headers -> IO (Bool, Maybe Integer)
exists url headers = case parseURI url of
- Nothing -> return (False, Nothing)
Just u
| uriScheme u == "file:" -> do
s <- catchMaybeIO $ getFileStatus (uriPath u)
- return $ case s of
- Nothing -> (False, Nothing)
- Just stat -> (True, Just $ fromIntegral $ fileSize stat)
+ case s of
+ Just stat -> return (True, Just $ fromIntegral $ fileSize stat)
+ Nothing -> dne
| otherwise -> do
- r <- request u headers HEAD
- case rspCode r of
- (2,_,_) -> return (True, size r)
- _ -> return (False, Nothing)
+ output <- readProcess "curl" curlparams
+ case lastMaybe (lines output) of
+ Just ('2':_:_) -> return (True, extractsize output)
+ _ -> dne
+ Nothing -> dne
where
- size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
+ dne = return (False, Nothing)
+
+ curlparams =
+ [ "-s"
+ , "--head"
+ , "-L"
+ , url
+ , "-w", "%{http_code}"
+ ] ++ concatMap (\h -> ["-H", h]) headers
+
+ extractsize s = case lastMaybe $ filter ("Content-Length:" `isPrefixOf`) (lines s) of
+ Just l -> case lastMaybe $ words l of
+ Just sz -> readish sz
+ _ -> Nothing
+ _ -> Nothing
{- Used to download large files, such as the contents of keys.
-
@@ -80,54 +92,5 @@ download url headers options file
{- Downloads a small file. -}
get :: URLString -> Headers -> IO String
-get url headers =
- case parseURI url of
- Nothing -> error "url parse error"
- Just u -> do
- r <- request u headers GET
- case rspCode r of
- (2,_,_) -> return $ rspBody r
- _ -> error $ rspReason r
-
-{- Makes a http request of an url. For example, HEAD can be used to
- - check if the url exists, or GET used to get the url content (best for
- - small urls).
- -
- - This does its own redirect following because Browser's is buggy for HEAD
- - requests.
- -}
-request :: URI -> Headers -> RequestMethod -> IO (Response String)
-request url headers requesttype = go 5 url
- where
- go :: Int -> URI -> IO (Response String)
- go 0 _ = error "Too many redirects "
- go n u = do
- rsp <- Browser.browse $ do
- Browser.setErrHandler ignore
- Browser.setOutHandler ignore
- Browser.setAllowRedirects False
- let req = mkRequest requesttype u :: Request_String
- snd <$> Browser.request (addheaders req)
- case rspCode rsp of
- (3,0,x) | x /= 5 -> redir (n - 1) u rsp
- _ -> return rsp
- ignore = const noop
- redir n u rsp = case retrieveHeaders HdrLocation rsp of
- [] -> return rsp
- (Header _ newu:_) ->
- case parseURIReference newu of
- Nothing -> return rsp
- Just newURI -> go n newURI_abs
- where
-#if defined VERSION_network
-#if ! MIN_VERSION_network(2,4,0)
-#define WITH_OLD_URI
-#endif
-#endif
-#ifdef WITH_OLD_URI
- newURI_abs = fromMaybe newURI (newURI `relativeTo` u)
-#else
- newURI_abs = newURI `relativeTo` u
-#endif
- addheaders req = setHeaders req (rqHeaders req ++ userheaders)
- userheaders = rights $ map parseHeader headers
+get url headers = readProcess "curl" $
+ ["-s", "-L", url] ++ concatMap (\h -> ["-H", h]) headers
diff --git a/debian/changelog b/debian/changelog
index 411f12a8c..df3021d69 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -3,6 +3,7 @@ git-annex (3.20130125) UNRELEASED; urgency=low
* webapp: Now allows restarting any threads that crash.
* Adjust debian package to only build-depend on DAV on architectures
where it is available.
+ * addurl --fast: Use curl, rather than haskell HTTP library, to support https.
-- Joey Hess <joeyh@debian.org> Sat, 26 Jan 2013 15:48:40 +1100
diff --git a/debian/control b/debian/control
index cf25d65df..2610ca53b 100644
--- a/debian/control
+++ b/debian/control
@@ -10,7 +10,6 @@ Build-Depends:
libghc-pcre-light-dev,
libghc-sha-dev,
libghc-dataenc-dev,
- libghc-http-dev,
libghc-utf8-string-dev,
libghc-hs3-dev (>= 0.5.6),
libghc-dav-dev (>= 0.3) [amd64 i386 kfreebsd-amd64 kfreebsd-i386 sparc],
diff --git a/debian/rules b/debian/rules
index 8cd2b5aa5..1dd64872d 100755
--- a/debian/rules
+++ b/debian/rules
@@ -1,9 +1,9 @@
#!/usr/bin/make -f
ifeq (install ok installed,$(shell dpkg-query -W -f '$${Status}' libghc-yesod-dev 2>/dev/null))
-export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_OLD_URI -DWITH_PAIRING -DWITH_XMPP -DWITH_WEBAPP -DWITH_OLD_YESOD
+export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_PAIRING -DWITH_XMPP -DWITH_WEBAPP -DWITH_OLD_YESOD
else
-export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_OLD_URI -DWITH_PAIRING -DWITH_XMPP
+export FEATURES=-DWITH_ASSISTANT -DWITH_S3 -DWITH_HOST -DWITH_PAIRING -DWITH_XMPP
endif
ifeq (install ok installed,$(shell dpkg-query -W -f '$${Status}' libghc-dav-dev 2>/dev/null))
export FEATURES:=${FEATURES} -DWITH_WEBDAV
diff --git a/doc/install/fromscratch.mdwn b/doc/install/fromscratch.mdwn
index 91a0d7f61..8fdb8dcda 100644
--- a/doc/install/fromscratch.mdwn
+++ b/doc/install/fromscratch.mdwn
@@ -13,7 +13,6 @@ quite a lot.
* [lifted-base](http://hackage.haskell.org/package/lifted-base)
* [TestPack](http://hackage.haskell.org/cgi-bin/hackage-scripts/package/testpack)
* [QuickCheck 2](http://hackage.haskell.org/package/QuickCheck)
- * [HTTP](http://hackage.haskell.org/package/HTTP)
* [json](http://hackage.haskell.org/package/json)
* [IfElse](http://hackage.haskell.org/package/IfElse)
* [bloomfilter](http://hackage.haskell.org/package/bloomfilter)
diff --git a/doc/todo/wishlist:_addurl_https:.mdwn b/doc/todo/wishlist:_addurl_https:.mdwn
index bfb1c9283..0a62eda6d 100644
--- a/doc/todo/wishlist:_addurl_https:.mdwn
+++ b/doc/todo/wishlist:_addurl_https:.mdwn
@@ -6,3 +6,6 @@ To give an example, here is a PDF file:
If you switch the https: to http: it redirects you back to https:.
As more sites provide https: for non-secret traffic, this becomes more of an issue.
+
+> I've gotten rid of the use of the HTTP library, now it just uses curl.
+> [[done]] --[[Joey]]