aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Command/SendKey.hs1
-rw-r--r--Command/TransferInfo.hs9
-rw-r--r--Logs/Transfer.hs10
-rw-r--r--Messages.hs7
-rw-r--r--Meters.hs40
-rw-r--r--Remote/Bup.hs1
-rw-r--r--Remote/Directory.hs17
-rw-r--r--Remote/Git.hs9
-rw-r--r--Remote/Glacier.hs2
-rw-r--r--Remote/Helper/Chunked.hs2
-rw-r--r--Remote/Helper/Encryptable.hs1
-rw-r--r--Remote/Hook.hs1
-rw-r--r--Remote/Rsync.hs1
-rw-r--r--Remote/S3.hs2
-rw-r--r--Remote/Web.hs1
-rw-r--r--Remote/WebDAV.hs2
-rw-r--r--Types.hs4
-rw-r--r--Types/Meters.hs12
-rw-r--r--Types/Remote.hs2
-rw-r--r--Utility/Metered.hs116
-rw-r--r--Utility/Observed.hs43
-rw-r--r--Utility/Rsync.hs12
-rw-r--r--debian/changelog1
-rw-r--r--doc/bugs/No_progress_bars_with_S3.mdwn5
24 files changed, 172 insertions, 129 deletions
diff --git a/Command/SendKey.hs b/Command/SendKey.hs
index 3e46d9dd0..0a07dcece 100644
--- a/Command/SendKey.hs
+++ b/Command/SendKey.hs
@@ -13,6 +13,7 @@ import Annex.Content
import Utility.Rsync
import Logs.Transfer
import qualified Fields
+import Utility.Metered
def :: [Command]
def = [noCommit $ command "sendkey" paramKey seek
diff --git a/Command/TransferInfo.hs b/Command/TransferInfo.hs
index aacc69bb1..4bebdebcd 100644
--- a/Command/TransferInfo.hs
+++ b/Command/TransferInfo.hs
@@ -13,6 +13,7 @@ import Annex.Content
import Logs.Transfer
import Types.Key
import qualified Fields
+import Utility.Metered
def :: [Command]
def = [noCommit $ command "transferinfo" paramKey seek SectionPlumbing
@@ -50,10 +51,14 @@ start (k:[]) = do
(update, tfile, _) <- mkProgressUpdater t info
liftIO $ mapM_ void
[ tryIO $ forever $ do
- bytes <- readish <$> getLine
- maybe (error "transferinfo protocol error") update bytes
+ bytes <- readUpdate
+ maybe (error "transferinfo protocol error")
+ (update . toBytesProcessed) bytes
, tryIO $ removeFile tfile
, exitSuccess
]
stop
start _ = error "wrong number of parameters"
+
+readUpdate :: IO (Maybe Integer)
+readUpdate = readish <$> getLine
diff --git a/Logs/Transfer.hs b/Logs/Transfer.hs
index e73933172..52ed03e5c 100644
--- a/Logs/Transfer.hs
+++ b/Logs/Transfer.hs
@@ -13,6 +13,7 @@ import Annex.Exception
import qualified Git
import Types.Remote
import Types.Key
+import Utility.Metered
import Utility.Percentage
import Utility.QuickCheck
@@ -165,12 +166,13 @@ mkProgressUpdater t info = do
mvar <- liftIO $ newMVar 0
return (liftIO . updater tfile mvar, tfile, mvar)
where
- updater tfile mvar bytes = modifyMVar_ mvar $ \oldbytes -> do
- if (bytes - oldbytes >= mindelta)
+ updater tfile mvar b = modifyMVar_ mvar $ \oldbytes -> do
+ let newbytes = fromBytesProcessed b
+ if (newbytes - oldbytes >= mindelta)
then do
- let info' = info { bytesComplete = Just bytes }
+ let info' = info { bytesComplete = Just newbytes }
_ <- tryIO $ writeTransferInfoFile info' tfile
- return bytes
+ return newbytes
else return oldbytes
{- The minimum change in bytesComplete that is worth
- updating a transfer info file for is 1% of the total
diff --git a/Messages.hs b/Messages.hs
index d79c91aa0..13b786a31 100644
--- a/Messages.hs
+++ b/Messages.hs
@@ -43,14 +43,15 @@ import System.Log.Logger
import System.Log.Formatter
import System.Log.Handler (setFormatter, LogHandler)
import System.Log.Handler.Simple
+import qualified Data.Set as S
import Common
import Types
import Types.Messages
+import qualified Messages.JSON as JSON
import Types.Key
import qualified Annex
-import qualified Messages.JSON as JSON
-import qualified Data.Set as S
+import Utility.Metered
showStart :: String -> String -> Annex ()
showStart command file = handle (JSON.start command $ Just file) $
@@ -86,7 +87,7 @@ meteredBytes combinemeterupdate size a = withOutputType go
meter <- liftIO $ newMeter progress "B" 25 (renderNums binaryOpts 1)
showOutput
r <- a $ \n -> liftIO $ do
- incrP progress n
+ setP progress $ fromBytesProcessed n
displayMeter stdout meter
maybe noop (\m -> m n) combinemeterupdate
liftIO $ clearMeter stdout meter
diff --git a/Meters.hs b/Meters.hs
deleted file mode 100644
index 378e570a2..000000000
--- a/Meters.hs
+++ /dev/null
@@ -1,40 +0,0 @@
-{- git-annex meters
- -
- - Copyright 2012 Joey Hess <joey@kitenet.net>
- -
- - Licensed under the GNU GPL version 3 or higher.
- -}
-
-module Meters where
-
-import Common
-import Types.Meters
-import Utility.Observed
-
-import qualified Data.ByteString.Lazy as L
-import qualified Data.ByteString as S
-
-{- Sends the content of a file to an action, updating the meter as it's
- - consumed. -}
-withMeteredFile :: FilePath -> MeterUpdate -> (L.ByteString -> IO a) -> IO a
-withMeteredFile f meterupdate a = withBinaryFile f ReadMode $ \h ->
- hGetContentsObserved h (meterupdate . toInteger) >>= a
-
-{- Sends the content of a file to a Handle, updating the meter as it's
- - written. -}
-streamMeteredFile :: FilePath -> MeterUpdate -> Handle -> IO ()
-streamMeteredFile f meterupdate h = withMeteredFile f meterupdate $ L.hPut h
-
-{- Writes a ByteString to a Handle, updating a meter as it's written. -}
-meteredWrite :: MeterUpdate -> Handle -> L.ByteString -> IO ()
-meteredWrite meterupdate h = go . L.toChunks
- where
- go [] = return ()
- go (c:cs) = do
- S.hPut h c
- meterupdate $ toInteger $ S.length c
- go cs
-
-meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
-meteredWriteFile meterupdate f b = withBinaryFile f WriteMode $ \h ->
- meteredWrite meterupdate h b
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index d168f0715..1c69d0a1c 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -29,6 +29,7 @@ import Data.ByteString.Lazy.UTF8 (fromString)
import Data.Digest.Pure.SHA
import Utility.UserInfo
import Annex.Content
+import Utility.Metered
type BupRepo = String
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index be533d038..8c5fa795e 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -24,7 +24,7 @@ import Remote.Helper.Encryptable
import Remote.Helper.Chunked
import Crypto
import Annex.Content
-import Meters
+import Utility.Metered
remote :: RemoteType
remote = RemoteType {
@@ -154,17 +154,20 @@ storeSplit' :: MeterUpdate -> Int64 -> [FilePath] -> [S.ByteString] -> [FilePath
storeSplit' _ _ [] _ _ = error "ran out of dests"
storeSplit' _ _ _ [] c = return $ reverse c
storeSplit' meterupdate chunksize (d:dests) bs c = do
- bs' <- E.bracket (openFile d WriteMode) hClose (feed chunksize bs)
+ bs' <- E.bracket (openFile d WriteMode) hClose $
+ feed zeroBytesProcessed chunksize bs
storeSplit' meterupdate chunksize dests bs' (d:c)
where
- feed _ [] _ = return []
- feed sz (l:ls) h = do
- let s = fromIntegral $ S.length l
+ feed _ _ [] _ = return []
+ feed bytes sz (l:ls) h = do
+ let len = S.length l
+ let s = fromIntegral len
if s <= sz || sz == chunksize
then do
S.hPut h l
- meterupdate $ toInteger s
- feed (sz - s) ls h
+ let bytes' = addBytesProcessed bytes len
+ meterupdate bytes'
+ feed bytes' (sz - s) ls h
else return (l:ls)
storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> Annex Bool
diff --git a/Remote/Git.hs b/Remote/Git.hs
index 207655b4e..31396003b 100644
--- a/Remote/Git.hs
+++ b/Remote/Git.hs
@@ -40,6 +40,7 @@ import Init
import Types.Key
import qualified Fields
import Logs.Location
+import Utility.Metered
import Control.Concurrent
import Control.Concurrent.MSampleVar
@@ -309,7 +310,7 @@ copyFromRemote r key file dest
: maybe [] (\f -> [(Fields.associatedFile, f)]) file
Just (cmd, params) <- git_annex_shell (repo r) "transferinfo"
[Param $ key2file key] fields
- v <- liftIO $ newEmptySV
+ v <- liftIO $ (newEmptySV :: IO (MSampleVar Integer))
tid <- liftIO $ forkIO $ void $ tryIO $ do
bytes <- readSV v
p <- createProcess $
@@ -325,7 +326,7 @@ copyFromRemote r key file dest
send bytes
forever $
send =<< readSV v
- let feeder = writeSV v
+ let feeder = writeSV v . fromBytesProcessed
bracketIO noop (const $ tryIO $ killThread tid) (a feeder)
copyFromRemoteCheap :: Remote -> Key -> FilePath -> Annex Bool
@@ -391,13 +392,13 @@ rsyncOrCopyFile rsyncparams src dest p =
dorsync = rsyncHelper (Just p) $
rsyncparams ++ [Param src, Param dest]
docopy = liftIO $ bracket
- (forkIO $ watchfilesize 0)
+ (forkIO $ watchfilesize zeroBytesProcessed)
(void . tryIO . killThread)
(const $ copyFileExternal src dest)
watchfilesize oldsz = do
threadDelay 500000 -- 0.5 seconds
v <- catchMaybeIO $
- fromIntegral . fileSize
+ toBytesProcessed . fileSize
<$> getFileStatus dest
case v of
Just sz
diff --git a/Remote/Glacier.hs b/Remote/Glacier.hs
index ea5df31e5..088c62fb3 100644
--- a/Remote/Glacier.hs
+++ b/Remote/Glacier.hs
@@ -22,7 +22,7 @@ import Remote.Helper.Encryptable
import qualified Remote.Helper.AWS as AWS
import Crypto
import Creds
-import Meters
+import Utility.Metered
import qualified Annex
import Annex.Content
diff --git a/Remote/Helper/Chunked.hs b/Remote/Helper/Chunked.hs
index 11b86042e..46678de70 100644
--- a/Remote/Helper/Chunked.hs
+++ b/Remote/Helper/Chunked.hs
@@ -10,7 +10,7 @@ module Remote.Helper.Chunked where
import Common.Annex
import Utility.DataUnits
import Types.Remote
-import Meters
+import Utility.Metered
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as L
diff --git a/Remote/Helper/Encryptable.hs b/Remote/Helper/Encryptable.hs
index 242fcfe8a..4f0404f2a 100644
--- a/Remote/Helper/Encryptable.hs
+++ b/Remote/Helper/Encryptable.hs
@@ -15,6 +15,7 @@ import Crypto
import qualified Annex
import Config.Cost
import Utility.Base64
+import Utility.Metered
{- Encryption setup for a remote. The user must specify whether to use
- an encryption key, or not encrypt. An encrypted cipher is created, or is
diff --git a/Remote/Hook.hs b/Remote/Hook.hs
index 97691d075..46ee8000f 100644
--- a/Remote/Hook.hs
+++ b/Remote/Hook.hs
@@ -21,6 +21,7 @@ import Annex.Content
import Remote.Helper.Special
import Remote.Helper.Encryptable
import Crypto
+import Utility.Metered
remote :: RemoteType
remote = RemoteType {
diff --git a/Remote/Rsync.hs b/Remote/Rsync.hs
index 1425601ad..9563b43e8 100644
--- a/Remote/Rsync.hs
+++ b/Remote/Rsync.hs
@@ -22,6 +22,7 @@ import Remote.Helper.Encryptable
import Crypto
import Utility.Rsync
import Utility.CopyFile
+import Utility.Metered
import Annex.Perms
type RsyncUrl = String
diff --git a/Remote/S3.hs b/Remote/S3.hs
index 0ca86f1ff..017886694 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -27,7 +27,7 @@ import Remote.Helper.Encryptable
import qualified Remote.Helper.AWS as AWS
import Crypto
import Creds
-import Meters
+import Utility.Metered
import Annex.Content
remote :: RemoteType
diff --git a/Remote/Web.hs b/Remote/Web.hs
index b0d12002c..5af3c5228 100644
--- a/Remote/Web.hs
+++ b/Remote/Web.hs
@@ -17,6 +17,7 @@ import Config.Cost
import Logs.Web
import qualified Utility.Url as Url
import Types.Key
+import Utility.Metered
import qualified Data.Map as M
diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs
index 3b729fe83..db5535494 100644
--- a/Remote/WebDAV.hs
+++ b/Remote/WebDAV.hs
@@ -30,7 +30,7 @@ import Remote.Helper.Encryptable
import Remote.Helper.Chunked
import Crypto
import Creds
-import Meters
+import Utility.Metered
import Annex.Content
type DavUrl = String
diff --git a/Types.hs b/Types.hs
index d19ac3de1..ea0fa6b89 100644
--- a/Types.hs
+++ b/Types.hs
@@ -14,8 +14,7 @@ module Types (
RemoteGitConfig(..),
Remote,
RemoteType,
- Option,
- MeterUpdate
+ Option
) where
import Annex
@@ -25,7 +24,6 @@ import Types.Key
import Types.UUID
import Types.Remote
import Types.Option
-import Types.Meters
type Backend = BackendA Annex
type Remote = RemoteA Annex
diff --git a/Types/Meters.hs b/Types/Meters.hs
deleted file mode 100644
index ef304d1ae..000000000
--- a/Types/Meters.hs
+++ /dev/null
@@ -1,12 +0,0 @@
-{- git-annex meter types
- -
- - Copyright 2012 Joey Hess <joey@kitenet.net>
- -
- - Licensed under the GNU GPL version 3 or higher.
- -}
-
-module Types.Meters where
-
-{- An action that can be run repeatedly, feeding it the number of
- - bytes sent or retrieved so far. -}
-type MeterUpdate = (Integer -> IO ())
diff --git a/Types/Remote.hs b/Types/Remote.hs
index 64a77109c..e6536757c 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -15,9 +15,9 @@ import Data.Ord
import qualified Git
import Types.Key
import Types.UUID
-import Types.Meters
import Types.GitConfig
import Config.Cost
+import Utility.Metered
type RemoteConfigKey = String
type RemoteConfig = M.Map RemoteConfigKey String
diff --git a/Utility/Metered.hs b/Utility/Metered.hs
new file mode 100644
index 000000000..f33ad443a
--- /dev/null
+++ b/Utility/Metered.hs
@@ -0,0 +1,116 @@
+{- Metered IO
+ -
+ - Copyright 2012, 2013 Joey Hess <joey@kitenet.net>
+ -
+ - Licensed under the GNU GPL version 3 or higher.
+ -}
+
+{-# LANGUAGE TypeSynonymInstances #-}
+
+module Utility.Metered where
+
+import Common
+
+import qualified Data.ByteString.Lazy as L
+import qualified Data.ByteString as S
+import System.IO.Unsafe
+import Foreign.Storable (Storable(sizeOf))
+import System.Posix.Types
+
+{- An action that can be run repeatedly, updating it on the bytes processed.
+ -
+ - Note that each call receives the total number of bytes processed, so
+ - far, *not* an incremental amount since the last call. -}
+type MeterUpdate = (BytesProcessed -> IO ())
+
+{- Total number of bytes processed so far. -}
+newtype BytesProcessed = BytesProcessed Integer
+ deriving (Eq, Ord)
+
+class AsBytesProcessed a where
+ toBytesProcessed :: a -> BytesProcessed
+ fromBytesProcessed :: BytesProcessed -> a
+
+instance AsBytesProcessed Integer where
+ toBytesProcessed i = BytesProcessed i
+ fromBytesProcessed (BytesProcessed i) = i
+
+instance AsBytesProcessed Int where
+ toBytesProcessed i = BytesProcessed $ toInteger i
+ fromBytesProcessed (BytesProcessed i) = fromInteger i
+
+instance AsBytesProcessed FileOffset where
+ toBytesProcessed sz = BytesProcessed $ toInteger sz
+ fromBytesProcessed (BytesProcessed sz) = fromInteger sz
+
+addBytesProcessed :: AsBytesProcessed v => BytesProcessed -> v -> BytesProcessed
+addBytesProcessed (BytesProcessed i) v =
+ let (BytesProcessed n) = toBytesProcessed v
+ in BytesProcessed $! i + n
+
+zeroBytesProcessed :: BytesProcessed
+zeroBytesProcessed = BytesProcessed 0
+
+{- Sends the content of a file to an action, updating the meter as it's
+ - consumed. -}
+withMeteredFile :: FilePath -> MeterUpdate -> (L.ByteString -> IO a) -> IO a
+withMeteredFile f meterupdate a = withBinaryFile f ReadMode $ \h ->
+ hGetContentsMetered h meterupdate >>= a
+
+{- Sends the content of a file to a Handle, updating the meter as it's
+ - written. -}
+streamMeteredFile :: FilePath -> MeterUpdate -> Handle -> IO ()
+streamMeteredFile f meterupdate h = withMeteredFile f meterupdate $ L.hPut h
+
+{- Writes a ByteString to a Handle, updating a meter as it's written. -}
+meteredWrite :: MeterUpdate -> Handle -> L.ByteString -> IO ()
+meteredWrite meterupdate h = go zeroBytesProcessed . L.toChunks
+ where
+ go _ [] = return ()
+ go sofar (c:cs) = do
+ S.hPut h c
+ let sofar' = addBytesProcessed sofar $ S.length c
+ meterupdate sofar'
+ go sofar' cs
+
+meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
+meteredWriteFile meterupdate f b = withBinaryFile f WriteMode $ \h ->
+ meteredWrite meterupdate h b
+
+{- This is like L.hGetContents, but after each chunk is read, a meter
+ - is updated based on the size of the chunk.
+ -
+ - Note that the meter update is run in unsafeInterleaveIO, which means that
+ - it can be run at any time. It's even possible for updates to run out
+ - of order, as different parts of the ByteString are consumed.
+ -
+ - All the usual caveats about using unsafeInterleaveIO apply to the
+ - meter updates, so use caution.
+ -}
+hGetContentsMetered :: Handle -> MeterUpdate -> IO L.ByteString
+hGetContentsMetered h meterupdate = lazyRead zeroBytesProcessed
+ where
+ lazyRead sofar = unsafeInterleaveIO $ loop sofar
+
+ loop sofar = do
+ c <- S.hGetSome h defaultChunkSize
+ if S.null c
+ then do
+ hClose h
+ return $ L.empty
+ else do
+ let sofar' = addBytesProcessed sofar $
+ S.length c
+ meterupdate sofar'
+ {- unsafeInterleaveIO causes this to be
+ - deferred until the data is read from the
+ - ByteString. -}
+ cs <- lazyRead sofar'
+ return $ L.append (L.fromChunks [c]) cs
+
+{- Same default chunk size Lazy ByteStrings use. -}
+defaultChunkSize :: Int
+defaultChunkSize = 32 * k - chunkOverhead
+ where
+ k = 1024
+ chunkOverhead = 2 * sizeOf (undefined :: Int) -- GHC specific
diff --git a/Utility/Observed.hs b/Utility/Observed.hs
deleted file mode 100644
index 3ee973429..000000000
--- a/Utility/Observed.hs
+++ /dev/null
@@ -1,43 +0,0 @@
-module Utility.Observed where
-
-import qualified Data.ByteString.Lazy as L
-import qualified Data.ByteString as S
-import System.IO
-import System.IO.Unsafe
-import Foreign.Storable (Storable(sizeOf))
-
-{- This is like L.hGetContents, but after each chunk is read, an action
- - is run to observe the size of the chunk.
- -
- - Note that the observer is run in unsafeInterleaveIO, which means that
- - it can be run at any time. It's even possible for observers to run out
- - of order, as different parts of the ByteString are consumed.
- -
- - All the usual caveats about using unsafeInterleaveIO apply to the observers,
- - so use caution.
- -}
-hGetContentsObserved :: Handle -> (Int -> IO ()) -> IO L.ByteString
-hGetContentsObserved h observe = lazyRead
- where
- lazyRead = unsafeInterleaveIO loop
-
- loop = do
- c <- S.hGetSome h defaultChunkSize
- if S.null c
- then do
- hClose h
- return $ L.empty
- else do
- observe $ S.length c
- {- unsafeInterleaveIO causes this to be
- - deferred until the data is read from the
- - ByteString. -}
- cs <- lazyRead
- return $ L.append (L.fromChunks [c]) cs
-
-{- Same default chunk size Lazy ByteStrings use. -}
-defaultChunkSize :: Int
-defaultChunkSize = 32 * k - chunkOverhead
- where
- k = 1024
- chunkOverhead = 2 * sizeOf (undefined :: Int) -- GHC specific
diff --git a/Utility/Rsync.hs b/Utility/Rsync.hs
index e03824239..afb3dcbc8 100644
--- a/Utility/Rsync.hs
+++ b/Utility/Rsync.hs
@@ -8,6 +8,7 @@
module Utility.Rsync where
import Common
+import Utility.Metered
import Data.Char
@@ -44,14 +45,13 @@ rsyncServerParams =
rsync :: [CommandParam] -> IO Bool
rsync = boolSystem "rsync"
-{- Runs rsync, but intercepts its progress output and feeds bytes
- - complete values into the callback. The progress output is also output
- - to stdout.
+{- Runs rsync, but intercepts its progress output and updates a meter.
+ - The progress output is also output to stdout.
-
- The params must enable rsync's --progress mode for this to work.
-}
-rsyncProgress :: (Integer -> IO ()) -> [CommandParam] -> IO Bool
-rsyncProgress callback params = do
+rsyncProgress :: MeterUpdate -> [CommandParam] -> IO Bool
+rsyncProgress meterupdate params = do
r <- withHandle StdoutHandle createProcessSuccess p (feedprogress 0 [])
{- For an unknown reason, piping rsync's output like this does
- causes it to run a second ssh process, which it neglects to wait
@@ -72,7 +72,7 @@ rsyncProgress callback params = do
Nothing -> feedprogress prev buf' h
(Just bytes) -> do
when (bytes /= prev) $
- callback bytes
+ meterupdate $ toBytesProcessed bytes
feedprogress bytes buf' h
{- Checks if an rsync url involves the remote shell (ssh or rsh).
diff --git a/debian/changelog b/debian/changelog
index 38c329dcd..96cca734d 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -4,6 +4,7 @@ git-annex (4.20130324) UNRELEASED; urgency=low
* Per-command usage messages.
* webapp: Fix a race that sometimes caused alerts or other notifications
to be missed if they occurred while a page was loading.
+ * webapp: Progess bar fixes for many types of special remotes.
-- Joey Hess <joeyh@debian.org> Mon, 25 Mar 2013 10:21:46 -0400
diff --git a/doc/bugs/No_progress_bars_with_S3.mdwn b/doc/bugs/No_progress_bars_with_S3.mdwn
index 907b3cb11..afa7ba5ee 100644
--- a/doc/bugs/No_progress_bars_with_S3.mdwn
+++ b/doc/bugs/No_progress_bars_with_S3.mdwn
@@ -19,3 +19,8 @@ I expect a changing status bar and percentage. Instead I see no changes when an
When uploading local data to an S3 remote, I see no progress bars. The progress bar area on active uploads stays the same grey as the bar on queued uploads. The status does not change from "0% of...". The uploads are completing, but this makes it very difficult to judge their activity.
The only remotes I currently have setup are S3 special remotes, so I cannot say whether progress bars are working for uploads to other remote types.
+
+> [[done]], this turned out to be a confusion in the progress code;
+> parts were expecting a full number of bytes since the start, while
+> other parts were sending the number of bytes in a chunk. Result was
+> progress bars stuck at 0% often. --[[Joey]]