summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar Joey Hess <joey@kitenet.net>2014-10-22 17:14:38 -0400
committerGravatar Joey Hess <joey@kitenet.net>2014-10-22 17:14:38 -0400
commit33e7dd2e0b756270cb51d1ed574cbe4b8173c7cd (patch)
tree0e9ff04c04c33cd1ba45171983d1b9f4d92cac60
parent2d7b57270e628994483495159d2be715c8f9531b (diff)
parent49475bb89542e92c6f466425f29cd0640a8e80f4 (diff)
Merge branch 'master' into s3-aws
Conflicts: Remote/S3.hs
-rw-r--r--Annex/Branch.hs6
-rw-r--r--Annex/Branch/Transitions.hs3
-rw-r--r--Annex/CatFile.hs6
-rw-r--r--Annex/CheckIgnore.hs2
-rw-r--r--Annex/Content.hs4
-rw-r--r--Annex/Direct.hs2
-rw-r--r--Annex/Environment.hs22
-rw-r--r--Annex/FileMatcher.hs2
-rw-r--r--Annex/ReplaceFile.hs2
-rw-r--r--Annex/Ssh.hs4
-rw-r--r--Annex/TaggedPush.hs10
-rw-r--r--Annex/Transfer.hs2
-rw-r--r--Annex/View.hs8
-rw-r--r--Annex/View/ViewedFile.hs2
-rw-r--r--Assistant.hs32
-rw-r--r--Assistant/Alert.hs2
-rw-r--r--Assistant/Alert/Utility.hs2
-rw-r--r--Assistant/DaemonStatus.hs2
-rw-r--r--Assistant/DeleteRemote.hs2
-rw-r--r--Assistant/MakeRemote.hs2
-rw-r--r--Assistant/NetMessager.hs4
-rw-r--r--Assistant/Ssh.hs6
-rw-r--r--Assistant/Threads/Committer.hs6
-rw-r--r--Assistant/Threads/Cronner.hs8
-rw-r--r--Assistant/Threads/SanityChecker.hs2
-rw-r--r--Assistant/Threads/UpgradeWatcher.hs4
-rw-r--r--Assistant/Threads/Upgrader.hs2
-rw-r--r--Assistant/Threads/Watcher.hs8
-rw-r--r--Assistant/Threads/WebApp.hs3
-rw-r--r--Assistant/Threads/XMPPClient.hs2
-rw-r--r--Assistant/Threads/XMPPPusher.hs4
-rw-r--r--Assistant/TransferQueue.hs2
-rw-r--r--Assistant/Types/NetMessager.hs2
-rw-r--r--Assistant/XMPP.hs2
-rw-r--r--Assistant/XMPP/Git.hs10
-rw-r--r--Backend/Hash.hs2
-rw-r--r--Build/EvilLinker.hs10
-rw-r--r--Build/EvilSplicer.hs22
-rw-r--r--Build/NullSoftInstaller.hs2
-rw-r--r--Build/OSXMkLibs.hs2
-rw-r--r--Build/Standalone.hs2
-rw-r--r--Checks.hs2
-rw-r--r--CmdLine.hs3
-rw-r--r--CmdLine/GitAnnex.hs152
-rw-r--r--CmdLine/GitAnnexShell.hs20
-rw-r--r--CmdLine/Seek.hs4
-rw-r--r--CmdLine/Usage.hs2
-rw-r--r--Command/Add.hs14
-rw-r--r--Command/AddUnused.hs4
-rw-r--r--Command/AddUrl.hs12
-rw-r--r--Command/Assistant.hs4
-rw-r--r--Command/Commit.hs4
-rw-r--r--Command/ConfigList.hs6
-rw-r--r--Command/Copy.hs6
-rw-r--r--Command/Dead.hs4
-rw-r--r--Command/Describe.hs4
-rw-r--r--Command/Direct.hs4
-rw-r--r--Command/Drop.hs4
-rw-r--r--Command/DropKey.hs4
-rw-r--r--Command/DropUnused.hs4
-rw-r--r--Command/EnableRemote.hs6
-rw-r--r--Command/ExamineKey.hs4
-rw-r--r--Command/Find.hs4
-rw-r--r--Command/FindRef.hs4
-rw-r--r--Command/Fix.hs4
-rw-r--r--Command/Forget.hs4
-rw-r--r--Command/FromKey.hs4
-rw-r--r--Command/Fsck.hs8
-rw-r--r--Command/FuzzTest.hs10
-rw-r--r--Command/GCryptSetup.hs6
-rw-r--r--Command/Get.hs6
-rw-r--r--Command/Group.hs4
-rw-r--r--Command/Help.hs22
-rw-r--r--Command/Import.hs10
-rw-r--r--Command/ImportFeed.hs8
-rw-r--r--Command/InAnnex.hs4
-rw-r--r--Command/Indirect.hs6
-rw-r--r--Command/Info.hs141
-rw-r--r--Command/Init.hs4
-rw-r--r--Command/InitRemote.hs20
-rw-r--r--Command/List.hs20
-rw-r--r--Command/Lock.hs4
-rw-r--r--Command/Log.hs4
-rw-r--r--Command/LookupKey.hs4
-rw-r--r--Command/Map.hs17
-rw-r--r--Command/Merge.hs4
-rw-r--r--Command/MetaData.hs4
-rw-r--r--Command/Migrate.hs6
-rw-r--r--Command/Mirror.hs6
-rw-r--r--Command/Move.hs8
-rw-r--r--Command/NotifyChanges.hs6
-rw-r--r--Command/NumCopies.hs27
-rw-r--r--Command/PreCommit.hs6
-rw-r--r--Command/ReKey.hs4
-rw-r--r--Command/RecvKey.hs8
-rw-r--r--Command/Reinit.hs4
-rw-r--r--Command/Reinject.hs4
-rw-r--r--Command/RemoteDaemon.hs4
-rw-r--r--Command/Repair.hs6
-rw-r--r--Command/ResolveMerge.hs6
-rw-r--r--Command/RmUrl.hs4
-rw-r--r--Command/Schedule.hs6
-rw-r--r--Command/Semitrust.hs4
-rw-r--r--Command/SendKey.hs4
-rw-r--r--Command/Status.hs4
-rw-r--r--Command/Sync.hs6
-rw-r--r--Command/Test.hs4
-rw-r--r--Command/TestRemote.hs4
-rw-r--r--Command/TransferInfo.hs4
-rw-r--r--Command/TransferKey.hs4
-rw-r--r--Command/TransferKeys.hs6
-rw-r--r--Command/Trust.hs8
-rw-r--r--Command/Unannex.hs4
-rw-r--r--Command/Ungroup.hs4
-rw-r--r--Command/Uninit.hs6
-rw-r--r--Command/Unlock.hs4
-rw-r--r--Command/Untrust.hs4
-rw-r--r--Command/Unused.hs4
-rw-r--r--Command/Upgrade.hs4
-rw-r--r--Command/VAdd.hs4
-rw-r--r--Command/VCycle.hs4
-rw-r--r--Command/VFilter.hs4
-rw-r--r--Command/VPop.hs4
-rw-r--r--Command/Version.hs4
-rw-r--r--Command/Vicfg.hs32
-rw-r--r--Command/View.hs6
-rw-r--r--Command/Wanted.hs6
-rw-r--r--Command/Watch.hs4
-rw-r--r--Command/WebApp.hs6
-rw-r--r--Command/Whereis.hs4
-rw-r--r--Command/XMPPGit.hs10
-rw-r--r--Config/Cost.hs2
-rw-r--r--Config/Files.hs2
-rw-r--r--Creds.hs86
-rw-r--r--Git/CatFile.hs2
-rw-r--r--Git/Command.hs4
-rw-r--r--Git/Config.hs2
-rw-r--r--Git/CurrentRepo.hs10
-rw-r--r--Git/DiffTree.hs2
-rw-r--r--Git/GCrypt.hs6
-rw-r--r--Git/Index.hs4
-rw-r--r--Git/LsTree.hs2
-rw-r--r--Git/Remote.hs2
-rw-r--r--Git/Repair.hs19
-rw-r--r--Git/Version.hs2
-rw-r--r--Limit.hs6
-rw-r--r--Locations.hs4
-rw-r--r--Logs.hs16
-rw-r--r--Logs/FsckResults.hs2
-rw-r--r--Logs/MapLog.hs2
-rw-r--r--Logs/MetaData.hs2
-rw-r--r--Logs/Schedule.hs2
-rw-r--r--Logs/SingleValue.hs4
-rw-r--r--Logs/Transitions.hs6
-rw-r--r--Logs/Trust.hs3
-rw-r--r--Logs/Web.hs2
-rw-r--r--Remote.hs8
-rw-r--r--Remote/Bup.hs3
-rw-r--r--Remote/Ddar.hs3
-rw-r--r--Remote/Directory.hs5
-rw-r--r--Remote/External.hs7
-rw-r--r--Remote/GCrypt.hs13
-rw-r--r--Remote/Git.hs3
-rw-r--r--Remote/Glacier.hs21
-rw-r--r--Remote/Helper/Chunked.hs15
-rw-r--r--Remote/Helper/Encryptable.hs63
-rw-r--r--Remote/Helper/Git.hs5
-rw-r--r--Remote/Helper/Special.hs12
-rw-r--r--Remote/Hook.hs7
-rw-r--r--Remote/Rsync.hs5
-rw-r--r--Remote/S3.hs29
-rw-r--r--Remote/Tahoe.hs7
-rw-r--r--Remote/Web.hs5
-rw-r--r--Remote/WebDAV.hs8
-rw-r--r--RemoteDaemon/Transport/Ssh.hs2
-rw-r--r--RemoteDaemon/Types.hs2
-rw-r--r--Test.hs38
-rw-r--r--Types/Crypto.hs8
-rw-r--r--Types/Key.hs2
-rw-r--r--Types/MetaData.hs2
-rw-r--r--Types/Remote.hs4
-rw-r--r--Types/ScheduledActivity.hs2
-rw-r--r--Types/StandardGroups.hs2
-rw-r--r--Types/TrustLevel.hs4
-rw-r--r--Utility/Batch.hs2
-rw-r--r--Utility/CoProcess.hs4
-rw-r--r--Utility/CopyFile.hs4
-rw-r--r--Utility/Daemon.hs2
-rw-r--r--Utility/DataUnits.hs2
-rw-r--r--Utility/Directory.hs4
-rw-r--r--Utility/Env.hs29
-rw-r--r--Utility/ExternalSHA.hs2
-rw-r--r--Utility/FileSystemEncoding.hs2
-rw-r--r--Utility/Format.hs2
-rw-r--r--Utility/Gpg.hs8
-rw-r--r--Utility/HumanTime.hs6
-rw-r--r--Utility/InodeCache.hs2
-rw-r--r--Utility/Lsof.hs2
-rw-r--r--Utility/Matcher.hs4
-rw-r--r--Utility/Path.hs8
-rw-r--r--Utility/Quvi.hs2
-rw-r--r--Utility/Rsync.hs2
-rw-r--r--Utility/SRV.hs2
-rw-r--r--Utility/Scheduled.hs26
-rw-r--r--Utility/SshConfig.hs6
-rw-r--r--Utility/TList.hs2
-rw-r--r--Utility/WebApp.hs2
-rw-r--r--Utility/Yesod.hs4
-rwxr-xr-xdebian/cabal-wrapper20
-rw-r--r--debian/changelog63
-rw-r--r--debian/control25
-rw-r--r--debian/copyright4
-rwxr-xr-xdebian/rules3
-rw-r--r--doc/bugs/Build_error_with_Yesod_1.4.mdwn287
-rw-r--r--doc/bugs/Build_error_with_Yesod_1.4/comment_1_42fe9d62c9dcc55deea35d16b67177e6._comment8
-rw-r--r--doc/bugs/Build_error_with_Yesod_1.4/comment_2_05f6fb19f2527f6dd72ab0e2f87c021a._comment8
-rw-r--r--doc/bugs/Can__39__t_add_a_git_repo_to_git_annex:___34__Invalid_path_repo__47__.git__47__X__34___for_many_X/comment_10_2c8e8a4f35b392b1cb4dc8104786312d._comment17
-rw-r--r--doc/bugs/Drop_files_with_the_same_checksum..mdwn33
-rw-r--r--doc/bugs/Issue_fewer_S3_GET_requests.mdwn9
-rw-r--r--doc/bugs/Req:_Upgrade_to_Yesod_1.4__63___https:__47____47__github.com__47__NixOS__47__nixpkgs__47__pull__47__4391.mdwn24
-rw-r--r--doc/bugs/S3_upload_not_using_multipart/comment_7_f620888512cd78628f82ec9e5eed4ad1._comment21
-rw-r--r--doc/bugs/S3_upload_not_using_multipart/comment_8_4d9242cde0d2348452438659a8aa8d6d._comment8
-rw-r--r--doc/bugs/Upload_to_S3_fails_.mdwn2
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_10_b7e912bac673bdffa5775b71d5d39937._comment8
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_1_398c014921f9af957fb5e9a92ed0ef4d._comment10
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_2_f33ce058c9460cf7d151e739bff0440a._comment10
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_3_cd1e768fe1e67daf08b5afd460620922._comment8
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_4_0cdd2e8d6e83c03de717ecd3253e753d._comment8
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_5_020c055f6c06860dda27c1debb123742._comment43
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_6_8bc023fca8cedfc517856cdcd20b7f10._comment10
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_7_32685258748a7cdd177e7af2105f128e._comment10
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_8_841fd94d0f599c71a76fd22b07944366._comment8
-rw-r--r--doc/bugs/Upload_to_S3_fails_/comment_9_dd837a1cb2146224b9c000cbeea4f3b3._comment8
-rw-r--r--doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists.mdwn36
-rw-r--r--doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists/comment_1_ac40ddc26bff27dafdbc457837695a92._comment8
-rw-r--r--doc/bugs/_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41_____91__2014-07-23_16:41:45_CEST__93___WebApp:_warning_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41__.mdwn2
-rw-r--r--doc/bugs/__34__error:_invalid_object__34____44___after_add__59___cannot_commit/comment_11_7776659e257a97c9a3855c8ad008207a._comment10
-rw-r--r--doc/bugs/annex_get_fails_from_read-only_filesystem.mdwn27
-rw-r--r--doc/bugs/annex_get_fails_from_read-only_filesystem/comment_1_d8ab07429195c06ec4fae199ca9e0764._comment10
-rw-r--r--doc/bugs/annex_get_fails_from_read-only_filesystem/comment_2_03c16df9d6c14e1529c5dc8b5fc49691._comment8
-rw-r--r--doc/bugs/annex_get_fails_from_read-only_filesystem/comment_3_c505a9df0ef63bb7cac28af9502a953d._comment10
-rw-r--r--doc/bugs/box.com/comment_1_d904a08519424cb9f599b2154d1ef953._comment10
-rw-r--r--doc/bugs/cabal_install_fails:_Could_not_find_module___8216__Network.URI__8217__.mdwn216
-rw-r--r--doc/bugs/cannot_add_local_readonly_repo_through_the_webapp.mdwn98
-rw-r--r--doc/bugs/fatal:_Cannot_handle_files_this_big.mdwn96
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early.mdwn72
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_1_5a4e37fef629e07dce6b83ae311d1b03._comment14
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_2_da065d367d0a3c91e4957f588f36dc67._comment9
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_3_1b49cd66a612bb46da5b73c83ab14688._comment11
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_4_a20b46a5e9c1c72a484962f3539d3b3e._comment7
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_5_488bb44796e6a4e16f7bfc1f229233e7._comment8
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_6_9c8f262b3d8b37f2e68108337acbd303._comment48
-rw-r--r--doc/bugs/get_from_glacier_fails_too_early/comment_7_c96b71759fe0d2af450e321ca57edb46._comment12
-rw-r--r--doc/bugs/git_annex_add_adds_unlocked_files.mdwn21
-rw-r--r--doc/bugs/git_annex_add_adds_unlocked_files/comment_2_4b46116eabe61946ae65b293d7bbacb7._comment12
-rw-r--r--doc/bugs/git_annex_add_adds_unlocked_files/comment_2_d53d0710d6ad9f0fdc8a29a98647e94b._comment55
-rw-r--r--doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__.mdwn2
-rw-r--r--doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_3_7502f88ae1c46e070e7fdbd9b9c1b54d._comment22
-rw-r--r--doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_4_9f67b14c9ac81f159439c5dff7354b8f._comment10
-rw-r--r--doc/bugs/git_annex_sync_--content_not_syncing_all_objects/comment_6_4540c31acd63626fbad9bde487ec3005._comment8
-rw-r--r--doc/bugs/git_clone_ignores_annex.mdwn25
-rw-r--r--doc/bugs/git_clone_ignores_annex/comment_1_18ba05c51f82ddadd2558f6cd789e394._comment10
-rw-r--r--doc/bugs/hS3_prevents_build.mdwn3
-rw-r--r--doc/bugs/incremental_fsck_should_not_use_sticky_bit/comment_7_f53d0542c9da38e0f6339df8c49c87db._comment8
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit.mdwn40
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_1_875ca12936d4b4505f2e280a454fe558._comment16
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_2_59f68098fa6edb2fe8902b120fda0280._comment94
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_3_22df91abd8c025000e67bdcef891de3b._comment8
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_4_ecf84eeb4feddafcfa7ba7d4a2f164b1._comment13
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_5_2ea1d78ec8a652a53391969e43bcb6f0._comment39
-rw-r--r--doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_6_2a3ad3f95ee03c79404e3784c9ce1a4b._comment8
-rw-r--r--doc/bugs/present_files__47__directories_are_dropped_after_a_sync.mdwn5
-rw-r--r--doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_1_9d7591faf99ce48b1e5753c80306ae8b._comment10
-rw-r--r--doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_2_7316cba69b9dc0415fea1389238edf25._comment14
-rw-r--r--doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__.mdwn105
-rw-r--r--doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__/comment_1_72a97bc3ccb00c623baee874609bb4ca._comment21
-rw-r--r--doc/bugs/rsync_remote_is_not_working.mdwn26
-rw-r--r--doc/bugs/rsync_remote_is_not_working/comment_1_8998edf856a411de1f90b27568628feb._comment20
-rw-r--r--doc/bugs/runs_of_of_memory_adding_2_million_files/comment_10_a201485bf41514fde7c61a4dcbb5064f._comment8
-rw-r--r--doc/bugs/runs_of_of_memory_adding_2_million_files/comment_9_27a31463bcf28b5c684bb483b46a3baf._comment8
-rw-r--r--doc/bugs/vicfg_and_description_often_not_propagated.mdwn2
-rw-r--r--doc/bugs/vicfg_and_description_often_not_propagated/comment_2_d56aed617e0791aa17d9f37c8d3fd317._comment12
-rw-r--r--doc/chunking.mdwn12
-rw-r--r--doc/design/metadata/comment_7_04cd255a516c8520a7bc1a8fad253533._comment8
-rw-r--r--doc/design/metadata/comment_8_0a7e55e7626f72f63966fa1e1d2cf100._comment8
-rw-r--r--doc/design/metadata/comment_9_f0bb62c885a925e0da5ae8ce3c5e9003._comment10
-rw-r--r--doc/design/requests_routing/simroutes.hs6
-rw-r--r--doc/devblog/day_-4__forgetting/comment_9_d9121a5172f02df63364f19eae87d011._comment8
-rw-r--r--doc/devblog/day_221__another_fine_day_of_bugfixing.mdwn10
-rw-r--r--doc/devblog/day_222_preparing_for_debian_release.mdwn12
-rw-r--r--doc/devblog/day_223__partial_commit_problem.mdwn26
-rw-r--r--doc/devblog/day_224-226__long_rainy_slog.mdwn14
-rw-r--r--doc/devblog/day_227__info.mdwn33
-rw-r--r--doc/direct_mode/comment_15_599b2285d24ae1244a1945d572b2c397._comment8
-rw-r--r--doc/forum/ARM_build_on_Zyxel_NAS.mdwn15
-rw-r--r--doc/forum/ARM_build_on_Zyxel_NAS/comment_1_38f38755c0afd76a2b968836fec395e8._comment11
-rw-r--r--doc/forum/ARM_build_on_Zyxel_NAS/comment_2_44c8f1af0cbe9ad51794e6d8d16be627._comment9
-rw-r--r--doc/forum/ARM_build_on_Zyxel_NAS/comment_3_b4f6e5ac672e8ece36cceb74ff3315dd._comment8
-rw-r--r--doc/forum/Android_version_does_not_sync/comment_3_2a4efec37015ea44509e7ed16b36a72d._comment13
-rw-r--r--doc/forum/Attempting_to_repair_fails_with_everincreasing_deltas/comment_3_5a09f65c77dce3c62236c13aa90a1191._comment10
-rw-r--r--doc/forum/Broken_symlinks_remain_after_drop.mdwn7
-rw-r--r--doc/forum/Broken_symlinks_remain_after_drop/comment_1_d4a59b9e58d43d7a3d437e521dd5c4e1._comment12
-rw-r--r--doc/forum/Broken_symlinks_remain_after_drop/comment_2_399ba969a17a41a022c69a1f7c480857._comment8
-rw-r--r--doc/forum/Changing_files_during_git_annex_runs.mdwn12
-rw-r--r--doc/forum/Changing_files_during_git_annex_runs/comment_1_8067077c49dafbe2afa7d182b3314df4._comment14
-rw-r--r--doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work.mdwn47
-rw-r--r--doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_1_ec390a7d521c697eb6b17e8db1dc9d1d._comment10
-rw-r--r--doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_2_14a584567ef42d5b7955ee970200e74d._comment10
-rw-r--r--doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_3_6cbd7329f1f11edf8dd90df27d45158f._comment10
-rw-r--r--doc/forum/Default_annex.largefiles.mdwn1
-rw-r--r--doc/forum/Default_annex.largefiles/comment_1_74a3ad2388e41f1ff17f64a00485a35a._comment8
-rw-r--r--doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__.mdwn14
-rw-r--r--doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_1_67ac7e8b53a4374baf640d32dac79030._comment8
-rw-r--r--doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_2_eb6df2bfcb3892ae22050a8c5f67ee90._comment8
-rw-r--r--doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_3_15f36487383a631f16e041e2885c44ec._comment10
-rw-r--r--doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_4_9293831aff5b6cef490f65d03638d34d._comment12
-rw-r--r--doc/forum/Equivalent_to_git_bundle__63__.mdwn10
-rw-r--r--doc/forum/Equivalent_to_git_bundle__63__/comment_1_e42936a9bc36fbee69f48e32df303dee._comment9
-rw-r--r--doc/forum/Equivalent_to_git_bundle__63__/comment_2_2b8b5c237d8572fdd27202f3502bea96._comment13
-rw-r--r--doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx.mdwn4
-rw-r--r--doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx/comment_1_72d7811990e78fba0b7fc2e1c7ee515f._comment15
-rw-r--r--doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server.mdwn14
-rw-r--r--doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_1_75c599cc26e7d3645f69173861d4f8be._comment9
-rw-r--r--doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_2_496e2f3a61b609ebb28ab55e5c30022b._comment12
-rw-r--r--doc/forum/Git_annex_hangs.mdwn2
-rw-r--r--doc/forum/Git_annex_hangs/comment_1_e6b854d4625ae3015aea9c5de71a28ef._comment8
-rw-r--r--doc/forum/Git_annex_hangs/comment_2_4f848771e60c38321a97361b0d1b33dd._comment8
-rw-r--r--doc/forum/Git_annex_hangs/comment_3_a07abdd1dc21a69ad6be0526edaeffc1._comment13
-rw-r--r--doc/forum/Git_annex_hangs/comment_4_2ba5992c32753ed03ddd5c12264e9acf._comment8
-rw-r--r--doc/forum/Git_annex_hangs/comment_5_5fd749f92343079b3916a4d32ddf39c7._comment12
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__.mdwn13
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__/comment_1_7f2cefb0991789be5a960eb9c0a9df3f._comment22
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__/comment_2_d13b456c5b3990082c16e78a50f5db91._comment14
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__/comment_3_854c17ff8cb38486c4bef618d1e94919._comment24
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__/comment_4_9572ad02bbf6845b1ab6d7c612c12a2a._comment19
-rw-r--r--doc/forum/How_To_Permanently_Delete_a_File__63__/comment_5_9c28faabb7d7bd1e83d551e2938d3532._comment14
-rw-r--r--doc/forum/How_to_list_all_existing_metadata_types__63__.mdwn15
-rw-r--r--doc/forum/How_to_list_all_existing_metadata_types__63__/comment_1_a8c30f697f32a3807661a59482d79b18._comment19
-rw-r--r--doc/forum/How_to_work_with_transfer_repos_manually__63__.mdwn18
-rw-r--r--doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_3dec369405e6b6a4a6e5121546c03712._comment11
-rw-r--r--doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_b8f3c09b470d99578a4a17064498dd39._comment12
-rw-r--r--doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_3_be2c594bc1d162cfb1acc3a01fc284f2._comment7
-rw-r--r--doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__.mdwn28
-rw-r--r--doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__/comment_1_d77fbbbe3a7438a1e79f175df1f69ef3._comment17
-rw-r--r--doc/forum/Modification_time_of_files_retained_in_synchronized_remote_copies__63__/comment_1_2b13584998108af0522b898c5d396ba4._comment8
-rw-r--r--doc/forum/Move_unsynced_file_in_direct_mode/comment_1_12a797cba753168dfde9e6339c00f481._comment10
-rw-r--r--doc/forum/Move_unsynced_file_in_direct_mode/comment_2_f3aec24668c35780a033f2b035df10ee._comment20
-rw-r--r--doc/forum/Preserving_extended_attributes.mdwn5
-rw-r--r--doc/forum/Removing_git-annex_repo/comment_1_58fcceb96647a8c7f33d188ae908f3bd._comment8
-rw-r--r--doc/forum/SSH_remote_transfers_queued_but_no_movement/comment_1_fea4e2317f850d6166480cddba088ae5._comment10
-rw-r--r--doc/forum/Stale_keys_and_.cache_files_left_in_.git__47__annex__47__objects/comment_1_2aa80b317863a99e676a375d907d0e84._comment13
-rw-r--r--doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__.mdwn5
-rw-r--r--doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__/comment_1_68e911629da672473bd6188407a68be2._comment13
-rw-r--r--doc/forum/Using_the_Git-Annex_Assistant_as_a_Backup_and_Syncing_Service/comment_1_7070f6e7e05fba7686d8620d62906a83._comment12
-rw-r--r--doc/forum/XMPP_problem_behind_router.mdwn3
-rw-r--r--doc/forum/XMPP_problem_behind_router/comment_1_25a7f8dc5cf14cda4d76b2f8c6ca77d5._comment8
-rw-r--r--doc/forum/XMPP_problem_behind_router/comment_2_3186ebe32c30764b9fd53625dd3e4eda._comment8
-rw-r--r--doc/forum/XMPP_problem_behind_router/comment_3_7fa8fe8cb92993c935ba2dbfb2aef728._comment8
-rw-r--r--doc/forum/add_only_binary_files__63__.mdwn1
-rw-r--r--doc/forum/add_only_binary_files__63__/comment_1_7ce3be5bafd62ce5ed78bcd9323039cc._comment16
-rw-r--r--doc/forum/annex_merge_creates___34__synced__47____42____34___branches/comment_4_79219e920a6beb4bd3265571f59f51cb._comment36
-rw-r--r--doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_5_c1d247fa128c0a0fc899284f5f95002c._comment8
-rw-r--r--doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_6_cf877a3502802492cd2bc3012cb2d779._comment12
-rw-r--r--doc/forum/big_overhead/comment_12_475d5af95adcfcd3a51e10f270205eb7._comment71
-rw-r--r--doc/forum/big_overhead/comment_13_1c8cc992f04fc63179094c494bd25025._comment10
-rw-r--r--doc/forum/big_overhead/comment_14_cbfb3d557915258e72c65a4e84df77a9._comment8
-rw-r--r--doc/forum/big_overhead/comment_15_b973529bae549bcbaaae792f0403989b._comment10
-rw-r--r--doc/forum/drop__47__whereis_not_showing_gcrypted_special_ssh_remote/comment_2_2c14d88e55ea7d4edc90ce0091025f32._comment10
-rw-r--r--doc/forum/files_being_dropped_undesirably.mdwn47
-rw-r--r--doc/forum/files_being_dropped_undesirably/comment_1_d03f8ed7d3f3da58612bf238c1790fb4._comment11
-rw-r--r--doc/forum/files_being_dropped_undesirably/comment_2_7d885abebfec789348639494b1bb1829._comment24
-rw-r--r--doc/forum/files_being_dropped_undesirably/comment_3_7c70b58f89408304055eefb1b166ef2e._comment10
-rw-r--r--doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved.mdwn1
-rw-r--r--doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_1_81111f59caea9f70cb9d597381e42c96._comment8
-rw-r--r--doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_2_8ca9156d21d9f3db0d83d6aa9b69caa0._comment17
-rw-r--r--doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_3_9da6ca0250ab0dcfc9a012df75e2e711._comment12
-rw-r--r--doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_4_55c52c45f3aaddfb63a1f53efe2ee582._comment8
-rw-r--r--doc/forum/git_annex_ls___47___metadata_in_git_annex_whereis/comment_3_24c54ed70220974b98700bf717d1e770._comment29
-rw-r--r--doc/forum/git_annex_sync:_only_git-annex.mdwn3
-rw-r--r--doc/forum/git_annex_sync:_only_git-annex/comment_1_2be68ed36a1e6bfc896d5aea9463d3c7._comment14
-rw-r--r--doc/forum/git_annex_sync:_only_git-annex/comment_2_50e137e4d278dfd0103a41aff0cfa3a9._comment10
-rw-r--r--doc/forum/git_annex_sync:_only_git-annex/comment_3_7753f8276478e0e05c10dba2b84bbc49._comment12
-rw-r--r--doc/forum/git_annex_sync:_only_git-annex/comment_4_cc3cebf5bc403b490e31e63af964a823._comment12
-rw-r--r--doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name.mdwn21
-rw-r--r--doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_1_3bfde59729b904aa1ef815427dd35ae6._comment8
-rw-r--r--doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_2_d08a955a11953cc783f09bfba180dbd6._comment8
-rw-r--r--doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_7_15d918ededb5b8375b0ca13d0b3523ff._comment8
-rw-r--r--doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_8_dcc3f2c6d55006776610e8d770b61d12._comment9
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit.mdwn9
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_1_a0551431a57ccab2463f2a6d43553337._comment15
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_2_e96e8cf6e08e3a21bfcefbc202e78fe2._comment8
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_3_2ad4c1a4bfe00c22444ab878c84a8830._comment47
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_4_44639388349a9ea5eabda9ebf79817b3._comment8
-rw-r--r--doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_5_339123ab87b69b11d6e999ad6eaf6df5._comment8
-rw-r--r--doc/forum/lsof_resource_use_problems.mdwn42
-rw-r--r--doc/forum/lsof_resource_use_problems/comment_1_a5e5d410545fa7f93f08936ec6aeee42._comment10
-rw-r--r--doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository.mdwn19
-rw-r--r--doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_1_419b27cb1c71bce021ef9f2e471aa92e._comment10
-rw-r--r--doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_2_dae4c7a42080dd89150159b2946839b1._comment8
-rw-r--r--doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_3_9d9fa65559ba4bb0e4676289b5a65684._comment8
-rw-r--r--doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_4_4e89b3590cc33b2565cd173ef7c85013._comment11
-rw-r--r--doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__.mdwn47
-rw-r--r--doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__/comment_1_4aaf93801119b36a01e452c7bb0fc7e9._comment18
-rw-r--r--doc/forum/using_git-annex_with_lightroom.mdwn6
-rw-r--r--doc/forum/using_git-annex_with_lightroom/comment_1_ec977efd277f0644767a4fc7064e4baf._comment8
-rw-r--r--doc/git-annex.mdwn19
-rw-r--r--doc/install/Docker.mdwn7
-rw-r--r--doc/install/OSX/comment_9_f11f726d1fee3c4c91f3c984e792037d._comment8
-rw-r--r--doc/news/version_5.20140709.mdwn11
-rw-r--r--doc/news/version_5.20140717.mdwn28
-rw-r--r--doc/news/version_5.20140817.mdwn42
-rw-r--r--doc/news/version_5.20140831.mdwn13
-rw-r--r--doc/news/version_5.20140919.mdwn16
-rw-r--r--doc/news/version_5.20140926.mdwn5
-rw-r--r--doc/news/version_5.20140927.mdwn6
-rw-r--r--doc/news/version_5.20141013.mdwn7
-rw-r--r--doc/publicrepos.mdwn7
-rw-r--r--doc/related_software.mdwn2
-rw-r--r--doc/special_remotes/rsync.mdwn4
-rw-r--r--doc/thanks.mdwn8
-rw-r--r--doc/tips/Synology_NAS_and_git_annex/comment_2_5e723ccf026fe970ad31207f9f036b69._comment30
-rw-r--r--doc/tips/Synology_NAS_and_git_annex/comment_3_8beb2b4b79c7787a92689aaad3bfc452._comment10
-rw-r--r--doc/tips/deleting_unwanted_files.mdwn40
-rw-r--r--doc/tips/dumb_metadata_extraction_from_xbmc.mdwn2
-rw-r--r--doc/tips/dumb_metadata_extraction_from_xbmc/git-annex-xbmc-playcount.pl241
-rw-r--r--doc/tips/file_manager_integration.mdwn2
-rw-r--r--doc/tips/file_manager_integration/comment_3_e7096737268cf66fce2709e9e4937f51._comment8
-rw-r--r--doc/tips/googledriveannex/comment_5_b547ee81946e14975f082f22ccbea035._comment29
-rw-r--r--doc/tips/googledriveannex/comment_6_3a693129a0928b327c7ac4ef45c96acb._comment10
-rw-r--r--doc/tips/googledriveannex/comment_7_3a645a0cd1e4c939b7a4b8a97a0e9b03._comment26
-rw-r--r--doc/tips/googledriveannex/comment_8_7df56c426b27f12dfde09edf345cb76b._comment8
-rw-r--r--doc/tips/using_the_web_as_a_special_remote/comment_8_3f32d536f51d5e9908953caf5736b0a0._comment16
-rw-r--r--doc/tips/using_the_web_as_a_special_remote/comment_9_b420b1f320d620a9909cce5086c549bf._comment8
-rw-r--r--doc/todo/does_not_preserve_timestamps.mdwn16
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_1_caf5e5cb17f4d05fff8c2fab661cd93f._comment8
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_2_c337fca1474b5b78f61ad6f421138ae4._comment12
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_3_9a3eeddc46e5a420575f00cb47caf703._comment8
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_4_99b064259fc2e3c6eb83c3da3b2d3bac._comment10
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_5_c95c8b9bd617830604500213c962fc7a._comment9
-rw-r--r--doc/todo/does_not_preserve_timestamps/comment_6_b99e00d0bc4258c4cb28b544b19ea3b8._comment12
-rw-r--r--doc/todo/read-only_removable_drives.mdwn10
-rw-r--r--doc/todo/read-only_removable_drives/comment_2_08fced29b86b21f63bb0868747227e08._comment12
-rw-r--r--doc/todo/read-only_removable_drives/comment_3_2675e211c7bd248b7f7c1bbc6fd46679._comment10
-rw-r--r--doc/todo/read-only_removable_drives/comment_4_9e9bc6dd5fa8c4cf7f2511b771bd1bc7._comment8
-rw-r--r--doc/todo/read-only_removable_drives/comment_5_a693c5744bfc6c33f5605aa9d9c0bfe0._comment8
-rw-r--r--doc/todo/read-only_removable_drives/comment_6_737e3d315f29a4fc61597ce4f9ec6206._comment20
-rw-r--r--doc/todo/read-only_removable_drives/comment_7_16c8652d38ae57db4ed1860a4733a18b._comment8
-rw-r--r--doc/todo/show_readonly_removable_drives_in_the_webapp.mdwn15
-rw-r--r--doc/todo/show_readonly_removable_drives_in_the_webapp/comment_1_c41140289f9b062e96cfd5d9d5382155._comment15
-rw-r--r--doc/todo/vicfg_comment_gotcha.mdwn20
-rw-r--r--doc/todo/webapp_nudge_when_less_than_numcopies_clones.mdwn7
-rw-r--r--doc/todo/whishlist:_temporary_relinking_to_remotes.mdwn30
-rw-r--r--doc/todo/wishlist:_git_annex_diff/comment_2_2e8324f47b66dce385263e258e94da16._comment32
-rw-r--r--doc/upgrades/insecure_embedded_creds.mdwn42
-rw-r--r--git-annex.cabal23
-rw-r--r--git-annex.hs39
-rw-r--r--standalone/android/Makefile6
-rwxr-xr-xstandalone/android/buildchroot2
-rwxr-xr-xstandalone/android/buildchroot-inchroot11
-rwxr-xr-xstandalone/android/buildchroot-inchroot-asuser6
-rw-r--r--standalone/android/cabal.config208
-rw-r--r--standalone/android/haskell-patches/dns_use-android-net.dns1-command-instead-of-resolv.conf.patch47
-rw-r--r--standalone/android/haskell-patches/entropy_cross-build.patch25
-rw-r--r--standalone/android/haskell-patches/gnuidn_fix-build-with-new-base.patch50
-rw-r--r--standalone/android/haskell-patches/network_2.4.1.0_0003-configure-misdetects-accept4.patch20
-rw-r--r--standalone/android/haskell-patches/shakespeare-text_remove-TH.patch153
-rw-r--r--standalone/android/haskell-patches/unix-time_hack-for-Bionic.patch37
-rw-r--r--standalone/android/haskell-patches/x509-system_support-Android-cert-store.patch29
-rwxr-xr-xstandalone/android/install-haskell-packages70
-rw-r--r--standalone/android/term.patch4
-rw-r--r--standalone/no-th/haskell-patches/DAV_build-without-TH.patch30
-rw-r--r--standalone/no-th/haskell-patches/hamlet_hack_TH.patch205
-rw-r--r--standalone/no-th/haskell-patches/lens_no-TH.patch52
-rw-r--r--standalone/no-th/haskell-patches/optparse-applicative_remove-ANN.patch33
-rw-r--r--standalone/no-th/haskell-patches/persistent-template_stub-out.patch14
-rw-r--r--standalone/no-th/haskell-patches/persistent_1.1.5.1_0001-disable-TH.patch14
-rw-r--r--standalone/no-th/haskell-patches/process-conduit_avoid-TH.patch16
-rw-r--r--standalone/no-th/haskell-patches/shakespeare-css_remove_TH.patch366
-rw-r--r--standalone/no-th/haskell-patches/shakespeare-js_hack_TH.patch316
-rw-r--r--standalone/no-th/haskell-patches/shakespeare_remove-TH.patch1161
-rw-r--r--standalone/no-th/haskell-patches/vector_hack-to-build-with-new-ghc.patch35
-rw-r--r--standalone/no-th/haskell-patches/yesod-core_expand_TH.patch61
-rw-r--r--standalone/no-th/haskell-patches/yesod-form_spliced-TH.patch151
-rw-r--r--standalone/no-th/haskell-patches/yesod-persistent_do-not-really-build.patch14
-rw-r--r--standalone/no-th/haskell-patches/yesod_hack-TH.patch19
485 files changed, 7087 insertions, 2390 deletions
diff --git a/Annex/Branch.hs b/Annex/Branch.hs
index a03d6ddf3..c567db554 100644
--- a/Annex/Branch.hs
+++ b/Annex/Branch.hs
@@ -454,7 +454,7 @@ handleTransitions jl localts refs = do
ignoreRefs untransitionedrefs
return True
where
- getreftransition ref = do
+ getreftransition ref = do
ts <- parseTransitionsStrictly "remote" . decodeBS
<$> catFile ref transitionsLog
return (ref, ts)
@@ -470,7 +470,7 @@ ignoreRefs rs = do
getIgnoredRefs :: Annex (S.Set Git.Ref)
getIgnoredRefs = S.fromList . mapMaybe Git.Sha.extractSha . lines <$> content
where
- content = do
+ content = do
f <- fromRepo gitAnnexIgnoredRefs
liftIO $ catchDefaultIO "" $ readFile f
@@ -498,7 +498,7 @@ performTransitionsLocked jl ts neednewlocalbranch transitionedrefs = do
ref <- getBranch
commitIndex jl ref message (nub $ fullname:transitionedrefs)
where
- message
+ message
| neednewlocalbranch && null transitionedrefs = "new branch for transition " ++ tdesc
| otherwise = "continuing transition " ++ tdesc
tdesc = show $ map describeTransition $ transitionList ts
diff --git a/Annex/Branch/Transitions.hs b/Annex/Branch/Transitions.hs
index f5833c0bc..9d306fe80 100644
--- a/Annex/Branch/Transitions.hs
+++ b/Annex/Branch/Transitions.hs
@@ -19,6 +19,7 @@ import Types.TrustLevel
import Types.UUID
import qualified Data.Map as M
+import Data.Default
data FileTransition
= ChangeFile String
@@ -60,4 +61,4 @@ dropDeadFromPresenceLog :: TrustMap -> [Presence.LogLine] -> [Presence.LogLine]
dropDeadFromPresenceLog trustmap = filter $ notDead trustmap (toUUID . Presence.info)
notDead :: TrustMap -> (v -> UUID) -> v -> Bool
-notDead trustmap a v = M.findWithDefault SemiTrusted (a v) trustmap /= DeadTrusted
+notDead trustmap a v = M.findWithDefault def (a v) trustmap /= DeadTrusted
diff --git a/Annex/CatFile.hs b/Annex/CatFile.hs
index 8b4d746e1..8a6f10def 100644
--- a/Annex/CatFile.hs
+++ b/Annex/CatFile.hs
@@ -100,10 +100,10 @@ catKey' modeguaranteed sha mode
catLink :: Bool -> Sha -> Annex String
catLink modeguaranteed sha = fromInternalGitPath . decodeBS <$> get
where
- -- If the mode is not guaranteed to be correct, avoid
+ -- If the mode is not guaranteed to be correct, avoid
-- buffering the whole file content, which might be large.
-- 8192 is enough if it really is a symlink.
- get
+ get
| modeguaranteed = catObject sha
| otherwise = L.take 8192 <$> catObject sha
@@ -120,7 +120,7 @@ catKeyChecked :: Bool -> Ref -> Annex (Maybe Key)
catKeyChecked needhead ref@(Ref r) =
catKey' False ref =<< findmode <$> catTree treeref
where
- pathparts = split "/" r
+ pathparts = split "/" r
dir = intercalate "/" $ take (length pathparts - 1) pathparts
file = fromMaybe "" $ lastMaybe pathparts
treeref = Ref $ if needhead then "HEAD" ++ dir ++ "/" else dir ++ "/"
diff --git a/Annex/CheckIgnore.hs b/Annex/CheckIgnore.hs
index d45e652bc..f2ed93543 100644
--- a/Annex/CheckIgnore.hs
+++ b/Annex/CheckIgnore.hs
@@ -18,7 +18,7 @@ import qualified Annex
checkIgnored :: FilePath -> Annex Bool
checkIgnored file = go =<< checkIgnoreHandle
where
- go Nothing = return False
+ go Nothing = return False
go (Just h) = liftIO $ Git.checkIgnored h file
checkIgnoreHandle :: Annex (Maybe Git.CheckIgnoreHandle)
diff --git a/Annex/Content.hs b/Annex/Content.hs
index c0c79ae56..37090d3bb 100644
--- a/Annex/Content.hs
+++ b/Annex/Content.hs
@@ -456,7 +456,7 @@ removeAnnex (ContentLock key) = withObjectLoc key remove removedirect
secureErase :: FilePath -> Annex ()
secureErase file = maybe noop go =<< annexSecureEraseCommand <$> Annex.getGitConfig
where
- go basecmd = void $ liftIO $
+ go basecmd = void $ liftIO $
boolSystem "sh" [Param "-c", Param $ gencmd basecmd]
gencmd = massReplace [ ("%file", shellEscape file) ]
@@ -555,7 +555,7 @@ saveState nocommit = doSideAction $ do
downloadUrl :: [Url.URLString] -> FilePath -> Annex Bool
downloadUrl urls file = go =<< annexWebDownloadCommand <$> Annex.getGitConfig
where
- go Nothing = Url.withUrlOptions $ \uo ->
+ go Nothing = Url.withUrlOptions $ \uo ->
anyM (\u -> Url.download u file uo) urls
go (Just basecmd) = liftIO $ anyM (downloadcmd basecmd) urls
downloadcmd basecmd url =
diff --git a/Annex/Direct.hs b/Annex/Direct.hs
index 76a6f27dc..9489b74f2 100644
--- a/Annex/Direct.hs
+++ b/Annex/Direct.hs
@@ -347,7 +347,7 @@ toDirectGen k f = do
(dloc:_) -> return $ Just $ fromdirect dloc
)
where
- fromindirect loc = do
+ fromindirect loc = do
{- Move content from annex to direct file. -}
updateInodeCache k loc
void $ addAssociatedFile k f
diff --git a/Annex/Environment.hs b/Annex/Environment.hs
index bc97c17b7..1ddd2b238 100644
--- a/Annex/Environment.hs
+++ b/Annex/Environment.hs
@@ -13,10 +13,7 @@ import Common.Annex
import Utility.UserInfo
import qualified Git.Config
import Config
-
-#ifndef mingw32_HOST_OS
import Utility.Env
-#endif
{- Checks that the system's environment allows git to function.
- Git requires a GECOS username, or suitable git configuration, or
@@ -35,31 +32,26 @@ checkEnvironment = do
liftIO checkEnvironmentIO
checkEnvironmentIO :: IO ()
-checkEnvironmentIO =
-#ifdef mingw32_HOST_OS
- noop
-#else
- whenM (null <$> myUserGecos) $ do
- username <- myUserName
- ensureEnv "GIT_AUTHOR_NAME" username
- ensureEnv "GIT_COMMITTER_NAME" username
+checkEnvironmentIO = whenM (null <$> myUserGecos) $ do
+ username <- myUserName
+ ensureEnv "GIT_AUTHOR_NAME" username
+ ensureEnv "GIT_COMMITTER_NAME" username
where
#ifndef __ANDROID__
- -- existing environment is not overwritten
- ensureEnv var val = void $ setEnv var val False
+ -- existing environment is not overwritten
+ ensureEnv var val = setEnv var val False
#else
-- Environment setting is broken on Android, so this is dealt with
-- in runshell instead.
ensureEnv _ _ = noop
#endif
-#endif
{- Runs an action that commits to the repository, and if it fails,
- sets user.email and user.name to a dummy value and tries the action again. -}
ensureCommit :: Annex a -> Annex a
ensureCommit a = either retry return =<< tryNonAsync a
where
- retry _ = do
+ retry _ = do
name <- liftIO myUserName
setConfig (ConfigKey "user.name") name
setConfig (ConfigKey "user.email") name
diff --git a/Annex/FileMatcher.hs b/Annex/FileMatcher.hs
index da6a5e0e9..856c68122 100644
--- a/Annex/FileMatcher.hs
+++ b/Annex/FileMatcher.hs
@@ -106,7 +106,7 @@ tokenizeMatcher = filter (not . null ) . concatMap splitparens . words
largeFilesMatcher :: Annex (FileMatcher Annex)
largeFilesMatcher = go =<< annexLargeFiles <$> Annex.getGitConfig
where
- go Nothing = return matchAll
+ go Nothing = return matchAll
go (Just expr) = do
gm <- groupMap
rc <- readRemoteLog
diff --git a/Annex/ReplaceFile.hs b/Annex/ReplaceFile.hs
index 9700d4b60..0355ddd51 100644
--- a/Annex/ReplaceFile.hs
+++ b/Annex/ReplaceFile.hs
@@ -33,7 +33,7 @@ replaceFileOr file action rollback = do
tmpfile <- liftIO $ setup tmpdir
go tmpfile `catchNonAsync` (const $ rollback tmpfile)
where
- setup tmpdir = do
+ setup tmpdir = do
(tmpfile, h) <- openTempFileWithDefaultPermissions tmpdir "tmp"
hClose h
return tmpfile
diff --git a/Annex/Ssh.hs b/Annex/Ssh.hs
index ad636b4aa..3b7bd7d69 100644
--- a/Annex/Ssh.hs
+++ b/Annex/Ssh.hs
@@ -78,10 +78,10 @@ bestSocketPath abssocketfile = do
then Just socketfile
else Nothing
where
- -- ssh appends a 16 char extension to the socket when setting it
+ -- ssh appends a 16 char extension to the socket when setting it
-- up, which needs to be taken into account when checking
-- that a valid socket was constructed.
- sshgarbage = replicate (1+16) 'X'
+ sshgarbage = replicate (1+16) 'X'
sshConnectionCachingParams :: FilePath -> [CommandParam]
sshConnectionCachingParams socketfile =
diff --git a/Annex/TaggedPush.hs b/Annex/TaggedPush.hs
index 35fdf333c..a31758022 100644
--- a/Annex/TaggedPush.hs
+++ b/Annex/TaggedPush.hs
@@ -49,13 +49,13 @@ fromTaggedBranch b = case split "/" $ Git.fromRef b of
taggedPush :: UUID -> Maybe String -> Git.Ref -> Remote -> Git.Repo -> IO Bool
taggedPush u info branch remote = Git.Command.runBool
- [ Param "push"
- , Param $ Remote.name remote
+ [ Param "push"
+ , Param $ Remote.name remote
{- Using forcePush here is safe because we "own" the tagged branch
- we're pushing; it has no other writers. Ensures it is pushed
- even if it has been rewritten by a transition. -}
- , Param $ Git.Branch.forcePush $ refspec Annex.Branch.name
- , Param $ refspec branch
- ]
+ , Param $ Git.Branch.forcePush $ refspec Annex.Branch.name
+ , Param $ refspec branch
+ ]
where
refspec b = Git.fromRef b ++ ":" ++ Git.fromRef (toTaggedBranch u info b)
diff --git a/Annex/Transfer.hs b/Annex/Transfer.hs
index d33d3073b..fb89869f8 100644
--- a/Annex/Transfer.hs
+++ b/Annex/Transfer.hs
@@ -69,7 +69,7 @@ runTransfer' ignorelock t file shouldretry a = do
return False
else do
ok <- retry info metervar $
- bracketIO (return fd) (cleanup tfile) (const $ a meter)
+ bracketIO (return fd) (cleanup tfile) (const $ a meter)
unless ok $ recordFailedTransfer t info
return ok
where
diff --git a/Annex/View.hs b/Annex/View.hs
index a1d873f50..4cbf274aa 100644
--- a/Annex/View.hs
+++ b/Annex/View.hs
@@ -102,7 +102,7 @@ refineView origview = checksize . calc Unchanged origview
let (components', viewchanges) = runWriter $
mapM (\c -> updateViewComponent c field vf) (viewComponents view)
viewchange = if field `elem` map viewField (viewComponents origview)
- then maximum viewchanges
+ then maximum viewchanges
else Narrowing
in (view { viewComponents = components' }, viewchange)
| otherwise =
@@ -207,7 +207,7 @@ viewComponentMatcher :: ViewComponent -> (MetaData -> Maybe [MetaValue])
viewComponentMatcher viewcomponent = \metadata ->
matcher (currentMetaDataValues metafield metadata)
where
- metafield = viewField viewcomponent
+ metafield = viewField viewcomponent
matcher = case viewFilter viewcomponent of
FilterValues s -> \values -> setmatches $
S.intersection s values
@@ -236,8 +236,8 @@ toViewPath = concatMap escapeslash . fromMetaValue
fromViewPath :: FilePath -> MetaValue
fromViewPath = toMetaValue . deescapeslash []
where
- deescapeslash s [] = reverse s
- deescapeslash s (c:cs)
+ deescapeslash s [] = reverse s
+ deescapeslash s (c:cs)
| c == pseudoSlash = case cs of
(c':cs')
| c' == pseudoSlash -> deescapeslash (pseudoSlash:s) cs'
diff --git a/Annex/View/ViewedFile.hs b/Annex/View/ViewedFile.hs
index 25ac16a34..ef901f700 100644
--- a/Annex/View/ViewedFile.hs
+++ b/Annex/View/ViewedFile.hs
@@ -58,7 +58,7 @@ viewedFileReuse = takeFileName
dirFromViewedFile :: ViewedFile -> FilePath
dirFromViewedFile = joinPath . drop 1 . sep [] ""
where
- sep l _ [] = reverse l
+ sep l _ [] = reverse l
sep l curr (c:cs)
| c == '%' = sep (reverse curr:l) "" cs
| c == '\\' = case cs of
diff --git a/Assistant.hs b/Assistant.hs
index 82f157241..2ba778d80 100644
--- a/Assistant.hs
+++ b/Assistant.hs
@@ -119,7 +119,7 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
)
#endif
where
- desc
+ desc
| assistant = "assistant"
| otherwise = "watch"
start daemonize webappwaiter = withThreadState $ \st -> do
@@ -147,7 +147,7 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
let threads = if isJust cannotrun
then webappthread
else webappthread ++
- [ watch $ commitThread
+ [ watch commitThread
#ifdef WITH_WEBAPP
#ifdef WITH_PAIRING
, assist $ pairListenerThread urlrenderer
@@ -158,29 +158,29 @@ startDaemon assistant foreground startdelay cannotrun listenhost startbrowser =
, assist $ xmppReceivePackThread urlrenderer
#endif
#endif
- , assist $ pushThread
- , assist $ pushRetryThread
- , assist $ mergeThread
- , assist $ transferWatcherThread
- , assist $ transferPollerThread
- , assist $ transfererThread
- , assist $ remoteControlThread
- , assist $ daemonStatusThread
+ , assist pushThread
+ , assist pushRetryThread
+ , assist mergeThread
+ , assist transferWatcherThread
+ , assist transferPollerThread
+ , assist transfererThread
+ , assist remoteControlThread
+ , assist daemonStatusThread
, assist $ sanityCheckerDailyThread urlrenderer
- , assist $ sanityCheckerHourlyThread
+ , assist sanityCheckerHourlyThread
, assist $ problemFixerThread urlrenderer
#ifdef WITH_CLIBS
, assist $ mountWatcherThread urlrenderer
#endif
- , assist $ netWatcherThread
+ , assist netWatcherThread
, assist $ upgraderThread urlrenderer
, assist $ upgradeWatcherThread urlrenderer
- , assist $ netWatcherFallbackThread
+ , assist netWatcherFallbackThread
, assist $ transferScannerThread urlrenderer
, assist $ cronnerThread urlrenderer
- , assist $ configMonitorThread
- , assist $ glacierThread
- , watch $ watchThread
+ , assist configMonitorThread
+ , assist glacierThread
+ , watch watchThread
-- must come last so that all threads that wait
-- on it have already started waiting
, watch $ sanityCheckerStartupThread startdelay
diff --git a/Assistant/Alert.hs b/Assistant/Alert.hs
index 745694f59..a41baa85f 100644
--- a/Assistant/Alert.hs
+++ b/Assistant/Alert.hs
@@ -145,7 +145,7 @@ syncResultAlert' succeeded failed = makeAlertFiller (not $ null succeeded) $
, alertHeader = Just $ tenseWords msg
}
where
- msg
+ msg
| null succeeded = ["Failed to sync with", showRemotes failed]
| null failed = ["Synced with", showRemotes succeeded]
| otherwise =
diff --git a/Assistant/Alert/Utility.hs b/Assistant/Alert/Utility.hs
index be631e999..ea1280dac 100644
--- a/Assistant/Alert/Utility.hs
+++ b/Assistant/Alert/Utility.hs
@@ -119,7 +119,7 @@ mergeAlert i al m = maybe updatePrune updateCombine (alertCombiner al)
where
bloat = M.size m' - maxAlerts
pruneold l =
- let (f, rest) = partition (\(_, a) -> isFiller a) l
+ let (f, rest) = partition (\(_, a) -> isFiller a) l
in drop bloat f ++ rest
updatePrune = pruneBloat $ M.filterWithKey pruneSame $
M.insertWith' const i al m
diff --git a/Assistant/DaemonStatus.hs b/Assistant/DaemonStatus.hs
index 35f8fc856..3edc2c174 100644
--- a/Assistant/DaemonStatus.hs
+++ b/Assistant/DaemonStatus.hs
@@ -65,7 +65,7 @@ calcSyncRemotes = do
, syncingToCloudRemote = any iscloud syncdata
}
where
- iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
+ iscloud r = not (Remote.readonly r) && Remote.availability r == Remote.GloballyAvailable
{- Updates the syncRemotes list from the list of all remotes in Annex state. -}
updateSyncRemotes :: Assistant ()
diff --git a/Assistant/DeleteRemote.hs b/Assistant/DeleteRemote.hs
index cc05786e4..a900753a7 100644
--- a/Assistant/DeleteRemote.hs
+++ b/Assistant/DeleteRemote.hs
@@ -62,7 +62,7 @@ removableRemote urlrenderer uuid = do
<$> liftAnnex (Remote.remoteFromUUID uuid)
mapM_ (queueremaining r) keys
where
- queueremaining r k =
+ queueremaining r k =
queueTransferWhenSmall "remaining object in unwanted remote"
Nothing (Transfer Download uuid k) r
{- Scanning for keys can take a long time; do not tie up
diff --git a/Assistant/MakeRemote.hs b/Assistant/MakeRemote.hs
index 967a4d41d..d244a7729 100644
--- a/Assistant/MakeRemote.hs
+++ b/Assistant/MakeRemote.hs
@@ -48,7 +48,7 @@ makeRsyncRemote :: RemoteName -> String -> Annex String
makeRsyncRemote name location = makeRemote name location $ const $ void $
go =<< Command.InitRemote.findExisting name
where
- go Nothing = setupSpecialRemote name Rsync.remote config Nothing
+ go Nothing = setupSpecialRemote name Rsync.remote config Nothing
(Nothing, Command.InitRemote.newConfig name)
go (Just (u, c)) = setupSpecialRemote name Rsync.remote config Nothing
(Just u, c)
diff --git a/Assistant/NetMessager.hs b/Assistant/NetMessager.hs
index acb18b648..f042b4e4e 100644
--- a/Assistant/NetMessager.hs
+++ b/Assistant/NetMessager.hs
@@ -80,7 +80,7 @@ checkImportantNetMessages (storedclient, sentclient) = go <<~ netMessager
queuePushInitiation :: NetMessage -> Assistant ()
queuePushInitiation msg@(Pushing clientid stage) = do
tv <- getPushInitiationQueue side
- liftIO $ atomically $ do
+ liftIO $ atomically $ do
r <- tryTakeTMVar tv
case r of
Nothing -> putTMVar tv [msg]
@@ -88,7 +88,7 @@ queuePushInitiation msg@(Pushing clientid stage) = do
let !l' = msg : filter differentclient l
putTMVar tv l'
where
- side = pushDestinationSide stage
+ side = pushDestinationSide stage
differentclient (Pushing cid _) = cid /= clientid
differentclient _ = True
queuePushInitiation _ = noop
diff --git a/Assistant/Ssh.hs b/Assistant/Ssh.hs
index e1a78cd00..7b82f4624 100644
--- a/Assistant/Ssh.hs
+++ b/Assistant/Ssh.hs
@@ -92,7 +92,7 @@ parseSshUrl u
, sshCapabilities = []
}
where
- (user, host) = if '@' `elem` userhost
+ (user, host) = if '@' `elem` userhost
then separate (== '@') userhost
else ("", userhost)
fromrsync s
@@ -260,7 +260,7 @@ setupSshKeyPair sshkeypair sshdata = do
fixSshKeyPairIdentitiesOnly :: IO ()
fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
where
- go c [] = reverse c
+ go c [] = reverse c
go c (l:[])
| all (`isInfixOf` l) indicators = go (fixedline l:l:c) []
| otherwise = go (l:c) []
@@ -268,7 +268,7 @@ fixSshKeyPairIdentitiesOnly = changeUserSshConfig $ unlines . go [] . lines
| all (`isInfixOf` l) indicators && not ("IdentitiesOnly" `isInfixOf` next) =
go (fixedline l:l:c) (next:rest)
| otherwise = go (l:c) (next:rest)
- indicators = ["IdentityFile", "key.git-annex"]
+ indicators = ["IdentityFile", "key.git-annex"]
fixedline tmpl = takeWhile isSpace tmpl ++ "IdentitiesOnly yes"
{- Add StrictHostKeyChecking to any ssh config stanzas that were written
diff --git a/Assistant/Threads/Committer.hs b/Assistant/Threads/Committer.hs
index 4a47a9e2c..47c2aa4aa 100644
--- a/Assistant/Threads/Committer.hs
+++ b/Assistant/Threads/Committer.hs
@@ -164,8 +164,8 @@ waitChangeTime a = waitchanges 0
-}
aftermaxcommit oldchanges = loop (30 :: Int)
where
- loop 0 = continue oldchanges
- loop n = do
+ loop 0 = continue oldchanges
+ loop n = do
liftAnnex noop -- ensure Annex state is free
liftIO $ threadDelaySeconds (Seconds 1)
changes <- getAnyChanges
@@ -301,7 +301,7 @@ handleAdds havelsof delayadd cs = returnWhen (null incomplete) $ do
add change@(InProcessAddChange { keySource = ks }) =
catchDefaultIO Nothing <~> doadd
where
- doadd = sanitycheck ks $ do
+ doadd = sanitycheck ks $ do
(mkey, mcache) <- liftAnnex $ do
showStart "add" $ keyFilename ks
Command.Add.ingest $ Just ks
diff --git a/Assistant/Threads/Cronner.hs b/Assistant/Threads/Cronner.hs
index 0fe7f58f4..6dc6f4c6b 100644
--- a/Assistant/Threads/Cronner.hs
+++ b/Assistant/Threads/Cronner.hs
@@ -87,7 +87,7 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
liftIO $ waitNotification h
debug ["reloading changed activities"]
go h amap' nmap'
- startactivities as lastruntimes = forM as $ \activity ->
+ startactivities as lastruntimes = forM as $ \activity ->
case connectActivityUUID activity of
Nothing -> do
runner <- asIO2 (sleepingActivityThread urlrenderer)
@@ -108,8 +108,8 @@ cronnerThread urlrenderer = namedThreadUnchecked "Cronner" $ do
sleepingActivityThread :: UrlRenderer -> ScheduledActivity -> Maybe LocalTime -> Assistant ()
sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnexttime lasttime
where
- getnexttime = liftIO . nextTime schedule
- go _ Nothing = debug ["no scheduled events left for", desc]
+ getnexttime = liftIO . nextTime schedule
+ go _ Nothing = debug ["no scheduled events left for", desc]
go l (Just (NextTimeExactly t)) = waitrun l t Nothing
go l (Just (NextTimeWindow windowstart windowend)) =
waitrun l windowstart (Just windowend)
@@ -129,7 +129,7 @@ sleepingActivityThread urlrenderer activity lasttime = go lasttime =<< getnextti
go l =<< getnexttime l
else run nowt
where
- tolate nowt tz = case mmaxt of
+ tolate nowt tz = case mmaxt of
Just maxt -> nowt > maxt
-- allow the job to start 10 minutes late
Nothing ->diffUTCTime
diff --git a/Assistant/Threads/SanityChecker.hs b/Assistant/Threads/SanityChecker.hs
index 3371f212f..9fd963a69 100644
--- a/Assistant/Threads/SanityChecker.hs
+++ b/Assistant/Threads/SanityChecker.hs
@@ -258,7 +258,7 @@ checkOldUnused :: UrlRenderer -> Assistant ()
checkOldUnused urlrenderer = go =<< annexExpireUnused <$> liftAnnex Annex.getGitConfig
where
go (Just Nothing) = noop
- go (Just (Just expireunused)) = expireUnused (Just expireunused)
+ go (Just (Just expireunused)) = expireUnused (Just expireunused)
go Nothing = maybe noop prompt =<< describeUnusedWhenBig
prompt msg =
diff --git a/Assistant/Threads/UpgradeWatcher.hs b/Assistant/Threads/UpgradeWatcher.hs
index ffad09d3d..431e6f339 100644
--- a/Assistant/Threads/UpgradeWatcher.hs
+++ b/Assistant/Threads/UpgradeWatcher.hs
@@ -51,9 +51,9 @@ upgradeWatcherThread urlrenderer = namedThread "UpgradeWatcher" $ do
let depth = length (splitPath dir) + 1
let nosubdirs f = length (splitPath f) == depth
void $ liftIO $ watchDir dir nosubdirs False hooks (startup mvar)
- -- Ignore bogus events generated during the startup scan.
+ -- Ignore bogus events generated during the startup scan.
-- We ask the watcher to not generate them, but just to be safe..
- startup mvar scanner = do
+ startup mvar scanner = do
r <- scanner
void $ swapMVar mvar Started
return r
diff --git a/Assistant/Threads/Upgrader.hs b/Assistant/Threads/Upgrader.hs
index 637c82a7d..100c15414 100644
--- a/Assistant/Threads/Upgrader.hs
+++ b/Assistant/Threads/Upgrader.hs
@@ -39,7 +39,7 @@ upgraderThread urlrenderer = namedThread "Upgrader" $
h <- liftIO . newNotificationHandle False . networkConnectedNotifier =<< getDaemonStatus
go h =<< liftIO getCurrentTime
where
- {- Wait for a network connection event. Then see if it's been
+ {- Wait for a network connection event. Then see if it's been
- half a day since the last upgrade check. If so, proceed with
- check. -}
go h lastchecked = do
diff --git a/Assistant/Threads/Watcher.hs b/Assistant/Threads/Watcher.hs
index fe9a95471..2e69e1640 100644
--- a/Assistant/Threads/Watcher.hs
+++ b/Assistant/Threads/Watcher.hs
@@ -72,7 +72,7 @@ needLsof = error $ unlines
{- A special exception that can be thrown to pause or resume the watcher. -}
data WatcherControl = PauseWatcher | ResumeWatcher
- deriving (Show, Eq, Typeable)
+ deriving (Show, Eq, Typeable)
instance E.Exception WatcherControl
@@ -192,7 +192,7 @@ runHandler handler file filestatus = void $ do
liftAnnex Annex.Queue.flushWhenFull
recordChange change
where
- normalize f
+ normalize f
| "./" `isPrefixOf` file = drop 2 f
| otherwise = f
@@ -246,7 +246,7 @@ onAddDirect symlinkssupported matcher file fs = do
debug ["add direct", file]
add matcher file
where
- {- On a filesystem without symlinks, we'll get changes for regular
+ {- On a filesystem without symlinks, we'll get changes for regular
- files that git uses to stand-in for symlinks. Detect when
- this happens, and stage the symlink, rather than annexing the
- file. -}
@@ -276,7 +276,7 @@ onAddSymlink isdirect file filestatus = unlessIgnored file $ do
onAddSymlink' :: Maybe String -> Maybe Key -> Bool -> Handler
onAddSymlink' linktarget mk isdirect file filestatus = go mk
where
- go (Just key) = do
+ go (Just key) = do
when isdirect $
liftAnnex $ void $ addAssociatedFile key file
link <- liftAnnex $ inRepo $ gitAnnexLink file key
diff --git a/Assistant/Threads/WebApp.hs b/Assistant/Threads/WebApp.hs
index 416c07874..d01096c7a 100644
--- a/Assistant/Threads/WebApp.hs
+++ b/Assistant/Threads/WebApp.hs
@@ -6,6 +6,7 @@
-}
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses #-}
+{-# LANGUAGE ViewPatterns, OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
@@ -97,7 +98,7 @@ webAppThread assistantdata urlrenderer noannex cannotrun postfirstrun listenhost
urlfile <- getAnnex' $ fromRepo gitAnnexUrlFile
go tlssettings addr webapp htmlshim (Just urlfile)
where
- -- The webapp thread does not wait for the startupSanityCheckThread
+ -- The webapp thread does not wait for the startupSanityCheckThread
-- to finish, so that the user interface remains responsive while
-- that's going on.
thread = namedThreadUnchecked "WebApp"
diff --git a/Assistant/Threads/XMPPClient.hs b/Assistant/Threads/XMPPClient.hs
index 2f70b508f..8ce99eac6 100644
--- a/Assistant/Threads/XMPPClient.hs
+++ b/Assistant/Threads/XMPPClient.hs
@@ -131,7 +131,7 @@ xmppClient urlrenderer d creds xmppuuid =
{- XEP-0199 says that the server will respond with either
- a ping response or an error message. Either will
- cause traffic, so good enough. -}
- pingstanza = xmppPing selfjid
+ pingstanza = xmppPing selfjid
handlemsg selfjid (PresenceMessage p) = do
void $ inAssistant $
diff --git a/Assistant/Threads/XMPPPusher.hs b/Assistant/Threads/XMPPPusher.hs
index 30c91c7f0..35c76ebf1 100644
--- a/Assistant/Threads/XMPPPusher.hs
+++ b/Assistant/Threads/XMPPPusher.hs
@@ -34,7 +34,7 @@ xmppReceivePackThread = pusherThread "XMPPReceivePack" ReceivePack
pusherThread :: String -> PushSide -> UrlRenderer -> NamedThread
pusherThread threadname side urlrenderer = namedThread threadname $ go Nothing
where
- go lastpushedto = do
+ go lastpushedto = do
msg <- waitPushInitiation side $ selectNextPush lastpushedto
debug ["started running push", logNetMessage msg]
@@ -78,4 +78,4 @@ selectNextPush lastpushedto l = go [] l
(Pushing clientid _)
| Just clientid /= lastpushedto -> (m, rejected ++ ms)
_ -> go (m:rejected) ms
- go [] [] = undefined
+ go [] [] = undefined
diff --git a/Assistant/TransferQueue.hs b/Assistant/TransferQueue.hs
index 93c982224..d138e16ef 100644
--- a/Assistant/TransferQueue.hs
+++ b/Assistant/TransferQueue.hs
@@ -92,7 +92,7 @@ queueTransfersMatching matching reason schedule k f direction
filterM (wantSend True (Just k) f . Remote.uuid) $
filter (\r -> not (inset s r || Remote.readonly r)) rs
where
- locs = S.fromList <$> Remote.keyLocations k
+ locs = S.fromList <$> Remote.keyLocations k
inset s r = S.member (Remote.uuid r) s
gentransfer r = Transfer
{ transferDirection = direction
diff --git a/Assistant/Types/NetMessager.hs b/Assistant/Types/NetMessager.hs
index 5ae987a61..f5ad85b4a 100644
--- a/Assistant/Types/NetMessager.hs
+++ b/Assistant/Types/NetMessager.hs
@@ -85,7 +85,7 @@ logNetMessage (Pushing c stage) = show $ Pushing (logClientID c) $
SendPackOutput n _ -> SendPackOutput n elided
s -> s
where
- elided = T.encodeUtf8 $ T.pack "<elided>"
+ elided = T.encodeUtf8 $ T.pack "<elided>"
logNetMessage (PairingNotification stage c uuid) =
show $ PairingNotification stage (logClientID c) uuid
logNetMessage m = show m
diff --git a/Assistant/XMPP.hs b/Assistant/XMPP.hs
index e74705021..cc0343abf 100644
--- a/Assistant/XMPP.hs
+++ b/Assistant/XMPP.hs
@@ -195,7 +195,7 @@ decodeMessage m = decode =<< gitAnnexTagInfo m
<*> a i
gen c i = c . toUUID <$> headMaybe (words (T.unpack (tagValue i)))
seqgen c i = do
- packet <- decodeTagContent $ tagElement i
+ packet <- decodeTagContent $ tagElement i
let seqnum = fromMaybe 0 $ readish $ T.unpack $ tagValue i
return $ c seqnum packet
shasgen c i = do
diff --git a/Assistant/XMPP/Git.hs b/Assistant/XMPP/Git.hs
index 19050c7d0..868fe6609 100644
--- a/Assistant/XMPP/Git.hs
+++ b/Assistant/XMPP/Git.hs
@@ -152,7 +152,7 @@ xmppPush cid gitpush = do
fromxmpp outh controlh = withPushMessagesInSequence cid SendPack handlemsg
where
- handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
+ handlemsg (Just (Pushing _ (ReceivePackOutput _ b))) =
liftIO $ writeChunk outh b
handlemsg (Just (Pushing _ (ReceivePackDone exitcode))) =
liftIO $ do
@@ -266,7 +266,7 @@ xmppReceivePack cid = do
relaytoxmpp seqnum' outh
relayfromxmpp inh = withPushMessagesInSequence cid ReceivePack handlemsg
where
- handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
+ handlemsg (Just (Pushing _ (SendPackOutput _ b))) =
liftIO $ writeChunk inh b
handlemsg (Just _) = noop
handlemsg Nothing = do
@@ -337,7 +337,7 @@ handlePushNotice (Pushing cid (CanPush theiruuid shas)) =
, go
)
where
- go = do
+ go = do
u <- liftAnnex getUUID
sendNetMessage $ Pushing cid (PushRequest u)
haveall l = liftAnnex $ not <$> anyM donthave l
@@ -359,9 +359,9 @@ writeChunk h b = do
withPushMessagesInSequence :: ClientID -> PushSide -> (Maybe NetMessage -> Assistant ()) -> Assistant ()
withPushMessagesInSequence cid side a = loop 0
where
- loop seqnum = do
+ loop seqnum = do
m <- timeout xmppTimeout <~> waitInbox cid side
- let go s = a m >> loop s
+ let go s = a m >> loop s
let next = seqnum + 1
case extractSequence =<< m of
Just seqnum'
diff --git a/Backend/Hash.hs b/Backend/Hash.hs
index 62d0a0fca..7c47a4abc 100644
--- a/Backend/Hash.hs
+++ b/Backend/Hash.hs
@@ -144,7 +144,7 @@ trivialMigrate oldkey newbackend
hashFile :: Hash -> FilePath -> Integer -> Annex String
hashFile hash file filesize = liftIO $ go hash
where
- go (SHAHash hashsize) = case shaHasher hashsize filesize of
+ go (SHAHash hashsize) = case shaHasher hashsize filesize of
Left sha -> sha <$> L.readFile file
Right command ->
either error return
diff --git a/Build/EvilLinker.hs b/Build/EvilLinker.hs
index cf0f771e5..e2921cc8c 100644
--- a/Build/EvilLinker.hs
+++ b/Build/EvilLinker.hs
@@ -58,13 +58,13 @@ parseGccLink = do
collect2params <- restOfLine
return $ CmdParams (path ++ collectcmd) (escapeDosPaths collect2params) cenv
where
- collectcmd = "collect2.exe"
- collectgccenv = "COLLECT_GCC"
+ collectcmd = "collect2.exe"
+ collectgccenv = "COLLECT_GCC"
collectltoenv = "COLLECT_LTO_WRAPPER"
pathenv = "COMPILER_PATH"
libpathenv = "LIBRARY_PATH"
- optenv = "COLLECT_GCC_OPTIONS"
- collectenv = do
+ optenv = "COLLECT_GCC_OPTIONS"
+ collectenv = do
void $ many1 $ do
notFollowedBy $ string collectgccenv
restOfLine
@@ -148,7 +148,7 @@ runAtFile p s f extraparams = do
removeFile f
return out
where
- c = case parse p "" s of
+ c = case parse p "" s of
Left e -> error $
(show e) ++
"\n<<<\n" ++ s ++ "\n>>>"
diff --git a/Build/EvilSplicer.hs b/Build/EvilSplicer.hs
index 648d631b5..fc41c624f 100644
--- a/Build/EvilSplicer.hs
+++ b/Build/EvilSplicer.hs
@@ -86,7 +86,7 @@ number = read <$> many1 digit
coordsParser :: Parser (Coord, Coord)
coordsParser = (try singleline <|> try weird <|> multiline) <?> "Coords"
where
- singleline = do
+ singleline = do
line <- number
void $ char ':'
startcol <- number
@@ -151,7 +151,7 @@ spliceParser = do
(unlines codelines)
splicetype
where
- tosplicetype "declarations" = SpliceDeclaration
+ tosplicetype "declarations" = SpliceDeclaration
tosplicetype "expression" = SpliceExpression
tosplicetype s = error $ "unknown splice type: " ++ s
@@ -177,7 +177,7 @@ spliceParser = do
splicesExtractor :: Parser [Splice]
splicesExtractor = rights <$> many extract
where
- extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
+ extract = try (Right <$> spliceParser) <|> (Left <$> compilerJunkLine)
compilerJunkLine = restOfLine
{- Modifies the source file, expanding the splices, which all must
@@ -214,8 +214,8 @@ applySplices destdir imports splices@(first:_) = do
hPutStr h newcontent
hClose h
where
- expand lls [] = lls
- expand lls (s:rest)
+ expand lls [] = lls
+ expand lls (s:rest)
| isExpressionSplice s = expand (expandExpressionSplice s lls) rest
| otherwise = expand (expandDeclarationSplice s lls) rest
@@ -291,12 +291,12 @@ expandExpressionSplice sp lls = concat [before, spliced:padding, end]
-- ie: bar $(splice)
| otherwise = s ++ " $ "
where
- s' = filter (not . isSpace) s
+ s' = filter (not . isSpace) s
findindent = length . takeWhile isSpace
addindent n = unlines . map (i ++) . lines
where
- i = take n $ repeat ' '
+ i = take n $ repeat ' '
{- Tweaks code output by GHC in splices to actually build. Yipes. -}
mangleCode :: String -> String
@@ -315,7 +315,7 @@ mangleCode = flip_colon
. remove_package_version
. emptylambda
where
- {- Lambdas are often output without parens around them.
+ {- Lambdas are often output without parens around them.
- This breaks when the lambda is immediately applied to a
- parameter.
-
@@ -409,7 +409,7 @@ mangleCode = flip_colon
restofline = manyTill (noneOf "\n") newline
- {- For some reason, GHC sometimes doesn't like the multiline
+ {- For some reason, GHC sometimes doesn't like the multiline
- strings it creates. It seems to get hung up on \{ at the
- start of a new line sometimes, wanting it to not be escaped.
-
@@ -646,7 +646,7 @@ parsecAndReplace p s = case parse find "" s of
Left _e -> s
Right l -> concatMap (either return id) l
where
- find :: Parser [Either Char String]
+ find :: Parser [Either Char String]
find = many $ try (Right <$> p) <|> (Left <$> anyChar)
main :: IO ()
@@ -654,7 +654,7 @@ main = go =<< getArgs
where
go (destdir:log:header:[]) = run destdir log (Just header)
go (destdir:log:[]) = run destdir log Nothing
- go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
+ go _ = error "usage: EvilSplicer destdir logfile [headerfile]"
run destdir log mheader = do
r <- parseFromFile splicesExtractor log
diff --git a/Build/NullSoftInstaller.hs b/Build/NullSoftInstaller.hs
index b8fc82605..22d3caf36 100644
--- a/Build/NullSoftInstaller.hs
+++ b/Build/NullSoftInstaller.hs
@@ -103,7 +103,7 @@ makeInstaller gitannex license extrabins launchers = nsis $ do
name "git-annex"
outFile $ str installer
{- Installing into the same directory as git avoids needing to modify
- - path myself, since the git installer already does it. -}
+ - path myself, since the git installer already does it. -}
installDir gitInstallDir
requestExecutionLevel Admin
diff --git a/Build/OSXMkLibs.hs b/Build/OSXMkLibs.hs
index 5640e4d36..ef668bb4a 100644
--- a/Build/OSXMkLibs.hs
+++ b/Build/OSXMkLibs.hs
@@ -112,7 +112,7 @@ expand_rpath libs replacement_libs cmd
return $ map (replacem m) libs
| otherwise = return libs
where
- probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
+ probe c = "DYLD_PRINT_RPATHS=1 " ++ c ++ " --getting-rpath-dummy-option 2>&1 | grep RPATH"
parse s = case words s of
("RPATH":"successful":"expansion":"of":old:"to:":new:[]) ->
Just (old, new)
diff --git a/Build/Standalone.hs b/Build/Standalone.hs
index 110163acf..da030933d 100644
--- a/Build/Standalone.hs
+++ b/Build/Standalone.hs
@@ -40,7 +40,7 @@ main :: IO ()
main = getArgs >>= go
where
go [] = error "specify topdir"
- go (topdir:_) = do
+ go (topdir:_) = do
let dir = progDir topdir
createDirectoryIfMissing True dir
installed <- forM bundledPrograms $ installProg dir
diff --git a/Checks.hs b/Checks.hs
index 7a9cd1e38..831c0a009 100644
--- a/Checks.hs
+++ b/Checks.hs
@@ -35,7 +35,7 @@ noDaemonRunning :: Command -> Command
noDaemonRunning = addCheck $ whenM (isJust <$> daemonpid) $
error "You cannot run this command while git-annex watch or git-annex assistant is running."
where
- daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
+ daemonpid = liftIO . checkDaemon =<< fromRepo gitAnnexPidFile
dontCheck :: CommandCheck -> Command -> Command
dontCheck check cmd = mutateCheck cmd $ \c -> filter (/= check) c
diff --git a/CmdLine.hs b/CmdLine.hs
index 606390130..41968a091 100644
--- a/CmdLine.hs
+++ b/CmdLine.hs
@@ -6,7 +6,6 @@
-}
{-# LANGUAGE CPP #-}
-{-# LANGUAGE BangPatterns #-}
module CmdLine (
dispatch,
@@ -58,7 +57,7 @@ dispatch fuzzyok allargs allcmds commonoptions fields header getgitrepo = do
shutdown $ cmdnocommit cmd
go _flags params (Left e) = do
when fuzzy $
- autocorrect =<< Git.Config.global
+ autocorrect =<< Git.Config.global
maybe (throw e) (\a -> a params) (cmdnorepo cmd)
err msg = msg ++ "\n\n" ++ usage header allcmds
cmd = Prelude.head cmds
diff --git a/CmdLine/GitAnnex.hs b/CmdLine/GitAnnex.hs
index d27866030..5f5d4a151 100644
--- a/CmdLine/GitAnnex.hs
+++ b/CmdLine/GitAnnex.hs
@@ -107,91 +107,91 @@ import System.Remote.Monitoring
cmds :: [Command]
cmds = concat
- [ Command.Add.def
- , Command.Get.def
- , Command.Drop.def
- , Command.Move.def
- , Command.Copy.def
- , Command.Unlock.def
- , Command.Lock.def
- , Command.Sync.def
- , Command.Mirror.def
- , Command.AddUrl.def
+ [ Command.Add.cmd
+ , Command.Get.cmd
+ , Command.Drop.cmd
+ , Command.Move.cmd
+ , Command.Copy.cmd
+ , Command.Unlock.cmd
+ , Command.Lock.cmd
+ , Command.Sync.cmd
+ , Command.Mirror.cmd
+ , Command.AddUrl.cmd
#ifdef WITH_FEED
- , Command.ImportFeed.def
+ , Command.ImportFeed.cmd
#endif
- , Command.RmUrl.def
- , Command.Import.def
- , Command.Init.def
- , Command.Describe.def
- , Command.InitRemote.def
- , Command.EnableRemote.def
- , Command.Reinject.def
- , Command.Unannex.def
- , Command.Uninit.def
- , Command.Reinit.def
- , Command.PreCommit.def
- , Command.NumCopies.def
- , Command.Trust.def
- , Command.Untrust.def
- , Command.Semitrust.def
- , Command.Dead.def
- , Command.Group.def
- , Command.Wanted.def
- , Command.Schedule.def
- , Command.Ungroup.def
- , Command.Vicfg.def
- , Command.LookupKey.def
- , Command.ExamineKey.def
- , Command.FromKey.def
- , Command.DropKey.def
- , Command.TransferKey.def
- , Command.TransferKeys.def
- , Command.ReKey.def
- , Command.MetaData.def
- , Command.View.def
- , Command.VAdd.def
- , Command.VFilter.def
- , Command.VPop.def
- , Command.VCycle.def
- , Command.Fix.def
- , Command.Fsck.def
- , Command.Repair.def
- , Command.Unused.def
- , Command.DropUnused.def
- , Command.AddUnused.def
- , Command.Find.def
- , Command.FindRef.def
- , Command.Whereis.def
- , Command.List.def
- , Command.Log.def
- , Command.Merge.def
- , Command.ResolveMerge.def
- , Command.Info.def
- , Command.Status.def
- , Command.Migrate.def
- , Command.Map.def
- , Command.Direct.def
- , Command.Indirect.def
- , Command.Upgrade.def
- , Command.Forget.def
- , Command.Version.def
- , Command.Help.def
+ , Command.RmUrl.cmd
+ , Command.Import.cmd
+ , Command.Init.cmd
+ , Command.Describe.cmd
+ , Command.InitRemote.cmd
+ , Command.EnableRemote.cmd
+ , Command.Reinject.cmd
+ , Command.Unannex.cmd
+ , Command.Uninit.cmd
+ , Command.Reinit.cmd
+ , Command.PreCommit.cmd
+ , Command.NumCopies.cmd
+ , Command.Trust.cmd
+ , Command.Untrust.cmd
+ , Command.Semitrust.cmd
+ , Command.Dead.cmd
+ , Command.Group.cmd
+ , Command.Wanted.cmd
+ , Command.Schedule.cmd
+ , Command.Ungroup.cmd
+ , Command.Vicfg.cmd
+ , Command.LookupKey.cmd
+ , Command.ExamineKey.cmd
+ , Command.FromKey.cmd
+ , Command.DropKey.cmd
+ , Command.TransferKey.cmd
+ , Command.TransferKeys.cmd
+ , Command.ReKey.cmd
+ , Command.MetaData.cmd
+ , Command.View.cmd
+ , Command.VAdd.cmd
+ , Command.VFilter.cmd
+ , Command.VPop.cmd
+ , Command.VCycle.cmd
+ , Command.Fix.cmd
+ , Command.Fsck.cmd
+ , Command.Repair.cmd
+ , Command.Unused.cmd
+ , Command.DropUnused.cmd
+ , Command.AddUnused.cmd
+ , Command.Find.cmd
+ , Command.FindRef.cmd
+ , Command.Whereis.cmd
+ , Command.List.cmd
+ , Command.Log.cmd
+ , Command.Merge.cmd
+ , Command.ResolveMerge.cmd
+ , Command.Info.cmd
+ , Command.Status.cmd
+ , Command.Migrate.cmd
+ , Command.Map.cmd
+ , Command.Direct.cmd
+ , Command.Indirect.cmd
+ , Command.Upgrade.cmd
+ , Command.Forget.cmd
+ , Command.Version.cmd
+ , Command.Help.cmd
#ifdef WITH_ASSISTANT
- , Command.Watch.def
- , Command.Assistant.def
+ , Command.Watch.cmd
+ , Command.Assistant.cmd
#ifdef WITH_WEBAPP
- , Command.WebApp.def
+ , Command.WebApp.cmd
#endif
#ifdef WITH_XMPP
- , Command.XMPPGit.def
+ , Command.XMPPGit.cmd
#endif
- , Command.RemoteDaemon.def
+ , Command.RemoteDaemon.cmd
#endif
- , Command.Test.def
+ , Command.Test.cmd
#ifdef WITH_TESTSUITE
- , Command.FuzzTest.def
- , Command.TestRemote.def
+ , Command.FuzzTest.cmd
+ , Command.TestRemote.cmd
#endif
]
diff --git a/CmdLine/GitAnnexShell.hs b/CmdLine/GitAnnexShell.hs
index 6c212b24d..21284f400 100644
--- a/CmdLine/GitAnnexShell.hs
+++ b/CmdLine/GitAnnexShell.hs
@@ -34,19 +34,19 @@ import qualified Command.GCryptSetup
cmds_readonly :: [Command]
cmds_readonly = concat
- [ gitAnnexShellCheck Command.ConfigList.def
- , gitAnnexShellCheck Command.InAnnex.def
- , gitAnnexShellCheck Command.SendKey.def
- , gitAnnexShellCheck Command.TransferInfo.def
- , gitAnnexShellCheck Command.NotifyChanges.def
+ [ gitAnnexShellCheck Command.ConfigList.cmd
+ , gitAnnexShellCheck Command.InAnnex.cmd
+ , gitAnnexShellCheck Command.SendKey.cmd
+ , gitAnnexShellCheck Command.TransferInfo.cmd
+ , gitAnnexShellCheck Command.NotifyChanges.cmd
]
cmds_notreadonly :: [Command]
cmds_notreadonly = concat
- [ gitAnnexShellCheck Command.RecvKey.def
- , gitAnnexShellCheck Command.DropKey.def
- , gitAnnexShellCheck Command.Commit.def
- , Command.GCryptSetup.def
+ [ gitAnnexShellCheck Command.RecvKey.cmd
+ , gitAnnexShellCheck Command.DropKey.cmd
+ , gitAnnexShellCheck Command.Commit.cmd
+ , Command.GCryptSetup.cmd
]
cmds :: [Command]
@@ -66,7 +66,7 @@ options = commonOptions ++
check u = unexpectedUUID expected u
checkGCryptUUID expected = check =<< getGCryptUUID True =<< gitRepo
where
- check (Just u) | u == toUUID expected = noop
+ check (Just u) | u == toUUID expected = noop
check Nothing = unexpected expected "uninitialized repository"
check (Just u) = unexpectedUUID expected u
unexpectedUUID expected u = unexpected expected $ "UUID " ++ fromUUID u
diff --git a/CmdLine/Seek.hs b/CmdLine/Seek.hs
index 397a48118..238ed4291 100644
--- a/CmdLine/Seek.hs
+++ b/CmdLine/Seek.hs
@@ -107,7 +107,7 @@ withFilesUnlocked' :: ([FilePath] -> Git.Repo -> IO ([FilePath], IO Bool)) -> (F
withFilesUnlocked' typechanged a params = seekActions $
prepFiltered a unlockedfiles
where
- check f = liftIO (notSymlink f) <&&>
+ check f = liftIO (notSymlink f) <&&>
(isJust <$> catKeyFile f <||> isJust <$> catKeyFileHEAD f)
unlockedfiles = filterM check =<< seekHelper typechanged params
@@ -165,7 +165,7 @@ withKeyOptions keyop fallbackop params = do
Just k -> go auto $ return [k]
_ -> error "Can only specify one of file names, --all, --unused, or --key"
where
- go True _ = error "Cannot use --auto with --all or --unused or --key"
+ go True _ = error "Cannot use --auto with --all or --unused or --key"
go False a = do
matcher <- Limit.getMatcher
seekActions $ map (process matcher) <$> a
diff --git a/CmdLine/Usage.hs b/CmdLine/Usage.hs
index 6e0a1ca80..1998a5f54 100644
--- a/CmdLine/Usage.hs
+++ b/CmdLine/Usage.hs
@@ -103,6 +103,8 @@ paramSize :: String
paramSize = "SIZE"
paramAddress :: String
paramAddress = "ADDRESS"
+paramItem :: String
+paramItem = "ITEM"
paramKeyValue :: String
paramKeyValue = "K=V"
paramNothing :: String
diff --git a/Command/Add.hs b/Command/Add.hs
index e2b6d04fe..519dad6e4 100644
--- a/Command/Add.hs
+++ b/Command/Add.hs
@@ -34,8 +34,8 @@ import Utility.Tmp
import Control.Exception (IOException)
-def :: [Command]
-def = [notBareRepo $ withOptions [includeDotFilesOption] $
+cmd :: [Command]
+cmd = [notBareRepo $ withOptions [includeDotFilesOption] $
command "add" paramPaths seek SectionCommon
"add files to annex"]
@@ -125,7 +125,7 @@ lockDown' file = ifM crippledFileSystem
- This is not done in direct mode, because files there need to
- remain writable at all times.
-}
- go tmp = do
+ go tmp = do
unlessM isDirect $
freezeContent file
withTSDelta $ \delta -> liftIO $ do
@@ -134,7 +134,7 @@ lockDown' file = ifM crippledFileSystem
hClose h
nukeFile tmpfile
withhardlink delta tmpfile `catchIO` const (nohardlink delta)
- nohardlink delta = do
+ nohardlink delta = do
cache <- genInodeCache file delta
return KeySource
{ keyFilename = file
@@ -177,14 +177,14 @@ ingest (Just source) = withTSDelta $ \delta -> do
(undo (keyFilename source) key)
maybe noop (genMetaData key (keyFilename source)) ms
liftIO $ nukeFile $ keyFilename source
- return $ (Just key, mcache)
+ return (Just key, mcache)
goindirect _ _ _ = failure "failed to generate a key"
godirect (Just (key, _)) (Just cache) ms = do
addInodeCache key cache
maybe noop (genMetaData key (keyFilename source)) ms
finishIngestDirect key source
- return $ (Just key, Just cache)
+ return (Just key, Just cache)
godirect _ _ _ = failure "failed to generate a key"
failure msg = do
@@ -207,7 +207,7 @@ finishIngestDirect key source = do
perform :: FilePath -> CommandPerform
perform file = lockDown file >>= ingest >>= go
where
- go (Just key, cache) = next $ cleanup file key cache True
+ go (Just key, cache) = next $ cleanup file key cache True
go (Nothing, _) = stop
{- On error, put the file back so it doesn't seem to have vanished.
diff --git a/Command/AddUnused.hs b/Command/AddUnused.hs
index 91427e819..69dbefc17 100644
--- a/Command/AddUnused.hs
+++ b/Command/AddUnused.hs
@@ -14,8 +14,8 @@ import qualified Command.Add
import Command.Unused (withUnusedMaps, UnusedMaps(..), startUnused)
import Types.Key
-def :: [Command]
-def = [notDirect $ command "addunused" (paramRepeating paramNumRange)
+cmd :: [Command]
+cmd = [notDirect $ command "addunused" (paramRepeating paramNumRange)
seek SectionMaintenance "add back unused files"]
seek :: CommandSeek
diff --git a/Command/AddUrl.hs b/Command/AddUrl.hs
index c21ce928f..81da67639 100644
--- a/Command/AddUrl.hs
+++ b/Command/AddUrl.hs
@@ -32,8 +32,8 @@ import Annex.Quvi
import qualified Utility.Quvi as Quvi
#endif
-def :: [Command]
-def = [notBareRepo $ withOptions [fileOption, pathdepthOption, relaxedOption] $
+cmd :: [Command]
+cmd = [notBareRepo $ withOptions [fileOption, pathdepthOption, relaxedOption] $
command "addurl" (paramRepeating paramUrl) seek
SectionCommon "add urls to annex"]
@@ -56,7 +56,7 @@ seek ps = do
start :: Bool -> Maybe FilePath -> Maybe Int -> String -> CommandStart
start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
where
- (s', downloader) = getDownloader s
+ (s', downloader) = getDownloader s
bad = fromMaybe (error $ "bad url " ++ s') $
parseURI $ escapeURIString isUnescapedInURI s'
choosefile = flip fromMaybe optfile
@@ -95,8 +95,8 @@ start relaxed optfile pathdepth s = go $ fromMaybe bad $ parseURI s
performQuvi :: Bool -> URLString -> URLString -> FilePath -> CommandPerform
performQuvi relaxed pageurl videourl file = ifAnnexed file addurl geturl
where
- quviurl = setDownloader pageurl QuviDownloader
- addurl key = next $ cleanup quviurl file key Nothing
+ quviurl = setDownloader pageurl QuviDownloader
+ addurl key = next $ cleanup quviurl file key Nothing
geturl = next $ isJust <$> addUrlFileQuvi relaxed quviurl videourl file
#endif
@@ -189,7 +189,7 @@ download url file = do
, return Nothing
)
where
- runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
+ runtransfer dummykey tmp = Transfer.notifyTransfer Transfer.Download (Just file) $
Transfer.download webUUID dummykey (Just file) Transfer.forwardRetry $ const $ do
liftIO $ createDirectoryIfMissing True (parentDir tmp)
downloadUrl [url] tmp
diff --git a/Command/Assistant.hs b/Command/Assistant.hs
index 8316a9948..8341a5694 100644
--- a/Command/Assistant.hs
+++ b/Command/Assistant.hs
@@ -18,8 +18,8 @@ import Assistant.Install
import System.Environment
-def :: [Command]
-def = [noRepo checkAutoStart $ dontCheck repoExists $ withOptions options $
+cmd :: [Command]
+cmd = [noRepo checkAutoStart $ dontCheck repoExists $ withOptions options $
notBareRepo $ command "assistant" paramNothing seek SectionCommon
"automatically handle changes"]
diff --git a/Command/Commit.hs b/Command/Commit.hs
index f5f13d248..1f2478ee5 100644
--- a/Command/Commit.hs
+++ b/Command/Commit.hs
@@ -12,8 +12,8 @@ import Command
import qualified Annex.Branch
import qualified Git
-def :: [Command]
-def = [command "commit" paramNothing seek
+cmd :: [Command]
+cmd = [command "commit" paramNothing seek
SectionPlumbing "commits any staged changes to the git-annex branch"]
seek :: CommandSeek
diff --git a/Command/ConfigList.hs b/Command/ConfigList.hs
index 219685c21..7d8f1ea70 100644
--- a/Command/ConfigList.hs
+++ b/Command/ConfigList.hs
@@ -15,8 +15,8 @@ import qualified Annex.Branch
import qualified Git.Config
import Remote.GCrypt (coreGCryptId)
-def :: [Command]
-def = [noCommit $ command "configlist" paramNothing seek
+cmd :: [Command]
+cmd = [noCommit $ command "configlist" paramNothing seek
SectionPlumbing "outputs relevant git configuration"]
seek :: CommandSeek
@@ -29,7 +29,7 @@ start = do
showConfig coreGCryptId =<< fromRepo (Git.Config.get coreGCryptId "")
stop
where
- showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
+ showConfig k v = liftIO $ putStrLn $ k ++ "=" ++ v
{- The repository may not yet have a UUID; automatically initialize it
- when there's a git-annex branch available. -}
diff --git a/Command/Copy.hs b/Command/Copy.hs
index ae254aae2..23fa83a35 100644
--- a/Command/Copy.hs
+++ b/Command/Copy.hs
@@ -14,8 +14,8 @@ import qualified Remote
import Annex.Wanted
import Config.NumCopies
-def :: [Command]
-def = [withOptions Command.Move.moveOptions $ command "copy" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions Command.Move.moveOptions $ command "copy" paramPaths seek
SectionCommon "copy content of files to/from another repository"]
seek :: CommandSeek
@@ -23,7 +23,7 @@ seek ps = do
to <- getOptionField toOption Remote.byNameWithUUID
from <- getOptionField fromOption Remote.byNameWithUUID
withKeyOptions
- (Command.Move.startKey to from False)
+ (Command.Move.startKey to from False)
(withFilesInGit $ whenAnnexed $ start to from)
ps
diff --git a/Command/Dead.hs b/Command/Dead.hs
index f9e5c2e27..c19812b73 100644
--- a/Command/Dead.hs
+++ b/Command/Dead.hs
@@ -11,8 +11,8 @@ import Command
import Types.TrustLevel
import Command.Trust (trustCommand)
-def :: [Command]
-def = [command "dead" (paramRepeating paramRemote) seek
+cmd :: [Command]
+cmd = [command "dead" (paramRepeating paramRemote) seek
SectionSetup "hide a lost repository"]
seek :: CommandSeek
diff --git a/Command/Describe.hs b/Command/Describe.hs
index 601b3fcc9..39a762c06 100644
--- a/Command/Describe.hs
+++ b/Command/Describe.hs
@@ -12,8 +12,8 @@ import Command
import qualified Remote
import Logs.UUID
-def :: [Command]
-def = [command "describe" (paramPair paramRemote paramDesc) seek
+cmd :: [Command]
+cmd = [command "describe" (paramPair paramRemote paramDesc) seek
SectionSetup "change description of a repository"]
seek :: CommandSeek
diff --git a/Command/Direct.hs b/Command/Direct.hs
index c64ef6e56..3493e103d 100644
--- a/Command/Direct.hs
+++ b/Command/Direct.hs
@@ -15,8 +15,8 @@ import qualified Git.Branch
import Config
import Annex.Direct
-def :: [Command]
-def = [notBareRepo $ noDaemonRunning $
+cmd :: [Command]
+cmd = [notBareRepo $ noDaemonRunning $
command "direct" paramNothing seek
SectionSetup "switch repository to direct mode"]
diff --git a/Command/Drop.hs b/Command/Drop.hs
index cf63d2bc7..9460c47b4 100644
--- a/Command/Drop.hs
+++ b/Command/Drop.hs
@@ -22,8 +22,8 @@ import Annex.Notification
import qualified Data.Set as S
-def :: [Command]
-def = [withOptions [dropFromOption] $ command "drop" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions [dropFromOption] $ command "drop" paramPaths seek
SectionCommon "indicate content of files not currently wanted"]
dropFromOption :: Option
diff --git a/Command/DropKey.hs b/Command/DropKey.hs
index 8ca41bdb6..ca20a1a64 100644
--- a/Command/DropKey.hs
+++ b/Command/DropKey.hs
@@ -13,8 +13,8 @@ import qualified Annex
import Logs.Location
import Annex.Content
-def :: [Command]
-def = [noCommit $ command "dropkey" (paramRepeating paramKey) seek
+cmd :: [Command]
+cmd = [noCommit $ command "dropkey" (paramRepeating paramKey) seek
SectionPlumbing "drops annexed content for specified keys"]
seek :: CommandSeek
diff --git a/Command/DropUnused.hs b/Command/DropUnused.hs
index ce49795c9..b9bc2bef6 100644
--- a/Command/DropUnused.hs
+++ b/Command/DropUnused.hs
@@ -16,8 +16,8 @@ import qualified Git
import Command.Unused (withUnusedMaps, UnusedMaps(..), startUnused)
import Config.NumCopies
-def :: [Command]
-def = [withOptions [Command.Drop.dropFromOption] $
+cmd :: [Command]
+cmd = [withOptions [Command.Drop.dropFromOption] $
command "dropunused" (paramRepeating paramNumRange)
seek SectionMaintenance "drop unused file content"]
diff --git a/Command/EnableRemote.hs b/Command/EnableRemote.hs
index 42ab43374..909f1ea2f 100644
--- a/Command/EnableRemote.hs
+++ b/Command/EnableRemote.hs
@@ -15,8 +15,8 @@ import qualified Command.InitRemote as InitRemote
import qualified Data.Map as M
-def :: [Command]
-def = [command "enableremote"
+cmd :: [Command]
+cmd = [command "enableremote"
(paramPair paramName $ paramOptional $ paramRepeating paramKeyValue)
seek SectionSetup "enables use of an existing special remote"]
@@ -29,7 +29,7 @@ start (name:ws) = go =<< InitRemote.findExisting name
where
config = Logs.Remote.keyValToConfig ws
- go Nothing = unknownNameError "Unknown special remote name."
+ go Nothing = unknownNameError "Unknown special remote name."
go (Just (u, c)) = do
let fullconfig = config `M.union` c
t <- InitRemote.findType fullconfig
diff --git a/Command/ExamineKey.hs b/Command/ExamineKey.hs
index dd2bec507..94f84c5b5 100644
--- a/Command/ExamineKey.hs
+++ b/Command/ExamineKey.hs
@@ -13,8 +13,8 @@ import qualified Utility.Format
import Command.Find (formatOption, getFormat, showFormatted, keyVars)
import Types.Key
-def :: [Command]
-def = [noCommit $ noMessages $ withOptions [formatOption, jsonOption] $
+cmd :: [Command]
+cmd = [noCommit $ noMessages $ withOptions [formatOption, jsonOption] $
command "examinekey" (paramRepeating paramKey) seek
SectionPlumbing "prints information from a key"]
diff --git a/Command/Find.hs b/Command/Find.hs
index c800933f9..5ca2191db 100644
--- a/Command/Find.hs
+++ b/Command/Find.hs
@@ -18,8 +18,8 @@ import qualified Utility.Format
import Utility.DataUnits
import Types.Key
-def :: [Command]
-def = [mkCommand $ command "find" paramPaths seek SectionQuery "lists available files"]
+cmd :: [Command]
+cmd = [mkCommand $ command "find" paramPaths seek SectionQuery "lists available files"]
mkCommand :: Command -> Command
mkCommand = noCommit . noMessages . withOptions [formatOption, print0Option, jsonOption]
diff --git a/Command/FindRef.hs b/Command/FindRef.hs
index 26007f7c0..a552e64e4 100644
--- a/Command/FindRef.hs
+++ b/Command/FindRef.hs
@@ -10,8 +10,8 @@ module Command.FindRef where
import Command
import qualified Command.Find as Find
-def :: [Command]
-def = [Find.mkCommand $ command "findref" paramRef seek SectionPlumbing
+cmd :: [Command]
+cmd = [Find.mkCommand $ command "findref" paramRef seek SectionPlumbing
"lists files in a git ref"]
seek :: CommandSeek
diff --git a/Command/Fix.hs b/Command/Fix.hs
index 0c2bf5942..774ef8583 100644
--- a/Command/Fix.hs
+++ b/Command/Fix.hs
@@ -18,8 +18,8 @@ import Utility.Touch
#endif
#endif
-def :: [Command]
-def = [notDirect $ noCommit $ command "fix" paramPaths seek
+cmd :: [Command]
+cmd = [notDirect $ noCommit $ command "fix" paramPaths seek
SectionMaintenance "fix up symlinks to point to annexed content"]
seek :: CommandSeek
diff --git a/Command/Forget.hs b/Command/Forget.hs
index dbcce6cc3..3ea64d5c9 100644
--- a/Command/Forget.hs
+++ b/Command/Forget.hs
@@ -15,8 +15,8 @@ import qualified Annex
import Data.Time.Clock.POSIX
-def :: [Command]
-def = [withOptions forgetOptions $ command "forget" paramNothing seek
+cmd :: [Command]
+cmd = [withOptions forgetOptions $ command "forget" paramNothing seek
SectionMaintenance "prune git-annex branch history"]
forgetOptions :: [Option]
diff --git a/Command/FromKey.hs b/Command/FromKey.hs
index 7eb62fa4e..3b20749fe 100644
--- a/Command/FromKey.hs
+++ b/Command/FromKey.hs
@@ -13,8 +13,8 @@ import qualified Annex.Queue
import Annex.Content
import Types.Key
-def :: [Command]
-def = [notDirect $ notBareRepo $
+cmd :: [Command]
+cmd = [notDirect $ notBareRepo $
command "fromkey" (paramPair paramKey paramPath) seek
SectionPlumbing "adds a file using a specific key"]
diff --git a/Command/Fsck.hs b/Command/Fsck.hs
index a17662d62..46c1620f1 100644
--- a/Command/Fsck.hs
+++ b/Command/Fsck.hs
@@ -39,8 +39,8 @@ import Data.Time
import System.Posix.Types (EpochTime)
import System.Locale
-def :: [Command]
-def = [withOptions fsckOptions $ command "fsck" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions fsckOptions $ command "fsck" paramPaths seek
SectionMaintenance "check for problems"]
fsckFromOption :: Option
@@ -282,7 +282,7 @@ verifyDirectMode key file = do
- the key's metadata, if available.
-
- Not checked in direct mode, because files can be changed directly.
- -}
+ -}
checkKeySize :: Key -> Annex Bool
checkKeySize key = ifM isDirect
( return True
@@ -329,7 +329,7 @@ checkKeySizeOr bad key file = case Types.Key.keySize key of
checkBackend :: Backend -> Key -> Maybe FilePath -> Annex Bool
checkBackend backend key mfile = go =<< isDirect
where
- go False = do
+ go False = do
content <- calcRepo $ gitAnnexLocation key
checkBackendOr badContent backend key content
go True = maybe nocheck checkdirect mfile
diff --git a/Command/FuzzTest.hs b/Command/FuzzTest.hs
index 7075aeddc..87bee963f 100644
--- a/Command/FuzzTest.hs
+++ b/Command/FuzzTest.hs
@@ -20,8 +20,8 @@ import System.Random (getStdRandom, random, randomR)
import Test.QuickCheck
import Control.Concurrent
-def :: [Command]
-def = [ notBareRepo $ command "fuzztest" paramNothing seek SectionTesting
+cmd :: [Command]
+cmd = [ notBareRepo $ command "fuzztest" paramNothing seek SectionTesting
"generates fuzz test files"]
seek :: CommandSeek
@@ -47,7 +47,7 @@ guardTest = unlessM (fromMaybe False . Git.Config.isTrue <$> getConfig key "") $
, "Refusing to run fuzz tests, since " ++ keyname ++ " is not set!"
]
where
- key = annexConfig "eat-my-repository"
+ key = annexConfig "eat-my-repository"
(ConfigKey keyname) = key
@@ -257,7 +257,7 @@ existingDir = do
newFile :: IO (Maybe FuzzFile)
newFile = go (100 :: Int)
where
- go 0 = return Nothing
+ go 0 = return Nothing
go n = do
f <- genFuzzFile
ifM (doesnotexist (toFilePath f))
@@ -268,7 +268,7 @@ newFile = go (100 :: Int)
newDir :: FilePath -> IO (Maybe FuzzDir)
newDir parent = go (100 :: Int)
where
- go 0 = return Nothing
+ go 0 = return Nothing
go n = do
(FuzzDir d) <- genFuzzDir
ifM (doesnotexist (parent </> d))
diff --git a/Command/GCryptSetup.hs b/Command/GCryptSetup.hs
index 2448467fd..77aadb22d 100644
--- a/Command/GCryptSetup.hs
+++ b/Command/GCryptSetup.hs
@@ -13,8 +13,8 @@ import Annex.UUID
import qualified Remote.GCrypt
import qualified Git
-def :: [Command]
-def = [dontCheck repoExists $ noCommit $
+cmd :: [Command]
+cmd = [dontCheck repoExists $ noCommit $
command "gcryptsetup" paramValue seek
SectionPlumbing "sets up gcrypt repository"]
@@ -30,7 +30,7 @@ start gcryptid = next $ next $ do
g <- gitRepo
gu <- Remote.GCrypt.getGCryptUUID True g
let newgu = genUUIDInNameSpace gCryptNameSpace gcryptid
- if gu == Nothing || gu == Just newgu
+ if isNothing gu || gu == Just newgu
then if Git.repoIsLocalBare g
then do
void $ Remote.GCrypt.setupRepo gcryptid g
diff --git a/Command/Get.hs b/Command/Get.hs
index d0be20018..a49c7c409 100644
--- a/Command/Get.hs
+++ b/Command/Get.hs
@@ -16,8 +16,8 @@ import Config.NumCopies
import Annex.Wanted
import qualified Command.Move
-def :: [Command]
-def = [withOptions getOptions $ command "get" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions getOptions $ command "get" paramPaths seek
SectionCommon "make content of annexed files available"]
getOptions :: [Option]
@@ -48,7 +48,7 @@ start' expensivecheck from key afile = stopUnless (not <$> inAnnex key) $
stopUnless (Command.Move.fromOk src key) $
go $ Command.Move.fromPerform src False key afile
where
- go a = do
+ go a = do
showStart' "get" key afile
next a
diff --git a/Command/Group.hs b/Command/Group.hs
index 2b5cd2ec4..e1420be88 100644
--- a/Command/Group.hs
+++ b/Command/Group.hs
@@ -15,8 +15,8 @@ import Types.Group
import qualified Data.Set as S
-def :: [Command]
-def = [command "group" (paramPair paramRemote paramDesc) seek
+cmd :: [Command]
+cmd = [command "group" (paramPair paramRemote paramDesc) seek
SectionSetup "add a repository to a group"]
seek :: CommandSeek
diff --git a/Command/Help.hs b/Command/Help.hs
index 7998ed796..fc1206e03 100644
--- a/Command/Help.hs
+++ b/Command/Help.hs
@@ -21,8 +21,8 @@ import qualified Command.Fsck
import System.Console.GetOpt
-def :: [Command]
-def = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
+cmd :: [Command]
+cmd = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
command "help" paramNothing seek SectionQuery "display help"]
seek :: CommandSeek
@@ -47,15 +47,15 @@ showGeneralHelp :: IO ()
showGeneralHelp = putStrLn $ unlines
[ "The most frequently used git-annex commands are:"
, unlines $ map cmdline $ concat
- [ Command.Init.def
- , Command.Add.def
- , Command.Drop.def
- , Command.Get.def
- , Command.Move.def
- , Command.Copy.def
- , Command.Sync.def
- , Command.Whereis.def
- , Command.Fsck.def
+ [ Command.Init.cmd
+ , Command.Add.cmd
+ , Command.Drop.cmd
+ , Command.Get.cmd
+ , Command.Move.cmd
+ , Command.Copy.cmd
+ , Command.Sync.cmd
+ , Command.Whereis.cmd
+ , Command.Fsck.cmd
]
, "Run 'git-annex' for a complete command list."
, "Run 'git-annex command --help' for help on a specific command."
diff --git a/Command/Import.hs b/Command/Import.hs
index 97e3f7652..b20e63853 100644
--- a/Command/Import.hs
+++ b/Command/Import.hs
@@ -16,8 +16,8 @@ import Backend
import Remote
import Types.KeySource
-def :: [Command]
-def = [withOptions opts $ notBareRepo $ command "import" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions opts $ notBareRepo $ command "import" paramPaths seek
SectionCommon "move and add files from outside git working copy"]
opts :: [Option]
@@ -50,8 +50,8 @@ getDuplicateMode = gen
<*> getflag cleanDuplicatesOption
<*> getflag skipDuplicatesOption
where
- getflag = Annex.getFlag . optionName
- gen False False False False = Default
+ getflag = Annex.getFlag . optionName
+ gen False False False False = Default
gen True False False False = Duplicate
gen False True False False = DeDuplicate
gen False False True False = CleanDuplicates
@@ -96,7 +96,7 @@ start mode (srcfile, destfile) =
handleexisting Nothing = noop
handleexisting (Just s)
| isDirectory s = notoverwriting "(is a directory)"
- | otherwise = ifM (Annex.getState Annex.force) $
+ | otherwise = ifM (Annex.getState Annex.force)
( liftIO $ nukeFile destfile
, notoverwriting "(use --force to override)"
)
diff --git a/Command/ImportFeed.hs b/Command/ImportFeed.hs
index 1fdba46a1..ecfee1db8 100644
--- a/Command/ImportFeed.hs
+++ b/Command/ImportFeed.hs
@@ -37,8 +37,8 @@ import Types.MetaData
import Logs.MetaData
import Annex.MetaData
-def :: [Command]
-def = [notBareRepo $ withOptions [templateOption, relaxedOption] $
+cmd :: [Command]
+cmd = [notBareRepo $ withOptions [templateOption, relaxedOption] $
command "importfeed" (paramRepeating paramUrl) seek
SectionCommon "import files from podcast feeds"]
@@ -153,7 +153,7 @@ performDownload relaxed cache todownload = case location todownload of
rundownload videourl ("." ++ Quvi.linkSuffix link) $
addUrlFileQuvi relaxed quviurl videourl
where
- forced = Annex.getState Annex.force
+ forced = Annex.getState Annex.force
{- Avoids downloading any urls that are already known to be
- associated with a file in the annex, unless forced. -}
@@ -192,7 +192,7 @@ performDownload relaxed cache todownload = case location todownload of
, return $ Just f
)
where
- f = if n < 2
+ f = if n < 2
then file
else
let (d, base) = splitFileName file
diff --git a/Command/InAnnex.hs b/Command/InAnnex.hs
index 11cbdb73d..db48a1422 100644
--- a/Command/InAnnex.hs
+++ b/Command/InAnnex.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import Annex.Content
-def :: [Command]
-def = [noCommit $ command "inannex" (paramRepeating paramKey) seek
+cmd :: [Command]
+cmd = [noCommit $ command "inannex" (paramRepeating paramKey) seek
SectionPlumbing "checks if keys are present in the annex"]
seek :: CommandSeek
diff --git a/Command/Indirect.hs b/Command/Indirect.hs
index e146f13b7..a363981be 100644
--- a/Command/Indirect.hs
+++ b/Command/Indirect.hs
@@ -22,8 +22,8 @@ import Annex.CatFile
import Annex.Init
import qualified Command.Add
-def :: [Command]
-def = [notBareRepo $ noDaemonRunning $
+cmd :: [Command]
+cmd = [notBareRepo $ noDaemonRunning $
command "indirect" paramNothing seek
SectionSetup "switch repository to indirect mode"]
@@ -94,7 +94,7 @@ perform = do
warnlocked
showEndOk
- warnlocked :: SomeException -> Annex ()
+ warnlocked :: SomeException -> Annex ()
warnlocked e = do
warning $ show e
warning "leaving this file as-is; correct this problem and run git annex add on it"
diff --git a/Command/Info.hs b/Command/Info.hs
index 63bc92bbe..96b7eb6d7 100644
--- a/Command/Info.hs
+++ b/Command/Info.hs
@@ -1,6 +1,6 @@
{- git-annex command
-
- - Copyright 2011 Joey Hess <joey@kitenet.net>
+ - Copyright 2011-2014 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
@@ -16,14 +16,16 @@ import Data.Tuple
import Data.Ord
import Common.Annex
-import qualified Remote
import qualified Command.Unused
import qualified Git
import qualified Annex
+import qualified Remote
+import qualified Types.Remote as Remote
import Command
import Utility.DataUnits
import Utility.DiskFree
import Annex.Content
+import Annex.Link
import Types.Key
import Logs.UUID
import Logs.Trust
@@ -65,42 +67,67 @@ data StatInfo = StatInfo
, referencedData :: Maybe KeyData
, numCopiesStats :: Maybe NumCopiesStats
}
+
+emptyStatInfo :: StatInfo
+emptyStatInfo = StatInfo Nothing Nothing Nothing
-- a state monad for running Stats in
type StatState = StateT StatInfo Annex
-def :: [Command]
-def = [noCommit $ dontCheck repoExists $ withOptions [jsonOption] $
- command "info" paramPaths seek SectionQuery
- "shows general information about the annex"]
+cmd :: [Command]
+cmd = [noCommit $ dontCheck repoExists $ withOptions [jsonOption] $
+ command "info" (paramOptional $ paramRepeating paramItem) seek SectionQuery
+ "shows information about the specified item or the repository as a whole"]
seek :: CommandSeek
seek = withWords start
-start :: [FilePath] -> CommandStart
+start :: [String] -> CommandStart
start [] = do
globalInfo
stop
start ps = do
- mapM_ localInfo =<< filterM isdir ps
+ mapM_ itemInfo ps
stop
- where
- isdir = liftIO . catchBoolIO . (isDirectory <$$> getFileStatus)
globalInfo :: Annex ()
globalInfo = do
stats <- selStats global_fast_stats global_slow_stats
showCustom "info" $ do
- evalStateT (mapM_ showStat stats) (StatInfo Nothing Nothing Nothing)
+ evalStateT (mapM_ showStat stats) emptyStatInfo
return True
-localInfo :: FilePath -> Annex ()
-localInfo dir = showCustom (unwords ["info", dir]) $ do
- stats <- selStats (tostats local_fast_stats) (tostats local_slow_stats)
- evalStateT (mapM_ showStat stats) =<< getLocalStatInfo dir
+itemInfo :: String -> Annex ()
+itemInfo p = ifM (isdir p)
+ ( dirInfo p
+ , do
+ v <- Remote.byName' p
+ case v of
+ Right r -> remoteInfo r
+ Left _ -> maybe noinfo (fileInfo p) =<< isAnnexLink p
+ )
+ where
+ isdir = liftIO . catchBoolIO . (isDirectory <$$> getFileStatus)
+ noinfo = error $ p ++ " is not a directory or an annexed file or a remote"
+
+dirInfo :: FilePath -> Annex ()
+dirInfo dir = showCustom (unwords ["info", dir]) $ do
+ stats <- selStats (tostats dir_fast_stats) (tostats dir_slow_stats)
+ evalStateT (mapM_ showStat stats) =<< getDirStatInfo dir
return True
where
- tostats = map (\s -> s dir)
+ tostats = map (\s -> s dir)
+
+fileInfo :: FilePath -> Key -> Annex ()
+fileInfo file k = showCustom (unwords ["info", file]) $ do
+ evalStateT (mapM_ showStat (file_stats file k)) emptyStatInfo
+ return True
+
+remoteInfo :: Remote -> Annex ()
+remoteInfo r = showCustom (unwords ["info", Remote.name r]) $ do
+ info <- map (\(k, v) -> simpleStat k (pure v)) <$> Remote.getInfo r
+ evalStateT (mapM_ showStat (remote_stats r ++ info)) emptyStatInfo
+ return True
selStats :: [Stat] -> [Stat] -> Annex [Stat]
selStats fast_stats slow_stats = do
@@ -132,22 +159,42 @@ global_slow_stats =
, bloom_info
, backend_usage
]
-local_fast_stats :: [FilePath -> Stat]
-local_fast_stats =
- [ local_dir
+dir_fast_stats :: [FilePath -> Stat]
+dir_fast_stats =
+ [ dir_name
, const local_annex_keys
, const local_annex_size
, const known_annex_files
, const known_annex_size
]
-local_slow_stats :: [FilePath -> Stat]
-local_slow_stats =
+dir_slow_stats :: [FilePath -> Stat]
+dir_slow_stats =
[ const numcopies_stats
]
+file_stats :: FilePath -> Key -> [Stat]
+file_stats f k =
+ [ file_name f
+ , key_size k
+ , key_name k
+ ]
+
+remote_stats :: Remote -> [Stat]
+remote_stats r = map (\s -> s r)
+ [ remote_name
+ , remote_description
+ , remote_uuid
+ , remote_cost
+ , remote_type
+ ]
+
stat :: String -> (String -> StatState String) -> Stat
stat desc a = return $ Just (desc, a desc)
+-- The json simply contains the same string that is displayed.
+simpleStat :: String -> StatState String -> Stat
+simpleStat desc getval = stat desc $ json id getval
+
nostat :: Stat
nostat = return Nothing
@@ -168,7 +215,7 @@ showStat s = maybe noop calc =<< s
lift . showRaw =<< a
repository_mode :: Stat
-repository_mode = stat "repository mode" $ json id $ lift $
+repository_mode = simpleStat "repository mode" $ lift $
ifM isDirect
( return "direct", return "indirect" )
@@ -181,15 +228,37 @@ remote_list level = stat n $ nojson $ lift $ do
where
n = showTrustLevel level ++ " repositories"
-local_dir :: FilePath -> Stat
-local_dir dir = stat "directory" $ json id $ return dir
+dir_name :: FilePath -> Stat
+dir_name dir = simpleStat "directory" $ pure dir
+
+file_name :: FilePath -> Stat
+file_name file = simpleStat "file" $ pure file
+
+remote_name :: Remote -> Stat
+remote_name r = simpleStat "remote" $ pure (Remote.name r)
+
+remote_description :: Remote -> Stat
+remote_description r = simpleStat "description" $ lift $
+ Remote.prettyUUID (Remote.uuid r)
+
+remote_uuid :: Remote -> Stat
+remote_uuid r = simpleStat "uuid" $ pure $
+ fromUUID $ Remote.uuid r
+
+remote_cost :: Remote -> Stat
+remote_cost r = simpleStat "cost" $ pure $
+ show $ Remote.cost r
+
+remote_type :: Remote -> Stat
+remote_type r = simpleStat "type" $ pure $
+ Remote.typename $ Remote.remotetype r
local_annex_keys :: Stat
local_annex_keys = stat "local annex keys" $ json show $
countKeys <$> cachedPresentData
local_annex_size :: Stat
-local_annex_size = stat "local annex size" $ json id $
+local_annex_size = simpleStat "local annex size" $
showSizeKeys <$> cachedPresentData
known_annex_files :: Stat
@@ -197,7 +266,7 @@ known_annex_files = stat "annexed files in working tree" $ json show $
countKeys <$> cachedReferencedData
known_annex_size :: Stat
-known_annex_size = stat "size of annexed files in working tree" $ json id $
+known_annex_size = simpleStat "size of annexed files in working tree" $
showSizeKeys <$> cachedReferencedData
tmp_size :: Stat
@@ -206,8 +275,14 @@ tmp_size = staleSize "temporary object directory size" gitAnnexTmpObjectDir
bad_data_size :: Stat
bad_data_size = staleSize "bad keys size" gitAnnexBadDir
+key_size :: Key -> Stat
+key_size k = simpleStat "size" $ pure $ showSizeKeys $ foldKeys [k]
+
+key_name :: Key -> Stat
+key_name k = simpleStat "key" $ pure $ key2file k
+
bloom_info :: Stat
-bloom_info = stat "bloom filter size" $ json id $ do
+bloom_info = simpleStat "bloom filter size" $ do
localkeys <- countKeys <$> cachedPresentData
capacity <- fromIntegral <$> lift Command.Unused.bloomCapacity
let note = aside $
@@ -240,7 +315,7 @@ transfer_list = stat "transfers in progress" $ nojson $ lift $ do
]
disk_size :: Stat
-disk_size = stat "available local disk space" $ json id $ lift $
+disk_size = simpleStat "available local disk space" $ lift $
calcfree
<$> (annexDiskReserve <$> Annex.getGitConfig)
<*> inRepo (getDiskFree . gitAnnexDir)
@@ -264,7 +339,7 @@ backend_usage = stat "backend usage" $ nojson $
where
calc x y = multiLine $
map (\(n, b) -> b ++ ": " ++ show n) $
- reverse $ sort $ map swap $ M.toList $
+ sortBy (flip compare) $ map swap $ M.toList $
M.unionWith (+) x y
numcopies_stats :: Stat
@@ -273,7 +348,7 @@ numcopies_stats = stat "numcopies stats" $ nojson $
where
calc = multiLine
. map (\(variance, count) -> show variance ++ ": " ++ show count)
- . reverse . sortBy (comparing snd) . M.toList
+ . sortBy (flip (comparing snd)) . M.toList
cachedPresentData :: StatState KeyData
cachedPresentData = do
@@ -296,12 +371,12 @@ cachedReferencedData = do
put s { referencedData = Just v }
return v
--- currently only available for local info
+-- currently only available for directory info
cachedNumCopiesStats :: StatState (Maybe NumCopiesStats)
cachedNumCopiesStats = numCopiesStats <$> get
-getLocalStatInfo :: FilePath -> Annex StatInfo
-getLocalStatInfo dir = do
+getDirStatInfo :: FilePath -> Annex StatInfo
+getDirStatInfo dir = do
fast <- Annex.getState Annex.fast
matcher <- Limit.getMatcher
(presentdata, referenceddata, numcopiesstats) <-
diff --git a/Command/Init.hs b/Command/Init.hs
index e8d9af167..b921c0657 100644
--- a/Command/Init.hs
+++ b/Command/Init.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import Annex.Init
-def :: [Command]
-def = [dontCheck repoExists $
+cmd :: [Command]
+cmd = [dontCheck repoExists $
command "init" paramDesc seek SectionSetup "initialize git-annex"]
seek :: CommandSeek
diff --git a/Command/InitRemote.hs b/Command/InitRemote.hs
index dc54023cc..51ea15373 100644
--- a/Command/InitRemote.hs
+++ b/Command/InitRemote.hs
@@ -19,8 +19,8 @@ import Logs.Trust
import Data.Ord
-def :: [Command]
-def = [command "initremote"
+cmd :: [Command]
+cmd = [command "initremote"
(paramPair paramName $ paramOptional $ paramRepeating paramKeyValue)
seek SectionSetup "creates a special (non-git) remote"]
@@ -33,11 +33,15 @@ start (name:ws) = ifM (isJust <$> findExisting name)
( error $ "There is already a special remote named \"" ++ name ++
"\". (Use enableremote to enable an existing special remote.)"
, do
- let c = newConfig name
- t <- findType config
-
- showStart "initremote" name
- next $ perform t name $ M.union config c
+ ifM (isJust <$> Remote.byNameOnly name)
+ ( error $ "There is already a remote named \"" ++ name ++ "\""
+ , do
+ let c = newConfig name
+ t <- findType config
+
+ showStart "initremote" name
+ next $ perform t name $ M.union config c
+ )
)
where
config = Logs.Remote.keyValToConfig ws
@@ -63,7 +67,7 @@ findExisting name = do
return $ headMaybe matches
newConfig :: String -> R.RemoteConfig
-newConfig name = M.singleton nameKey name
+newConfig = M.singleton nameKey
findByName :: String -> M.Map UUID R.RemoteConfig -> [(UUID, R.RemoteConfig)]
findByName n = filter (matching . snd) . M.toList
diff --git a/Command/List.hs b/Command/List.hs
index d038d6deb..98cb82311 100644
--- a/Command/List.hs
+++ b/Command/List.hs
@@ -23,8 +23,8 @@ import Annex.UUID
import qualified Annex
import Git.Types (RemoteName)
-def :: [Command]
-def = [noCommit $ withOptions [allrepos] $ command "list" paramPaths seek
+cmd :: [Command]
+cmd = [noCommit $ withOptions [allrepos] $ command "list" paramPaths seek
SectionQuery "show which remotes contain files"]
allrepos :: Option
@@ -71,15 +71,15 @@ type Present = Bool
header :: [(RemoteName, TrustLevel)] -> String
header remotes = unlines (zipWith formatheader [0..] remotes) ++ pipes (length remotes)
where
- formatheader n (remotename, trustlevel) = pipes n ++ remotename ++ trust trustlevel
- pipes = flip replicate '|'
- trust UnTrusted = " (untrusted)"
- trust _ = ""
+ formatheader n (remotename, trustlevel) = pipes n ++ remotename ++ trust trustlevel
+ pipes = flip replicate '|'
+ trust UnTrusted = " (untrusted)"
+ trust _ = ""
format :: [(TrustLevel, Present)] -> FilePath -> String
format remotes file = thereMap ++ " " ++ file
where
- thereMap = concatMap there remotes
- there (UnTrusted, True) = "x"
- there (_, True) = "X"
- there (_, False) = "_"
+ thereMap = concatMap there remotes
+ there (UnTrusted, True) = "x"
+ there (_, True) = "X"
+ there (_, False) = "_"
diff --git a/Command/Lock.hs b/Command/Lock.hs
index e6733dcb1..f227ab380 100644
--- a/Command/Lock.hs
+++ b/Command/Lock.hs
@@ -12,8 +12,8 @@ import Command
import qualified Annex.Queue
import qualified Annex
-def :: [Command]
-def = [notDirect $ command "lock" paramPaths seek SectionCommon
+cmd :: [Command]
+cmd = [notDirect $ command "lock" paramPaths seek SectionCommon
"undo unlock command"]
seek :: CommandSeek
diff --git a/Command/Log.hs b/Command/Log.hs
index b0109f117..11fd51eb8 100644
--- a/Command/Log.hs
+++ b/Command/Log.hs
@@ -34,8 +34,8 @@ data RefChange = RefChange
type Outputter = Bool -> POSIXTime -> [UUID] -> Annex ()
-def :: [Command]
-def = [withOptions options $
+cmd :: [Command]
+cmd = [withOptions options $
command "log" paramPaths seek SectionQuery "shows location log"]
options :: [Option]
diff --git a/Command/LookupKey.hs b/Command/LookupKey.hs
index 814c5d2d7..202233233 100644
--- a/Command/LookupKey.hs
+++ b/Command/LookupKey.hs
@@ -12,8 +12,8 @@ import Command
import Annex.CatFile
import Types.Key
-def :: [Command]
-def = [notBareRepo $ noCommit $ noMessages $
+cmd :: [Command]
+cmd = [notBareRepo $ noCommit $ noMessages $
command "lookupkey" (paramRepeating paramFile) seek
SectionPlumbing "looks up key used for file"]
diff --git a/Command/Map.hs b/Command/Map.hs
index b1d28113b..e15fd9c33 100644
--- a/Command/Map.hs
+++ b/Command/Map.hs
@@ -25,8 +25,8 @@ import qualified Utility.Dot as Dot
-- a link from the first repository to the second (its remote)
data Link = Link Git.Repo Git.Repo
-def :: [Command]
-def = [dontCheck repoExists $
+cmd :: [Command]
+cmd = [dontCheck repoExists $
command "map" paramNothing seek SectionQuery
"generate map of repositories"]
@@ -194,11 +194,11 @@ tryScan r
| Git.repoIsUrl r = return Nothing
| otherwise = liftIO $ safely $ Git.Config.read r
where
- pipedconfig cmd params = liftIO $ safely $
+ pipedconfig pcmd params = liftIO $ safely $
withHandle StdoutHandle createProcessSuccess p $
Git.Config.hRead r
where
- p = proc cmd $ toCommand params
+ p = proc pcmd $ toCommand params
configlist = Ssh.onRemote r (pipedconfig, return Nothing) "configlist" [] []
manualconfiglist = do
@@ -206,14 +206,15 @@ tryScan r
sshparams <- Ssh.toRepo r gc [Param sshcmd]
liftIO $ pipedconfig "ssh" sshparams
where
- sshcmd = cddir ++ " && " ++
- "git config --null --list"
+ sshcmd = "sh -c " ++ shellEscape
+ (cddir ++ " && " ++ "git config --null --list")
dir = Git.repoPath r
cddir
| "/~" `isPrefixOf` dir =
let (userhome, reldir) = span (/= '/') (drop 1 dir)
- in "cd " ++ userhome ++ " && cd " ++ shellEscape (drop 1 reldir)
- | otherwise = "cd " ++ shellEscape dir
+ in "cd " ++ userhome ++ " && " ++ cdto (drop 1 reldir)
+ | otherwise = cdto dir
+ cdto p = "if ! cd " ++ shellEscape p ++ " 2>/dev/null; then cd " ++ shellEscape p ++ ".git; fi"
-- First, try sshing and running git config manually,
-- only fall back to git-annex-shell configlist if that
diff --git a/Command/Merge.hs b/Command/Merge.hs
index 51a8b9c52..eeb151c27 100644
--- a/Command/Merge.hs
+++ b/Command/Merge.hs
@@ -13,8 +13,8 @@ import qualified Annex.Branch
import qualified Git.Branch
import Command.Sync (prepMerge, mergeLocal)
-def :: [Command]
-def = [command "merge" paramNothing seek SectionMaintenance
+cmd :: [Command]
+cmd = [command "merge" paramNothing seek SectionMaintenance
"automatically merge changes from remotes"]
seek :: CommandSeek
diff --git a/Command/MetaData.hs b/Command/MetaData.hs
index 38f9b8522..50b9b1f9a 100644
--- a/Command/MetaData.hs
+++ b/Command/MetaData.hs
@@ -16,8 +16,8 @@ import Logs.MetaData
import qualified Data.Set as S
import Data.Time.Clock.POSIX
-def :: [Command]
-def = [withOptions metaDataOptions $
+cmd :: [Command]
+cmd = [withOptions metaDataOptions $
command "metadata" paramPaths seek
SectionMetaData "sets metadata of a file"]
diff --git a/Command/Migrate.hs b/Command/Migrate.hs
index cea9e9426..19fd89c7a 100644
--- a/Command/Migrate.hs
+++ b/Command/Migrate.hs
@@ -17,8 +17,8 @@ import Annex.Content
import qualified Command.ReKey
import qualified Command.Fsck
-def :: [Command]
-def = [notDirect $
+cmd :: [Command]
+cmd = [notDirect $
command "migrate" paramPaths seek
SectionUtility "switch data to different backend"]
@@ -65,7 +65,7 @@ upgradableKey backend key = isNothing (Types.Key.keySize key) || backendupgradab
perform :: FilePath -> Key -> Backend -> Backend -> CommandPerform
perform file oldkey oldbackend newbackend = go =<< genkey
where
- go Nothing = stop
+ go Nothing = stop
go (Just (newkey, knowngoodcontent))
| knowngoodcontent = finish newkey
| otherwise = stopUnless checkcontent $ finish newkey
diff --git a/Command/Mirror.hs b/Command/Mirror.hs
index 4e9a85009..ec9ef92c3 100644
--- a/Command/Mirror.hs
+++ b/Command/Mirror.hs
@@ -17,8 +17,8 @@ import Annex.Content
import qualified Annex
import Config.NumCopies
-def :: [Command]
-def = [withOptions (fromToOptions ++ keyOptions) $
+cmd :: [Command]
+cmd = [withOptions (fromToOptions ++ keyOptions) $
command "mirror" paramPaths seek
SectionCommon "mirror content of files to/from another repository"]
@@ -32,7 +32,7 @@ seek ps = do
ps
start :: Maybe Remote -> Maybe Remote -> FilePath -> Key -> CommandStart
-start to from file key = startKey to from (Just file) key
+start to from file = startKey to from (Just file)
startKey :: Maybe Remote -> Maybe Remote -> Maybe FilePath -> Key -> CommandStart
startKey to from afile key = do
diff --git a/Command/Move.hs b/Command/Move.hs
index c3d641edd..edb7ede7b 100644
--- a/Command/Move.hs
+++ b/Command/Move.hs
@@ -17,8 +17,8 @@ import Annex.UUID
import Annex.Transfer
import Logs.Presence
-def :: [Command]
-def = [withOptions moveOptions $ command "move" paramPaths seek
+cmd :: [Command]
+cmd = [withOptions moveOptions $ command "move" paramPaths seek
SectionCommon "move content of files to/from another repository"]
moveOptions :: [Option]
@@ -34,7 +34,7 @@ seek ps = do
ps
start :: Maybe Remote -> Maybe Remote -> Bool -> FilePath -> Key -> CommandStart
-start to from move file key = start' to from move (Just file) key
+start to from move = start' to from move . Just
startKey :: Maybe Remote -> Maybe Remote -> Bool -> Key -> CommandStart
startKey to from move = start' to from move Nothing
@@ -91,7 +91,7 @@ expectedPresent dest key = do
return $ dest `elem` remotes
toPerform :: Remote -> Bool -> Key -> AssociatedFile -> Bool -> Either String Bool -> CommandPerform
-toPerform dest move key afile fastcheck isthere = do
+toPerform dest move key afile fastcheck isthere =
case isthere of
Left err -> do
showNote err
diff --git a/Command/NotifyChanges.hs b/Command/NotifyChanges.hs
index d0df05551..36997666d 100644
--- a/Command/NotifyChanges.hs
+++ b/Command/NotifyChanges.hs
@@ -19,8 +19,8 @@ import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
-def :: [Command]
-def = [noCommit $ command "notifychanges" paramNothing seek SectionPlumbing
+cmd :: [Command]
+cmd = [noCommit $ command "notifychanges" paramNothing seek SectionPlumbing
"sends notification when git refs are changed"]
seek :: CommandSeek
@@ -51,7 +51,7 @@ start = do
-- No messages need to be received from the caller,
-- but when it closes the connection, notice and terminate.
- let receiver = forever $ void $ getLine
+ let receiver = forever $ void getLine
void $ liftIO $ concurrently sender receiver
stop
diff --git a/Command/NumCopies.hs b/Command/NumCopies.hs
index b7323ae35..773e10b6a 100644
--- a/Command/NumCopies.hs
+++ b/Command/NumCopies.hs
@@ -13,8 +13,8 @@ import Command
import Config.NumCopies
import Types.Messages
-def :: [Command]
-def = [command "numcopies" paramNumber seek
+cmd :: [Command]
+cmd = [command "numcopies" paramNumber seek
SectionSetup "configure desired number of copies"]
seek :: CommandSeek
@@ -22,16 +22,15 @@ seek = withWords start
start :: [String] -> CommandStart
start [] = startGet
-start [s] = do
- case readish s of
- Nothing -> error $ "Bad number: " ++ s
- Just n
- | n > 0 -> startSet n
- | n == 0 -> ifM (Annex.getState Annex.force)
- ( startSet n
- , error "Setting numcopies to 0 is very unsafe. You will lose data! If you really want to do that, specify --force."
- )
- | otherwise -> error "Number cannot be negative!"
+start [s] = case readish s of
+ Nothing -> error $ "Bad number: " ++ s
+ Just n
+ | n > 0 -> startSet n
+ | n == 0 -> ifM (Annex.getState Annex.force)
+ ( startSet n
+ , error "Setting numcopies to 0 is very unsafe. You will lose data! If you really want to do that, specify --force."
+ )
+ | otherwise -> error "Number cannot be negative!"
start _ = error "Specify a single number."
startGet :: CommandStart
@@ -39,9 +38,9 @@ startGet = next $ next $ do
Annex.setOutput QuietOutput
v <- getGlobalNumCopies
case v of
- Just n -> liftIO $ putStrLn $ show $ fromNumCopies n
+ Just n -> liftIO $ print $ fromNumCopies n
Nothing -> do
- liftIO $ putStrLn $ "global numcopies is not set"
+ liftIO $ putStrLn "global numcopies is not set"
old <- deprecatedNumCopies
case old of
Nothing -> liftIO $ putStrLn "(default is 1)"
diff --git a/Command/PreCommit.hs b/Command/PreCommit.hs
index 355e2766e..aaaa51fbd 100644
--- a/Command/PreCommit.hs
+++ b/Command/PreCommit.hs
@@ -26,8 +26,8 @@ import Types.MetaData
import qualified Data.Set as S
-def :: [Command]
-def = [command "pre-commit" paramPaths seek SectionPlumbing
+cmd :: [Command]
+cmd = [command "pre-commit" paramPaths seek SectionPlumbing
"run by git pre-commit hook"]
seek :: CommandSeek
@@ -59,7 +59,7 @@ startIndirect f = next $ do
next $ return True
startDirect :: [String] -> CommandStart
-startDirect _ = next $ next $ preCommitDirect
+startDirect _ = next $ next preCommitDirect
addViewMetaData :: View -> ViewedFile -> Key -> CommandStart
addViewMetaData v f k = do
diff --git a/Command/ReKey.hs b/Command/ReKey.hs
index 2919a09e9..a203ab8d5 100644
--- a/Command/ReKey.hs
+++ b/Command/ReKey.hs
@@ -17,8 +17,8 @@ import Logs.Web
import Logs.Location
import Utility.CopyFile
-def :: [Command]
-def = [notDirect $ command "rekey"
+cmd :: [Command]
+cmd = [notDirect $ command "rekey"
(paramOptional $ paramRepeating $ paramPair paramPath paramKey)
seek SectionPlumbing "change keys used for files"]
diff --git a/Command/RecvKey.hs b/Command/RecvKey.hs
index d5971d6cf..8a806875b 100644
--- a/Command/RecvKey.hs
+++ b/Command/RecvKey.hs
@@ -20,8 +20,8 @@ import qualified Types.Key
import qualified Types.Backend
import qualified Backend
-def :: [Command]
-def = [noCommit $ command "recvkey" paramKey seek
+cmd :: [Command]
+cmd = [noCommit $ command "recvkey" paramKey seek
SectionPlumbing "runs rsync in server mode to receive content"]
seek :: CommandSeek
@@ -63,7 +63,7 @@ start key = fieldTransfer Download key $ \_p ->
Nothing -> return True
Just size -> do
size' <- fromIntegral . fileSize
- <$> liftIO (getFileStatus tmp)
+ <$> liftIO (getFileStatus tmp)
return $ size == size'
if oksize
then case Backend.maybeLookupBackendName (Types.Key.keyBackendName key) of
@@ -76,7 +76,7 @@ start key = fieldTransfer Download key $ \_p ->
warning "recvkey: received key with wrong size; discarding"
return False
where
- runfsck check = ifM (check key tmp)
+ runfsck check = ifM (check key tmp)
( return True
, do
warning "recvkey: received key from direct mode repository seems to have changed as it was transferred; discarding"
diff --git a/Command/Reinit.hs b/Command/Reinit.hs
index 0fc1e8314..6de7b9932 100644
--- a/Command/Reinit.hs
+++ b/Command/Reinit.hs
@@ -14,8 +14,8 @@ import Annex.UUID
import Types.UUID
import qualified Remote
-def :: [Command]
-def = [dontCheck repoExists $
+cmd :: [Command]
+cmd = [dontCheck repoExists $
command "reinit" (paramUUID ++ " or " ++ paramDesc) seek SectionUtility ""]
seek :: CommandSeek
diff --git a/Command/Reinject.hs b/Command/Reinject.hs
index a516fe93c..a968f6f56 100644
--- a/Command/Reinject.hs
+++ b/Command/Reinject.hs
@@ -14,8 +14,8 @@ import Annex.Content
import qualified Command.Fsck
import qualified Backend
-def :: [Command]
-def = [command "reinject" (paramPair "SRC" "DEST") seek
+cmd :: [Command]
+cmd = [command "reinject" (paramPair "SRC" "DEST") seek
SectionUtility "sets content of annexed file"]
seek :: CommandSeek
diff --git a/Command/RemoteDaemon.hs b/Command/RemoteDaemon.hs
index 61c3a7d84..9f4cc884d 100644
--- a/Command/RemoteDaemon.hs
+++ b/Command/RemoteDaemon.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import RemoteDaemon.Core
-def :: [Command]
-def = [noCommit $ command "remotedaemon" paramNothing seek SectionPlumbing
+cmd :: [Command]
+cmd = [noCommit $ command "remotedaemon" paramNothing seek SectionPlumbing
"detects when remotes have changed, and fetches from them"]
seek :: CommandSeek
diff --git a/Command/Repair.hs b/Command/Repair.hs
index 56925d83d..8eb937ce5 100644
--- a/Command/Repair.hs
+++ b/Command/Repair.hs
@@ -16,8 +16,8 @@ import qualified Git.Ref
import Git.Types
import Annex.Version
-def :: [Command]
-def = [noCommit $ dontCheck repoExists $
+cmd :: [Command]
+cmd = [noCommit $ dontCheck repoExists $
command "repair" paramNothing seek SectionMaintenance "recover broken git repository"]
seek :: CommandSeek
@@ -68,7 +68,7 @@ repairAnnexBranch modifiedbranches
)
)
where
- okindex = Annex.Branch.withIndex $ inRepo $ Git.Repair.checkIndex
+ okindex = Annex.Branch.withIndex $ inRepo Git.Repair.checkIndex
commitindex = do
Annex.Branch.forceCommit "committing index after git repository repair"
liftIO $ putStrLn "Successfully recovered the git-annex branch using .git/annex/index"
diff --git a/Command/ResolveMerge.hs b/Command/ResolveMerge.hs
index a50e2aa9d..145db37df 100644
--- a/Command/ResolveMerge.hs
+++ b/Command/ResolveMerge.hs
@@ -14,12 +14,12 @@ import Git.Sha
import qualified Git.Branch
import Annex.AutoMerge
-def :: [Command]
-def = [command "resolvemerge" paramNothing seek SectionPlumbing
+cmd :: [Command]
+cmd = [command "resolvemerge" paramNothing seek SectionPlumbing
"resolve merge conflicts"]
seek :: CommandSeek
-seek ps = withNothing start ps
+seek = withNothing start
start :: CommandStart
start = do
diff --git a/Command/RmUrl.hs b/Command/RmUrl.hs
index e961575a3..1582d0f3f 100644
--- a/Command/RmUrl.hs
+++ b/Command/RmUrl.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import Logs.Web
-def :: [Command]
-def = [notBareRepo $
+cmd :: [Command]
+cmd = [notBareRepo $
command "rmurl" (paramPair paramFile paramUrl) seek
SectionCommon "record file is not available at url"]
diff --git a/Command/Schedule.hs b/Command/Schedule.hs
index a088dbef8..ce8b67da0 100644
--- a/Command/Schedule.hs
+++ b/Command/Schedule.hs
@@ -17,8 +17,8 @@ import Types.Messages
import qualified Data.Set as S
-def :: [Command]
-def = [command "schedule" (paramPair paramRemote (paramOptional paramExpression)) seek
+cmd :: [Command]
+cmd = [command "schedule" (paramPair paramRemote (paramOptional paramExpression)) seek
SectionSetup "get or set scheduled jobs"]
seek :: CommandSeek
@@ -27,7 +27,7 @@ seek = withWords start
start :: [String] -> CommandStart
start = parse
where
- parse (name:[]) = go name performGet
+ parse (name:[]) = go name performGet
parse (name:expr:[]) = go name $ \uuid -> do
showStart "schedile" name
performSet expr uuid
diff --git a/Command/Semitrust.hs b/Command/Semitrust.hs
index edba27346..146ec2192 100644
--- a/Command/Semitrust.hs
+++ b/Command/Semitrust.hs
@@ -11,8 +11,8 @@ import Command
import Types.TrustLevel
import Command.Trust (trustCommand)
-def :: [Command]
-def = [command "semitrust" (paramRepeating paramRemote) seek
+cmd :: [Command]
+cmd = [command "semitrust" (paramRepeating paramRemote) seek
SectionSetup "return repository to default trust level"]
seek :: CommandSeek
diff --git a/Command/SendKey.hs b/Command/SendKey.hs
index 13e585fc6..90eca20bb 100644
--- a/Command/SendKey.hs
+++ b/Command/SendKey.hs
@@ -16,8 +16,8 @@ import Annex.Transfer
import qualified CmdLine.GitAnnexShell.Fields as Fields
import Utility.Metered
-def :: [Command]
-def = [noCommit $ command "sendkey" paramKey seek
+cmd :: [Command]
+cmd = [noCommit $ command "sendkey" paramKey seek
SectionPlumbing "runs rsync in server mode to send content"]
seek :: CommandSeek
diff --git a/Command/Status.hs b/Command/Status.hs
index 9d184c33b..0d3efa840 100644
--- a/Command/Status.hs
+++ b/Command/Status.hs
@@ -16,8 +16,8 @@ import qualified Git.LsFiles as LsFiles
import qualified Git.Ref
import qualified Git
-def :: [Command]
-def = [notBareRepo $ noCommit $ noMessages $ withOptions [jsonOption] $
+cmd :: [Command]
+cmd = [notBareRepo $ noCommit $ noMessages $ withOptions [jsonOption] $
command "status" paramPaths seek SectionCommon
"show the working tree status"]
diff --git a/Command/Sync.hs b/Command/Sync.hs
index 6a6a254b3..a89737647 100644
--- a/Command/Sync.hs
+++ b/Command/Sync.hs
@@ -35,8 +35,8 @@ import Annex.Ssh
import Control.Concurrent.MVar
-def :: [Command]
-def = [withOptions syncOptions $
+cmd :: [Command]
+cmd = [withOptions syncOptions $
command "sync" (paramOptional (paramRepeating paramRemote))
seek SectionCommon "synchronize local repository with remotes"]
@@ -356,7 +356,7 @@ syncFile rs f k = do
handleDropsFrom locs' rs "unwanted" True k (Just f)
Nothing callCommandAction
where
- wantget have = allM id
+ wantget have = allM id
[ pure (not $ null have)
, not <$> inAnnex k
, wantGet True (Just k) (Just f)
diff --git a/Command/Test.hs b/Command/Test.hs
index 08e9d1b6e..4d481369d 100644
--- a/Command/Test.hs
+++ b/Command/Test.hs
@@ -11,8 +11,8 @@ import Common
import Command
import Messages
-def :: [Command]
-def = [ noRepo startIO $ dontCheck repoExists $
+cmd :: [Command]
+cmd = [ noRepo startIO $ dontCheck repoExists $
command "test" paramNothing seek SectionTesting
"run built-in test suite"]
diff --git a/Command/TestRemote.hs b/Command/TestRemote.hs
index 247a243e4..f0735e087 100644
--- a/Command/TestRemote.hs
+++ b/Command/TestRemote.hs
@@ -36,8 +36,8 @@ import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as M
-def :: [Command]
-def = [ withOptions [sizeOption] $
+cmd :: [Command]
+cmd = [ withOptions [sizeOption] $
command "testremote" paramRemote seek SectionTesting
"test transfers to/from a remote"]
diff --git a/Command/TransferInfo.hs b/Command/TransferInfo.hs
index 8ab577a81..ae7fbf033 100644
--- a/Command/TransferInfo.hs
+++ b/Command/TransferInfo.hs
@@ -15,8 +15,8 @@ import Types.Key
import qualified CmdLine.GitAnnexShell.Fields as Fields
import Utility.Metered
-def :: [Command]
-def = [noCommit $ command "transferinfo" paramKey seek SectionPlumbing
+cmd :: [Command]
+cmd = [noCommit $ command "transferinfo" paramKey seek SectionPlumbing
"updates sender on number of bytes of content received"]
seek :: CommandSeek
diff --git a/Command/TransferKey.hs b/Command/TransferKey.hs
index 13bfd825e..469e01322 100644
--- a/Command/TransferKey.hs
+++ b/Command/TransferKey.hs
@@ -15,8 +15,8 @@ import Annex.Transfer
import qualified Remote
import Types.Remote
-def :: [Command]
-def = [withOptions transferKeyOptions $
+cmd :: [Command]
+cmd = [withOptions transferKeyOptions $
noCommit $ command "transferkey" paramKey seek SectionPlumbing
"transfers a key from or to a remote"]
diff --git a/Command/TransferKeys.hs b/Command/TransferKeys.hs
index fba0e6593..346e413e6 100644
--- a/Command/TransferKeys.hs
+++ b/Command/TransferKeys.hs
@@ -21,8 +21,8 @@ import Git.Types (RemoteName)
data TransferRequest = TransferRequest Direction Remote Key AssociatedFile
-def :: [Command]
-def = [command "transferkeys" paramNothing seek
+cmd :: [Command]
+cmd = [command "transferkeys" paramNothing seek
SectionPlumbing "transfers keys"]
seek :: CommandSeek
@@ -57,7 +57,7 @@ runRequests readh writeh a = do
fileEncoding writeh
go =<< readrequests
where
- go (d:rn:k:f:rest) = do
+ go (d:rn:k:f:rest) = do
case (deserialize d, deserialize rn, deserialize k, deserialize f) of
(Just direction, Just remotename, Just key, Just file) -> do
mremote <- Remote.byName' remotename
diff --git a/Command/Trust.hs b/Command/Trust.hs
index c0f013699..f02fcf617 100644
--- a/Command/Trust.hs
+++ b/Command/Trust.hs
@@ -16,19 +16,19 @@ import Logs.Group
import qualified Data.Set as S
-def :: [Command]
-def = [command "trust" (paramRepeating paramRemote) seek
+cmd :: [Command]
+cmd = [command "trust" (paramRepeating paramRemote) seek
SectionSetup "trust a repository"]
seek :: CommandSeek
seek = trustCommand "trust" Trusted
trustCommand :: String -> TrustLevel -> CommandSeek
-trustCommand cmd level = withWords start
+trustCommand c level = withWords start
where
start ws = do
let name = unwords ws
- showStart cmd name
+ showStart c name
u <- Remote.nameToUUID name
next $ perform u
perform uuid = do
diff --git a/Command/Unannex.hs b/Command/Unannex.hs
index c105eb9ce..e8cf70f51 100644
--- a/Command/Unannex.hs
+++ b/Command/Unannex.hs
@@ -22,8 +22,8 @@ import qualified Git.DiffTree as DiffTree
import Utility.CopyFile
import Command.PreCommit (lockPreCommitHook)
-def :: [Command]
-def = [command "unannex" paramPaths seek SectionUtility
+cmd :: [Command]
+cmd = [command "unannex" paramPaths seek SectionUtility
"undo accidential add command"]
seek :: CommandSeek
diff --git a/Command/Ungroup.hs b/Command/Ungroup.hs
index a88e3f7c8..a26bd34a9 100644
--- a/Command/Ungroup.hs
+++ b/Command/Ungroup.hs
@@ -15,8 +15,8 @@ import Types.Group
import qualified Data.Set as S
-def :: [Command]
-def = [command "ungroup" (paramPair paramRemote paramDesc) seek
+cmd :: [Command]
+cmd = [command "ungroup" (paramPair paramRemote paramDesc) seek
SectionSetup "remove a repository from a group"]
seek :: CommandSeek
diff --git a/Command/Uninit.hs b/Command/Uninit.hs
index 3f57782fc..ea4a3a9f6 100644
--- a/Command/Uninit.hs
+++ b/Command/Uninit.hs
@@ -21,8 +21,8 @@ import Utility.FileMode
import System.IO.HVFS
import System.IO.HVFS.Utils
-def :: [Command]
-def = [addCheck check $ command "uninit" paramPaths seek
+cmd :: [Command]
+cmd = [addCheck check $ command "uninit" paramPaths seek
SectionUtility "de-initialize git-annex and clean out repository"]
check :: Annex ()
@@ -100,7 +100,7 @@ prepareRemoveAnnexDir annexdir =
removeUnannexed :: [Key] -> Annex [Key]
removeUnannexed = go []
where
- go c [] = return c
+ go c [] = return c
go c (k:ks) = ifM (inAnnexCheck k $ liftIO . enoughlinks)
( do
lockContent k removeAnnex
diff --git a/Command/Unlock.hs b/Command/Unlock.hs
index 02704e805..bed618104 100644
--- a/Command/Unlock.hs
+++ b/Command/Unlock.hs
@@ -12,8 +12,8 @@ import Command
import Annex.Content
import Utility.CopyFile
-def :: [Command]
-def =
+cmd :: [Command]
+cmd =
[ c "unlock" "unlock files for modification"
, c "edit" "same as unlock"
]
diff --git a/Command/Untrust.hs b/Command/Untrust.hs
index 4c1035dcd..ecd0ae4cf 100644
--- a/Command/Untrust.hs
+++ b/Command/Untrust.hs
@@ -11,8 +11,8 @@ import Command
import Types.TrustLevel
import Command.Trust (trustCommand)
-def :: [Command]
-def = [command "untrust" (paramRepeating paramRemote) seek
+cmd :: [Command]
+cmd = [command "untrust" (paramRepeating paramRemote) seek
SectionSetup "do not trust a repository"]
seek :: CommandSeek
diff --git a/Command/Unused.hs b/Command/Unused.hs
index c2179447d..1859856af 100644
--- a/Command/Unused.hs
+++ b/Command/Unused.hs
@@ -35,8 +35,8 @@ import Git.FilePath
import Logs.View (is_branchView)
import Utility.Bloom
-def :: [Command]
-def = [withOptions [unusedFromOption] $ command "unused" paramNothing seek
+cmd :: [Command]
+cmd = [withOptions [unusedFromOption] $ command "unused" paramNothing seek
SectionMaintenance "look for unused file content"]
unusedFromOption :: Option
diff --git a/Command/Upgrade.hs b/Command/Upgrade.hs
index 80876290a..7e03ec3ee 100644
--- a/Command/Upgrade.hs
+++ b/Command/Upgrade.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import Upgrade
-def :: [Command]
-def = [dontCheck repoExists $ -- because an old version may not seem to exist
+cmd :: [Command]
+cmd = [dontCheck repoExists $ -- because an old version may not seem to exist
command "upgrade" paramNothing seek
SectionMaintenance "upgrade repository layout"]
diff --git a/Command/VAdd.hs b/Command/VAdd.hs
index e3726a051..33614ae59 100644
--- a/Command/VAdd.hs
+++ b/Command/VAdd.hs
@@ -12,8 +12,8 @@ import Command
import Annex.View
import Command.View (checkoutViewBranch)
-def :: [Command]
-def = [notBareRepo $ notDirect $ command "vadd" (paramRepeating "FIELD=GLOB")
+cmd :: [Command]
+cmd = [notBareRepo $ notDirect $ command "vadd" (paramRepeating "FIELD=GLOB")
seek SectionMetaData "add subdirs to current view"]
seek :: CommandSeek
diff --git a/Command/VCycle.hs b/Command/VCycle.hs
index f7da47fa2..eead9e022 100644
--- a/Command/VCycle.hs
+++ b/Command/VCycle.hs
@@ -14,8 +14,8 @@ import Types.View
import Logs.View
import Command.View (checkoutViewBranch)
-def :: [Command]
-def = [notBareRepo $ notDirect $
+cmd :: [Command]
+cmd = [notBareRepo $ notDirect $
command "vcycle" paramNothing seek SectionUtility
"switch view to next layout"]
diff --git a/Command/VFilter.hs b/Command/VFilter.hs
index bd17aca45..320f28568 100644
--- a/Command/VFilter.hs
+++ b/Command/VFilter.hs
@@ -12,8 +12,8 @@ import Command
import Annex.View
import Command.View (paramView, checkoutViewBranch)
-def :: [Command]
-def = [notBareRepo $ notDirect $
+cmd :: [Command]
+cmd = [notBareRepo $ notDirect $
command "vfilter" paramView seek SectionMetaData "filter current view"]
seek :: CommandSeek
diff --git a/Command/VPop.hs b/Command/VPop.hs
index 706a522f8..5046b54b5 100644
--- a/Command/VPop.hs
+++ b/Command/VPop.hs
@@ -16,8 +16,8 @@ import Types.View
import Logs.View
import Command.View (checkoutViewBranch)
-def :: [Command]
-def = [notBareRepo $ notDirect $
+cmd :: [Command]
+cmd = [notBareRepo $ notDirect $
command "vpop" (paramOptional paramNumber) seek SectionMetaData
"switch back to previous view"]
diff --git a/Command/Version.hs b/Command/Version.hs
index 526b752f0..255fd8188 100644
--- a/Command/Version.hs
+++ b/Command/Version.hs
@@ -17,8 +17,8 @@ import qualified Types.Remote as R
import qualified Remote
import qualified Backend
-def :: [Command]
-def = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
+cmd :: [Command]
+cmd = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
command "version" paramNothing seek SectionQuery "show version info"]
seek :: CommandSeek
diff --git a/Command/Vicfg.hs b/Command/Vicfg.hs
index 1f1695536..faa2d3f05 100644
--- a/Command/Vicfg.hs
+++ b/Command/Vicfg.hs
@@ -5,6 +5,8 @@
- Licensed under the GNU GPL version 3 or higher.
-}
+{-# LANGUAGE RankNTypes #-}
+
module Command.Vicfg where
import qualified Data.Map as M
@@ -12,6 +14,7 @@ import qualified Data.Set as S
import System.Environment (getEnv)
import Data.Tuple (swap)
import Data.Char (isSpace)
+import Data.Default
import Common.Annex
import Command
@@ -26,8 +29,8 @@ import Types.StandardGroups
import Types.ScheduledActivity
import Remote
-def :: [Command]
-def = [command "vicfg" paramNothing seek
+cmd :: [Command]
+cmd = [command "vicfg" paramNothing seek
SectionSetup "edit git-annex's configuration"]
seek :: CommandSeek
@@ -49,7 +52,7 @@ vicfg curcfg f = do
-- Allow EDITOR to be processed by the shell, so it can contain options.
unlessM (liftIO $ boolSystem "sh" [Param "-c", Param $ unwords [vi, shellEscape f]]) $
error $ vi ++ " exited nonzero; aborting"
- r <- parseCfg curcfg <$> liftIO (readFileStrict f)
+ r <- parseCfg (defCfg curcfg) <$> liftIO (readFileStrict f)
liftIO $ nukeFile f
case r of
Left s -> do
@@ -85,6 +88,21 @@ setCfg curcfg newcfg = do
mapM_ (uncurry groupPreferredContentSet) $ M.toList $ cfgGroupPreferredContentMap diff
mapM_ (uncurry scheduleSet) $ M.toList $ cfgScheduleMap diff
+{- Default config has all the keys from the input config, but with their
+ - default values. -}
+defCfg :: Cfg -> Cfg
+defCfg curcfg = Cfg
+ { cfgTrustMap = mapdef $ cfgTrustMap curcfg
+ , cfgGroupMap = mapdef $ cfgGroupMap curcfg
+ , cfgPreferredContentMap = mapdef $ cfgPreferredContentMap curcfg
+ , cfgRequiredContentMap = mapdef $ cfgRequiredContentMap curcfg
+ , cfgGroupPreferredContentMap = mapdef $ cfgGroupPreferredContentMap curcfg
+ , cfgScheduleMap = mapdef $ cfgScheduleMap curcfg
+ }
+ where
+ mapdef :: forall k v. Default v => M.Map k v -> M.Map k v
+ mapdef = M.map (const def)
+
diffCfg :: Cfg -> Cfg -> Cfg
diffCfg curcfg newcfg = Cfg
{ cfgTrustMap = diff cfgTrustMap
@@ -124,7 +142,7 @@ genCfg cfg descs = unlines $ intercalate [""]
, com "(Valid trust levels: " ++ trustlevels ++ ")"
]
(\(t, u) -> line "trust" u $ showTrustLevel t)
- (\u -> lcom $ line "trust" u $ showTrustLevel SemiTrusted)
+ (\u -> lcom $ line "trust" u $ showTrustLevel def)
where
trustlevels = unwords $ map showTrustLevel [Trusted .. DeadTrusted]
@@ -136,7 +154,7 @@ genCfg cfg descs = unlines $ intercalate [""]
(\(s, u) -> line "group" u $ unwords $ S.toList s)
(\u -> lcom $ line "group" u "")
where
- grouplist = unwords $ map fromStandardGroup [minBound..]
+ grouplist = unwords $ map fromStandardGroup [minBound..]
preferredcontent = settings cfg descs cfgPreferredContentMap
[ com "Repository preferred contents"
@@ -157,7 +175,7 @@ genCfg cfg descs = unlines $ intercalate [""]
(\(s, g) -> gline g s)
(\g -> gline g "")
where
- gline g value = [ unwords ["groupwanted", g, "=", value] ]
+ gline g value = [ unwords ["groupwanted", g, "=", value] ]
allgroups = S.unions $ stdgroups : M.elems (cfgGroupMap cfg)
stdgroups = S.fromList $ map fromStandardGroup [minBound..maxBound]
@@ -203,7 +221,7 @@ lcom = map (\l -> if "#" `isPrefixOf` l then l else '#' : l)
{- If there's a parse error, returns a new version of the file,
- with the problem lines noted. -}
parseCfg :: Cfg -> String -> Either String Cfg
-parseCfg curcfg = go [] curcfg . lines
+parseCfg defcfg = go [] defcfg . lines
where
go c cfg []
| null (mapMaybe fst c) = Right cfg
diff --git a/Command/View.hs b/Command/View.hs
index 93b045c39..bfe030e23 100644
--- a/Command/View.hs
+++ b/Command/View.hs
@@ -17,8 +17,8 @@ import Types.View
import Annex.View
import Logs.View
-def :: [Command]
-def = [notBareRepo $ notDirect $
+cmd :: [Command]
+cmd = [notBareRepo $ notDirect $
command "view" paramView seek SectionMetaData "enter a view branch"]
seek :: CommandSeek
@@ -42,7 +42,7 @@ perform view = do
next $ checkoutViewBranch view applyView
paramView :: String
-paramView = paramPair (paramRepeating "TAG") (paramRepeating "FIELD=VALUE")
+paramView = paramRepeating "FIELD=VALUE"
mkView :: [String] -> Annex View
mkView params = go =<< inRepo Git.Branch.current
diff --git a/Command/Wanted.hs b/Command/Wanted.hs
index bae450d26..3f721e368 100644
--- a/Command/Wanted.hs
+++ b/Command/Wanted.hs
@@ -16,8 +16,8 @@ import Types.Messages
import qualified Data.Map as M
-def :: [Command]
-def = [command "wanted" (paramPair paramRemote (paramOptional paramExpression)) seek
+cmd :: [Command]
+cmd = [command "wanted" (paramPair paramRemote (paramOptional paramExpression)) seek
SectionSetup "get or set preferred content expression"]
seek :: CommandSeek
@@ -26,7 +26,7 @@ seek = withWords start
start :: [String] -> CommandStart
start = parse
where
- parse (name:[]) = go name performGet
+ parse (name:[]) = go name performGet
parse (name:expr:[]) = go name $ \uuid -> do
showStart "wanted" name
performSet expr uuid
diff --git a/Command/Watch.hs b/Command/Watch.hs
index 79079337c..2d25b54c3 100644
--- a/Command/Watch.hs
+++ b/Command/Watch.hs
@@ -12,8 +12,8 @@ import Assistant
import Command
import Utility.HumanTime
-def :: [Command]
-def = [notBareRepo $ withOptions [foregroundOption, stopOption] $
+cmd :: [Command]
+cmd = [notBareRepo $ withOptions [foregroundOption, stopOption] $
command "watch" paramNothing seek SectionCommon "watch for changes"]
seek :: CommandSeek
diff --git a/Command/WebApp.hs b/Command/WebApp.hs
index e329582e3..3a074218f 100644
--- a/Command/WebApp.hs
+++ b/Command/WebApp.hs
@@ -37,8 +37,8 @@ import Control.Concurrent.STM
import Network.Socket (HostName)
import System.Environment (getArgs)
-def :: [Command]
-def = [ withOptions [listenOption] $
+cmd :: [Command]
+cmd = [ withOptions [listenOption] $
noCommit $ noRepo startNoRepo $ dontCheck repoExists $ notBareRepo $
command "webapp" paramNothing seek SectionCommon "launch webapp"]
@@ -213,7 +213,7 @@ openBrowser mcmd htmlshim realurl outh errh = do
#endif
where
p = case mcmd of
- Just cmd -> proc cmd [htmlshim]
+ Just c -> proc c [htmlshim]
Nothing ->
#ifndef mingw32_HOST_OS
browserProc url
diff --git a/Command/Whereis.hs b/Command/Whereis.hs
index d2c27eb9b..582aaffc2 100644
--- a/Command/Whereis.hs
+++ b/Command/Whereis.hs
@@ -14,8 +14,8 @@ import Command
import Remote
import Logs.Trust
-def :: [Command]
-def = [noCommit $ withOptions (jsonOption : keyOptions) $
+cmd :: [Command]
+cmd = [noCommit $ withOptions (jsonOption : keyOptions) $
command "whereis" paramPaths seek SectionQuery
"lists repositories that have file content"]
diff --git a/Command/XMPPGit.hs b/Command/XMPPGit.hs
index 47c2d7ff2..ab238c85e 100644
--- a/Command/XMPPGit.hs
+++ b/Command/XMPPGit.hs
@@ -11,8 +11,8 @@ import Common.Annex
import Command
import Assistant.XMPP.Git
-def :: [Command]
-def = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
+cmd :: [Command]
+cmd = [noCommit $ noRepo startNoRepo $ dontCheck repoExists $
command "xmppgit" paramNothing seek
SectionPlumbing "git to XMPP relay"]
@@ -37,9 +37,9 @@ gitRemoteHelper = do
respond []
where
expect s = do
- cmd <- getLine
- unless (cmd == s) $
- error $ "git-remote-helpers protocol error: expected: " ++ s ++ ", but got: " ++ cmd
+ gitcmd <- getLine
+ unless (gitcmd == s) $
+ error $ "git-remote-helpers protocol error: expected: " ++ s ++ ", but got: " ++ gitcmd
respond l = do
mapM_ putStrLn l
putStrLn ""
diff --git a/Config/Cost.hs b/Config/Cost.hs
index 2d94a6b15..44a26f064 100644
--- a/Config/Cost.hs
+++ b/Config/Cost.hs
@@ -52,7 +52,7 @@ insertCostAfter l pos
| otherwise =
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
where
- nextpos = pos + 1
+ nextpos = pos + 1
maxpos = length l - 1
item = l !! pos
diff --git a/Config/Files.hs b/Config/Files.hs
index 30ed0a3cf..8d5c1fd12 100644
--- a/Config/Files.hs
+++ b/Config/Files.hs
@@ -66,4 +66,4 @@ readProgramFile = do
)
)
where
- cmd = "git-annex"
+ cmd = "git-annex"
diff --git a/Creds.hs b/Creds.hs
index 73d631ff7..1f5c83570 100644
--- a/Creds.hs
+++ b/Creds.hs
@@ -15,6 +15,7 @@ module Creds (
writeCacheCreds,
readCacheCreds,
removeCreds,
+ includeCredsInfo,
) where
import Common.Annex
@@ -23,7 +24,7 @@ import Annex.Perms
import Utility.FileMode
import Crypto
import Types.Remote (RemoteConfig, RemoteConfigKey)
-import Remote.Helper.Encryptable (remoteCipher, remoteCipher', embedCreds)
+import Remote.Helper.Encryptable (remoteCipher, remoteCipher', embedCreds, EncryptionIsSetup, extractCipher)
import Utility.Env (getEnv)
import qualified Data.ByteString.Lazy.Char8 as L
@@ -39,16 +40,23 @@ data CredPairStorage = CredPairStorage
}
{- Stores creds in a remote's configuration, if the remote allows
- - that. Otherwise, caches them locally.
- - The creds are found in storage if not provided. -}
-setRemoteCredPair :: RemoteConfig -> CredPairStorage -> Maybe CredPair -> Annex RemoteConfig
-setRemoteCredPair c storage Nothing =
- maybe (return c) (setRemoteCredPair c storage . Just)
+ - that. Also caches them locally.
+ -
+ - The creds are found from the CredPairStorage storage if not provided,
+ - so may be provided by an environment variable etc.
+ -
+ - The remote's configuration should have already had a cipher stored in it
+ - if that's going to be done, so that the creds can be encrypted using the
+ - cipher. The EncryptionIsSetup phantom type ensures that is the case.
+ -}
+setRemoteCredPair :: EncryptionIsSetup -> RemoteConfig -> CredPairStorage -> Maybe CredPair -> Annex RemoteConfig
+setRemoteCredPair encsetup c storage Nothing =
+ maybe (return c) (setRemoteCredPair encsetup c storage . Just)
=<< getRemoteCredPair c storage
-setRemoteCredPair c storage (Just creds)
+setRemoteCredPair _ c storage (Just creds)
| embedCreds c = case credPairRemoteKey storage of
Nothing -> localcache
- Just key -> storeconfig key =<< remoteCipher c
+ Just key -> storeconfig key =<< remoteCipher =<< localcache
| otherwise = localcache
where
localcache = do
@@ -86,23 +94,31 @@ getRemoteCredPair c storage = maybe fromcache (return . Just) =<< fromenv
fromconfig = case credPairRemoteKey storage of
Just key -> do
mcipher <- remoteCipher' c
- case (mcipher, M.lookup key c) of
- (_, Nothing) -> return Nothing
- (Just (_cipher, SharedCipher {}), Just bcreds) ->
- -- When using a shared cipher, the
- -- creds are not stored encrypted.
- fromcreds $ fromB64 bcreds
- (Just (cipher, _), Just enccreds) -> do
- creds <- liftIO $ decrypt cipher
- (feedBytes $ L.pack $ fromB64 enccreds)
- (readBytes $ return . L.unpack)
- fromcreds creds
- (Nothing, Just bcreds) ->
+ case (M.lookup key c, mcipher) of
+ (Nothing, _) -> return Nothing
+ (Just enccreds, Just (cipher, storablecipher)) ->
+ fromenccreds enccreds cipher storablecipher
+ (Just bcreds, Nothing) ->
fromcreds $ fromB64 bcreds
Nothing -> return Nothing
+ fromenccreds enccreds cipher storablecipher = do
+ mcreds <- liftIO $ catchMaybeIO $ decrypt cipher
+ (feedBytes $ L.pack $ fromB64 enccreds)
+ (readBytes $ return . L.unpack)
+ case mcreds of
+ Just creds -> fromcreds creds
+ Nothing -> do
+ -- Work around un-encrypted creds storage
+ -- bug in old S3 and glacier remotes.
+ -- Not a problem for shared cipher.
+ case storablecipher of
+ SharedCipher {} -> showLongNote "gpg error above was caused by an old git-annex bug in credentials storage. Working around it.."
+ _ -> error "*** Insecure credentials storage detected for this remote! See https://git-annex.branchable.com/upgrades/insecure_embedded_creds/"
+ fromcreds $ fromB64 enccreds
fromcreds creds = case decodeCredPair creds of
Just credpair -> do
writeCacheCredPair credpair storage
+
return $ Just credpair
_ -> error "bad creds"
@@ -131,10 +147,16 @@ readCacheCredPair storage = maybe Nothing decodeCredPair
<$> readCacheCreds (credPairFile storage)
readCacheCreds :: FilePath -> Annex (Maybe Creds)
-readCacheCreds file = do
+readCacheCreds f = liftIO . catchMaybeIO . readFile =<< cacheCredsFile f
+
+cacheCredsFile :: FilePath -> Annex FilePath
+cacheCredsFile basefile = do
d <- fromRepo gitAnnexCredsDir
- let f = d </> file
- liftIO $ catchMaybeIO $ readFile f
+ return $ d </> basefile
+
+existsCacheCredPair :: CredPairStorage -> Annex Bool
+existsCacheCredPair storage =
+ liftIO . doesFileExist =<< cacheCredsFile (credPairFile storage)
encodeCredPair :: CredPair -> Creds
encodeCredPair (l, p) = unlines [l, p]
@@ -149,3 +171,21 @@ removeCreds file = do
d <- fromRepo gitAnnexCredsDir
let f = d </> file
liftIO $ nukeFile f
+
+includeCredsInfo :: RemoteConfig -> CredPairStorage -> [(String, String)] -> Annex [(String, String)]
+includeCredsInfo c storage info = do
+ v <- liftIO $ getEnvCredPair storage
+ case v of
+ Just _ -> do
+ let (uenv, penv) = credPairEnvironment storage
+ ret $ "from environment variables (" ++ unwords [uenv, penv] ++ ")"
+ Nothing -> case (\ck -> M.lookup ck c) =<< credPairRemoteKey storage of
+ Nothing -> ifM (existsCacheCredPair storage)
+ ( ret "stored locally"
+ , ret "not available"
+ )
+ Just _ -> case extractCipher c of
+ Just (EncryptedCipher _ _ _) -> ret "embedded in git repository (gpg encrypted)"
+ _ -> ret "embedded in git repository (not encrypted)"
+ where
+ ret s = return $ ("creds", s) : info
diff --git a/Git/CatFile.hs b/Git/CatFile.hs
index 8e64fc558..d0bcef4fb 100644
--- a/Git/CatFile.hs
+++ b/Git/CatFile.hs
@@ -94,7 +94,7 @@ catTree :: CatFileHandle -> Ref -> IO [(FilePath, FileMode)]
catTree h treeref = go <$> catObjectDetails h treeref
where
go (Just (b, _, TreeObject)) = parsetree [] b
- go _ = []
+ go _ = []
parsetree c b = case L.break (== 0) b of
(modefile, rest)
diff --git a/Git/Command.hs b/Git/Command.hs
index 30d2dcbf9..c61cc9fe8 100644
--- a/Git/Command.hs
+++ b/Git/Command.hs
@@ -79,7 +79,7 @@ pipeWriteRead params writer repo = assertLocal repo $
writeReadProcessEnv "git" (toCommand $ gitCommandLine params repo)
(gitEnv repo) writer (Just adjusthandle)
where
- adjusthandle h = do
+ adjusthandle h = do
fileEncoding h
hSetNewlineMode h noNewlineTranslation
@@ -117,7 +117,7 @@ gitCoProcessStart restartable params repo = CoProcess.start numrestarts "git"
(toCommand $ gitCommandLine params repo)
(gitEnv repo)
where
- {- If a long-running git command like cat-file --batch
+ {- If a long-running git command like cat-file --batch
- crashes, it will likely start up again ok. If it keeps crashing
- 10 times, something is badly wrong. -}
numrestarts = if restartable then 10 else 0
diff --git a/Git/Config.hs b/Git/Config.hs
index 171c3e6c6..32c0dd1cc 100644
--- a/Git/Config.hs
+++ b/Git/Config.hs
@@ -167,7 +167,7 @@ coreBare = "core.bare"
fromPipe :: Repo -> String -> [CommandParam] -> IO (Either SomeException (Repo, String))
fromPipe r cmd params = try $
withHandle StdoutHandle createProcessSuccess p $ \h -> do
- fileEncoding h
+ fileEncoding h
val <- hGetContentsStrict h
r' <- store val r
return (r', val)
diff --git a/Git/CurrentRepo.hs b/Git/CurrentRepo.hs
index 23ebbbcad..f611f7a34 100644
--- a/Git/CurrentRepo.hs
+++ b/Git/CurrentRepo.hs
@@ -5,17 +5,13 @@
- Licensed under the GNU GPL version 3 or higher.
-}
-{-# LANGUAGE CPP #-}
-
module Git.CurrentRepo where
import Common
import Git.Types
import Git.Construct
import qualified Git.Config
-#ifndef mingw32_HOST_OS
import Utility.Env
-#endif
{- Gets the current git repository.
-
@@ -42,17 +38,13 @@ get = do
setCurrentDirectory d
return $ addworktree wt r
where
-#ifndef mingw32_HOST_OS
pathenv s = do
v <- getEnv s
case v of
Just d -> do
- void $ unsetEnv s
+ unsetEnv s
Just <$> absPath d
Nothing -> return Nothing
-#else
- pathenv _ = return Nothing
-#endif
configure Nothing (Just r) = Git.Config.read r
configure (Just d) _ = do
diff --git a/Git/DiffTree.hs b/Git/DiffTree.hs
index 59de60871..489afa86c 100644
--- a/Git/DiffTree.hs
+++ b/Git/DiffTree.hs
@@ -53,7 +53,7 @@ diffIndex ref = diffIndex' ref [Param "--cached"]
diffWorkTree :: Ref -> Repo -> IO ([DiffTreeItem], IO Bool)
diffWorkTree ref repo =
ifM (Git.Ref.headExists repo)
- ( diffIndex' ref [] repo
+ ( diffIndex' ref [] repo
, return ([], return True)
)
diff --git a/Git/GCrypt.hs b/Git/GCrypt.hs
index c2a5a98fe..db067e25c 100644
--- a/Git/GCrypt.hs
+++ b/Git/GCrypt.hs
@@ -38,12 +38,12 @@ isEncrypted _ = False
encryptedRemote :: Repo -> Repo -> IO Repo
encryptedRemote baserepo = go
where
- go Repo { location = Url url }
+ go Repo { location = Url url }
| urlPrefix `isPrefixOf` u =
fromRemoteLocation (drop plen u) baserepo
| otherwise = notencrypted
where
- u = show url
+ u = show url
plen = length urlPrefix
go _ = notencrypted
notencrypted = error "not a gcrypt encrypted repository"
@@ -92,7 +92,7 @@ getParticiantList globalconfigrepo repo remotename = KeyIds $ parse $ firstJust
]
where
defaultkey = "gcrypt.participants"
- parse (Just "simple") = []
+ parse (Just "simple") = []
parse (Just l) = words l
parse Nothing = []
diff --git a/Git/Index.hs b/Git/Index.hs
index d712245a8..c42ac42f8 100644
--- a/Git/Index.hs
+++ b/Git/Index.hs
@@ -21,8 +21,8 @@ import Utility.Env
override :: FilePath -> IO (IO ())
override index = do
res <- getEnv var
- void $ setEnv var index True
- return $ void $ reset res
+ setEnv var index True
+ return $ reset res
where
var = "GIT_INDEX_FILE"
reset (Just v) = setEnv var v True
diff --git a/Git/LsTree.hs b/Git/LsTree.hs
index 6d3ca4813..ca5e323e0 100644
--- a/Git/LsTree.hs
+++ b/Git/LsTree.hs
@@ -44,7 +44,7 @@ lsTreeParams t = [ Params "ls-tree --full-tree -z -r --", File $ fromRef t ]
lsTreeFiles :: Ref -> [FilePath] -> Repo -> IO [TreeItem]
lsTreeFiles t fs repo = map parseLsTree <$> pipeNullSplitStrict ps repo
where
- ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
+ ps = [Params "ls-tree --full-tree -z --", File $ fromRef t] ++ map File fs
{- Parses a line of ls-tree output.
- (The --long format is not currently supported.) -}
diff --git a/Git/Remote.hs b/Git/Remote.hs
index 7573c87ee..7e8e5f817 100644
--- a/Git/Remote.hs
+++ b/Git/Remote.hs
@@ -70,7 +70,7 @@ remoteLocationIsSshUrl _ = False
parseRemoteLocation :: String -> Repo -> RemoteLocation
parseRemoteLocation s repo = ret $ calcloc s
where
- ret v
+ ret v
#ifdef mingw32_HOST_OS
| dosstyle v = RemotePath (dospath v)
#endif
diff --git a/Git/Repair.hs b/Git/Repair.hs
index 43f0a56fa..77a592b4e 100644
--- a/Git/Repair.hs
+++ b/Git/Repair.hs
@@ -135,11 +135,16 @@ retrieveMissingObjects missing referencerepo r
pullremotes tmpr rmts fetchrefs (FsckFoundMissing stillmissing t)
, pullremotes tmpr rmts fetchrefs ms
)
- fetchfrom fetchurl ps = runBool $
- [ Param "fetch"
- , Param fetchurl
- , Params "--force --update-head-ok --quiet"
- ] ++ ps
+ fetchfrom fetchurl ps fetchr = runBool ps' fetchr'
+ where
+ ps' =
+ [ Param "fetch"
+ , Param fetchurl
+ , Params "--force --update-head-ok --quiet"
+ ] ++ ps
+ fetchr' = fetchr { gitGlobalOpts = gitGlobalOpts fetchr ++ nogc }
+ nogc = [ Param "-c", Param "gc.auto=0" ]
+
-- fetch refs and tags
fetchrefstags = [ Param "+refs/heads/*:refs/heads/*", Param "--tags"]
-- Fetch all available refs (more likely to fail,
@@ -222,7 +227,7 @@ badBranches missing r = filterM isbad =<< getAllRefs r
getAllRefs :: Repo -> IO [Ref]
getAllRefs r = map toref <$> dirContentsRecursive refdir
where
- refdir = localGitDir r </> "refs"
+ refdir = localGitDir r </> "refs"
toref = Ref . relPathDirToFile (localGitDir r)
explodePackedRefsFile :: Repo -> IO ()
@@ -411,7 +416,7 @@ displayList items header
putStrLn header
putStr $ unlines $ map (\i -> "\t" ++ i) truncateditems
where
- numitems = length items
+ numitems = length items
truncateditems
| numitems > 10 = take 10 items ++ ["(and " ++ show (numitems - 10) ++ " more)"]
| otherwise = items
diff --git a/Git/Version.hs b/Git/Version.hs
index 5ad1d5959..5c61f859e 100644
--- a/Git/Version.hs
+++ b/Git/Version.hs
@@ -21,7 +21,7 @@ instance Show GitVersion where
installed :: IO GitVersion
installed = normalize . extract <$> readProcess "git" ["--version"]
where
- extract s = case lines s of
+ extract s = case lines s of
[] -> ""
(l:_) -> unwords $ drop 2 $ words l
diff --git a/Limit.hs b/Limit.hs
index 89dd9d33e..5d58e77f0 100644
--- a/Limit.hs
+++ b/Limit.hs
@@ -82,7 +82,7 @@ addExclude = addLimit . limitExclude
limitExclude :: MkLimit Annex
limitExclude glob = Right $ const $ return . not . matchGlobFile glob
-matchGlobFile :: String -> (MatchInfo -> Bool)
+matchGlobFile :: String -> MatchInfo -> Bool
matchGlobFile glob = go
where
cglob = compileGlob glob CaseSensative -- memoized
@@ -234,7 +234,7 @@ limitSize vs s = case readSize dataUnits s of
Nothing -> Left "bad size"
Just sz -> Right $ go sz
where
- go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
+ go sz _ (MatchingFile fi) = lookupFileKey fi >>= check fi sz
go sz _ (MatchingKey key) = checkkey sz key
checkkey sz key = return $ keySize key `vs` Just sz
check _ sz (Just key) = checkkey sz key
@@ -254,7 +254,7 @@ limitMetaData s = case parseMetaData s of
let cglob = compileGlob (fromMetaValue v) CaseInsensative
in Right $ const $ checkKey (check f cglob)
where
- check f cglob k = not . S.null
+ check f cglob k = not . S.null
. S.filter (matchGlob cglob . fromMetaValue)
. metaDataValues f <$> getCurrentMetaData k
diff --git a/Locations.hs b/Locations.hs
index 0369c7a1c..bcf793bda 100644
--- a/Locations.hs
+++ b/Locations.hs
@@ -148,7 +148,7 @@ gitAnnexLink file key r = do
loc <- gitAnnexLocation' key r False
return $ relPathDirToFile (parentDir absfile) loc
where
- whoops = error $ "unable to normalize " ++ file
+ whoops = error $ "unable to normalize " ++ file
{- File used to lock a key's content. -}
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
@@ -356,7 +356,7 @@ isLinkToAnnex s = (pathSeparator:objectDir) `isInfixOf` s
preSanitizeKeyName :: String -> String
preSanitizeKeyName = concatMap escape
where
- escape c
+ escape c
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
| c `elem` ".-_ " = [c] -- common, assumed safe
| c `elem` "/%:" = [c] -- handled by keyFile
diff --git a/Logs.hs b/Logs.hs
index ff7b7dcf0..d18339361 100644
--- a/Logs.hs
+++ b/Logs.hs
@@ -90,11 +90,11 @@ locationLogFile key = hashDirLower key ++ keyFile key ++ ".log"
locationLogFileKey :: FilePath -> Maybe Key
locationLogFileKey path
| ["remote", "web"] `isPrefixOf` splitDirectories dir = Nothing
- | ext == ".log" = fileKey base
- | otherwise = Nothing
+ | ext == ".log" = fileKey base
+ | otherwise = Nothing
where
(dir, file) = splitFileName path
- (base, ext) = splitAt (length file - 4) file
+ (base, ext) = splitAt (length file - 4) file
{- The filename of the url log for a given key. -}
urlLogFile :: Key -> FilePath
@@ -117,7 +117,7 @@ urlLogFileKey path
| ext == urlLogExt = fileKey base
| otherwise = Nothing
where
- file = takeFileName path
+ file = takeFileName path
(base, ext) = splitAt (length file - extlen) file
extlen = length urlLogExt
@@ -144,7 +144,7 @@ chunkLogFileKey path
| ext == chunkLogExt = fileKey base
| otherwise = Nothing
where
- file = takeFileName path
+ file = takeFileName path
(base, ext) = splitAt (length file - extlen) file
extlen = length chunkLogExt
@@ -173,13 +173,13 @@ prop_logs_sane dummykey = and
, expect gotNewUUIDBasedLog (getLogVariety $ remoteStateLogFile dummykey)
, expect gotChunkLog (getLogVariety $ chunkLogFile dummykey)
, expect gotOtherLog (getLogVariety $ metaDataLogFile dummykey)
- , expect gotOtherLog (getLogVariety $ numcopiesLog)
+ , expect gotOtherLog (getLogVariety numcopiesLog)
]
where
- expect = maybe False
+ expect = maybe False
gotUUIDBasedLog UUIDBasedLog = True
gotUUIDBasedLog _ = False
- gotNewUUIDBasedLog NewUUIDBasedLog = True
+ gotNewUUIDBasedLog NewUUIDBasedLog = True
gotNewUUIDBasedLog _ = False
gotChunkLog (ChunkLog k) = k == dummykey
gotChunkLog _ = False
diff --git a/Logs/FsckResults.hs b/Logs/FsckResults.hs
index 619dd586c..23367a3d3 100644
--- a/Logs/FsckResults.hs
+++ b/Logs/FsckResults.hs
@@ -28,7 +28,7 @@ writeFsckResults u fsckresults = do
| S.null s -> nukeFile logfile
| otherwise -> store s t logfile
where
- store s t logfile = do
+ store s t logfile = do
createDirectoryIfMissing True (parentDir logfile)
liftIO $ viaTmp writeFile logfile $ serialize s t
serialize s t =
diff --git a/Logs/MapLog.hs b/Logs/MapLog.hs
index 1725ef953..dd3cc0696 100644
--- a/Logs/MapLog.hs
+++ b/Logs/MapLog.hs
@@ -15,7 +15,7 @@ import qualified Data.Map as M
import Data.Time.Clock.POSIX
import Data.Time
import System.Locale
-
+
import Common
data TimeStamp = Unknown | Date POSIXTime
diff --git a/Logs/MetaData.hs b/Logs/MetaData.hs
index 250317090..d63a87470 100644
--- a/Logs/MetaData.hs
+++ b/Logs/MetaData.hs
@@ -67,7 +67,7 @@ getCurrentMetaData k = do
return $ currentMetaData $ unionMetaData loggedmeta
(lastchanged ls loggedmeta)
where
- lastchanged [] _ = emptyMetaData
+ lastchanged [] _ = emptyMetaData
lastchanged ls (MetaData currentlyset) =
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
in MetaData $
diff --git a/Logs/Schedule.hs b/Logs/Schedule.hs
index 1d78467bb..540667059 100644
--- a/Logs/Schedule.hs
+++ b/Logs/Schedule.hs
@@ -35,7 +35,7 @@ scheduleSet uuid@(UUID _) activities = do
Annex.Branch.change scheduleLog $
showLog id . changeLog ts uuid val . parseLog Just
where
- val = fromScheduledActivities activities
+ val = fromScheduledActivities activities
scheduleSet NoUUID _ = error "unknown UUID; cannot modify"
scheduleMap :: Annex (M.Map UUID [ScheduledActivity])
diff --git a/Logs/SingleValue.hs b/Logs/SingleValue.hs
index cbebdc8e5..bb774b6f4 100644
--- a/Logs/SingleValue.hs
+++ b/Logs/SingleValue.hs
@@ -60,6 +60,6 @@ getLog = newestValue <$$> readLog
setLog :: (SingleValueSerializable v) => FilePath -> v -> Annex ()
setLog f v = do
- now <- liftIO getPOSIXTime
- let ent = LogEntry now v
+ now <- liftIO getPOSIXTime
+ let ent = LogEntry now v
Annex.Branch.change f $ \_old -> showLog (S.singleton ent)
diff --git a/Logs/Transitions.hs b/Logs/Transitions.hs
index 64e9d3344..15ea32401 100644
--- a/Logs/Transitions.hs
+++ b/Logs/Transitions.hs
@@ -53,7 +53,7 @@ showTransitions = unlines . map showTransitionLine . S.elems
parseTransitions :: String -> Maybe Transitions
parseTransitions = check . map parseTransitionLine . lines
where
- check l
+ check l
| all isJust l = Just $ S.fromList $ catMaybes l
| otherwise = Nothing
@@ -68,8 +68,8 @@ showTransitionLine (TransitionLine ts t) = unwords [show t, show ts]
parseTransitionLine :: String -> Maybe TransitionLine
parseTransitionLine s = TransitionLine <$> pdate ds <*> readish ts
where
- ws = words s
- ts = Prelude.head ws
+ ws = words s
+ ts = Prelude.head ws
ds = unwords $ Prelude.tail ws
pdate = utcTimeToPOSIXSeconds <$$> parseTime defaultTimeLocale "%s%Qs"
diff --git a/Logs/Trust.hs b/Logs/Trust.hs
index 047a728f4..b880f44de 100644
--- a/Logs/Trust.hs
+++ b/Logs/Trust.hs
@@ -19,6 +19,7 @@ module Logs.Trust (
) where
import qualified Data.Map as M
+import Data.Default
import Common.Annex
import Types.TrustLevel
@@ -38,7 +39,7 @@ trustGet level = M.keys . M.filter (== level) <$> trustMap
{- Returns the TrustLevel of a given repo UUID. -}
lookupTrust :: UUID -> Annex TrustLevel
-lookupTrust u = (fromMaybe SemiTrusted . M.lookup u) <$> trustMap
+lookupTrust u = (fromMaybe def . M.lookup u) <$> trustMap
{- Partitions a list of UUIDs to those matching a TrustLevel and not. -}
trustPartition :: TrustLevel -> [UUID] -> Annex ([UUID], [UUID])
diff --git a/Logs/Web.hs b/Logs/Web.hs
index ede600ec2..1d16e10b3 100644
--- a/Logs/Web.hs
+++ b/Logs/Web.hs
@@ -76,7 +76,7 @@ knownUrls = do
return $ concat r
where
geturls Nothing = return []
- geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
+ geturls (Just logsha) = getLog . L.unpack <$> catObject logsha
data Downloader = DefaultDownloader | QuviDownloader
diff --git a/Remote.hs b/Remote.hs
index 8a8eb64df..37dfafa1f 100644
--- a/Remote.hs
+++ b/Remote.hs
@@ -101,14 +101,14 @@ byName (Just n) = either error Just <$> byName' n
byNameWithUUID :: Maybe RemoteName -> Annex (Maybe Remote)
byNameWithUUID = checkuuid <=< byName
where
- checkuuid Nothing = return Nothing
+ checkuuid Nothing = return Nothing
checkuuid (Just r)
- | uuid r == NoUUID =
+ | uuid r == NoUUID = error $
if remoteAnnexIgnore (gitconfig r)
- then error $ noRemoteUUIDMsg r ++
+ then noRemoteUUIDMsg r ++
" (" ++ show (remoteConfig (repo r) "ignore") ++
" is set)"
- else error $ noRemoteUUIDMsg r
+ else noRemoteUUIDMsg r
| otherwise = return $ Just r
byName' :: RemoteName -> Annex (Either String Remote)
diff --git a/Remote/Bup.hs b/Remote/Bup.hs
index 0de0e2946..4f2ddf35a 100644
--- a/Remote/Bup.hs
+++ b/Remote/Bup.hs
@@ -73,6 +73,7 @@ gen r u c gc = do
, availability = if bupLocal buprepo then LocallyAvailable else GloballyAvailable
, readonly = False
, mkUnavailable = return Nothing
+ , getInfo = return [("repo", buprepo)]
}
return $ Just $ specialRemote' specialcfg c
(simplyPrepare $ store this buprepo)
@@ -94,7 +95,7 @@ bupSetup mu _ c = do
-- verify configuration is sane
let buprepo = fromMaybe (error "Specify buprepo=") $
M.lookup "buprepo" c
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
-- bup init will create the repository.
-- (If the repository already exists, bup init again appears safe.)
diff --git a/Remote/Ddar.hs b/Remote/Ddar.hs
index fc226ddff..d73919bfd 100644
--- a/Remote/Ddar.hs
+++ b/Remote/Ddar.hs
@@ -70,6 +70,7 @@ gen r u c gc = do
, availability = if ddarLocal ddarrepo then LocallyAvailable else GloballyAvailable
, readonly = False
, mkUnavailable = return Nothing
+ , getInfo = return [("repo", ddarrepo)]
}
ddarrepo = fromMaybe (error "missing ddarrepo") $ remoteAnnexDdarRepo gc
specialcfg = (specialRemoteCfg c)
@@ -84,7 +85,7 @@ ddarSetup mu _ c = do
-- verify configuration is sane
let ddarrepo = fromMaybe (error "Specify ddarrepo=") $
M.lookup "ddarrepo" c
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
-- The ddarrepo is stored in git config, as well as this repo's
-- persistant state, so it can vary between hosts.
diff --git a/Remote/Directory.hs b/Remote/Directory.hs
index 3137c9534..2e9e013ab 100644
--- a/Remote/Directory.hs
+++ b/Remote/Directory.hs
@@ -67,7 +67,8 @@ gen r u c gc = do
availability = LocallyAvailable,
remotetype = remote,
mkUnavailable = gen r u c $
- gc { remoteAnnexDirectory = Just "/dev/null" }
+ gc { remoteAnnexDirectory = Just "/dev/null" },
+ getInfo = return [("directory", dir)]
}
where
dir = fromMaybe (error "missing directory") $ remoteAnnexDirectory gc
@@ -81,7 +82,7 @@ directorySetup mu _ c = do
absdir <- liftIO $ absPath dir
liftIO $ unlessM (doesDirectoryExist absdir) $
error $ "Directory does not exist: " ++ absdir
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
-- The directory is stored in git config, not in this remote's
-- persistant state, so it can vary between hosts.
diff --git a/Remote/External.hs b/Remote/External.hs
index 6ba0e2f3a..e907ab0cf 100644
--- a/Remote/External.hs
+++ b/Remote/External.hs
@@ -68,6 +68,7 @@ gen r u c gc = do
remotetype = remote,
mkUnavailable = gen r u c $
gc { remoteAnnexExternalType = Just "!dne!" }
+ , getInfo = return [("externaltype", externaltype)]
}
where
externaltype = fromMaybe (error "missing externaltype") (remoteAnnexExternalType gc)
@@ -77,7 +78,7 @@ externalSetup mu _ c = do
u <- maybe (liftIO genUUID) return mu
let externaltype = fromMaybe (error "Specify externaltype=") $
M.lookup "externaltype" c
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
external <- newExternal externaltype u c'
handleRequest external INITREMOTE Nothing $ \resp -> case resp of
@@ -169,7 +170,7 @@ handleRequest' lck external req mp responsehandler
go
| otherwise = go
where
- go = do
+ go = do
sendMessage lck external req
loop
loop = receiveMessage lck external responsehandler
@@ -191,7 +192,7 @@ handleRequest' lck external req mp responsehandler
send $ VALUE value
handleRemoteRequest (SETCREDS setting login password) = do
c <- liftIO $ atomically $ readTMVar $ externalConfig external
- c' <- setRemoteCredPair c (credstorage setting) $
+ c' <- setRemoteCredPair encryptionAlreadySetup c (credstorage setting) $
Just (login, password)
void $ liftIO $ atomically $ swapTMVar (externalConfig external) c'
handleRemoteRequest (GETCREDS setting) = do
diff --git a/Remote/GCrypt.hs b/Remote/GCrypt.hs
index a95f21669..995c3e838 100644
--- a/Remote/GCrypt.hs
+++ b/Remote/GCrypt.hs
@@ -121,6 +121,7 @@ gen' r u c gc = do
, availability = availabilityCalc r
, remotetype = remote
, mkUnavailable = return Nothing
+ , getInfo = return $ gitRepoInfo r
}
return $ Just $ specialRemote' specialcfg c
(simplyPrepare $ store this rsyncopts)
@@ -147,7 +148,7 @@ rsyncTransport r
| ":" `isInfixOf` loc = sshtransport $ separate (== ':') loc
| otherwise = othertransport
where
- loc = Git.repoLocation r
+ loc = Git.repoLocation r
sshtransport (host, path) = do
let rsyncpath = if "/~/" `isPrefixOf` path
then drop 3 path
@@ -166,9 +167,9 @@ gCryptSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConf
gCryptSetup mu _ c = go $ M.lookup "gitrepo" c
where
remotename = fromJust (M.lookup "name" c)
- go Nothing = error "Specify gitrepo="
+ go Nothing = error "Specify gitrepo="
go (Just gitrepo) = do
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
inRepo $ Git.Command.run
[ Params "remote add"
, Param remotename
@@ -234,7 +235,7 @@ setupRepo gcryptid r
- create the objectDir on the remote,
- which is needed for direct rsync of objects to work.
-}
- rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
+ rsyncsetup = Remote.Rsync.withRsyncScratchDir $ \tmp -> do
liftIO $ createDirectoryIfMissing True $ tmp </> objectDir
(rsynctransport, rsyncurl, _) <- rsyncTransport r
let tmpconfig = tmp </> "config"
@@ -266,7 +267,7 @@ isShell r = case method of
AccessShell -> True
_ -> False
where
- method = toAccessMethod $ fromMaybe "" $
+ method = toAccessMethod $ fromMaybe "" $
remoteAnnexGCrypt $ gitconfig r
shellOrRsync :: Remote -> Annex a -> Annex a -> Annex a
@@ -352,7 +353,7 @@ checkKey r rsyncopts k
| Git.repoIsSsh (repo r) = shellOrRsync r checkshell checkrsync
| otherwise = unsupportedUrl
where
- checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
+ checkrsync = Remote.Rsync.checkKey (repo r) rsyncopts k
checkshell = Ssh.inAnnex (repo r) k
{- Annexed objects are hashed using lower-case directories for max
diff --git a/Remote/Git.hs b/Remote/Git.hs
index 6397c1a2e..50c34a2bb 100644
--- a/Remote/Git.hs
+++ b/Remote/Git.hs
@@ -159,6 +159,7 @@ gen r u c gc
, availability = availabilityCalc r
, remotetype = remote
, mkUnavailable = unavailable r u c gc
+ , getInfo = return $ gitRepoInfo r
}
unavailable :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> Annex (Maybe Remote)
@@ -305,7 +306,7 @@ inAnnex rmt key
| Git.repoIsUrl r = checkremote
| otherwise = checklocal
where
- r = repo rmt
+ r = repo rmt
checkhttp = do
showChecking r
ifM (Url.withUrlOptions $ \uo -> anyM (\u -> Url.checkBoth u (keySize key) uo) (keyUrls rmt key))
diff --git a/Remote/Glacier.hs b/Remote/Glacier.hs
index 18038a79c..99003f29a 100644
--- a/Remote/Glacier.hs
+++ b/Remote/Glacier.hs
@@ -66,7 +66,9 @@ gen r u c gc = new <$> remoteCost gc veryExpensiveRemoteCost
readonly = False,
availability = GloballyAvailable,
remotetype = remote,
- mkUnavailable = return Nothing
+ mkUnavailable = return Nothing,
+ getInfo = includeCredsInfo c (AWS.creds u) $
+ [ ("glacier vault", getVault c) ]
}
specialcfg = (specialRemoteCfg c)
-- Disabled until jobList gets support for chunks.
@@ -76,12 +78,12 @@ gen r u c gc = new <$> remoteCost gc veryExpensiveRemoteCost
glacierSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
glacierSetup mu mcreds c = do
u <- maybe (liftIO genUUID) return mu
- c' <- setRemoteCredPair c (AWS.creds u) mcreds
- glacierSetup' (isJust mu) u c'
-glacierSetup' :: Bool -> UUID -> RemoteConfig -> Annex (RemoteConfig, UUID)
-glacierSetup' enabling u c = do
- c' <- encryptionSetup c
- let fullconfig = c' `M.union` defaults
+ glacierSetup' (isJust mu) u mcreds c
+glacierSetup' :: Bool -> UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
+glacierSetup' enabling u mcreds c = do
+ (c', encsetup) <- encryptionSetup c
+ c'' <- setRemoteCredPair encsetup c' (AWS.creds u) mcreds
+ let fullconfig = c'' `M.union` defaults
unless enabling $
genVault fullconfig u
gitConfigSpecialRemote u fullconfig "glacier" "true"
@@ -141,7 +143,10 @@ retrieve r k sink = go =<< glacierEnv c u
]
go Nothing = error "cannot retrieve from glacier"
go (Just e) = do
- let cmd = (proc "glacier" (toCommand params)) { env = Just e }
+ let cmd = (proc "glacier" (toCommand params))
+ { env = Just e
+ , std_out = CreatePipe
+ }
(_, Just h, _, pid) <- liftIO $ createProcess cmd
-- Glacier cannot store empty files, so if the output is
-- empty, the content is not available yet.
diff --git a/Remote/Helper/Chunked.hs b/Remote/Helper/Chunked.hs
index 271978658..806fab542 100644
--- a/Remote/Helper/Chunked.hs
+++ b/Remote/Helper/Chunked.hs
@@ -8,6 +8,7 @@
module Remote.Helper.Chunked (
ChunkSize,
ChunkConfig(..),
+ describeChunkConfig,
getChunkConfig,
storeChunks,
removeChunks,
@@ -34,6 +35,14 @@ data ChunkConfig
| LegacyChunks ChunkSize
deriving (Show)
+describeChunkConfig :: ChunkConfig -> String
+describeChunkConfig NoChunks = "none"
+describeChunkConfig (UnpaddedChunks sz) = describeChunkSize sz ++ "chunks"
+describeChunkConfig (LegacyChunks sz) = describeChunkSize sz ++ " chunks (old style)"
+
+describeChunkSize :: ChunkSize -> String
+describeChunkSize sz = roughSize storageUnits False (fromIntegral sz)
+
noChunks :: ChunkConfig -> Bool
noChunks NoChunks = True
noChunks _ = False
@@ -123,7 +132,7 @@ storeChunks u chunkconfig k f p storer checker =
loop bytesprocessed (chunk, bs) chunkkeys
| L.null chunk && numchunks > 0 = do
- -- Once all chunks are successfully
+ -- Once all chunks are successfully
-- stored, update the chunk log.
chunksStored u k (FixedSizeChunks chunksize) numchunks
return True
@@ -138,7 +147,7 @@ storeChunks u chunkconfig k f p storer checker =
)
where
numchunks = numChunks chunkkeys
- {- The MeterUpdate that is passed to the action
+ {- The MeterUpdate that is passed to the action
- storing a chunk is offset, so that it reflects
- the total bytes that have already been stored
- in previous chunks. -}
@@ -290,7 +299,7 @@ retrieveChunks retriever u chunkconfig encryptor basek dest basep sink
hSeek h AbsoluteSeek startpoint
return h
- {- Progress meter updating is a bit tricky: If the Retriever
+ {- Progress meter updating is a bit tricky: If the Retriever
- populates a file, it is responsible for updating progress
- as the file is being retrieved.
-
diff --git a/Remote/Helper/Encryptable.hs b/Remote/Helper/Encryptable.hs
index 69216a793..4903cffb4 100644
--- a/Remote/Helper/Encryptable.hs
+++ b/Remote/Helper/Encryptable.hs
@@ -5,7 +5,19 @@
- Licensed under the GNU GPL version 3 or higher.
-}
-module Remote.Helper.Encryptable where
+module Remote.Helper.Encryptable (
+ EncryptionIsSetup,
+ encryptionSetup,
+ noEncryptionUsed,
+ encryptionAlreadySetup,
+ remoteCipher,
+ remoteCipher',
+ embedCreds,
+ cipherKey,
+ storeCipher,
+ extractCipher,
+ describeEncryption,
+) where
import qualified Data.Map as M
@@ -16,11 +28,26 @@ import Types.Crypto
import qualified Annex
import Utility.Base64
+-- Used to ensure that encryption has been set up before trying to
+-- eg, store creds in the remote config that would need to use the
+-- encryption setup.
+data EncryptionIsSetup = EncryptionIsSetup | NoEncryption
+
+-- Remotes that don't use encryption can use this instead of
+-- encryptionSetup.
+noEncryptionUsed :: EncryptionIsSetup
+noEncryptionUsed = NoEncryption
+
+-- Using this avoids the type-safe check, so you'd better be sure
+-- of what you're doing.
+encryptionAlreadySetup :: EncryptionIsSetup
+encryptionAlreadySetup = EncryptionIsSetup
+
{- Encryption setup for a remote. The user must specify whether to use
- an encryption key, or not encrypt. An encrypted cipher is created, or is
- updated to be accessible to an additional encryption key. Or the user
- could opt to use a shared cipher, which is stored unencrypted. -}
-encryptionSetup :: RemoteConfig -> Annex RemoteConfig
+encryptionSetup :: RemoteConfig -> Annex (RemoteConfig, EncryptionIsSetup)
encryptionSetup c = maybe genCipher updateCipher $ extractCipher c
where
-- The type of encryption
@@ -28,11 +55,11 @@ encryptionSetup c = maybe genCipher updateCipher $ extractCipher c
-- Generate a new cipher, depending on the chosen encryption scheme
genCipher = case encryption of
_ | M.member "cipher" c || M.member "cipherkeys" c -> cannotchange
- Just "none" -> return c
+ Just "none" -> return (c, NoEncryption)
Just "shared" -> use "encryption setup" . genSharedCipher
=<< highRandomQuality
-- hybrid encryption is the default when a keyid is
- -- specified but no encryption
+ -- specified but no encryption
_ | maybe (M.member "keyid" c) (== "hybrid") encryption ->
use "encryption setup" . genEncryptedCipher key Hybrid
=<< highRandomQuality
@@ -48,7 +75,7 @@ encryptionSetup c = maybe genCipher updateCipher $ extractCipher c
cannotchange = error "Cannot set encryption type of existing remotes."
-- Update an existing cipher if possible.
updateCipher v = case v of
- SharedCipher _ | maybe True (== "shared") encryption -> return c'
+ SharedCipher _ | maybe True (== "shared") encryption -> return (c', EncryptionIsSetup)
EncryptedCipher _ variant _
| maybe True (== if variant == Hybrid then "hybrid" else "pubkey") encryption ->
use "encryption update" $ updateEncryptedCipher newkeys v
@@ -57,22 +84,22 @@ encryptionSetup c = maybe genCipher updateCipher $ extractCipher c
showNote m
cipher <- liftIO a
showNote $ describeCipher cipher
- return $ storeCipher c' cipher
+ return (storeCipher c' cipher, EncryptionIsSetup)
highRandomQuality =
(&&) (maybe True ( /= "false") $ M.lookup "highRandomQuality" c)
<$> fmap not (Annex.getState Annex.fast)
c' = foldr M.delete c
- -- git-annex used to remove 'encryption' as well, since
- -- it was redundant; we now need to keep it for
- -- public-key encryption, hence we leave it on newer
- -- remotes (while being backward-compatible).
+ -- git-annex used to remove 'encryption' as well, since
+ -- it was redundant; we now need to keep it for
+ -- public-key encryption, hence we leave it on newer
+ -- remotes (while being backward-compatible).
[ "keyid", "keyid+", "keyid-", "highRandomQuality" ]
-{- Gets encryption Cipher. The decrypted Ciphers are cached in the Annex
- - state. -}
remoteCipher :: RemoteConfig -> Annex (Maybe Cipher)
remoteCipher = fmap fst <$$> remoteCipher'
+{- Gets encryption Cipher. The decrypted Ciphers are cached in the Annex
+ - state. -}
remoteCipher' :: RemoteConfig -> Annex (Maybe (Cipher, StorableCipher))
remoteCipher' c = go $ extractCipher c
where
@@ -131,3 +158,15 @@ extractCipher c = case (M.lookup "cipher" c,
_ -> Nothing
where
readkeys = KeyIds . split ","
+
+describeEncryption :: RemoteConfig -> String
+describeEncryption c = case extractCipher c of
+ Nothing -> "not encrypted"
+ (Just (SharedCipher _)) -> "encrypted (encryption key stored in git repository)"
+ (Just (EncryptedCipher _ v (KeyIds { keyIds = ks }))) -> unwords $ catMaybes
+ [ Just "encrypted (to gpg keys:"
+ , Just (unwords ks ++ ")")
+ , case v of
+ PubKey -> Nothing
+ Hybrid -> Just "(hybrid mode)"
+ ]
diff --git a/Remote/Helper/Git.hs b/Remote/Helper/Git.hs
index b405fd358..156d7ac28 100644
--- a/Remote/Helper/Git.hs
+++ b/Remote/Helper/Git.hs
@@ -30,3 +30,8 @@ guardUsable :: Git.Repo -> Annex a -> Annex a -> Annex a
guardUsable r fallback a
| Git.repoIsLocalUnknown r = fallback
| otherwise = a
+
+gitRepoInfo :: Git.Repo -> [(String, String)]
+gitRepoInfo r =
+ [ ("repository location", Git.repoLocation r)
+ ]
diff --git a/Remote/Helper/Special.hs b/Remote/Helper/Special.hs
index ba9ff4fb4..181d7548f 100644
--- a/Remote/Helper/Special.hs
+++ b/Remote/Helper/Special.hs
@@ -87,7 +87,7 @@ checkPrepare checker helper k a = ifM (checker k)
-- Use to acquire a resource when preparing a helper.
resourcePrepare :: (Key -> (r -> Annex Bool) -> Annex Bool) -> (r -> helper) -> Preparer helper
resourcePrepare withr helper k a = withr k $ \r ->
- a (Just (helper r))
+ a (Just (helper r))
-- A Storer that expects to be provided with a file containing
-- the content of the key to store.
@@ -168,6 +168,12 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
(cost baser)
(const $ cost baser + encryptedRemoteCostAdj)
(extractCipher c)
+ , getInfo = do
+ l <- getInfo baser
+ return $ l ++
+ [ ("encryption", describeEncryption c)
+ , ("chunking", describeChunkConfig (chunkConfig cfg))
+ ]
}
cip = cipherKey c
gpgopts = getGpgEncParams encr
@@ -196,7 +202,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
retrieveKeyFileGen k dest p enc =
safely $ prepareretriever k $ safely . go
where
- go (Just retriever) = displayprogress p k $ \p' ->
+ go (Just retriever) = displayprogress p k $ \p' ->
retrieveChunks retriever (uuid baser) chunkconfig
enck k dest p' (sink dest enc)
go Nothing = return False
@@ -210,7 +216,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
checkPresentGen k enc = preparecheckpresent k go
where
- go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
+ go (Just checker) = checkPresentChunks checker (uuid baser) chunkconfig enck k
go Nothing = cantCheck baser
enck = maybe id snd enc
diff --git a/Remote/Hook.hs b/Remote/Hook.hs
index 8e6ac439d..f7c428e99 100644
--- a/Remote/Hook.hs
+++ b/Remote/Hook.hs
@@ -60,7 +60,8 @@ gen r u c gc = do
availability = GloballyAvailable,
remotetype = remote,
mkUnavailable = gen r u c $
- gc { remoteAnnexHookType = Just "!dne!" }
+ gc { remoteAnnexHookType = Just "!dne!" },
+ getInfo = return [("hooktype", hooktype)]
}
where
hooktype = fromMaybe (error "missing hooktype") $ remoteAnnexHookType gc
@@ -70,7 +71,7 @@ hookSetup mu _ c = do
u <- maybe (liftIO genUUID) return mu
let hooktype = fromMaybe (error "Specify hooktype=") $
M.lookup "hooktype" c
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
gitConfigSpecialRemote u c' "hooktype" hooktype
return (c', u)
@@ -138,7 +139,7 @@ checkKey r h k = do
v <- lookupHook h action
liftIO $ check v
where
- action = "checkpresent"
+ action = "checkpresent"
findkey s = key2file k `elem` lines s
check Nothing = error $ action ++ " hook misconfigured"
check (Just hook) = do
diff --git a/Remote/Rsync.hs b/Remote/Rsync.hs
index 698d733e6..a87d05a33 100644
--- a/Remote/Rsync.hs
+++ b/Remote/Rsync.hs
@@ -83,6 +83,7 @@ gen r u c gc = do
, availability = if islocal then LocallyAvailable else GloballyAvailable
, remotetype = remote
, mkUnavailable = return Nothing
+ , getInfo = return [("url", url)]
}
where
specialcfg = (specialRemoteCfg c)
@@ -138,7 +139,7 @@ rsyncSetup mu _ c = do
-- verify configuration is sane
let url = fromMaybe (error "Specify rsyncurl=") $
M.lookup "rsyncurl" c
- c' <- encryptionSetup c
+ (c', _encsetup) <- encryptionSetup c
-- The rsyncurl is stored in git config, not only in this remote's
-- persistant state, so it can vary between hosts.
@@ -175,7 +176,7 @@ store o k src meterupdate = withRsyncScratchDir $ \tmp -> do
]
else return False
where
- {- If the key being sent is encrypted or chunked, the file
+ {- If the key being sent is encrypted or chunked, the file
- containing its content is a temp file, and so can be
- renamed into place. Otherwise, the file is the annexed
- object file, and has to be copied or hard linked into place. -}
diff --git a/Remote/S3.hs b/Remote/S3.hs
index 7c49937ce..fe0b4992a 100644
--- a/Remote/S3.hs
+++ b/Remote/S3.hs
@@ -5,9 +5,7 @@
- Licensed under the GNU GPL version 3 or higher.
-}
-{-# LANGUAGE TypeFamilies #-}
-
-module Remote.S3 (remote, iaHost, configIA, isIAHost, iaItemUrl) where
+module Remote.S3 (remote, iaHost, configIA, isIA, iaItemUrl) where
import qualified Aws as AWS
import qualified Aws.Core as AWS
@@ -83,16 +81,21 @@ gen r u c gc = do
readonly = False,
availability = GloballyAvailable,
remotetype = remote,
- mkUnavailable = gen r u (M.insert "host" "!dne!" c) gc
+ mkUnavailable = gen r u (M.insert "host" "!dne!" c) gc,
+ getInfo = includeCredsInfo c (AWS.creds u) $ catMaybes
+ [ Just ("bucket", fromMaybe "unknown" (getBucket c))
+ , if isIA c
+ then Just ("internet archive item", iaItemUrl $ fromMaybe "unknown" $ getBucket c)
+ else Nothing
+ ]
}
s3Setup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
s3Setup mu mcreds c = do
u <- maybe (liftIO genUUID) return mu
- c' <- setRemoteCredPair c (AWS.creds u) mcreds
- s3Setup' u c'
-s3Setup' :: UUID -> RemoteConfig -> Annex (RemoteConfig, UUID)
-s3Setup' u c = if configIA c then archiveorg else defaulthost
+ s3Setup' u mcreds c
+s3Setup' :: UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
+s3Setup' u mcreds c = if isIA c then archiveorg else defaulthost
where
remotename = fromJust (M.lookup "name" c)
defbucket = remotename ++ "-" ++ fromUUID u
@@ -109,25 +112,27 @@ s3Setup' u c = if configIA c then archiveorg else defaulthost
return (fullconfig, u)
defaulthost = do
- c' <- encryptionSetup c
- let fullconfig = c' `M.union` defaults
+ (c', encsetup) <- encryptionSetup c
+ c'' <- setRemoteCredPair encsetup c' (AWS.creds u) mcreds
+ let fullconfig = c'' `M.union` defaults
genBucket fullconfig u
use fullconfig
archiveorg = do
showNote "Internet Archive mode"
+ c' <- setRemoteCredPair noEncryptionUsed c (AWS.creds u) mcreds
-- Ensure user enters a valid bucket name, since
-- this determines the name of the archive.org item.
let validbucket = replace " " "-" $ map toLower $
fromMaybe (error "specify bucket=") $
- getBucketName c
+ getBucketName c'
let archiveconfig =
-- IA acdepts x-amz-* as an alias for x-archive-*
M.mapKeys (replace "x-archive-" "x-amz-") $
-- encryption does not make sense here
M.insert "encryption" "none" $
M.insert "bucket" validbucket $
- M.union c $
+ M.union c' $
-- special constraints on key names
M.insert "mungekeys" "ia" defaults
info <- extractS3Info archiveconfig
diff --git a/Remote/Tahoe.hs b/Remote/Tahoe.hs
index bde8ee9d7..7dd231c06 100644
--- a/Remote/Tahoe.hs
+++ b/Remote/Tahoe.hs
@@ -84,7 +84,8 @@ gen r u c gc = do
readonly = False,
availability = GloballyAvailable,
remotetype = remote,
- mkUnavailable = return Nothing
+ mkUnavailable = return Nothing,
+ getInfo = return []
}
tahoeSetup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
@@ -167,7 +168,7 @@ writeSharedConvergenceSecret configdir scs =
getSharedConvergenceSecret :: TahoeConfigDir -> IO SharedConvergenceSecret
getSharedConvergenceSecret configdir = go (60 :: Int)
where
- f = convergenceFile configdir
+ f = convergenceFile configdir
go n
| n == 0 = error $ "tahoe did not write " ++ f ++ " after 1 minute. Perhaps the daemon failed to start?"
| otherwise = do
@@ -190,7 +191,7 @@ startTahoeDaemon configdir = void $ boolTahoe configdir "start" []
withTahoeConfigDir :: TahoeHandle -> (TahoeConfigDir -> IO a) -> IO a
withTahoeConfigDir (TahoeHandle configdir v) a = go =<< atomically needsstart
where
- go True = do
+ go True = do
startTahoeDaemon configdir
a configdir
go False = a configdir
diff --git a/Remote/Web.hs b/Remote/Web.hs
index 04b453277..4d4b43c41 100644
--- a/Remote/Web.hs
+++ b/Remote/Web.hs
@@ -62,7 +62,8 @@ gen r _ c gc =
readonly = True,
availability = GloballyAvailable,
remotetype = remote,
- mkUnavailable = return Nothing
+ mkUnavailable = return Nothing,
+ getInfo = return []
}
downloadKey :: Key -> AssociatedFile -> FilePath -> MeterUpdate -> Annex Bool
@@ -120,7 +121,7 @@ checkKey' key us = firsthit us (Right False) $ \u -> do
Url.withUrlOptions $ catchMsgIO .
Url.checkBoth u' (keySize key)
where
- firsthit [] miss _ = return miss
+ firsthit [] miss _ = return miss
firsthit (u:rest) _ a = do
r <- a u
case r of
diff --git a/Remote/WebDAV.hs b/Remote/WebDAV.hs
index d427d67a9..932ed81e0 100644
--- a/Remote/WebDAV.hs
+++ b/Remote/WebDAV.hs
@@ -71,7 +71,9 @@ gen r u c gc = new <$> remoteCost gc expensiveRemoteCost
readonly = False,
availability = GloballyAvailable,
remotetype = remote,
- mkUnavailable = gen r u (M.insert "url" "http://!dne!/" c) gc
+ mkUnavailable = gen r u (M.insert "url" "http://!dne!/" c) gc,
+ getInfo = includeCredsInfo c (davCreds u) $
+ [("url", fromMaybe "unknown" (M.lookup "url" c))]
}
chunkconfig = getChunkConfig c
@@ -81,11 +83,11 @@ webdavSetup mu mcreds c = do
url <- case M.lookup "url" c of
Nothing -> error "Specify url="
Just url -> return url
- c' <- encryptionSetup c
+ (c', encsetup) <- encryptionSetup c
creds <- maybe (getCreds c' u) (return . Just) mcreds
testDav url creds
gitConfigSpecialRemote u c' "webdav" "true"
- c'' <- setRemoteCredPair c' (davCreds u) creds
+ c'' <- setRemoteCredPair encsetup c' (davCreds u) creds
return (c'', u)
-- Opens a http connection to the DAV server, which will be reused
diff --git a/RemoteDaemon/Transport/Ssh.hs b/RemoteDaemon/Transport/Ssh.hs
index db6b6127c..afedf559e 100644
--- a/RemoteDaemon/Transport/Ssh.hs
+++ b/RemoteDaemon/Transport/Ssh.hs
@@ -119,5 +119,5 @@ robustly backoff a = caught =<< catchDefaultIO ConnectionClosed a
| b2 > maxbackoff = maxbackoff
| otherwise = b2
where
- b2 = backoff * 2
+ b2 = backoff * 2
maxbackoff = 3600 -- one hour
diff --git a/RemoteDaemon/Types.hs b/RemoteDaemon/Types.hs
index 0a7269534..7413f5851 100644
--- a/RemoteDaemon/Types.hs
+++ b/RemoteDaemon/Types.hs
@@ -20,7 +20,7 @@ import Control.Concurrent
-- The URI of a remote is used to uniquely identify it (names change..)
newtype RemoteURI = RemoteURI URI
- deriving (Show)
+ deriving (Show)
-- A Transport for a particular git remote consumes some messages
-- from a Chan, and emits others to another Chan.
diff --git a/Test.hs b/Test.hs
index 825c01879..a0c56a3ab 100644
--- a/Test.hs
+++ b/Test.hs
@@ -73,8 +73,8 @@ import qualified Utility.Scheduled
import qualified Utility.HumanTime
import qualified Utility.ThreadScheduler
import qualified Command.Uninit
-#ifndef mingw32_HOST_OS
import qualified CmdLine.GitAnnex as GitAnnex
+#ifndef mingw32_HOST_OS
import qualified Remote.Helper.Encryptable
import qualified Types.Crypto
import qualified Utility.Gpg
@@ -120,9 +120,9 @@ main ps = do
error msg
v -> handleParseResult v
#else
- handleParseResult $ execParserPure pprefs pinfo args
+ handleParseResult $ execParserPure pprefs pinfo args
#endif
- progdesc = "git-annex test"
+ progdesc = "git-annex test"
ingredients :: [Ingredient]
ingredients =
@@ -822,7 +822,7 @@ test_conflict_resolution_movein_regression testenv = withtmpclonerepo testenv Fa
- be missing the content of the file that had
- been put in it. -}
forM_ [r1, r2] $ \r -> indir testenv r $ do
- git_annex testenv "get" [] @? "unable to get all files after merge conflict resolution in " ++ rname r
+ git_annex testenv "get" [] @? "unable to get all files after merge conflict resolution in " ++ rname r
{- Simple case of conflict resolution; 2 different versions of annexed
- file. -}
@@ -943,12 +943,12 @@ test_remove_conflict_resolution testenv = do
length v == 1
@? (what ++ " too many variant files in: " ++ show v)
- {- Check merge confalict resolution when a file is annexed in one repo,
- - and checked directly into git in the other repo.
- -
- - This test requires indirect mode to set it up, but tests both direct and
- - indirect mode.
- -}
+{- Check merge confalict resolution when a file is annexed in one repo,
+ - and checked directly into git in the other repo.
+ -
+ - This test requires indirect mode to set it up, but tests both direct and
+ - indirect mode.
+ -}
test_nonannexed_file_conflict_resolution :: TestEnv -> Assertion
test_nonannexed_file_conflict_resolution testenv = do
check True False
@@ -957,7 +957,7 @@ test_nonannexed_file_conflict_resolution testenv = do
check False True
where
check inr1 switchdirect = withtmpclonerepo testenv False $ \r1 ->
- withtmpclonerepo testenv False $ \r2 -> do
+ withtmpclonerepo testenv False $ \r2 ->
whenM (isInDirect r1 <&&> isInDirect r2) $ do
indir testenv r1 $ do
disconnectOrigin
@@ -1007,7 +1007,7 @@ test_nonannexed_symlink_conflict_resolution testenv = do
check False True
where
check inr1 switchdirect = withtmpclonerepo testenv False $ \r1 ->
- withtmpclonerepo testenv False $ \r2 -> do
+ withtmpclonerepo testenv False $ \r2 ->
whenM (checkRepo (Types.coreSymlinks <$> Annex.getGitConfig) r1
<&&> isInDirect r1 <&&> isInDirect r2) $ do
indir testenv r1 $ do
@@ -1094,9 +1094,9 @@ test_uncommitted_conflict_resolution testenv = do
- lost track of whether a file was a symlink.
-}
test_conflict_resolution_symlink_bit :: TestEnv -> Assertion
-test_conflict_resolution_symlink_bit testenv = do
+test_conflict_resolution_symlink_bit testenv =
withtmpclonerepo testenv False $ \r1 ->
- withtmpclonerepo testenv False $ \r2 -> do
+ withtmpclonerepo testenv False $ \r2 ->
withtmpclonerepo testenv False $ \r3 -> do
indir testenv r1 $ do
writeFile conflictor "conflictor"
@@ -1152,7 +1152,7 @@ test_uninit_inbranch testenv = intmpclonerepoInDirect testenv $ do
not <$> git_annex testenv "uninit" [] @? "uninit failed to fail when git-annex branch was checked out"
test_upgrade :: TestEnv -> Assertion
-test_upgrade testenv = intmpclonerepo testenv $ do
+test_upgrade testenv = intmpclonerepo testenv $
git_annex testenv "upgrade" [] @? "upgrade from same version failed"
test_whereis :: TestEnv -> Assertion
@@ -1346,7 +1346,6 @@ test_add_subdirs testenv = intmpclonerepo testenv $ do
-- (when the OS allows) so test coverage collection works.
git_annex :: TestEnv -> String -> [String] -> IO Bool
git_annex testenv command params = do
-#ifndef mingw32_HOST_OS
forM_ (M.toList testenv) $ \(var, val) ->
Utility.Env.setEnv var val True
@@ -1357,11 +1356,6 @@ git_annex testenv command params = do
Left _ -> return False
where
run = GitAnnex.run (command:"-q":params)
-#else
- Utility.SafeCommand.boolSystemEnv "git-annex"
- (map Param $ command : params)
- (Just $ M.toList testenv)
-#endif
{- Runs git-annex and returns its output. -}
git_annex_output :: TestEnv -> String -> [String] -> IO String
@@ -1404,7 +1398,7 @@ intmpclonerepoInDirect testenv a = intmpclonerepo testenv $
, a
)
where
- isdirect = annexeval $ do
+ isdirect = annexeval $ do
Annex.Init.initialize Nothing
Config.isDirect
diff --git a/Types/Crypto.hs b/Types/Crypto.hs
index 1a9a7774a..48d03ce12 100644
--- a/Types/Crypto.hs
+++ b/Types/Crypto.hs
@@ -59,10 +59,10 @@ readMac "HMACSHA512" = Just HmacSha512
readMac _ = Nothing
calcMac
- :: Mac -- ^ MAC
- -> L.ByteString -- ^ secret key
- -> L.ByteString -- ^ message
- -> String -- ^ MAC'ed message, in hexadecimals
+ :: Mac -- ^ MAC
+ -> L.ByteString -- ^ secret key
+ -> L.ByteString -- ^ message
+ -> String -- ^ MAC'ed message, in hexadecimal
calcMac mac = case mac of
HmacSha1 -> showDigest $* hmacSha1
HmacSha224 -> showDigest $* hmacSha224
diff --git a/Types/Key.hs b/Types/Key.hs
index 5bb41e15f..da9ff494a 100644
--- a/Types/Key.hs
+++ b/Types/Key.hs
@@ -133,7 +133,7 @@ prop_idempotent_key_decode f
| normalfieldorder = maybe True (\k -> key2file k == f) (file2key f)
| otherwise = True
where
- -- file2key will accept the fields in any order, so don't
+ -- file2key will accept the fields in any order, so don't
-- try the test unless the fields are in the normal order
normalfieldorder = fields `isPrefixOf` "smSC"
fields = map (f !!) $ filter (< length f) $ map succ $
diff --git a/Types/MetaData.hs b/Types/MetaData.hs
index 8df56734d..f19e0b439 100644
--- a/Types/MetaData.hs
+++ b/Types/MetaData.hs
@@ -290,4 +290,4 @@ prop_metadata_serialize f v m = and
, deserialize (serialize m') == Just m'
]
where
- m' = removeEmptyFields m
+ m' = removeEmptyFields m
diff --git a/Types/Remote.hs b/Types/Remote.hs
index e166d7090..795121763 100644
--- a/Types/Remote.hs
+++ b/Types/Remote.hs
@@ -98,7 +98,9 @@ data RemoteA a = Remote {
remotetype :: RemoteTypeA a,
-- For testing, makes a version of this remote that is not
-- available for use. All its actions should fail.
- mkUnavailable :: a (Maybe (RemoteA a))
+ mkUnavailable :: a (Maybe (RemoteA a)),
+ -- Information about the remote, for git annex info to display.
+ getInfo :: a [(String, String)]
}
instance Show (RemoteA a) where
diff --git a/Types/ScheduledActivity.hs b/Types/ScheduledActivity.hs
index b683409ce..5cdbe29e8 100644
--- a/Types/ScheduledActivity.hs
+++ b/Types/ScheduledActivity.hs
@@ -17,7 +17,7 @@ import Data.Either
data ScheduledActivity
= ScheduledSelfFsck Schedule Duration
| ScheduledRemoteFsck UUID Schedule Duration
- deriving (Eq, Read, Show, Ord)
+ deriving (Eq, Read, Show, Ord)
{- Activities that run on a remote, within a time window, so
- should be run when the remote gets connected. -}
diff --git a/Types/StandardGroups.hs b/Types/StandardGroups.hs
index 37ba6e9c6..66c1dd5ef 100644
--- a/Types/StandardGroups.hs
+++ b/Types/StandardGroups.hs
@@ -96,7 +96,7 @@ standardPreferredContent UnwantedGroup = "exclude=*"
notArchived :: String
notArchived = "not (copies=archive:1 or copies=smallarchive:1)"
-
+
{- Most repositories want any content that is only on untrusted
- or dead repositories, or that otherwise does not have enough copies.
- Does not look at .gitattributes since that is quite a lot slower.
diff --git a/Types/TrustLevel.hs b/Types/TrustLevel.hs
index a72dbb8c6..4af71294a 100644
--- a/Types/TrustLevel.hs
+++ b/Types/TrustLevel.hs
@@ -14,6 +14,7 @@ module Types.TrustLevel (
) where
import qualified Data.Map as M
+import Data.Default
import Types.UUID
@@ -22,6 +23,9 @@ import Types.UUID
data TrustLevel = Trusted | SemiTrusted | UnTrusted | DeadTrusted
deriving (Eq, Enum, Ord, Bounded)
+instance Default TrustLevel where
+ def = SemiTrusted
+
type TrustMap = M.Map UUID TrustLevel
readTrustLevel :: String -> Maybe TrustLevel
diff --git a/Utility/Batch.hs b/Utility/Batch.hs
index d6dadae67..ff81318fb 100644
--- a/Utility/Batch.hs
+++ b/Utility/Batch.hs
@@ -32,7 +32,7 @@ batch :: IO a -> IO a
#if defined(linux_HOST_OS) || defined(__ANDROID__)
batch a = wait =<< batchthread
where
- batchthread = asyncBound $ do
+ batchthread = asyncBound $ do
setProcessPriority 0 maxNice
a
#else
diff --git a/Utility/CoProcess.hs b/Utility/CoProcess.hs
index 332c09d49..97826ec1e 100644
--- a/Utility/CoProcess.hs
+++ b/Utility/CoProcess.hs
@@ -65,7 +65,7 @@ query ch send receive = do
restartable s (receive $ coProcessFrom s)
return
where
- restartable s a cont
+ restartable s a cont
| coProcessNumRestarts (coProcessSpec s) > 0 =
maybe restart cont =<< catchMaybeIO a
| otherwise = cont =<< a
@@ -87,7 +87,7 @@ rawMode ch = do
raw $ coProcessTo s
return ch
where
- raw h = do
+ raw h = do
fileEncoding h
#ifdef mingw32_HOST_OS
hSetNewlineMode h noNewlineTranslation
diff --git a/Utility/CopyFile.hs b/Utility/CopyFile.hs
index 6601d0a80..503ab842a 100644
--- a/Utility/CopyFile.hs
+++ b/Utility/CopyFile.hs
@@ -47,10 +47,10 @@ createLinkOrCopy :: FilePath -> FilePath -> IO Bool
#ifndef mingw32_HOST_OS
createLinkOrCopy src dest = go `catchIO` const fallback
where
- go = do
+ go = do
createLink src dest
return True
- fallback = copyFileExternal CopyAllMetaData src dest
+ fallback = copyFileExternal CopyAllMetaData src dest
#else
createLinkOrCopy = copyFileExternal CopyAllMetaData
#endif
diff --git a/Utility/Daemon.hs b/Utility/Daemon.hs
index 2f0f84179..0615149e5 100644
--- a/Utility/Daemon.hs
+++ b/Utility/Daemon.hs
@@ -175,7 +175,7 @@ winLockFile pid pidfile = do
cleanstale
return $ prefix ++ show pid ++ suffix
where
- prefix = pidfile ++ "."
+ prefix = pidfile ++ "."
suffix = ".lck"
cleanstale = mapM_ (void . tryIO . removeFile) =<<
(filter iswinlockfile <$> dirContents (parentDir pidfile))
diff --git a/Utility/DataUnits.hs b/Utility/DataUnits.hs
index 7575af21f..e035b2f86 100644
--- a/Utility/DataUnits.hs
+++ b/Utility/DataUnits.hs
@@ -120,7 +120,7 @@ roughSize units short i
showUnit x (Unit size abbrev name) = s ++ " " ++ unit
where
- v = (fromInteger x :: Double) / fromInteger size
+ v = (fromInteger x :: Double) / fromInteger size
s = showImprecise 2 v
unit
| short = abbrev
diff --git a/Utility/Directory.hs b/Utility/Directory.hs
index a4429d5b9..e4e4b80a7 100644
--- a/Utility/Directory.hs
+++ b/Utility/Directory.hs
@@ -56,7 +56,7 @@ dirContentsRecursive = dirContentsRecursiveSkipping (const False) True
dirContentsRecursiveSkipping :: (FilePath -> Bool) -> Bool -> FilePath -> IO [FilePath]
dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir = go [topdir]
where
- go [] = return []
+ go [] = return []
go (dir:dirs)
| skipdir (takeFileName dir) = go dirs
| otherwise = unsafeInterleaveIO $ do
@@ -87,7 +87,7 @@ dirContentsRecursiveSkipping skipdir followsubdirsymlinks topdir = go [topdir]
dirTreeRecursiveSkipping :: (FilePath -> Bool) -> FilePath -> IO [FilePath]
dirTreeRecursiveSkipping skipdir topdir = go [] [topdir]
where
- go c [] = return c
+ go c [] = return c
go c (dir:dirs)
| skipdir (takeFileName dir) = go c dirs
| otherwise = unsafeInterleaveIO $ do
diff --git a/Utility/Env.hs b/Utility/Env.hs
index 6763c24e1..ff6644fbf 100644
--- a/Utility/Env.hs
+++ b/Utility/Env.hs
@@ -14,6 +14,7 @@ import Utility.Exception
import Control.Applicative
import Data.Maybe
import qualified System.Environment as E
+import qualified System.SetEnv
#else
import qualified System.Posix.Env as PE
#endif
@@ -39,27 +40,27 @@ getEnvironment = PE.getEnvironment
getEnvironment = E.getEnvironment
#endif
-{- Returns True if it could successfully set the environment variable.
+{- Sets an environment variable. To overwrite an existing variable,
+ - overwrite must be True.
-
- - There is, apparently, no way to do this in Windows. Instead,
- - environment varuables must be provided when running a new process. -}
-setEnv :: String -> String -> Bool -> IO Bool
+ - On Windows, setting a variable to "" unsets it. -}
+setEnv :: String -> String -> Bool -> IO ()
#ifndef mingw32_HOST_OS
-setEnv var val overwrite = do
- PE.setEnv var val overwrite
- return True
+setEnv var val overwrite = PE.setEnv var val overwrite
#else
-setEnv _ _ _ = return False
+setEnv var val True = System.SetEnv.setEnv var val
+setEnv var val False = do
+ r <- getEnv var
+ case r of
+ Nothing -> setEnv var val True
+ Just _ -> return ()
#endif
-{- Returns True if it could successfully unset the environment variable. -}
-unsetEnv :: String -> IO Bool
+unsetEnv :: String -> IO ()
#ifndef mingw32_HOST_OS
-unsetEnv var = do
- PE.unsetEnv var
- return True
+unsetEnv = PE.unsetEnv
#else
-unsetEnv _ = return False
+unsetEnv = System.SetEnv.unsetEnv
#endif
{- Adds the environment variable to the input environment. If already
diff --git a/Utility/ExternalSHA.hs b/Utility/ExternalSHA.hs
index 595acd8cf..858d04e6a 100644
--- a/Utility/ExternalSHA.hs
+++ b/Utility/ExternalSHA.hs
@@ -57,7 +57,7 @@ externalSHA command shasize file = do
Left $ "Unexpected character in output of " ++ command ++ "\"" ++ sha ++ "\""
| otherwise = Right sha'
where
- sha' = map toLower sha
+ sha' = map toLower sha
expectedSHALength :: Int -> Int
expectedSHALength 1 = 40
diff --git a/Utility/FileSystemEncoding.hs b/Utility/FileSystemEncoding.hs
index b81fdc532..fa4b39aa3 100644
--- a/Utility/FileSystemEncoding.hs
+++ b/Utility/FileSystemEncoding.hs
@@ -111,7 +111,7 @@ truncateFilePath :: Int -> FilePath -> FilePath
#ifndef mingw32_HOST_OS
truncateFilePath n = go . reverse
where
- go f =
+ go f =
let bytes = decodeW8 f
in if length bytes <= n
then reverse f
diff --git a/Utility/Format.hs b/Utility/Format.hs
index 2a5ae5c34..78620f9b9 100644
--- a/Utility/Format.hs
+++ b/Utility/Format.hs
@@ -117,7 +117,7 @@ decode_c s = unescape ("", s)
handle (x:'x':n1:n2:rest)
| isescape x && allhex = (fromhex, rest)
where
- allhex = isHexDigit n1 && isHexDigit n2
+ allhex = isHexDigit n1 && isHexDigit n2
fromhex = [chr $ readhex [n1, n2]]
readhex h = Prelude.read $ "0x" ++ h :: Int
handle (x:n1:n2:n3:rest)
diff --git a/Utility/Gpg.hs b/Utility/Gpg.hs
index f9b60f276..50f78a1de 100644
--- a/Utility/Gpg.hs
+++ b/Utility/Gpg.hs
@@ -166,7 +166,7 @@ secretKeys :: IO (M.Map KeyId UserId)
secretKeys = catchDefaultIO M.empty makemap
where
makemap = M.fromList . parse . lines <$> readStrict params
- params = [Params "--with-colons --list-secret-keys --fixed-list-mode"]
+ params = [Params "--with-colons --list-secret-keys --fixed-list-mode"]
parse = extract [] Nothing . map (split ":")
extract c (Just keyid) (("uid":_:_:_:_:_:_:_:_:userid:_):rest) =
extract ((keyid, decode_c userid):c) Nothing rest
@@ -196,7 +196,7 @@ genSecretKey keytype passphrase userid keysize =
withHandle StdinHandle createProcessSuccess (proc gpgcmd params) feeder
where
params = ["--batch", "--gen-key"]
- feeder h = do
+ feeder h = do
hPutStr h $ unlines $ catMaybes
[ Just $ "Key-Type: " ++
case keytype of
@@ -232,7 +232,7 @@ genRandom highQuality size = checksize <$> readStrict
randomquality :: Int
randomquality = if highQuality then 2 else 1
- {- The size is the number of bytes of entropy desired; the data is
+ {- The size is the number of bytes of entropy desired; the data is
- base64 encoded, so needs 8 bits to represent every 6 bytes of
- entropy. -}
expectedlength = size * 8 `div` 6
@@ -334,7 +334,7 @@ testHarness a = do
setup = do
base <- getTemporaryDirectory
dir <- mktmpdir $ base </> "gpgtmpXXXXXX"
- void $ setEnv var dir True
+ setEnv var dir True
-- For some reason, recent gpg needs a trustdb to be set up.
_ <- pipeStrict [Params "--trust-model auto --update-trustdb"] []
_ <- pipeStrict [Params "--import -q"] $ unlines
diff --git a/Utility/HumanTime.hs b/Utility/HumanTime.hs
index 2aef1b09c..3c23f31f7 100644
--- a/Utility/HumanTime.hs
+++ b/Utility/HumanTime.hs
@@ -27,7 +27,7 @@ import Control.Applicative
import qualified Data.Map as M
newtype Duration = Duration { durationSeconds :: Integer }
- deriving (Eq, Ord, Read, Show)
+ deriving (Eq, Ord, Read, Show)
durationSince :: UTCTime -> IO Duration
durationSince pasttime = do
@@ -47,8 +47,8 @@ daysToDuration i = Duration $ i * dsecs
parseDuration :: String -> Maybe Duration
parseDuration = Duration <$$> go 0
where
- go n [] = return n
- go n s = do
+ go n [] = return n
+ go n s = do
num <- readish s :: Maybe Integer
case dropWhile isDigit s of
(c:rest) -> do
diff --git a/Utility/InodeCache.hs b/Utility/InodeCache.hs
index 91359457a..328b77595 100644
--- a/Utility/InodeCache.hs
+++ b/Utility/InodeCache.hs
@@ -182,7 +182,7 @@ checkSentinalFile s = do
SentinalStatus (not unchanged) tsdelta
where
#ifdef mingw32_HOST_OS
- unchanged = oldinode == newinode && oldsize == newsize
+ unchanged = oldinode == newinode && oldsize == newsize
tsdelta = TSDelta $ do
-- Run when generating an InodeCache,
-- to get the current delta.
diff --git a/Utility/Lsof.hs b/Utility/Lsof.hs
index ee4036b16..e44d13197 100644
--- a/Utility/Lsof.hs
+++ b/Utility/Lsof.hs
@@ -32,7 +32,7 @@ setup = do
when (isAbsolute cmd) $ do
path <- getSearchPath
let path' = takeDirectory cmd : path
- void $ setEnv "PATH" (intercalate [searchPathSeparator] path') True
+ setEnv "PATH" (intercalate [searchPathSeparator] path') True
{- Checks each of the files in a directory to find open files.
- Note that this will find hard links to files elsewhere that are open. -}
diff --git a/Utility/Matcher.hs b/Utility/Matcher.hs
index 76f8903f5..3356bdd07 100644
--- a/Utility/Matcher.hs
+++ b/Utility/Matcher.hs
@@ -90,7 +90,7 @@ tokenGroups :: [Token op] -> [TokenGroup op]
tokenGroups [] = []
tokenGroups (t:ts) = go t
where
- go Open =
+ go Open =
let (gr, rest) = findClose ts
in gr : tokenGroups rest
go Close = tokenGroups ts -- not picky about missing Close
@@ -101,7 +101,7 @@ findClose l =
let (g, rest) = go [] l
in (Group (reverse g), rest)
where
- go c [] = (c, []) -- not picky about extra Close
+ go c [] = (c, []) -- not picky about extra Close
go c (t:ts) = dispatch t
where
dispatch Close = (c, ts)
diff --git a/Utility/Path.hs b/Utility/Path.hs
index 99c9438bf..9035cbc49 100644
--- a/Utility/Path.hs
+++ b/Utility/Path.hs
@@ -235,11 +235,11 @@ toCygPath p
| null drive = recombine parts
| otherwise = recombine $ "/cygdrive" : driveletter drive : parts
where
- (drive, p') = splitDrive p
+ (drive, p') = splitDrive p
parts = splitDirectories p'
- driveletter = map toLower . takeWhile (/= ':')
+ driveletter = map toLower . takeWhile (/= ':')
recombine = fixtrailing . Posix.joinPath
- fixtrailing s
+ fixtrailing s
| hasTrailingPathSeparator p = Posix.addTrailingPathSeparator s
| otherwise = s
#endif
@@ -272,7 +272,7 @@ fileNameLengthLimit dir = do
sanitizeFilePath :: String -> FilePath
sanitizeFilePath = map sanitize
where
- sanitize c
+ sanitize c
| c == '.' = c
| isSpace c || isPunctuation c || isSymbol c || isControl c || c == '/' = '_'
| otherwise = c
diff --git a/Utility/Quvi.hs b/Utility/Quvi.hs
index 228ff7809..cf3a23cfd 100644
--- a/Utility/Quvi.hs
+++ b/Utility/Quvi.hs
@@ -113,7 +113,7 @@ supported Quvi04 url = boolSystem "quvi"
supported Quvi09 url = (firstlevel <&&> secondlevel)
`catchNonAsync` (\_ -> return False)
where
- firstlevel = case uriAuthority =<< parseURIRelaxed url of
+ firstlevel = case uriAuthority =<< parseURIRelaxed url of
Nothing -> return False
Just auth -> do
let domain = map toLower $ uriRegName auth
diff --git a/Utility/Rsync.hs b/Utility/Rsync.hs
index d0a89b2b0..8dee6093c 100644
--- a/Utility/Rsync.hs
+++ b/Utility/Rsync.hs
@@ -57,7 +57,7 @@ rsync = boolSystem "rsync" . rsyncParamsFixup
rsyncParamsFixup :: [CommandParam] -> [CommandParam]
rsyncParamsFixup = map fixup
where
- fixup (File f) = File (toCygPath f)
+ fixup (File f) = File (toCygPath f)
fixup p = p
{- Runs rsync, but intercepts its progress output and updates a meter.
diff --git a/Utility/SRV.hs b/Utility/SRV.hs
index f1671758e..1b86aeb76 100644
--- a/Utility/SRV.hs
+++ b/Utility/SRV.hs
@@ -74,7 +74,7 @@ lookupSRV (SRV srv) = do
maybe [] use r
#endif
where
- use = orderHosts . map tohosts
+ use = orderHosts . map tohosts
tohosts (priority, weight, port, hostname) =
( (priority, weight)
, (B8.toString hostname, PortNumber $ fromIntegral port)
diff --git a/Utility/Scheduled.hs b/Utility/Scheduled.hs
index 305410c54..4fa3a29f1 100644
--- a/Utility/Scheduled.hs
+++ b/Utility/Scheduled.hs
@@ -44,7 +44,7 @@ import Data.Char
{- Some sort of scheduled event. -}
data Schedule = Schedule Recurrance ScheduledTime
- deriving (Eq, Read, Show, Ord)
+ deriving (Eq, Read, Show, Ord)
data Recurrance
= Daily
@@ -54,7 +54,7 @@ data Recurrance
| Divisible Int Recurrance
-- ^ Days, Weeks, or Months of the year evenly divisible by a number.
-- (Divisible Year is years evenly divisible by a number.)
- deriving (Eq, Read, Show, Ord)
+ deriving (Eq, Read, Show, Ord)
type WeekDay = Int
type MonthDay = Int
@@ -63,7 +63,7 @@ type YearDay = Int
data ScheduledTime
= AnyTime
| SpecificTime Hour Minute
- deriving (Eq, Read, Show, Ord)
+ deriving (Eq, Read, Show, Ord)
type Hour = Int
type Minute = Int
@@ -73,7 +73,7 @@ type Minute = Int
data NextTime
= NextTimeExactly LocalTime
| NextTimeWindow LocalTime LocalTime
- deriving (Eq, Read, Show)
+ deriving (Eq, Read, Show)
startTime :: NextTime -> LocalTime
startTime (NextTimeExactly t) = t
@@ -96,9 +96,9 @@ calcNextTime schedule@(Schedule recurrance scheduledtime) lasttime currenttime
NextTimeExactly t -> window (localDay t) (localDay t)
| otherwise = NextTimeExactly . startTime <$> findfromtoday False
where
- findfromtoday anytime = findfrom recurrance afterday today
+ findfromtoday anytime = findfrom recurrance afterday today
where
- today = localDay currenttime
+ today = localDay currenttime
afterday = sameaslastrun || toolatetoday
toolatetoday = not anytime && localTimeOfDay currenttime >= nexttime
sameaslastrun = lastrun == Just today
@@ -163,8 +163,8 @@ calcNextTime schedule@(Schedule recurrance scheduledtime) lasttime currenttime
Divisible n r'@(Yearly _) -> handlediv n r' ynum Nothing
Divisible _ r'@(Divisible _ _) -> findfrom r' afterday candidate
where
- skip n = findfrom r False (addDays n candidate)
- handlediv n r' getval mmax
+ skip n = findfrom r False (addDays n candidate)
+ handlediv n r' getval mmax
| n > 0 && maybe True (n <=) mmax =
findfromwhere r' (divisible n . getval) afterday candidate
| otherwise = Nothing
@@ -267,7 +267,7 @@ toRecurrance s = case words s of
constructor u
| "s" `isSuffixOf` u = constructor $ reverse $ drop 1 $ reverse u
| otherwise = Nothing
- withday sd u = do
+ withday sd u = do
c <- constructor u
d <- readish sd
Just $ c (Just d)
@@ -285,7 +285,7 @@ fromScheduledTime AnyTime = "any time"
fromScheduledTime (SpecificTime h m) =
show h' ++ (if m > 0 then ":" ++ pad 2 (show m) else "") ++ " " ++ ampm
where
- pad n s = take (n - length s) (repeat '0') ++ s
+ pad n s = take (n - length s) (repeat '0') ++ s
(h', ampm)
| h == 0 = (12, "AM")
| h < 12 = (h, "AM")
@@ -304,10 +304,10 @@ toScheduledTime v = case words v of
(s:[]) -> go s id
_ -> Nothing
where
- h0 h
+ h0 h
| h == 12 = 0
| otherwise = h
- go :: String -> (Int -> Int) -> Maybe ScheduledTime
+ go :: String -> (Int -> Int) -> Maybe ScheduledTime
go s adjust =
let (h, m) = separate (== ':') s
in SpecificTime
@@ -363,7 +363,7 @@ instance Arbitrary Recurrance where
]
]
where
- arbday = oneof
+ arbday = oneof
[ Just <$> nonNegative arbitrary
, pure Nothing
]
diff --git a/Utility/SshConfig.hs b/Utility/SshConfig.hs
index 529e5c990..e45d09acd 100644
--- a/Utility/SshConfig.hs
+++ b/Utility/SshConfig.hs
@@ -56,7 +56,7 @@ parseSshConfig = go [] . lines
| iscomment l = hoststanza host c ((Left $ mkcomment l):hc) ls
| otherwise = case splitline l of
(indent, k, v)
- | isHost k -> hoststanza v
+ | isHost k -> hoststanza v
(HostConfig host (reverse hc):c) [] ls
| otherwise -> hoststanza host c
((Right $ SshSetting indent k v):hc) ls
@@ -87,7 +87,7 @@ genSshConfig = unlines . concatMap gen
findHostConfigKey :: SshConfig -> Key -> Maybe Value
findHostConfigKey (HostConfig _ cs) wantk = go (rights cs) (map toLower wantk)
where
- go [] _ = Nothing
+ go [] _ = Nothing
go ((SshSetting _ k v):rest) wantk'
| map toLower k == wantk' = Just v
| otherwise = go rest wantk'
@@ -98,7 +98,7 @@ addToHostConfig :: SshConfig -> Key -> Value -> SshConfig
addToHostConfig (HostConfig host cs) k v =
HostConfig host $ Right (SshSetting indent k v) : cs
where
- {- The indent is taken from any existing SshSetting
+ {- The indent is taken from any existing SshSetting
- in the HostConfig (largest indent wins). -}
indent = fromMaybe "\t" $ headMaybe $ reverse $
sortBy (comparing length) $ map getindent cs
diff --git a/Utility/TList.hs b/Utility/TList.hs
index 4b91b767f..5532cdce5 100644
--- a/Utility/TList.hs
+++ b/Utility/TList.hs
@@ -57,7 +57,7 @@ modifyTList tlist a = do
unless (emptyDList dl') $
putTMVar tlist dl'
where
- emptyDList = D.list True (\_ _ -> False)
+ emptyDList = D.list True (\_ _ -> False)
consTList :: TList a -> a -> STM ()
consTList tlist v = modifyTList tlist $ \dl -> D.cons v dl
diff --git a/Utility/WebApp.hs b/Utility/WebApp.hs
index 6bcfce919..6c42e103b 100644
--- a/Utility/WebApp.hs
+++ b/Utility/WebApp.hs
@@ -117,7 +117,7 @@ getSocket h = do
when (isJust h) $
error "getSocket with HostName not supported on this OS"
addr <- inet_addr "127.0.0.1"
- sock <- socket AF_INET Stream defaultProtocol
+ sock <- socket AF_INET Stream defaultProtocol
preparesocket sock
bindSocket sock (SockAddrInet aNY_PORT addr)
use sock
diff --git a/Utility/Yesod.hs b/Utility/Yesod.hs
index 6d38ba4ed..afe10a111 100644
--- a/Utility/Yesod.hs
+++ b/Utility/Yesod.hs
@@ -28,7 +28,11 @@ import Yesod as Y
#else
import Yesod as Y hiding (Html)
#endif
+#if MIN_VERSION_yesod_form(1,3,8)
+import Yesod.Form.Bootstrap3 as Y hiding (bfs)
+#else
import Assistant.WebApp.Bootstrap3 as Y hiding (bfs)
+#endif
#ifndef __NO_TH__
import Yesod.Default.Util
import Language.Haskell.TH.Syntax (Q, Exp)
diff --git a/debian/cabal-wrapper b/debian/cabal-wrapper
new file mode 100755
index 000000000..1326aead7
--- /dev/null
+++ b/debian/cabal-wrapper
@@ -0,0 +1,20 @@
+#!/bin/sh
+# It would be more usual to use:
+# export CABAL=./Setup
+# But Setup currently has a very bad dependency resolver, and very bad
+# debugging output, and tends to eat all memory and die on small buildds.
+#
+# This should be revisited once Debian has a newer ghc than 7.6.3,
+# and hopefully gets the improved dependency resolver from cabal.
+set -e
+
+# Avoid cabal writing to HOME, and avoid local cabal settings
+# influencing the build.
+HOME=$(mktemp -d)
+export HOME
+
+# Temporary workaround for #763078
+PATH=/usr/lib/llvm-3.4/bin:$PATH
+export PATH
+
+cabal "$@"
diff --git a/debian/changelog b/debian/changelog
index d0bff041d..c658ce186 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,5 +1,62 @@
-git-annex (5.20140916) UNRELEASED; urgency=medium
-
+git-annex (5.20141014) UNRELEASED; urgency=medium
+
+ * vicfg: Deleting configurations now resets to the default, where
+ before it has no effect.
+ * Remove hurd stuff from cabal file, since hackage currently rejects
+ it, and the test suite fails on hurd.
+ * initremote: Don't allow creating a special remote that has the same
+ name as an existing git remote.
+ * Windows: Use haskell setenv library to clean up several ugly workarounds
+ for inability to manipulate the environment on windows. This includes
+ making git-annex not re-exec itself on start on windows, and making the
+ test suite on Windows run tests without forking.
+ * glacier: Fix pipe setup when calling glacier-cli to retrieve an object.
+ * info: When run on a single annexed file, displays some info about the
+ file, including its key and size.
+ * info: When passed the name or uuid of a remote, displays info about that
+ remote. Remotes that support encryption, chunking, or embedded
+ creds will include that in their info.
+ * enableremote: When the remote has creds, update the local creds cache
+ file. Before, the old version of the creds could be left there, and
+ would continue to be used.
+
+ -- Joey Hess <joeyh@debian.org> Tue, 14 Oct 2014 14:09:24 -0400
+
+git-annex (5.20141013) unstable; urgency=medium
+
+ * Adjust cabal file to support building w/o assistant on the hurd.
+ * Support building with yesod 1.4.
+ * S3: Fix embedcreds=yes handling for the Internet Archive.
+ * map: Handle .git prefixed remote repos. Closes: #614759
+ * repair: Prevent auto gc from happening when fetching from a remote.
+
+ -- Joey Hess <joeyh@debian.org> Mon, 13 Oct 2014 10:13:06 -0400
+
+git-annex (5.20140927) unstable; urgency=medium
+
+ * Really depend (not just build-depend) on new enough git for --no-gpg-sign
+ to work. Closes: #763057
+ * Add temporary workaround for bug #763078 which broke building on armel
+ and armhf.
+
+ -- Joey Hess <joeyh@debian.org> Sat, 27 Sep 2014 14:25:09 -0400
+
+git-annex (5.20140926) unstable; urgency=high
+
+ * Depend on new enough git for --no-gpg-sign to work. Closes: #762446
+ * Work around failure to build on mips by using cabal, not Setup,
+ to build in debian/rules.
+
+ -- Joey Hess <joeyh@debian.org> Fri, 26 Sep 2014 15:09:02 -0400
+
+git-annex (5.20140919) unstable; urgency=high
+
+ * Security fix for S3 and glacier when using embedcreds=yes with
+ encryption=pubkey or encryption=hybrid. CVE-2014-6274
+ The creds embedded in the git repo were *not* encrypted.
+ git-annex enableremote will warn when used on a remote that has
+ this problem. For details, see:
+ https://git-annex.branchable.com/upgrades/insecure_embedded_creds/
* assistant: Detect when repository has been deleted or moved, and
automatically shut down the assistant. Closes: #761261
* Windows: Avoid crashing trying to list gpg secret keys, for gcrypt
@@ -9,7 +66,7 @@ git-annex (5.20140916) UNRELEASED; urgency=medium
* add: In direct mode, adding an annex symlink will check it into git,
as was already done in indirect mode.
- -- Joey Hess <joeyh@debian.org> Mon, 15 Sep 2014 14:39:17 -0400
+ -- Joey Hess <joeyh@debian.org> Fri, 19 Sep 2014 12:53:42 -0400
git-annex (5.20140915) unstable; urgency=medium
diff --git a/debian/control b/debian/control
index fdd9e00be..659ec05fa 100644
--- a/debian/control
+++ b/debian/control
@@ -4,6 +4,7 @@ Priority: optional
Build-Depends:
debhelper (>= 9),
ghc (>= 7.4),
+ cabal-install,
libghc-mtl-dev (>= 2.1.1),
libghc-missingh-dev,
libghc-data-default-dev,
@@ -69,12 +70,13 @@ Build-Depends:
lsof [!kfreebsd-i386 !kfreebsd-amd64 !hurd-any],
ikiwiki,
perlmagick,
- git (>= 1:1.8.4),
+ git (>= 1:2.0),
rsync,
wget,
curl,
openssh-client,
git-remote-gcrypt (>= 0.20130908-6),
+ llvm-3.4 [armel armhf],
Maintainer: Joey Hess <joeyh@debian.org>
Standards-Version: 3.9.5
Vcs-Git: git://git.kitenet.net/git-annex
@@ -85,7 +87,7 @@ Package: git-annex
Architecture: any
Section: utils
Depends: ${misc:Depends}, ${shlibs:Depends},
- git (>= 1:1.8.4),
+ git (>= 1:2.0),
rsync,
wget,
curl,
@@ -108,10 +110,15 @@ Description: manage files with git, without checking their contents into git
dealing with files larger than git can currently easily handle, whether due
to limitations in memory, time, or disk space.
.
- Even without file content tracking, being able to manage files with git,
- move files around and delete files with versioned directory trees, and use
- branches and distributed clones, are all very handy reasons to use git. And
- annexed files can co-exist in the same git repository with regularly
- versioned files, which is convenient for maintaining documents, Makefiles,
- etc that are associated with annexed files but that benefit from full
- revision control.
+ It can store large files in many places, from local hard drives, to a
+ large number of cloud storage services, including S3, WebDAV,
+ and rsync, with a dozen cloud storage providers usable via plugins.
+ Files can be stored encrypted with gpg, so that the cloud storage
+ provider cannot see your data. git-annex keeps track of where each file
+ is stored, so it knows how many copies are available, and has many
+ facilities to ensure your data is preserved.
+ .
+ git-annex can also be used to keep a folder in sync between computers,
+ noticing when files are changed, and automatically committing them
+ to git and transferring them to other computers. The git-annex webapp
+ makes it easy to set up and use git-annex this way.
diff --git a/debian/copyright b/debian/copyright
index 19d61f228..84aec847f 100644
--- a/debian/copyright
+++ b/debian/copyright
@@ -28,6 +28,10 @@ Files: Utility/Gpg.hs Utility/DirWatcher*
Copyright: © 2010-2014 Joey Hess <joey@kitenet.net>
License: GPL-3+
+Files: Assistant/WebApp/Bootstrap3.hs
+Copyright: 2010 Michael Snoyman
+License: BSD-2-clause
+
Files: doc/logo* */favicon.ico standalone/osx/git-annex.app/Contents/Resources/git-annex.icns standalone/android/icons/*
Copyright: 2007 Henrik Nyh <http://henrik.nyh.se/>
2010 Joey Hess <joey@kitenet.net>
diff --git a/debian/rules b/debian/rules
index 3a0511fa6..7c8f8a560 100755
--- a/debian/rules
+++ b/debian/rules
@@ -1,7 +1,6 @@
#!/usr/bin/make -f
-# Avoid using cabal, as it writes to $HOME
-export CABAL=./Setup
+export CABAL=debian/cabal-wrapper
# Do use the changelog's version number, rather than making one up.
export RELEASE_BUILD=1
diff --git a/doc/bugs/Build_error_with_Yesod_1.4.mdwn b/doc/bugs/Build_error_with_Yesod_1.4.mdwn
new file mode 100644
index 000000000..dbee2654e
--- /dev/null
+++ b/doc/bugs/Build_error_with_Yesod_1.4.mdwn
@@ -0,0 +1,287 @@
+### Please describe the problem.
+I have problems building with yesod 1.4
+
+### What steps will reproduce the problem?
+Building git annex in a clean sandbox.
+
+### What version of git-annex are you using? On what operating system?
+5.20140927 on OS X i.e. Trying to upgrade the homebrew recipe to the most recent version of git-annex
+
+### Please provide any additional information below.
+Error messages below are discussed in the following SO-thread:
+https://stackoverflow.com/questions/26225991/illegal-view-pattern-frompathpiece-just-dyn-abdd-when-using-parameters-on
+
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+[310 of 470] Compiling Assistant.WebApp.Types ( Assistant/WebApp/Types.hs, dist/dist-sandbox-52ca649e/build/git-annex/git-annex-tmp/Assistant/WebApp/Types.o )
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_aceZO
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_aceZW
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf02
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0c
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0e
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0f
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0h
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0j
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0l
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0n
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0p
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0r
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0u
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0w
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0y
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0z
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0C
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0D
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0F
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0H
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0J
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0L
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0M
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0O
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0R
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0T
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf0U
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf11
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf13
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf18
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1a
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1c
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1e
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1g
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1i
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1k
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1m
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1o
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1q
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1s
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1v
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1x
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1z
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1B
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1D
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1G
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1I
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1J
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1L
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1M
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1O
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1R
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1U
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1X
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf1Y
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf20
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf22
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf25
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf27
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf28
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf2b
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf2d
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf2f
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf2h
+ Use ViewPatterns to enable view patterns
+
+Assistant/WebApp/Types.hs:40:1:
+ Illegal view pattern: fromPathPiece -> Just dyn_acf2j
+ Use ViewPatterns to enable view patterns
+cabal: Error: some packages failed to install:
+git-annex-5.20140927
+
+
+# End of transcript or log.
+"""]]
+
+> You're not building the most recent version of git-annex; this was
+> already fixed in version 5.20141013. [[done]] --[[Joey]]
diff --git a/doc/bugs/Build_error_with_Yesod_1.4/comment_1_42fe9d62c9dcc55deea35d16b67177e6._comment b/doc/bugs/Build_error_with_Yesod_1.4/comment_1_42fe9d62c9dcc55deea35d16b67177e6._comment
new file mode 100644
index 000000000..145b5b1ea
--- /dev/null
+++ b/doc/bugs/Build_error_with_Yesod_1.4/comment_1_42fe9d62c9dcc55deea35d16b67177e6._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnmF_9CAtfqdZkC4e-_dCX-rK5bqh4RWkw"
+ nickname="Carl"
+ subject="Not on hackage"
+ date="2014-10-15T15:34:02Z"
+ content="""
+I stand corrected, but it seems this release is not on hackage?
+"""]]
diff --git a/doc/bugs/Build_error_with_Yesod_1.4/comment_2_05f6fb19f2527f6dd72ab0e2f87c021a._comment b/doc/bugs/Build_error_with_Yesod_1.4/comment_2_05f6fb19f2527f6dd72ab0e2f87c021a._comment
new file mode 100644
index 000000000..4f671d0b4
--- /dev/null
+++ b/doc/bugs/Build_error_with_Yesod_1.4/comment_2_05f6fb19f2527f6dd72ab0e2f87c021a._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.111"
+ subject="comment 2"
+ date="2014-10-15T17:30:45Z"
+ content="""
+Hmm, yeah, it seems the upload to hackage failed, because hackage still rejects cabal files mentioning the legal os(gnu). Sigh. Fixed now.
+"""]]
diff --git a/doc/bugs/Can__39__t_add_a_git_repo_to_git_annex:___34__Invalid_path_repo__47__.git__47__X__34___for_many_X/comment_10_2c8e8a4f35b392b1cb4dc8104786312d._comment b/doc/bugs/Can__39__t_add_a_git_repo_to_git_annex:___34__Invalid_path_repo__47__.git__47__X__34___for_many_X/comment_10_2c8e8a4f35b392b1cb4dc8104786312d._comment
new file mode 100644
index 000000000..b95b3ed68
--- /dev/null
+++ b/doc/bugs/Can__39__t_add_a_git_repo_to_git_annex:___34__Invalid_path_repo__47__.git__47__X__34___for_many_X/comment_10_2c8e8a4f35b392b1cb4dc8104786312d._comment
@@ -0,0 +1,17 @@
+[[!comment format=mdwn
+ username="JerSou"
+ ip="82.228.88.32"
+ subject="comment 10"
+ date="2014-09-25T19:27:43Z"
+ content="""
+I thought a workaround (but I don't think ultimately use) :
+
+> \# for each git repo :
+
+> mv .git .gitToAnnex
+
+> ln -s .gitToAnnex .git
+
+> echo .gitToAnnex >> .gitignore
+
+"""]]
diff --git a/doc/bugs/Drop_files_with_the_same_checksum..mdwn b/doc/bugs/Drop_files_with_the_same_checksum..mdwn
new file mode 100644
index 000000000..0dcae20c7
--- /dev/null
+++ b/doc/bugs/Drop_files_with_the_same_checksum..mdwn
@@ -0,0 +1,33 @@
+### Please describe the problem.
+When two identical files are annexed and one of them is dropped, both files are gone (one dangling symlink is left). This may be intentional (the checksums are the same after all), but then is there a way to drop one of the files?
+
+### What steps will reproduce the problem?
+
+ mkdir annex
+ cd annex
+ git init
+ git annex init
+ mkdir a b
+ dd if=/dev/urandom of=a/data.bin count=2048
+ cp a/data.bin b
+ git annex add a/data.bin b/data.bin
+ git commit -m "Added raw data."
+ git annex drop --force a/data.bin
+ file b/data.bin
+
+### What version of git-annex are you using? On what operating system?
+
+git-annex version: 5.20140831+b1
+build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+local repository version: 5
+supported repository version: 5
+
+Distributor ID: Debian
+Description: Debian GNU/Linux testing (jessie)
+Release: testing
+Codename: jessie
+
+> If you don't want git-annex to de-duplicate files you can use a backend
+> such as WORM. Here it's behaving as expected, so [[done]]. --[[Joey]]
diff --git a/doc/bugs/Issue_fewer_S3_GET_requests.mdwn b/doc/bugs/Issue_fewer_S3_GET_requests.mdwn
new file mode 100644
index 000000000..8bbcfa179
--- /dev/null
+++ b/doc/bugs/Issue_fewer_S3_GET_requests.mdwn
@@ -0,0 +1,9 @@
+It appears that git-annex issues one GET request to S3 / Google cloud for every file it tries to copy, if you don't pass --fast. (I could be wrong; I'm basing this on the fact that each "checking <remote name>" takes about the same amount of time, and that it's slow enough to be hitting the network.)
+
+Amazon lets you GET 1000 objects in one GET request, and afaict a request that returns 1000 objects costs just as much as a request that returns 1 object. The cost of GET'ing every file in my annex is nontrivial -- Google charges 0.01 per 1000 GETs, and my repo has 130k objects, so that's $1.3, compared to a monthly cost for storage of under $10. This means that if I want to back up my files more than, say, once a week, I need to write a script that parses the JSON output of git annex whereis and uploads with --fast only the files that aren't present in the cloud. It also means that I have to trust the output of whereis.
+
+All those GETs also slow down the non-fast copy, and this also applies to other kinds of remotes.
+
+There are a number of ways one could implement this. One way would be to have a command that updates the whereis data from the remote and then to add a parameter (maybe you already have it) to copy that's like --fast but skips files that are already present (maybe this is what --fast already does, but I did a quick check and it doesn't seem to). Because of the way git annex names files, I think it would be hard to coalesce GETs during a copy command, but it could be done.
+
+Anyway, please don't consider this a high-priority request; I can get by as-is, and I <3 git annex.
diff --git a/doc/bugs/Req:_Upgrade_to_Yesod_1.4__63___https:__47____47__github.com__47__NixOS__47__nixpkgs__47__pull__47__4391.mdwn b/doc/bugs/Req:_Upgrade_to_Yesod_1.4__63___https:__47____47__github.com__47__NixOS__47__nixpkgs__47__pull__47__4391.mdwn
new file mode 100644
index 000000000..6e0117202
--- /dev/null
+++ b/doc/bugs/Req:_Upgrade_to_Yesod_1.4__63___https:__47____47__github.com__47__NixOS__47__nixpkgs__47__pull__47__4391.mdwn
@@ -0,0 +1,24 @@
+### Please describe the problem.
+
+Not a super big "problem" but I'm blocked upgrading Nix packages to Yesod 1.4 because of git-annex breakage.
+
+### What steps will reproduce the problem?
+
+Try to build with Yesod 1.4
+
+### What version of git-annex are you using? On what operating system?
+
+Latest
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+
+# End of transcript or log.
+"""]]
+
+> [[fixed|done]], although I have not made a release yet.
+> It's a 1 line change anyhow, just adding ViewPatterns. --[[Joey]]
diff --git a/doc/bugs/S3_upload_not_using_multipart/comment_7_f620888512cd78628f82ec9e5eed4ad1._comment b/doc/bugs/S3_upload_not_using_multipart/comment_7_f620888512cd78628f82ec9e5eed4ad1._comment
new file mode 100644
index 000000000..ec47aa2be
--- /dev/null
+++ b/doc/bugs/S3_upload_not_using_multipart/comment_7_f620888512cd78628f82ec9e5eed4ad1._comment
@@ -0,0 +1,21 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawl9sYlePmv1xK-VvjBdN-5doOa_Xw-jH4U"
+ nickname="Richard"
+ subject="comment 7"
+ date="2014-09-29T08:07:55Z"
+ content="""
+As I found the latest comment confusing, here's the full quote:
+
+ Depending on the size of the data you are uploading, Amazon S3 offers the following options:
+
+ Upload objects in a single operation—With a single PUT operation you can upload objects up to 5 GB in size.
+
+ Upload objects in parts—Using the Multipart upload API you can upload large objects, up to 5 TB.
+
+ The Multipart Upload API is designed to improve the upload experience for larger objects. You can upload objects in parts.
+ These object parts can be uploaded independently, in any order, and in parallel.
+ You can use a Multipart Upload for objects from 5 MB to 5 TB in size.
+
+ We encourage Amazon S3 customers to use Multipart Upload for objects greater than 100 MB.
+
+"""]]
diff --git a/doc/bugs/S3_upload_not_using_multipart/comment_8_4d9242cde0d2348452438659a8aa8d6d._comment b/doc/bugs/S3_upload_not_using_multipart/comment_8_4d9242cde0d2348452438659a8aa8d6d._comment
new file mode 100644
index 000000000..a427c504e
--- /dev/null
+++ b/doc/bugs/S3_upload_not_using_multipart/comment_8_4d9242cde0d2348452438659a8aa8d6d._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawl9sYlePmv1xK-VvjBdN-5doOa_Xw-jH4U"
+ nickname="Richard"
+ subject="comment 8"
+ date="2014-09-29T08:09:33Z"
+ content="""
+PS: Chunking spams the S3 remote with individual objects whereas multipart uploads do not. Just something to keep in mind in case you turn on chunking for S3.
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_.mdwn b/doc/bugs/Upload_to_S3_fails_.mdwn
index 2264c42e4..de74bfd7f 100644
--- a/doc/bugs/Upload_to_S3_fails_.mdwn
+++ b/doc/bugs/Upload_to_S3_fails_.mdwn
@@ -55,3 +55,5 @@ If I fire up the web app and open the log, the end looks like this:
3% 858.1KB/s 6h45mmux_client_request_session: read from master failed: Broken pipe
"""]]
+
+[[!tag moreinfo]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_10_b7e912bac673bdffa5775b71d5d39937._comment b/doc/bugs/Upload_to_S3_fails_/comment_10_b7e912bac673bdffa5775b71d5d39937._comment
new file mode 100644
index 000000000..ecc34d487
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_10_b7e912bac673bdffa5775b71d5d39937._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkvSZ1AFJdY_1FeutZr_KWeqtzjZta1PNE"
+ nickname="Thedward"
+ subject="comment 10"
+ date="2014-10-21T21:25:57Z"
+ content="""
+The only files that succeeded were small text files. The other files — 3-200MiB — all failed.
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_1_398c014921f9af957fb5e9a92ed0ef4d._comment b/doc/bugs/Upload_to_S3_fails_/comment_1_398c014921f9af957fb5e9a92ed0ef4d._comment
new file mode 100644
index 000000000..a18ca1d05
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_1_398c014921f9af957fb5e9a92ed0ef4d._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 1"
+ date="2014-09-18T18:49:43Z"
+ content="""
+This is using the old hS3 library. So, each chunk is sent using a new http connection. It seems that the connection must be being closed by S3 part way through the upload of a chunk.
+
+It may be that the new aws library somehow avoids this problem. So, a git-annex built with the `s3-aws` branch merged in may help with this bug. OTOH, that new branch makes a single http connection be reused for all the chunks in a file, so it might also make things worse.
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_2_f33ce058c9460cf7d151e739bff0440a._comment b/doc/bugs/Upload_to_S3_fails_/comment_2_f33ce058c9460cf7d151e739bff0440a._comment
new file mode 100644
index 000000000..dcf719bba
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_2_f33ce058c9460cf7d151e739bff0440a._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 2"
+ date="2014-09-18T18:52:17Z"
+ content="""
+If you're using the new chunking system, git-annex should support resuming the upload to S3. Next time you try to send the file, it should find the chunks that were successfully sent, and resume at the chunk where it failed.
+
+Supporting this even for encrypted uploads was a major benefit of the new chunking system, so I hope it works...?
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_3_cd1e768fe1e67daf08b5afd460620922._comment b/doc/bugs/Upload_to_S3_fails_/comment_3_cd1e768fe1e67daf08b5afd460620922._comment
new file mode 100644
index 000000000..5efb78685
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_3_cd1e768fe1e67daf08b5afd460620922._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="annexuser"
+ ip="71.198.212.13"
+ subject="comment 3"
+ date="2014-09-19T04:43:42Z"
+ content="""
+Was the version I listed above not using the new chunking from the `s3-aws` branch? How do I determine if my version of git-annex was built with the new or old chunking?
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_4_0cdd2e8d6e83c03de717ecd3253e753d._comment b/doc/bugs/Upload_to_S3_fails_/comment_4_0cdd2e8d6e83c03de717ecd3253e753d._comment
new file mode 100644
index 000000000..216aea575
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_4_0cdd2e8d6e83c03de717ecd3253e753d._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="67.223.1.203"
+ subject="comment 4"
+ date="2014-09-19T18:33:17Z"
+ content="""
+Your version supports both new and old style chunking. Which is used depends on how the S3 remote was configured when it was set up. It can't really be changed w/o re-setting up the remote. You can check which is used by `git show git-annex:remote.log`, find the line for the UUID of the remote, and see if it has chunk= (new chunking) or chunksize= (old chunking).
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_5_020c055f6c06860dda27c1debb123742._comment b/doc/bugs/Upload_to_S3_fails_/comment_5_020c055f6c06860dda27c1debb123742._comment
new file mode 100644
index 000000000..368d2853f
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_5_020c055f6c06860dda27c1debb123742._comment
@@ -0,0 +1,43 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkvSZ1AFJdY_1FeutZr_KWeqtzjZta1PNE"
+ nickname="Thedward"
+ subject="comment 5"
+ date="2014-10-21T04:10:16Z"
+ content="""
+
+I am experiencing similar behavior on Ubuntu Trusty (x86_64) using a prebuilt Linux release:
+
+ Linux hostname 3.13.0-36-generic #63-Ubuntu SMP Wed Sep 3 21:30:07 UTC 2014 x86_64 x86_64 x86_64 GNU/Linux
+
+ Distributor ID: Ubuntu
+ Description: Ubuntu 14.04.1 LTS
+ Release: 14.04
+ Codename: trusty
+
+ git-annex version: 5.20141016-g26b38fd
+ build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+ key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+ remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+
+Copying files to S3 consistently fails both from the command line and via the assistant:
+
+ [2014-10-20 22:34:32 CDT] read: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"show-ref\",\"git-annex\"]
+ [2014-10-20 22:34:32 CDT] read: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"show-ref\",\"--hash\",\"refs/heads/git-annex\"]
+ [2014-10-20 22:34:32 CDT] read: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"log\",\"refs/heads/git-annex..78e9b6b85f3b453d8ed4f66f63ff09e03ce13d06\",\"-n1\",\"--pretty=%H\"]
+ [2014-10-20 22:34:32 CDT] read: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"log\",\"refs/heads/git-annex..658720ba59a2fefee89c908b972971ca901f84dc\",\"-n1\",\"--pretty=%H\"]
+ [2014-10-20 22:34:32 CDT] chat: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"cat-file\",\"--batch\"]
+ [2014-10-20 22:34:32 CDT] read: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"ls-files\",\"--cached\",\"-z\",\"--\",\"storage/data.bin\"]
+ [2014-10-20 22:34:32 CDT] chat: git [\"--git-dir=/home/user/git-annex/.git\",\"--work-tree=/home/user/git-annex\",\"-c\",\"core.bare=false\",\"cat-file\",\"--batch\"]
+ copy storage/data.bin (gpg) (checking S3git-annex...) (to S3git-annex...)
+ 0% 0.0 B/s 0s[2014-10-20 22:34:33 CDT] chat: gpg [\"--batch\",\"--no-tty\",\"--use-agent\",\"--quiet\",\"--trust-model\",\"always\",\"--batch\",\"--passphrase-fd\",\"14\",\"--symmetric\",\"--force-mdc\",\"--no-textmode\"]
+ 8% 512.0KB/s 21s[2014-10-20 22:34:35 CDT] chat: gpg [\"--batch\",\"--no-tty\",\"--use-agent\",\"--quiet\",\"--trust-model\",\"always\",\"--batch\",\"--passphrase-fd\",\"14\",\"--symmetric\",\"--force-mdc\",\"--no-textmode\"]
+ 8% 528.0KB/s 21s
+ ErrorClosed
+ failed
+ git-annex: copy: 1 failed
+
+Two files (out of several hundred) have succeeded.
+
+Any ideas?
+
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_6_8bc023fca8cedfc517856cdcd20b7f10._comment b/doc/bugs/Upload_to_S3_fails_/comment_6_8bc023fca8cedfc517856cdcd20b7f10._comment
new file mode 100644
index 000000000..ed21bb8fb
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_6_8bc023fca8cedfc517856cdcd20b7f10._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 6"
+ date="2014-10-21T16:31:02Z"
+ content="""
+I need to know if the S3 remote is configured to use the new style chunking feature, and what size chunks it is configured to use. I have already explained how to check that in this thread.
+
+I also need to know if retrying the upload after it fails lets it resume where it left off.
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_7_32685258748a7cdd177e7af2105f128e._comment b/doc/bugs/Upload_to_S3_fails_/comment_7_32685258748a7cdd177e7af2105f128e._comment
new file mode 100644
index 000000000..4d6655224
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_7_32685258748a7cdd177e7af2105f128e._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkvSZ1AFJdY_1FeutZr_KWeqtzjZta1PNE"
+ nickname="Thedward"
+ subject="comment 7"
+ date="2014-10-21T17:35:16Z"
+ content="""
+It is running the new style chunking (chunk=1MiB).
+
+It does not appear to resume when it tries again. If I try copying a file to the remote from the command line, it always starts at 0% and dies at some point before 100% even if it has tried to copy that file before.
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_8_841fd94d0f599c71a76fd22b07944366._comment b/doc/bugs/Upload_to_S3_fails_/comment_8_841fd94d0f599c71a76fd22b07944366._comment
new file mode 100644
index 000000000..61e201c9d
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_8_841fd94d0f599c71a76fd22b07944366._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkvSZ1AFJdY_1FeutZr_KWeqtzjZta1PNE"
+ nickname="Thedward"
+ subject="comment 8"
+ date="2014-10-21T17:47:35Z"
+ content="""
+Additional info: I created both of the related git-annex repositories yesterday via the webapp. Then imported a number of files to each one. Then connected them via xmpp. Then created the S3 remote via the webapp so they could actually share files. I am using an IAM identity for S3 instead of my root access key; it has full S3 access (and data IS showing up in the bucket, so it's not a *simple* permissions problem).
+"""]]
diff --git a/doc/bugs/Upload_to_S3_fails_/comment_9_dd837a1cb2146224b9c000cbeea4f3b3._comment b/doc/bugs/Upload_to_S3_fails_/comment_9_dd837a1cb2146224b9c000cbeea4f3b3._comment
new file mode 100644
index 000000000..4cfda38c2
--- /dev/null
+++ b/doc/bugs/Upload_to_S3_fails_/comment_9_dd837a1cb2146224b9c000cbeea4f3b3._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 9"
+ date="2014-10-21T20:22:52Z"
+ content="""
+How big is the file that it fails to copy?
+"""]]
diff --git a/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists.mdwn b/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists.mdwn
new file mode 100644
index 000000000..857ca333a
--- /dev/null
+++ b/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists.mdwn
@@ -0,0 +1,36 @@
+### Please describe the problem.
+
+"git-annex enableremote box.com" fails with "git-annex: WebDAV test failed". The server returns error message "The resource you tried to create already exists" (see below).
+
+### What steps will reproduce the problem?
+
+1. I initialize box.com special remote in desktop. The path at box.com is at "gas/annex".
+
+2. I enable the box.com special remote in laptop. I got the error I described above.
+
+### What version of git-annex are you using? On what operating system?
+
+ $ git annex version
+ git-annex version: 5.20140831-g62e6ad8
+ build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+ key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+ remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+ $ uname -a
+ Linux tkf-acer 3.12.9-2-ARCH #1 SMP PREEMPT Fri Jan 31 10:22:54 CET 2014 x86_64 GNU/Linux
+
+
+### Please provide any additional information below.
+
+I ran the following while with appropriate environment variable WEBDAV_USERNAM and WEBDAV_PASSWORD.
+
+ me@desktop$ git-annex initremote box type=webdav url=https://dav.box.com/dav/gas/annex chunk=50mb encryption=shared
+
+ me@laptop$ git-annex enableremote box.com
+ enableremote box.com (testing WebDAV server...)
+
+ git-annex: WebDAV test failed: StatusCodeException (Status {statusCode = 405, statusMessage = "Method Not Allowed"}) [("Server","nginx"),("Date","Sat, 27 Sep 2014 09:36:42 GMT"),("Content-Type","application/xml; charset=utf-8"),("Content-Length","247"),("Connection","keep-alive"),("Vary","Host"),("Allow","OPTIONS, GET, HEAD, DELETE, PROPFIND, PUT, PROPPATCH, COPY, MOVE, REPORT, LOCK, UNLOCK"),("X-Response-Body-Start","<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<d:error xmlns:d=\"DAV:\" xmlns:s=\"http://sabredav.org/ns\">\n <s:exception>Sabre_DAV_Exception_MethodNotAllowed</s:exception>\n <s:message>The resource you tried to create already exists</s:message>\n</d:error>\n")] (CJ {expose = []}): user error
+ failed
+ git-annex: enableremote: 1 failed
+
+> You are using an old version of git-annex; this bug was fixed in
+> version 5.20140919. [[done]] --[[Joey]]
diff --git a/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists/comment_1_ac40ddc26bff27dafdbc457837695a92._comment b/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists/comment_1_ac40ddc26bff27dafdbc457837695a92._comment
new file mode 100644
index 000000000..89ab74c69
--- /dev/null
+++ b/doc/bugs/WebDAV_error_when_connecting_to_box.com:_The_resource_you_tried_to_create_already_exists/comment_1_ac40ddc26bff27dafdbc457837695a92._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawk0GR7KgDF6PAzHTkLZCCkjAvJVB7ceXTY"
+ nickname="Takafumi"
+ subject="comment 1"
+ date="2014-09-27T19:46:08Z"
+ content="""
+I updated git-annex and it works. Thank you very much.
+"""]]
diff --git a/doc/bugs/_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41_____91__2014-07-23_16:41:45_CEST__93___WebApp:_warning_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41__.mdwn b/doc/bugs/_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41_____91__2014-07-23_16:41:45_CEST__93___WebApp:_warning_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41__.mdwn
index 77d2beb10..69732a743 100644
--- a/doc/bugs/_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41_____91__2014-07-23_16:41:45_CEST__93___WebApp:_warning_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41__.mdwn
+++ b/doc/bugs/_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41_____91__2014-07-23_16:41:45_CEST__93___WebApp:_warning_WebApp_crashed:_getAddrInfo:_does_not_exist___40__Name_or_service_not_known__41__.mdwn
@@ -42,3 +42,5 @@ WebApp crashed: getAddrInfo: does not exist (Name or service not known)
# End of transcript or log.
"""]]
+
+> [[done]] --[[Joey]]
diff --git a/doc/bugs/__34__error:_invalid_object__34____44___after_add__59___cannot_commit/comment_11_7776659e257a97c9a3855c8ad008207a._comment b/doc/bugs/__34__error:_invalid_object__34____44___after_add__59___cannot_commit/comment_11_7776659e257a97c9a3855c8ad008207a._comment
new file mode 100644
index 000000000..4632a5df3
--- /dev/null
+++ b/doc/bugs/__34__error:_invalid_object__34____44___after_add__59___cannot_commit/comment_11_7776659e257a97c9a3855c8ad008207a._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 11"
+ date="2014-10-02T16:09:37Z"
+ content="""
+It seems to me that the problem must be with .git/annex/index.
+
+I would be interested in looking at this git repository, if there's a way to get a copy (no .git/annex/objects needed).
+"""]]
diff --git a/doc/bugs/annex_get_fails_from_read-only_filesystem.mdwn b/doc/bugs/annex_get_fails_from_read-only_filesystem.mdwn
new file mode 100644
index 000000000..18b446ee1
--- /dev/null
+++ b/doc/bugs/annex_get_fails_from_read-only_filesystem.mdwn
@@ -0,0 +1,27 @@
+### Please describe the problem.
+
+annex get does not work from read-only file systems...
+
+### What steps will reproduce the problem?
+
+ $ git annex get --from=...
+ error: could not lock config file /.../Annex/.git/config: Read-only file system
+ get ... (from ...) error: could not lock config file .../Annex/.git/config: Read-only file system
+ git [Param "config",Param "annex.version",Param "5"] failed
+ failed
+
+### What version of git-annex are you using? On what operating system?
+
+annex.version = 3 in the remote
+
+ $ git annex version
+ git-annex version: 5.20140927
+ build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+ key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+ remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+ local repository version: 5
+ supported repository version: 5
+ upgrade supported from repository versions: 0 1 2 4
+
+[[!tag confirmed]]
+[[!meta title="read-only filesystem on remote prevents auto-upgrade from v3 to v5, and prevents using a remote"]]
diff --git a/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_1_d8ab07429195c06ec4fae199ca9e0764._comment b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_1_d8ab07429195c06ec4fae199ca9e0764._comment
new file mode 100644
index 000000000..9fadf817f
--- /dev/null
+++ b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_1_d8ab07429195c06ec4fae199ca9e0764._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-06T15:11:18Z"
+ content="""
+There might be a general problem with using git-annex against a read-only filesystem, but the specific case here is a read-only filesystem containing a repository in an old format which git-annex needs to upgrade to the current format to use. So it's pretty reasonable that the (automatic) upgrade fails, since it's not being allowed to write to the repository to upgrade it.
+
+Now, if that repository is a indirect mode repo, there is really no change between version 3 and version 5, so it might do to let git-annex ignore the failure to write out the config, and treat that repo as if it's a v5 repo. It seems easier in most cases to mount the media read-write for git-annex to do the upgrade though.
+"""]]
diff --git a/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_2_03c16df9d6c14e1529c5dc8b5fc49691._comment b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_2_03c16df9d6c14e1529c5dc8b5fc49691._comment
new file mode 100644
index 000000000..4bd0bce59
--- /dev/null
+++ b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_2_03c16df9d6c14e1529c5dc8b5fc49691._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://id.koumbit.net/anarcat"
+ ip="72.0.72.144"
+ subject="comment 2"
+ date="2014-10-06T15:18:20Z"
+ content="""
+i've seen problems like this not related to upgrades at all, in [[todo/read-only removable drives]]. furthermore, it seems to me that failure to upgrade a repository shouldn't be fatal and we should be able to recover and get files anyways, in the spirit of [[backwards compatibility|future_proofing]]. --[[anarcat]]
+"""]]
diff --git a/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_3_c505a9df0ef63bb7cac28af9502a953d._comment b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_3_c505a9df0ef63bb7cac28af9502a953d._comment
new file mode 100644
index 000000000..3fd24eb67
--- /dev/null
+++ b/doc/bugs/annex_get_fails_from_read-only_filesystem/comment_3_c505a9df0ef63bb7cac28af9502a953d._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-06T15:59:10Z"
+ content="""
+From a code complication POV, it's useful for git-annex to only support one version of repository at a time.
+
+As far as backwards compatablity goes, I don't anticipate ever removing the upgrade code from git-annex. It still supports upgrading v0 repos which probably only I ever used!
+"""]]
diff --git a/doc/bugs/box.com/comment_1_d904a08519424cb9f599b2154d1ef953._comment b/doc/bugs/box.com/comment_1_d904a08519424cb9f599b2154d1ef953._comment
new file mode 100644
index 000000000..d230e52aa
--- /dev/null
+++ b/doc/bugs/box.com/comment_1_d904a08519424cb9f599b2154d1ef953._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 1"
+ date="2014-09-18T19:28:56Z"
+ content="""
+Ok, I managed to reproduce this. In my case, there was no \"Bad creds\" message because the broken creds data it used happened to contain a newline, but same problem; the creds stored by the webapp are not used properly when re-enabling the box.com remote elsewhere. Same problem would affect other special remotes using embedded creds and shared encryption.
+
+Seems to be a bug introduced in [[!commit fbdeeeed5fa276d94be587c8916d725eddcaf546]]. Despite what the commit says, the embedded creds did get encrypted with the shared gpg key. I have reverted that commit to fix this problem.
+"""]]
diff --git a/doc/bugs/cabal_install_fails:_Could_not_find_module___8216__Network.URI__8217__.mdwn b/doc/bugs/cabal_install_fails:_Could_not_find_module___8216__Network.URI__8217__.mdwn
new file mode 100644
index 000000000..2f9b5de3e
--- /dev/null
+++ b/doc/bugs/cabal_install_fails:_Could_not_find_module___8216__Network.URI__8217__.mdwn
@@ -0,0 +1,216 @@
+### Please describe the problem.
+
+Can't install git-annex from current git master. cabal install also fails. Both fail with the same error.
+
+### What steps will reproduce the problem?
+
+cabal install git-annex --bindir=$HOME/bin
+
+### What version of git-annex are you using? On what operating system?
+
+[[!format sh """
+$ uname -a
+Linux arch 3.16.3-1-ARCH #1 SMP PREEMPT Wed Sep 17 21:54:13 CEST 2014 x86_64 GNU/Linux
+
+$ cabal --version
+cabal-install version 1.20.0.3
+using version 1.20.0.0 of the Cabal library
+"""]]
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+$ cabal install git-annex --bindir=$HOME/bin
+Resolving dependencies...
+Configuring DAV-1.0.2...
+Configuring gnuidn-0.2.1...
+Configuring gsasl-0.3.5...
+Configuring hS3-0.5.8...
+Failed to install gsasl-0.3.5
+Build log ( /home/dirk/.cabal/logs/gsasl-0.3.5.log ):
+Configuring gsasl-0.3.5...
+setup-Simple-Cabal-1.18.1.3-x86_64-linux-ghc-7.8.3: The pkg-config package
+libgsasl version >=1.1 is required but it could not be found.
+Failed to install gnuidn-0.2.1
+Build log ( /home/dirk/.cabal/logs/gnuidn-0.2.1.log ):
+Configuring gnuidn-0.2.1...
+setup-Simple-Cabal-1.18.1.3-x86_64-linux-ghc-7.8.3: The program c2hs is
+required but it could not be found.
+Building hS3-0.5.8...
+Building DAV-1.0.2...
+Failed to install hS3-0.5.8
+Build log ( /home/dirk/.cabal/logs/hS3-0.5.8.log ):
+Configuring hS3-0.5.8...
+Building hS3-0.5.8...
+Preprocessing library hS3-0.5.8...
+
+Network/AWS/S3Object.hs:26:8:
+ Could not find module ‘Network.URI’
+ It is a member of the hidden package ‘network-uri-2.6.0.1’.
+ Perhaps you need to add ‘network-uri’ to the build-depends in your .cabal file.
+ Use -v to see a list of the files searched for.
+Failed to install DAV-1.0.2
+Build log ( /home/dirk/.cabal/logs/DAV-1.0.2.log ):
+Configuring DAV-1.0.2...
+Building DAV-1.0.2...
+Preprocessing library DAV-1.0.2...
+[1 of 2] Compiling Network.Protocol.HTTP.DAV.TH ( Network/Protocol/HTTP/DAV/TH.hs, dist/build/Network/Protocol/HTTP/DAV/TH.o )
+Loading package ghc-prim ... linking ... done.
+Loading package integer-gmp ... linking ... done.
+Loading package base ... linking ... done.
+Loading package array-0.5.0.0 ... linking ... done.
+Loading package deepseq-1.3.0.2 ... linking ... done.
+Loading package containers-0.5.5.1 ... linking ... done.
+Loading package bytestring-0.10.4.0 ... linking ... done.
+Loading package transformers-0.3.0.0 ... linking ... done.
+Loading package mtl-2.1.3.1 ... linking ... done.
+Loading package text-1.2.0.0 ... linking ... done.
+Loading package parsec-3.1.6 ... linking ... done.
+Loading package hashable-1.2.2.0 ... linking ... done.
+Loading package scientific-0.3.3.1 ... linking ... done.
+Loading package attoparsec-0.12.1.2 ... linking ... done.
+Loading package dlist-0.7.1 ... linking ... done.
+Loading package old-locale-1.0.0.6 ... linking ... done.
+Loading package syb-0.4.2 ... linking ... done.
+Loading package pretty-1.1.1.1 ... linking ... done.
+Loading package template-haskell ... linking ... done.
+Loading package time-1.4.2 ... linking ... done.
+Loading package unordered-containers-0.2.5.0 ... linking ... done.
+Loading package primitive-0.5.3.0 ... linking ... done.
+Loading package vector-0.10.11.0 ... linking ... done.
+Loading package aeson-0.8.0.0 ... linking ... done.
+Loading package blaze-builder-0.3.3.4 ... linking ... done.
+Loading package blaze-markup-0.6.1.1 ... linking ... done.
+Loading package blaze-html-0.7.0.3 ... linking ... done.
+Loading package filepath-1.3.0.2 ... linking ... done.
+Loading package unix-2.7.0.1 ... linking ... done.
+Loading package directory-1.2.1.0 ... linking ... done.
+Loading package exceptions-0.6.1 ... linking ... done.
+Loading package process-1.2.0.0 ... linking ... done.
+Loading package system-filepath-0.4.12 ... linking ... done.
+Loading package system-fileio-0.3.14 ... linking ... done.
+Loading package shakespeare-2.0.1.1 ... linking ... done.
+Loading package stm-2.4.3 ... linking ... done.
+Loading package transformers-base-0.4.3 ... linking ... done.
+Loading package monad-control-0.3.3.0 ... linking ... done.
+Loading package lifted-base-0.2.3.0 ... linking ... done.
+Loading package mmorph-1.0.4 ... linking ... done.
+Loading package resourcet-1.1.2.3 ... linking ... done.
+Loading package nats-0.2 ... linking ... done.
+Loading package semigroups-0.15.3 ... linking ... done.
+Loading package void-0.6.1 ... linking ... done.
+Loading package conduit-1.2.0.2 ... linking ... done.
+Loading package attoparsec-conduit-1.1.0 ... linking ... done.
+Loading package blaze-builder-conduit-1.1.0 ... linking ... done.
+Loading package network-2.6.0.2 ... linking ... done.
+Loading package random-1.1 ... linking ... done.
+Loading package zlib-0.5.4.1 ... linking ... done.
+Loading package streaming-commons-0.1.5 ... linking ... done.
+Loading package conduit-extra-1.1.3.4 ... linking ... done.
+Loading package data-default-class-0.0.1 ... linking ... done.
+Loading package data-default-instances-base-0.0.1 ... linking ... done.
+Loading package data-default-instances-containers-0.0.1 ... linking ... done.
+Loading package data-default-instances-dlist-0.0.1 ... linking ... done.
+Loading package data-default-instances-old-locale-0.0.1 ... linking ... done.
+Loading package data-default-0.5.3 ... linking ... done.
+Loading package xml-types-0.3.4 ... linking ... done.
+Loading package xml-conduit-1.2.2 ... linking ... done.
+Loading package xml-hamlet-0.4.0.9 ... linking ... done.
+Loading package transformers-compat-0.3.3.4 ... linking ... done.
+Loading package contravariant-1.2 ... linking ... done.
+Loading package tagged-0.7.2 ... linking ... done.
+Loading package distributive-0.4.4 ... linking ... done.
+Loading package comonad-4.2.2 ... linking ... done.
+Loading package semigroupoids-4.2 ... linking ... done.
+Loading package bifunctors-4.1.1.1 ... linking ... done.
+Loading package prelude-extras-0.4 ... linking ... done.
+Loading package profunctors-4.2.0.1 ... linking ... done.
+Loading package free-4.9 ... linking ... done.
+Loading package parallel-3.2.0.4 ... linking ... done.
+Loading package reflection-1.5.1 ... linking ... done.
+Loading package split-0.2.2 ... linking ... done.
+Loading package lens-4.4.0.2 ... linking ... done.
+Loading package byteable-0.1.1 ... linking ... done.
+Loading package securemem-0.1.3 ... linking ... done.
+Loading package crypto-cipher-types-0.0.9 ... linking ... done.
+Loading package cipher-aes-0.2.8 ... linking ... done.
+Loading package crypto-random-0.0.8 ... linking ... done.
+Loading package cprng-aes-0.5.2 ... linking ... done.
+Loading package cereal-0.4.0.1 ... linking ... done.
+Loading package socks-0.5.4 ... linking ... done.
+Loading package asn1-types-0.2.3 ... linking ... done.
+Loading package asn1-encoding-0.8.1.3 ... linking ... done.
+Loading package cipher-des-0.0.6 ... linking ... done.
+Loading package cipher-rc4-0.1.4 ... linking ... done.
+Loading package crypto-numbers-0.2.3 ... linking ... done.
+Loading package crypto-pubkey-types-0.4.2.2 ... linking ... done.
+Loading package cryptohash-0.11.6 ... linking ... done.
+Loading package crypto-pubkey-0.2.4 ... linking ... done.
+Loading package asn1-parse-0.8.1 ... linking ... done.
+Loading package base64-bytestring-1.0.0.1 ... linking ... done.
+Loading package pem-0.2.2 ... linking ... done.
+Loading package x509-1.4.12 ... linking ... done.
+Loading package x509-store-1.4.4 ... linking ... done.
+Loading package x509-validation-1.5.0 ... linking ... done.
+Loading package tls-1.2.9 ... linking ... done.
+Loading package x509-system-1.4.5 ... linking ... done.
+Loading package connection-0.2.3 ... linking ... done.
+Loading package case-insensitive-1.2.0.1 ... linking ... done.
+Loading package cookie-0.4.1.3 ... linking ... done.
+Loading package http-types-0.8.5 ... linking ... done.
+Loading package mime-types-0.1.0.4 ... linking ... done.
+Loading package network-uri-2.6.0.1 ... linking ... done.
+Loading package utf8-string-0.3.8 ... linking ... done.
+Loading package publicsuffixlist-0.1 ... linking ... done.
+Loading package http-client-0.4.0 ... linking ... done.
+Loading package http-client-tls-0.2.2 ... linking ... done.
+Loading package MonadRandom-0.3 ... linking ... done.
+Loading package either-4.3.1 ... linking ... done.
+Loading package safe-0.3.8 ... linking ... done.
+Loading package errors-1.4.7 ... linking ... done.
+[2 of 2] Compiling Network.Protocol.HTTP.DAV ( Network/Protocol/HTTP/DAV.hs, dist/build/Network/Protocol/HTTP/DAV.o )
+
+Network/Protocol/HTTP/DAV.hs:80:1: Warning:
+ The import of ‘unauthorized401’
+ from module ‘Network.HTTP.Types’ is redundant
+
+Network/Protocol/HTTP/DAV.hs:92:95: Warning:
+ ‘DAVT’ is an instance of MonadPlus but not Alternative - this will become an error in GHC 7.10, under the Applicative-Monad Proposal.
+
+Network/Protocol/HTTP/DAV.hs:213:1: Warning:
+ Defined but not used: ‘supportsCalDAV’
+
+Network/Protocol/HTTP/DAV.hs:88:10: Warning:
+ Orphan instance: instance Default DAVContext
+
+Network/Protocol/HTTP/DAV.hs:95:10: Warning:
+ Orphan instance: instance MonadMask m => MonadMask (EitherT e m)
+In-place registering DAV-1.0.2...
+Preprocessing executable 'hdav' for DAV-1.0.2...
+
+hdav.hs:33:8:
+ Could not find module ‘Network.URI’
+ It is a member of the hidden package ‘network-uri-2.6.0.1’.
+ Perhaps you need to add ‘network-uri’ to the build-depends in your .cabal file.
+ Use -v to see a list of the files searched for.
+cabal: Error: some packages failed to install:
+DAV-1.0.2 failed during the building phase. The exception was:
+ExitFailure 1
+git-annex-5.20140919 depends on DAV-1.0.2 which failed to install.
+gnuidn-0.2.1 failed during the configure step. The exception was:
+ExitFailure 1
+gsasl-0.3.5 failed during the configure step. The exception was:
+ExitFailure 1
+hS3-0.5.8 failed during the building phase. The exception was:
+ExitFailure 1
+network-protocol-xmpp-0.4.6 depends on gnuidn-0.2.1 which failed to install.
+
+# End of transcript or log.
+"""]]
+
+> This is a bug in hS3, not in git-annex. hS3 needs to be updated
+> per the example at <http://hackage.haskell.org/package/network>.
+> Email sent to hS3 author; [[done]]. --[[Joey]]
diff --git a/doc/bugs/cannot_add_local_readonly_repo_through_the_webapp.mdwn b/doc/bugs/cannot_add_local_readonly_repo_through_the_webapp.mdwn
new file mode 100644
index 000000000..9b1f726cf
--- /dev/null
+++ b/doc/bugs/cannot_add_local_readonly_repo_through_the_webapp.mdwn
@@ -0,0 +1,98 @@
+### Please describe the problem.
+
+A readonly repository that I can add fine on the commandline (and sync content from) cannot be added through the webapp.
+
+### What steps will reproduce the problem?
+
+Say I have a readonly (owned by root) repository in `~/test/a` and I create a `~/test/b` (owned by my user). In the webapp, when to add `/home/anarcat/test/a` as a "local repository" (`Add another local repository`) to the `~/test/b` repo, it fails when i enter that path, with "Cannot write a repository there." I obviously can't sync content from there then.
+
+This works on the commandline, although with warnings.
+
+### What version of git-annex are you using? On what operating system?
+
+Version: 5.20140927
+Build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+
+Debian Jessie.
+
+### Please provide any additional information below.
+
+Here's the transcript of the commandline equivalent:
+
+~~~
+anarcat@marcos:test$ git init a
+Dépôt Git vide initialisé dans /home/anarcat/test/a/.git/
+anarcat@marcos:test$ git init b
+Dépôt Git vide initialisé dans /home/anarcat/test/b/.git/
+anarcat@marcos:test$ cd a
+anarcat@marcos:a$ git annex init
+init ok
+(Recording state in git...)
+anarcat@marcos:a$ echo hellow world > README
+anarcat@marcos:a$ git annex add README
+add README ok
+(Recording state in git...)
+anarcat@marcos:a$ git commit -m"test repo a"
+[master (commit racine) 3ece2a1] test repo a
+ 1 file changed, 1 insertion(+)
+ create mode 120000 README
+anarcat@marcos:a$ cd ../ ^C
+anarcat@marcos:a$ sudo chown -R root .
+[sudo] password for anarcat:
+Sorry, try again.
+[sudo] password for anarcat:
+anarcat@marcos:a$ cd ../b
+anarcat@marcos:b$ git annex init
+init ok
+(Recording state in git...)
+anarcat@marcos:b$ git remote add a ../a
+anarcat@marcos:b$ git annex sync a
+commit ok
+pull a
+warning: no common commits
+remote: Décompte des objets: 13, fait.
+remote: Compression des objets: 100% (9/9), fait.
+remote: Total 13 (delta 1), reused 0 (delta 0)
+Dépaquetage des objets: 100% (13/13), fait.
+Depuis ../a
+ * [nouvelle branche] git-annex -> a/git-annex
+ * [nouvelle branche] master -> a/master
+
+
+merge: refs/remotes/a/synced/master - not something we can merge
+failed
+(merging a/git-annex into git-annex...)
+(Recording state in git...)
+push a
+Décompte des objets: 8, fait.
+Delta compression using up to 2 threads.
+Compression des objets: 100% (6/6), fait.
+Écriture des objets: 100% (8/8), 819 bytes | 0 bytes/s, fait.
+Total 8 (delta 1), reused 0 (delta 0)
+remote: error: insufficient permission for adding an object to repository database objects
+remote: fatal: failed to write object
+error: unpack failed: unpack-objects abnormal exit
+To ../a
+ ! [remote rejected] git-annex -> synced/git-annex (unpacker error)
+ ! [remote rejected] master -> synced/master (unpacker error)
+error: impossible de pousser des références vers '../a'
+
+ Pushing to a failed.
+
+ (non-fast-forward problems can be solved by setting receive.denyNonFastforwards to false in the remote's git config)
+failed
+git-annex: sync: 2 failed
+anarcat@marcos:b$ ls
+README
+anarcat@marcos:b$ git annex copy --from a
+copy README (from a...) ok
+(Recording state in git...)
+anarcat@marcos:b$ ls -al
+total 16K
+drwxr-xr-x 3 anarcat anarcat 4096 oct. 20 15:36 .
+drwxr-xr-x 4 anarcat anarcat 4096 oct. 20 15:35 ..
+drwxr-xr-x 9 anarcat anarcat 4096 oct. 20 15:36 .git
+lrwxrwxrwx 1 anarcat anarcat 180 oct. 20 15:36 README -> .git/annex/objects/wz/Zq/SHA256E-s13--8c083c6897455257dfbace7a9012d92ca8ebfb6e6ebe8acddc6dfa8fc81226ed/SHA256E-s13--8c083c6897455257dfbace7a9012d92ca8ebfb6e6ebe8acddc6dfa8fc81226ed
+~~~
+
+This is part of the [[todo/read-only_removable_drives/]] series. --[[anarcat]]
diff --git a/doc/bugs/fatal:_Cannot_handle_files_this_big.mdwn b/doc/bugs/fatal:_Cannot_handle_files_this_big.mdwn
new file mode 100644
index 000000000..7272bfc29
--- /dev/null
+++ b/doc/bugs/fatal:_Cannot_handle_files_this_big.mdwn
@@ -0,0 +1,96 @@
+### Please describe the problem.
+
+Syncing a 20GB video file causes this error. I have no problems with 8GB files.
+
+### What steps will reproduce the problem?
+
+See additional info
+
+### What version of git-annex are you using? On what operating system?
+
+git-annex version: 5.20140920-gb0c4300
+build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV DNS Feeds Quvi TDFA CryptoHash
+key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+local repository version: 5
+supported repository version: 5
+upgrade supported from repository versions: 2 3 4
+
+git version 1.9.4.msysgit.2
+
+Windows 7 64bit
+
+### Please provide any additional information below.
+
+[[!format sh """
+
+Z:\>git clone L:\repositories\bigFilesTest.git-annex
+Cloning into 'bigFilesTest.git-annex'...
+done.
+
+Z:\>cd bigFilesTest.git-annex
+
+Z:\bigFilesTest.git-annex>git annex init "cloned"
+init cloned
+ Detected a filesystem without fifo support.
+
+ Disabling ssh connection caching.
+
+ Detected a crippled filesystem.
+
+ Enabling direct mode.
+ok
+(Recording state in git...)
+
+Z:\bigFilesTest.git-annex>git annex add test20GBVideo.mkv
+add test20GBVideo.mkv ok
+(Recording state in git...)
+
+Z:\bigFilesTest.git-annex>git annex sync --debug
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","git-annex"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--hash","refs/heads/git-annex"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/git-annex..54de336a3423f7f8f72f897effd29f952534c24e","-n1","--pretty=%H"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/git-annex..53cfcf38b40247b3992b6007336b2c915a945ad4","-n1","--pretty=%H"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] chat: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","cat-file","--batch"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["config","--null","--list"]
+commit [2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","ls-files","--stage","-z","--others","--exclude-standard","--","Z:\\bigFilesTest.git-annex"]
+[2014-10-18 15:39:02 Mitteleuropäische Sommerzeit] chat: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","cat-file","--batch"]
+(Recording state in git...)
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","add","-f","test20GBVideo.mkv"]
+fatal: Cannot handle files this big
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--head"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","diff-index","-z","--raw","--no-renames","-l0","--cached","HEAD"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","symbolic-ref","HEAD"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--hash","refs/heads/annex/direct/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","write-tree"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","rev-parse","b12e8477242df97be13c1395db143f860ce8e895:"]
+ok
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","symbolic-ref","HEAD"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","refs/heads/annex/direct/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--verify","-q","refs/heads/synced/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/annex/direct/master..refs/heads/synced/master","-n1","--pretty=%H"]
+pull origin
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","fetch","origin"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--verify","-q","refs/remotes/origin/annex/direct/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--verify","-q","refs/remotes/origin/synced/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/synced/master..refs/remotes/origin/synced/master","-n1","--pretty=%H"]
+ok
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","git-annex"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--hash","refs/heads/git-annex"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/git-annex..54de336a3423f7f8f72f897effd29f952534c24e","-n1","--pretty=%H"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/heads/git-annex..53cfcf38b40247b3992b6007336b2c915a945ad4","-n1","--pretty=%H"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","branch","-f","synced/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","branch","-f","master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--verify","-q","refs/remotes/origin/synced/master"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/remotes/origin/synced/master..refs/heads/synced/master","-n1","--pretty=%H"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","show-ref","--verify","-q","refs/remotes/origin/git-annex"]
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","log","refs/remotes/origin/git-annex..git-annex","-n1","--pretty=%H"]
+push origin
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] call: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","push","origin","+git-annex:synced/git-annex","annex/direct/master:synced/master"]
+Everything up-to-date
+[2014-10-18 15:39:03 Mitteleuropäische Sommerzeit] read: git ["--git-dir=Z:\\bigFilesTest.git-annex\\.git","--work-tree=Z:\\bigFilesTest.git-annex","-c","core.bare=false","push","origin","master"]
+ok
+
+
+
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early.mdwn b/doc/bugs/get_from_glacier_fails_too_early.mdwn
new file mode 100644
index 000000000..489240330
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early.mdwn
@@ -0,0 +1,72 @@
+### Please describe the problem.
+
+In order to test the integrity of my file backup on glacier,
+I initiated get of a single file from glacier via:
+
+ $ git annex get --from=glacier localdir/myfile.jpg
+
+A check with
+
+ $ glacier job list
+
+confirmed, that a job was in progress.
+
+Then after a couple hours wait the job is complete
+[[!format sh """
+[ben@voyagerS9 annex]$ glacier job list
+i/d 2014-10-16T20:25:23.068Z glacier-bbbbbbbb-bbbb-bbbb-bbbb-MYVAULTbbbbb
+a/d 2014-10-16T20:30:13.086Z glacier-bbbbbbbb-bbbb-bbbb-bbbb-MYVAULTbbbbb GPGHMACSHA1--cccccccccccc
+"""]]
+
+So, again I enter the get command:
+[[!format sh """
+[ben@voyagerS9 annex]$ git annex get --from=glacier localdir/myfile.jpg
+get localdir/myfile.jpg (from glacier...) (gpg)
+failed
+git-annex: get: 1 failed
+[ben@voyagerS9 annex]$
+"""]]
+
+The command immediately fails after entering the gpg passphrase, releasing the shell.
+But in the background the glacier-cli is still running, downloads the file from Amazon
+and then dumps the gpg encrypted file content into the terminal.
+(4 MB of binary character garbage on the screen)
+
+git annex should not fail so early and wait until the data is coming in order to pipe it into gpg.
+
+### What version of git-annex are you using? On what operating system?
+Arch Linux git-annex-bin package.
+[[!format sh """
+[ben@voyagerS9 annex]$ git annex version
+git-annex version: 5.20140920-gb0c4300
+build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV Inotify DBus DesktopNotify XMPP DNS Feeds Quvi TDFA CryptoHash
+key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+local repository version: 5
+supported repository version: 5
+upgrade supported from repository versions: 0 1 2 4
+[ben@voyagerS9 annex]$ gpg --version
+gpg (GnuPG) 2.0.26
+libgcrypt 1.6.2
+"""]]
+
+### Possibly related information about the annexed repo and its history
+The file was uploaded sometime earlier this year with a different version of git annex: Older source package for Arch Linux with Haskell packages from the Arch haskell repos.
+
+The special glacier remote was initially set up with an old gpg key (hybrid encryption), which is still in my keychain but has expired. I exchanged the key with a new one by
+
+ $ git annex enableremote glacier keyid+=NEWKEY keyid-=OLDKEY
+
+I don't know why, but my AWS credentials seem no longer be embedded into the git repo. glacier upload (copy --to=) only succeeds with explicitly set AWS credential environment variables
+
+I tried
+
+ $ git annex enableremote embedcreds=yes
+
+with no noticeable change.
+I had changed the AWS credentials a while ago.
+
+Tomorrow I will try to download a just recently uploaded file with the current credentials and keys.
+
+> [[done]]; I am not confident that I understand this failure on retrival,
+> and that I've fixed it. --[[Joey]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_1_5a4e37fef629e07dce6b83ae311d1b03._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_1_5a4e37fef629e07dce6b83ae311d1b03._comment
new file mode 100644
index 000000000..736457f2a
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_1_5a4e37fef629e07dce6b83ae311d1b03._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-20T18:54:43Z"
+ content="""
+Wow, the code seems to neglect to actually set up a pipe from glacier-cli's
+stdout. It seems this broke quite a while ago, in
+[[!commit fb19d56476bb6eb5aa4d794a10199adb267d5870]] and nobody noticed.
+
+I have committed what should be a fix, but it's pretty hard for me to test
+this. Can you please either test the current daily autobuild for linux
+amd64 (should be ready about 15 minutes after I post this comment), or
+build git-annex from master and test?
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_2_da065d367d0a3c91e4957f588f36dc67._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_2_da065d367d0a3c91e4957f588f36dc67._comment
new file mode 100644
index 000000000..2afe60195
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_2_da065d367d0a3c91e4957f588f36dc67._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""creds"""
+ date="2014-10-20T19:09:28Z"
+ content="""
+Since you are using gpg encryption, your repository may have
+[[upgrades/insecure_embedded_creds]]. Strongly suggest you check if it
+does.
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_3_1b49cd66a612bb46da5b73c83ab14688._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_3_1b49cd66a612bb46da5b73c83ab14688._comment
new file mode 100644
index 000000000..ccdd94848
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_3_1b49cd66a612bb46da5b73c83ab14688._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkck-Tokgfh_1Fwh6pkl69xPA_dYUgA4Tg"
+ nickname="Benjamin"
+ subject="autobuild"
+ date="2014-10-20T22:39:25Z"
+ content="""
+Okay, where do I get the newest build? I cannot find a link to a packaged file at http://downloads.kitenet.net/git-annex/autobuild/amd64/
+
+I'd rather not build it from master myself, as all the Haskell dependencies are not well supported on Arch linux. Thats why I switched from the git-annex package to the git-annex-bin package in AUR in the first place.
+
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_4_a20b46a5e9c1c72a484962f3539d3b3e._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_4_a20b46a5e9c1c72a484962f3539d3b3e._comment
new file mode 100644
index 000000000..c46042d4e
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_4_a20b46a5e9c1c72a484962f3539d3b3e._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 4"""
+ date="2014-10-21T16:31:33Z"
+ content="""
+http://git-annex.branchable.com/install/Linux_standalone/
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_5_488bb44796e6a4e16f7bfc1f229233e7._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_5_488bb44796e6a4e16f7bfc1f229233e7._comment
new file mode 100644
index 000000000..c67f60288
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_5_488bb44796e6a4e16f7bfc1f229233e7._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 5"
+ date="2014-10-21T19:59:06Z"
+ content="""
+Recent autobuilds will also print out some useful info when you run `git annex info glacier`, including where it's getting the AWS credentials from.
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_6_9c8f262b3d8b37f2e68108337acbd303._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_6_9c8f262b3d8b37f2e68108337acbd303._comment
new file mode 100644
index 000000000..48e9f5335
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_6_9c8f262b3d8b37f2e68108337acbd303._comment
@@ -0,0 +1,48 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkck-Tokgfh_1Fwh6pkl69xPA_dYUgA4Tg"
+ nickname="Benjamin"
+ subject="autobuild test"
+ date="2014-10-21T22:09:47Z"
+ content="""
+Okay I managed to package the autobuild for my Arch system and installed. Here is what I get, retrieving finished glacier retrieval jobs which was started yesterday:
+
+Without AWS credentials as environment variables, the call fails:
+[[!format sh \"\"\"
+[ben@voyagerS9 annextest]$ git annex get --from=glacier mydir/myfile1
+get mydir/myfile (from glacier...) (gpg)
+['/usr/local/bin/glacier', '--region=us-east-1', 'archive', 'retrieve', '-o-', 'glacier-myvault', 'GPGHMACSHA1--4286b1a121892c9e64de436725478b0bc5038e67']
+glacier: archive 'GPGHMACSHA1--4286b1a121892c9e64de436725478b0bc5038e67' not found
+failed
+git-annex: get: 1 failed
+\"\"\"]]
+
+I patched the glacier-cli Python source so that it prints out the command arguments argv.
+The archive _does_ exist. Executing the glacier-cli command manually is successful. So is calling
+git-annex with AWS credentials exported into env:
+
+[[!format sh \"\"\"
+[ben@voyagerS9 annextest]$ git annex get --from=glacier mydir/myfile2
+get mydir/myfile2 (from glacier...) (gpg)
+['/usr/local/bin/glacier', '--region=us-east-1', 'archive', 'retrieve', '-o-', 'glacier-myvault', 'GPGHMACSHA1--c3827c03d48b4829c7cc584778652c66e2784b0f']
+ok
+(Recording state in git...)
+\"\"\"]]
+
+So I guess one bug is fixed, although I think there is a wrong error message.
+
+Regarding AWS credentials, I have no success in updating credentials or finding out which if any are embedded:
+[[!format sh \"\"\"
+[ben@voyagerS9 annextest]$ git annex info glacier
+remote: glacier
+description: [glacier]
+uuid: b4dcf525-40c7-4f04-86cc-3850d1260680
+cost: 1050.0
+type: glacier
+glacier vault: glacier-myvault
+encryption: encrypted (to gpg keys: MYKEY)
+chunking: none
+\"\"\"]]
+
+When I checkout the git-annex branch and look into the remote.log I see fields for cipher, cipherkeys, datacenter, embedcreds=yes, name, s3creds, type, vault, timestamp.
+The s3creds field does not look like my current AWS credentials, at least not in plaintext.
+"""]]
diff --git a/doc/bugs/get_from_glacier_fails_too_early/comment_7_c96b71759fe0d2af450e321ca57edb46._comment b/doc/bugs/get_from_glacier_fails_too_early/comment_7_c96b71759fe0d2af450e321ca57edb46._comment
new file mode 100644
index 000000000..0cdef26b5
--- /dev/null
+++ b/doc/bugs/get_from_glacier_fails_too_early/comment_7_c96b71759fe0d2af450e321ca57edb46._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 7"
+ date="2014-10-22T18:30:29Z"
+ content="""
+I forgot to include the creds in `git annex info` for glacier; fixed that now.
+
+It seems that changing the creds with `enableremote` did embed them into your git repository, but it neglected to update the .git/annex/creds/$remoteuuid file that caches the creds locally. So I think that your old creds are still cached there, and still being used, and this explains why the file is not found in glacier; the wrong creds are being used to access it! You can work around this by deleting the .git/annex/creds/$remoteuuid file correspnding to the uuid of the glacier remote. (You can also look at that file and compare it with what the creds are supposed to be.) I have fixed git-annex enableremote to update that creds file.
+
+Also, it looks like you did not fall afoul of the [[upgrades/insecure_embedded_creds]] problem! If you had, this new version of git-annex would be complaining that it had detected that problem. If you want to double-check that, the s3creds= value is base64 encoded, and when run through `base64 -d`, it should yield a gpg encrypted file. If your repo did have that problem, it would instead decode to the creds in clear text.
+"""]]
diff --git a/doc/bugs/git_annex_add_adds_unlocked_files.mdwn b/doc/bugs/git_annex_add_adds_unlocked_files.mdwn
new file mode 100644
index 000000000..4496f3469
--- /dev/null
+++ b/doc/bugs/git_annex_add_adds_unlocked_files.mdwn
@@ -0,0 +1,21 @@
+### Please describe the problem.
+
+git annex add . should ignore unlocked files
+
+### What steps will reproduce the problem?
+SEE NEXT COMMENT
+
+### What version of git-annex are you using? On what operating system?
+
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+
+# End of transcript or log.
+"""]]
+
+> [[done]] --[[Joey]]
diff --git a/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_4b46116eabe61946ae65b293d7bbacb7._comment b/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_4b46116eabe61946ae65b293d7bbacb7._comment
new file mode 100644
index 000000000..5eb35789b
--- /dev/null
+++ b/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_4b46116eabe61946ae65b293d7bbacb7._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="behaving as intended"
+ date="2014-10-09T20:30:26Z"
+ content="""
+git-annex add is supposed to add unlocked files. See the documentation for the unlock command on the man page. Typical workflow is to unlock a file, edit it, add the changes, and commit it.
+
+Your example has 2 files with content \"foo\" and 1 file with content \"foobar\", which require 2 objects to be stored by git-annex, so that's what it stores.
+
+I suggest you get a bit more familiar with git-annex before filing bugs on it.
+"""]]
diff --git a/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_d53d0710d6ad9f0fdc8a29a98647e94b._comment b/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_d53d0710d6ad9f0fdc8a29a98647e94b._comment
new file mode 100644
index 000000000..904cc9ebb
--- /dev/null
+++ b/doc/bugs/git_annex_add_adds_unlocked_files/comment_2_d53d0710d6ad9f0fdc8a29a98647e94b._comment
@@ -0,0 +1,55 @@
+[[!comment format=sh
+ username="https://www.google.com/accounts/o8/id?id=AItOawn0hu_TPhLcUM1Ivvn7iIoZ_iD3g_5WDcs"
+ nickname="Greg"
+ subject="comment 2"
+ date="2014-10-06T19:20:26Z"
+ content="""
+ubuntu@hostname:~$ cd annex
+ubuntu@hostname:~/annex$ git init
+Initialized empty Git repository in /home/ubuntu/annex/.git/
+ubuntu@hostname:~/annex$ git annex init
+init ok
+ubuntu@hostname:~/annex$ echo foo > test.txt
+ubuntu@hostname:~/annex$ git annex add .
+add test.txt (checksum...) ok
+(Recording state in git...)
+ubuntu@hostname:~/annex$ git commit -a -m first
+[master (root-commit) fe54856] first
+ 1 file changed, 1 insertion(+)
+ create mode 120000 test.txt
+ubuntu@hostname:~/annex$ git annex unlock test.txt
+unlock test.txt (copying...) ok
+ubuntu@hostname:~/annex$ echo foobar > test.txt
+ubuntu@hostname:~/annex$ echo foo > test2.txt
+ubuntu@hostname:~/annex$ git annex add .
+add test2.txt (checksum...) ok
+add test.txt (checksum...) ok
+(Recording state in git...)
+ubuntu@hostname:~/annex$ git commit -a -m second
+[master 1776b25] second
+ 2 files changed, 2 insertions(+), 1 deletion(-)
+ create mode 120000 test2.txt
+ubuntu@hostname:~/annex$ tree -d ./git/annex
+./git/annex [error opening dir]
+
+0 directories
+ubuntu@hostname:~/annex$ tree -d .git/annex
+.git/annex
+├── journal
+├── objects
+│   ├── 8Z
+│   │   └── 1J
+│   │   └── SHA256-s4--b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
+│   └── q2
+│   └── Xj
+│   └── SHA256-s7--aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f
+└── tmp
+
+9 directories
+ubuntu@hostname:~/annex$ ls
+test2.txt test.txt
+ubuntu@hostname:~/annex$
+
+
+I'm expecting 3 SHA's in .git/annex, but I only see two.
+"""]]
diff --git a/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__.mdwn b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__.mdwn
index 5407db36a..e61d44883 100644
--- a/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__.mdwn
+++ b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__.mdwn
@@ -36,3 +36,5 @@ git-annex: repair: 1 failed
# End of transcript or log.
"""]]
+
+> Provisionally [[done]]; see comment. --[[Joey]]
diff --git a/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_3_7502f88ae1c46e070e7fdbd9b9c1b54d._comment b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_3_7502f88ae1c46e070e7fdbd9b9c1b54d._comment
new file mode 100644
index 000000000..db40450d0
--- /dev/null
+++ b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_3_7502f88ae1c46e070e7fdbd9b9c1b54d._comment
@@ -0,0 +1,22 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="relevant excerpt "
+ date="2014-10-12T18:08:55Z"
+ content="""
+<pre>
+[2014-08-20 09:05:45 MSK] call: git [\"--git-dir=/tmp/tmprepo.0/.git\",\"--work-tree=/tmp/tmprepo.0\",\"fetch\",\"ssh://crabman@git-annex-192.168.1.246-crabman_annex/~/annex/\",\"--force\",\"--update-head-ok\",\"--quiet\",\"+*:*\"]
+Auto packing the repository in background for optimum performance.
+See \"git help gc\" for manual housekeeping.
+[2014-08-20 09:05:50 MSK] call: rsync [\"-qr\",\"/tmp/tmprepo.0/.git/objects/\",\"/home/crabman/annex/.git/objects/\"]
+[2014-08-20 09:14:58 MSK] read: git [\"--git-dir=/home/crabman/annex/.git\",\"--work-tree=/home/crabman/annex\",\"-c\",\"core.bare=false\",\"show\",\"584a7836d05e6733224a53e5882547eeb87d43db\"]
+[2014-08-20 09:14:59 MSK] read: git [\"--git-dir=/home/crabman/annex/.git\",\"--work-tree=/home/crabman/annex\",\"-c\",\"core.bare=false\",\"show\",\"62fee7cc3ec6ea4c56ba42015ab9bf8f0f808dee\"]
+[2014-08-20 09:14:59 MSK] read: git [\"--git-dir=/home/crabman/annex/.git\",\"--work-tree=/home/crabman/annex\",\"-c\",\"core.bare=false\",\"show\",\"c7a698397328c71a33bbc2852fda8d09d52c4f38\"]
+Running git fsck ...
+Trying to recover missing objects from remote 192.168.1.246_annex.
+Unpacking all pack files.
+Trying to recover missing objects from remote 192.168.1.246_annex.
+
+git-annex: /tmp/tmprepo.0/.git/gc.pid: removeLink: does not exist (No such file or directory)
+</pre>
+"""]]
diff --git a/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_4_9f67b14c9ac81f159439c5dff7354b8f._comment b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_4_9f67b14c9ac81f159439c5dff7354b8f._comment
new file mode 100644
index 000000000..8d15f5c57
--- /dev/null
+++ b/doc/bugs/git_annex_repair_fails_-___47__tmp__47__tmprepo.1__47__.git__47__gc.pid:_removeLink:_does_not_exist___40__No_such_file_or_directory__41__/comment_4_9f67b14c9ac81f159439c5dff7354b8f._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 4"
+ date="2014-10-12T18:27:39Z"
+ content="""
+So the auto gc was triggered by git fetch. I have made the repair code prevent that from happening. I expect that will solve the problem, so I am marking this bug as closed.
+
+However, I don't understand really what could have caused it to try to remove the gc.pid file. I tried creating such a pid file manually after the fetch, and it didn't try to remove it.
+"""]]
diff --git a/doc/bugs/git_annex_sync_--content_not_syncing_all_objects/comment_6_4540c31acd63626fbad9bde487ec3005._comment b/doc/bugs/git_annex_sync_--content_not_syncing_all_objects/comment_6_4540c31acd63626fbad9bde487ec3005._comment
new file mode 100644
index 000000000..54bce5df8
--- /dev/null
+++ b/doc/bugs/git_annex_sync_--content_not_syncing_all_objects/comment_6_4540c31acd63626fbad9bde487ec3005._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="stp"
+ ip="91.34.113.105"
+ subject="Any update"
+ date="2014-10-01T12:46:34Z"
+ content="""
+Any update?
+"""]]
diff --git a/doc/bugs/git_clone_ignores_annex.mdwn b/doc/bugs/git_clone_ignores_annex.mdwn
new file mode 100644
index 000000000..450a069ad
--- /dev/null
+++ b/doc/bugs/git_clone_ignores_annex.mdwn
@@ -0,0 +1,25 @@
+### Please describe the problem.
+
+More of a feature request than a bug. It would be nice if when creating a local clone with git clone this would run automatically:
+
+ln -s ../../annex/.git/annex .git/annex
+
+to hook up the annex. Just a minor thing, but I'd be nice.
+
+### What steps will reproduce the problem?
+
+
+### What version of git-annex are you using? On what operating system?
+
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+
+# End of transcript or log.
+"""]]
+
+> [[done]] --[[Joey]]
diff --git a/doc/bugs/git_clone_ignores_annex/comment_1_18ba05c51f82ddadd2558f6cd789e394._comment b/doc/bugs/git_clone_ignores_annex/comment_1_18ba05c51f82ddadd2558f6cd789e394._comment
new file mode 100644
index 000000000..e0b69f81a
--- /dev/null
+++ b/doc/bugs/git_clone_ignores_annex/comment_1_18ba05c51f82ddadd2558f6cd789e394._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-06T15:42:26Z"
+ content="""
+Making that symlink is extremely unsafe! git-annex will see two repositories. So if a file is present in the annex, with only one actual copy existing, and you try to drop it, git-annex will go check the other repository, find the file there, assume this means there's an extra copy and so proceed with the drop. Which deletes the only existing copy of your file. So if you do this, you will likely eventually lose data.
+
+However, recent versions of git-annex will detect if you clone a git repository with `--shared` and automatically hard link files into the annex when getting them into that repository. They also mark the shared clone as untrusted, to avoid the above problem. This is a much better solution.
+"""]]
diff --git a/doc/bugs/hS3_prevents_build.mdwn b/doc/bugs/hS3_prevents_build.mdwn
new file mode 100644
index 000000000..1a064b90b
--- /dev/null
+++ b/doc/bugs/hS3_prevents_build.mdwn
@@ -0,0 +1,3 @@
+The `hS3` dependency doesn't work with the `network` / `network-uri` split, which causes a build failure for `git-annex` in a fresh sandbox with its current version bounds. Either more gymnastics are needed to constrain `network` to accommodate `hS3`, or the `s3-aws` branch could be merged in if it's ready. Building `git-annex` with a `< 2.6` constraint on `network` does succeed.
+
+> not a bug in git-annex, but in a dependency it uses, so [[done]]. (I already told the hS3 author about this, which is a very easy fix there, and he promised to fix it soon.) --[[Joey]]
diff --git a/doc/bugs/incremental_fsck_should_not_use_sticky_bit/comment_7_f53d0542c9da38e0f6339df8c49c87db._comment b/doc/bugs/incremental_fsck_should_not_use_sticky_bit/comment_7_f53d0542c9da38e0f6339df8c49c87db._comment
new file mode 100644
index 000000000..6d6631bf0
--- /dev/null
+++ b/doc/bugs/incremental_fsck_should_not_use_sticky_bit/comment_7_f53d0542c9da38e0f6339df8c49c87db._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="stp"
+ ip="24.134.205.34"
+ subject="Any update"
+ date="2014-10-01T12:48:06Z"
+ content="""
+Any update?
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit.mdwn b/doc/bugs/modified_permissions_persist_after_unlock__44___commit.mdwn
new file mode 100644
index 000000000..493443dd3
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit.mdwn
@@ -0,0 +1,40 @@
+### Please describe the problem.
+
+Modifying an annexed file with unlock then commit leaves the link with permissions 777 and git status reports a typechange, which makes checkout impossible. Resolves by running git unlock on the file.
+
+### What steps will reproduce the problem?
+
+echo foo > test.txt
+git annex add test.txt
+git commit -a -m "first"
+git annex unlock test.txt
+echo foobar > test.txt
+git commit -a -m "second"
+
+git status (notice typechange message)
+
+git unlock test.txt (corrects and retains both versions)
+
+### What version of git-annex are you using? On what operating system?
+
+git-annex version: 3.20120406
+local repository version: 3
+default repository version: 3
+supported repository versions: 3
+upgrade supported from repository versions: 0 1 2
+
+git version 1.7.9.5
+
+
+### Please provide any additional information below.
+
+[[!format sh """
+# If you can, paste a complete transcript of the problem occurring here.
+# If the problem is with the git-annex assistant, paste in .git/annex/daemon.log
+
+
+# End of transcript or log.
+"""]]
+
+[[!tag confirmed]]
+[[!meta title="git commit of unlocked file leaves typechange staged in index"]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_1_875ca12936d4b4505f2e280a454fe558._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_1_875ca12936d4b4505f2e280a454fe558._comment
new file mode 100644
index 000000000..535c20e4d
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_1_875ca12936d4b4505f2e280a454fe558._comment
@@ -0,0 +1,16 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-06T15:47:39Z"
+ content="""
+I am unable to reproduce any problem with the steps you gave. I don't see any typechange message, and would not expect one. Perhaps your repository is lacking a .git/hooks/pre-commit script to run git-annex when you use `git commit -a`?
+
+It's not clear to me what problem you experienced, beyond the typechange message that I don't see.
+
+> git unlock test.txt (corrects and retains both versions)
+
+I don't understand that line at all. `git unlock` is not a valid git command, and what does \"corrects and retains both versions\" mean?
+
+Please provide an actual trascript of the problem, rather than the unclear description.
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_2_59f68098fa6edb2fe8902b120fda0280._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_2_59f68098fa6edb2fe8902b120fda0280._comment
new file mode 100644
index 000000000..7938ad9ad
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_2_59f68098fa6edb2fe8902b120fda0280._comment
@@ -0,0 +1,94 @@
+[[!comment format=sh
+ username="https://www.google.com/accounts/o8/id?id=AItOawn0hu_TPhLcUM1Ivvn7iIoZ_iD3g_5WDcs"
+ nickname="Greg"
+ subject="comment 2"
+ date="2014-10-06T17:06:27Z"
+ content="""
+ubuntu@ip-10-170-13-124:~$ mkdir annex
+ubuntu@ip-10-170-13-124:~$ cd annex
+ubuntu@ip-10-170-13-124:~/annex$ ls
+ubuntu@ip-10-170-13-124:~/annex$ git init .
+Initialized empty Git repository in /home/ubuntu/annex/.git/
+ubuntu@ip-10-170-13-124:~/annex$ git annex init \"test annex\"
+init test annex ok
+ubuntu@ip-10-170-13-124:~/annex$ echo \"foo\" > test.txt
+ubuntu@ip-10-170-13-124:~/annex$ ls
+test.txt
+ubuntu@ip-10-170-13-124:~/annex$ ls -al
+total 16
+drwxrwxr-x 3 ubuntu ubuntu 4096 Oct 6 16:43 .
+drwxr-xr-x 7 ubuntu ubuntu 4096 Oct 6 16:42 ..
+drwxrwxr-x 9 ubuntu ubuntu 4096 Oct 6 16:42 .git
+-rw-rw-r-- 1 ubuntu ubuntu 4 Oct 6 16:43 test.txt
+ubuntu@ip-10-170-13-124:~/annex$ git annex add test.txt
+add test.txt (checksum...) ok
+(Recording state in git...)
+ubuntu@ip-10-170-13-124:~/annex$ ls -al
+total 16
+drwxrwxr-x 3 ubuntu ubuntu 4096 Oct 6 16:48 .
+drwxr-xr-x 7 ubuntu ubuntu 4096 Oct 6 16:42 ..
+drwxrwxr-x 9 ubuntu ubuntu 4096 Oct 6 16:48 .git
+lrwxrwxrwx 1 ubuntu ubuntu 176 Oct 6 16:43 test.txt -> .git/annex/objects/8Z/1J/SHA256-s4--b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c/SHA256-s4--b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
+ubuntu@ip-10-170-13-124:~/annex$ git commit test.txt
+Aborting commit due to empty commit message.
+ubuntu@ip-10-170-13-124:~/annex$ git commit test.txt -m first
+[master (root-commit) 38a8e18] first
+ Committer: Ubuntu <ubuntu@ip-10-170-13-124.us-west-1.compute.internal>
+Your name and email address were configured automatically based
+on your username and hostname. Please check that they are accurate.
+You can suppress this message by setting them explicitly:
+
+ git config --global user.name \"Your Name\"
+ git config --global user.email you@example.com
+
+After doing this, you may fix the identity used for this commit with:
+
+ git commit --amend --reset-author
+
+ 1 file changed, 1 insertion(+)
+ create mode 120000 test.txt
+ubuntu@ip-10-170-13-124:~/annex$ ls -al
+total 16
+drwxrwxr-x 3 ubuntu ubuntu 4096 Oct 6 16:48 .
+drwxr-xr-x 7 ubuntu ubuntu 4096 Oct 6 16:42 ..
+drwxrwxr-x 9 ubuntu ubuntu 4096 Oct 6 16:49 .git
+lrwxrwxrwx 1 ubuntu ubuntu 176 Oct 6 16:43 test.txt -> .git/annex/objects/8Z/1J/SHA256-s4--b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c/SHA256-s4--b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
+ubuntu@ip-10-170-13-124:~/annex$ git annex unlock test.txt
+unlock test.txt (copying...) ok
+ubuntu@ip-10-170-13-124:~/annex$ cat test.txt
+foo
+ubuntu@ip-10-170-13-124:~/annex$ echo foobar > test.txt
+ubuntu@ip-10-170-13-124:~/annex$ git commit test.txt -m second
+add test.txt (checksum...) ok
+ok
+(Recording state in git...)
+[master f265461] second
+ Committer: Ubuntu <ubuntu@ip-10-170-13-124.us-west-1.compute.internal>
+Your name and email address were configured automatically based
+on your username and hostname. Please check that they are accurate.
+You can suppress this message by setting them explicitly:
+
+ git config --global user.name \"Your Name\"
+ git config --global user.email you@example.com
+
+After doing this, you may fix the identity used for this commit with:
+
+ git commit --amend --reset-author
+
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+ubuntu@ip-10-170-13-124:~/annex$ git status
+# On branch master
+# Changes to be committed:
+# (use \"git reset HEAD <file>...\" to unstage)
+#
+# typechange: test.txt
+#
+# Changes not staged for commit:
+# (use \"git add <file>...\" to update what will be committed)
+# (use \"git checkout -- <file>...\" to discard changes in working directory)
+#
+# typechange: test.txt
+#
+ubuntu@ip-10-170-13-124:~/annex$
+
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_3_22df91abd8c025000e67bdcef891de3b._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_3_22df91abd8c025000e67bdcef891de3b._comment
new file mode 100644
index 000000000..4b0270178
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_3_22df91abd8c025000e67bdcef891de3b._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawn0hu_TPhLcUM1Ivvn7iIoZ_iD3g_5WDcs"
+ nickname="Greg"
+ subject="comment 3"
+ date="2014-10-06T17:07:46Z"
+ content="""
+Looks like its doing it when you specifically commit a filename.
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_4_ecf84eeb4feddafcfa7ba7d4a2f164b1._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_4_ecf84eeb4feddafcfa7ba7d4a2f164b1._comment
new file mode 100644
index 000000000..4ccf2a3a9
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_4_ecf84eeb4feddafcfa7ba7d4a2f164b1._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 4"
+ date="2014-10-09T20:43:34Z"
+ content="""
+Ah, ok. git's index has the file listed as not being a symlink, because `git commit $file` stages it in the index that way. Running `git reset --hard` will fix git's index.
+
+This problem is avoided if you `git annex add $file` before committing. Which is generally a good idea
+for other reasons, including avoiding staging a potentially huge file's contents in the git index in the first place.
+
+git-annex's pre-commit hook should probably update the git index for the committed files, replacing the staged full file contents with the git-annex symlink. That would avoid this problem.
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_5_2ea1d78ec8a652a53391969e43bcb6f0._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_5_2ea1d78ec8a652a53391969e43bcb6f0._comment
new file mode 100644
index 000000000..72495fe66
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_5_2ea1d78ec8a652a53391969e43bcb6f0._comment
@@ -0,0 +1,39 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 5"
+ date="2014-10-10T00:01:54Z"
+ content="""
+Actually, the pre-commit hook does stage the annexed symlink into the index. But it seems that `git commit $file` causes the pre-commit hook's changes to the index to be partially ignored, in a way that `git commit -a` does not.
+
+While the pre-commit hook is running, `git commit -a` sets `GIT_INDEX_FILE=index.lock`, while `git commit $file` instead sets `GIT_INDEX_FILE=next-index-$pid.lock`. git's builtin/commit.c refers to this latter file as the \"false index\". Full comment from git:
+
+<pre>
+ /*
+ * A partial commit.
+ *
+ * (0) find the set of affected paths;
+ * (1) get lock on the real index file;
+ * (2) update the_index with the given paths;
+ * (3) write the_index out to the real index (still locked);
+ * (4) get lock on the false index file;
+ * (5) reset the_index from HEAD;
+ * (6) update the_index the same way as (2);
+ * (7) write the_index out to the false index file;
+ * (8) return the name of the false index file (still locked);
+ *
+ * The caller should run hooks on the locked false index, and
+ * create commit from it. Then
+ * (A) if all goes well, commit the real index;
+ * (B) on failure, rollback the real index;
+ * In either case, rollback the false index.
+ */
+
+</pre>
+
+So, the pre-commit hook is run on the false index, which has been reset to HEAD. The changes it stages are committed, but do not affect the real index. If I read that comment right, the commit from the false index is then supposed to be committed on the real index, but it seems in this case the real index does not get updated to reflect the changes.
+
+This seems to be a bug in git. Reproduced w/o git-annex, and bug report sent to the git ML.
+
+Depending on what happens, this might just get fixed in git. Or, I might need to make git-annex detect this case (by looking at what `GIT_INDEX_FILE` is set to) and have the pre-commit hook cancel the commit.
+"""]]
diff --git a/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_6_2a3ad3f95ee03c79404e3784c9ce1a4b._comment b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_6_2a3ad3f95ee03c79404e3784c9ce1a4b._comment
new file mode 100644
index 000000000..ef86e4880
--- /dev/null
+++ b/doc/bugs/modified_permissions_persist_after_unlock__44___commit/comment_6_2a3ad3f95ee03c79404e3784c9ce1a4b._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 6"
+ date="2014-10-10T17:26:04Z"
+ content="""
+upstream bug: <http://www.mail-archive.com/git@vger.kernel.org/msg59587.html>
+"""]]
diff --git a/doc/bugs/present_files__47__directories_are_dropped_after_a_sync.mdwn b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync.mdwn
index 432ab9050..69c029cd9 100644
--- a/doc/bugs/present_files__47__directories_are_dropped_after_a_sync.mdwn
+++ b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync.mdwn
@@ -1,6 +1,6 @@
### Please describe the problem.
-This is a followup from the discussion on https://git-annex.branchable.com/forum/Standard_groups__47__preferred_contents/ where I unfortunately did not get a complete answer.
+This is a followup from the discussion on <https://git-annex.branchable.com/forum/Standard_groups__47__preferred_contents/> where I unfortunately did not get a complete answer.
I don't know if it is really a bug but at least it does not work as I would expect and the documentation provides no clear discussion on that.
Now to the problem:
@@ -36,3 +36,6 @@ Similarly for directories:
key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+
+[[!meta title="manual mode preferred content expression does not want newer versions of present files"]]
+[[!tag confirmed]]
diff --git a/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_1_9d7591faf99ce48b1e5753c80306ae8b._comment b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_1_9d7591faf99ce48b1e5753c80306ae8b._comment
new file mode 100644
index 000000000..bb8133354
--- /dev/null
+++ b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_1_9d7591faf99ce48b1e5753c80306ae8b._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 1"
+ date="2014-10-01T22:25:24Z"
+ content="""
+Have you found a solution for this? This seems useful if you're only interested in a subset of files/directories on your laptop, eg, but those that are fetched (present) that you are interested you'd want to keep up to date (in sync) with other computers?
+
+Btw, the link to the previous discussion didnt work for me.
+"""]]
diff --git a/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_2_7316cba69b9dc0415fea1389238edf25._comment b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_2_7316cba69b9dc0415fea1389238edf25._comment
new file mode 100644
index 000000000..194f7a3d9
--- /dev/null
+++ b/doc/bugs/present_files__47__directories_are_dropped_after_a_sync/comment_2_7316cba69b9dc0415fea1389238edf25._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 2"
+ date="2014-10-21T20:20:25Z"
+ content="""
+The problem is that there's no way for preferred content expressions to specify that a file is wanted just because some old version of the file is (or was) present.
+
+It's not clear to me how that could be added to the preferred content expressions in an efficient way.
+
+It might be possible to hack `git annex sync --content` and the assistant to look at incoming merges, and queue downloads of newer versions of files before merging.
+
+Also being discussed at <https://github.com/datalad/datalad/issues/6>.
+"""]]
diff --git a/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__.mdwn b/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__.mdwn
new file mode 100644
index 000000000..5dcf732a6
--- /dev/null
+++ b/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__.mdwn
@@ -0,0 +1,105 @@
+### Please describe the problem.
+
+cannot enable an exiting gcrypt special remote after successfully having cloned the git repository; I get this error: "git-annex: uuid mismatch ..." at the end of the enableremote command (see transcript for details)
+
+maybe my fault but cannot understand what I'm doing wrong
+
+### What steps will reproduce the problem?
+
+1. cloned the encrypted repository with: "git clone gcrypt::git.myserver.net:myrepo TEST-myrepo.annex"
+
+2. enabled the special remote with: "git annex enableremote backup type=gcrypt encryption=hybrid gitrepo=git.myserver.net:myrepo"
+
+### What version of git-annex are you using? On what operating system?
+
+[[!format sh """
+git-annex version: 5.20140927~bpo70+2
+build flags: Assistant Pairing S3 Inotify XMPP Feeds Quvi TDFA
+key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SHA256 SHA1 SHA512 SHA224 SHA384 WORM URL
+remote types: git gcrypt S3 bup directory rsync web tahoe glacier ddar hook external
+local repository version: 5
+supported repository version: 5
+upgrade supported from repository versions: 0 1 2 4
+"""]]
+
+### Please provide any additional information below.
+
+[[!format sh """
+
+# transcript of commands and results
+
+(cloning)
+g@renaissance:~$ git clone gcrypt::git.myserver.net:DMS-myrepo TEST-myrepo.annex
+Cloning into 'TEST-myrepo.annex'...
+gcrypt: Development version -- Repository format MAY CHANGE
+gcrypt: Decrypting manifest
+gpg: Signature made Thu 16 Oct 2014 12:58:33 CEST
+[...]
+gcrypt: Remote ID is :id:8sucFsBZIGQKXFv5ecSW
+Receiving objects: 100% (3531/3531), 245.40 KiB | 0 bytes/s, done.
+Resolving deltas: 100% (1382/1382), done.
+[...]
+Receiving objects: 100% (636/636), 66.78 KiB | 0 bytes/s, done.
+Resolving deltas: 100% (209/209), done.
+Checking connectivity... done.
+
+
+(annex info)
+g@renaissance:~/TEST-myrepo.annex$ git annex info
+repository mode: indirect
+trusted repositories: (merging origin/git-annex origin/synced/git-annex into git-annex...)
+(Recording state in git...)
+0
+semitrusted repositories: 5
+ -- here
+ 00000000-0000-0000-0000-000000000001 -- web
+ 622362eb-3882-4429-829b-1ec0f299f5a7 -- [omissis]
+ 69b848ef-dd29-43e4-ae1b-73ec6a01f2f6 -- [omissis]
+ ffc5c5d1-6166-4753-a2e4-88727d0f8c7b -- backup
+untrusted repositories: 1
+ b185b2ed-c024-43ac-9049-3bc12a87dacc -- [omissis]
+transfers in progress: none
+available local disk space: 51.53 gigabytes (+1 megabyte reserved)
+local annex keys: 0
+local annex size: 0 bytes
+annexed files in working tree: 212
+size of annexed files in working tree: 210.56 megabytes
+bloom filter size: 16 mebibytes (0% full)
+backend usage:
+ SHA256E: 212
+
+
+(list of remotes)
+g@renaissance:~/TEST-myrepo.annex$ git annex enableremote
+git-annex: Specify the name of the special remote to enable.
+Known special remotes: backup
+
+
+(enabling remote)
+g@renaissance:~/TEST-myrepo.annex$ git annex enableremote backup type=gcrypt encryption=hybrid gitrepo=git.myserver.net:myrepo
+enableremote backup (encryption update) (hybrid cipher with gpg key [omissis]) gcrypt: Development version -- Repository format MAY CHANGE
+gcrypt: Decrypting manifest
+gpg: Signature made Thu 16 Oct 2014 12:58:33 CEST
+[omissis]
+gcrypt: Remote ID is :id:8sucFsBZIGQKXFv5ecSW
+From gcrypt::git.myserver.net:myrepo
+ * [new branch] synced/master -> backup/synced/master
+ * [new branch] master -> backup/master
+ * [new branch] synced/git-annex -> backup/synced/git-annex
+ * [new branch] git-annex -> backup/git-annex
+gcrypt: Development version -- Repository format MAY CHANGE
+gcrypt: Decrypting manifest
+gpg: Signature made Thu 16 Oct 2014 12:58:33 CEST
+[omissis]
+Counting objects: 3, done.
+Compressing objects: 100% (2/2), done.
+Total 3 (delta 0), reused 1 (delta 0)
+gcrypt: Encrypting to: -r [omissis]
+gcrypt: Requesting manifest signature
+gpg: [omissis]: skipped: public key already present
+To gcrypt::git.myserver.net:myserver
+ 1195dda..3254af7 git-annex -> git-annex
+git-annex: uuid mismatch (UUID "78104a6f-16a9-504b-8e8a-d8a3c59351e8",Just (UUID "984e0333-3327-5f21-87a1-35d30f37f337"),":id:8sucFsBZIGQKXFv5ecSW")
+
+# End of transcript or log.
+"""]]
diff --git a/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__/comment_1_72a97bc3ccb00c623baee874609bb4ca._comment b/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__/comment_1_72a97bc3ccb00c623baee874609bb4ca._comment
new file mode 100644
index 000000000..70339747e
--- /dev/null
+++ b/doc/bugs/problems_with_enableremote_on_gcrypt_remote___40__hosted_with_gitolite__41__/comment_1_72a97bc3ccb00c623baee874609bb4ca._comment
@@ -0,0 +1,21 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlZ-6dtxJY4cP7shhvV8E6YyuV0Rak8it4"
+ nickname="Giovanni"
+ subject="I messed up that repo"
+ date="2014-10-16T13:31:15Z"
+ content="""
+I'm sure I messed up the repository at some point
+
+the remote repository have a duplicated (I hope just duplicated and not triplicated) UUID: both ffc5c5d1-6166-4753-a2e4-88727d0f8c7b and 984e0333-3327-5f21-87a1-35d30f37f337
+
+on one of my working remotes I already used \"git annex dead 984e0333-3327-5f21-87a1-35d30f37f337\" and synced the special (bare) remote **but** trying to make a new clone and adding the remote special with enableremote i always get the same \"UUID mismatch\" error, listing the (marked) dead UUID
+
+please is there a way to get rid of the mess I did?!? :-)
+I'm tempted to manually add \"annex-uuid = ffc5c5d1-6166-4753-a2e4-88727d0f8c7b\" to the repo \".git/config\" but I fear I'm going to further mess things
+
+sorry for reportng this as a bug... actually it was my fault
+
+best regards
+Giovanni
+
+"""]]
diff --git a/doc/bugs/rsync_remote_is_not_working.mdwn b/doc/bugs/rsync_remote_is_not_working.mdwn
new file mode 100644
index 000000000..a4b15b3b3
--- /dev/null
+++ b/doc/bugs/rsync_remote_is_not_working.mdwn
@@ -0,0 +1,26 @@
+Host: Mac OS with git-annex 5.20140919-g0f7caf5
+
+Remote: Linux
+
+* with git-annex 5.20140920-gb0c4300
+* using user&password login
+
+On Host:
+
+1. create a repo with git init && git annex init && git annex direct
+1. add a rsync repo in git-annex webapp, type "small archive", with shared encryption (same result using command line)
+1. copy some new files to the repo, expect the files to appear in the remote repo (check with du)
+1. Web app says "synced with remote-name", but remote repo is completely empty
+1. run git annex copy --to $remotename, now remote repo is filled with files
+1. but the sizes are really small, seems that the actual files are not being transferred
+1. convert the repo to indirect repo: git annex indirect
+1. re-run git annex copy, now the repo size on the remote seems about right
+1. now start git annex assistant, copy some new files, expect new files to be synced
+1. actual: the remote becomes completely empty, the existing files are removed!
+
+The other small issue
+
+* The add remote interface stops at "check remote" prompt for a long time without completing
+* Kill the webapp process, re-run webapp, add remote again, it worked very quickly
+* But future interaction with the remote still requires password, both commandline & webapp
+
diff --git a/doc/bugs/rsync_remote_is_not_working/comment_1_8998edf856a411de1f90b27568628feb._comment b/doc/bugs/rsync_remote_is_not_working/comment_1_8998edf856a411de1f90b27568628feb._comment
new file mode 100644
index 000000000..50f0c06cc
--- /dev/null
+++ b/doc/bugs/rsync_remote_is_not_working/comment_1_8998edf856a411de1f90b27568628feb._comment
@@ -0,0 +1,20 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 1"
+ date="2014-10-21T20:07:40Z"
+ content="""
+A \"small archive\" only wants to contain files that are located inside archive/ directories.
+
+That seems to explain everything you reported except for:
+
+> 6. but the sizes are really small, seems that the actual files are not being transferred
+
+Maybe the remote is configured to use chunking? What happens if you run `git annex fsck --from $remotename` after copying a file to it? Any problem detected?
+
+> The add remote interface stops at \"check remote\" prompt for a long time without completing
+
+Please explain exactly what you did in the webapp. What did you click on, and what did you enter? I need enough detail to be able to reproduce the problem.
+
+(Also, in the future, one problem per bug report turns out to be a lot less confusing, and have better results all around. True here and really anywhere..)
+"""]]
diff --git a/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_10_a201485bf41514fde7c61a4dcbb5064f._comment b/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_10_a201485bf41514fde7c61a4dcbb5064f._comment
new file mode 100644
index 000000000..9d5c8aab6
--- /dev/null
+++ b/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_10_a201485bf41514fde7c61a4dcbb5064f._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 10"
+ date="2014-10-02T15:35:15Z"
+ content="""
+This got fixed in version 5.20140707
+"""]]
diff --git a/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_9_27a31463bcf28b5c684bb483b46a3baf._comment b/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_9_27a31463bcf28b5c684bb483b46a3baf._comment
new file mode 100644
index 000000000..fe421a450
--- /dev/null
+++ b/doc/bugs/runs_of_of_memory_adding_2_million_files/comment_9_27a31463bcf28b5c684bb483b46a3baf._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmyYyXrtGKiR3Pu2OjdVsETXf4ePmECW54"
+ nickname="Andrey"
+ subject="comment 9"
+ date="2014-09-29T10:48:36Z"
+ content="""
+Joeyh, in what version it was fixed? I really need it for Ubuntu 14.04
+"""]]
diff --git a/doc/bugs/vicfg_and_description_often_not_propagated.mdwn b/doc/bugs/vicfg_and_description_often_not_propagated.mdwn
index d42ba43a9..7e6dd9269 100644
--- a/doc/bugs/vicfg_and_description_often_not_propagated.mdwn
+++ b/doc/bugs/vicfg_and_description_often_not_propagated.mdwn
@@ -150,3 +150,5 @@ wanted a6febfa0-9fe5-4a65-95bb-dc255d87c2e2 = standard
#schedule a6febfa0-9fe5-4a65-95bb-dc255d87c2e2 =
# End of transcript or log.
"""]]
+
+[[!tag moreinfo]]
diff --git a/doc/bugs/vicfg_and_description_often_not_propagated/comment_2_d56aed617e0791aa17d9f37c8d3fd317._comment b/doc/bugs/vicfg_and_description_often_not_propagated/comment_2_d56aed617e0791aa17d9f37c8d3fd317._comment
new file mode 100644
index 000000000..cb485362b
--- /dev/null
+++ b/doc/bugs/vicfg_and_description_often_not_propagated/comment_2_d56aed617e0791aa17d9f37c8d3fd317._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 2"
+ date="2014-10-21T20:02:40Z"
+ content="""
+Can you please provide more information, like showing the commits made to the git-annex branch when the configuration was reverted?
+
+Also, might some of the clocks of computers where you're using git-annex be set wrong?
+
+I have tagged this report moreinfo because I don't have enough information to do anything else.
+"""]]
diff --git a/doc/chunking.mdwn b/doc/chunking.mdwn
index 119a85c77..71b330d64 100644
--- a/doc/chunking.mdwn
+++ b/doc/chunking.mdwn
@@ -28,4 +28,16 @@ To change the chunk size, pass a `chunk=nnMiB` parameter to
`git annex enableremote`. This only affects the chunk sized used when
storing new content.
+# old-style chunking
+
+Note that older versions of git-annex used a different chunk method, which
+was configured by passing `chunksize=nnMib` when initializing a remote.
+
+The old-style chunking had a number of problems, including being less
+efficient, and not allowing resumes of encrypted uploads.
+
+It's not possible to change a remote using that old chunking method to the
+new one, but git-annex continues to support the old-style chunking to
+support such remotes.
+
See also: [[design document|design/assistant/chunks]]
diff --git a/doc/design/metadata/comment_7_04cd255a516c8520a7bc1a8fad253533._comment b/doc/design/metadata/comment_7_04cd255a516c8520a7bc1a8fad253533._comment
new file mode 100644
index 000000000..12e5042fb
--- /dev/null
+++ b/doc/design/metadata/comment_7_04cd255a516c8520a7bc1a8fad253533._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 7"
+ date="2014-10-01T22:43:40Z"
+ content="""
+I have the same question as Toby, is there a particular reason the whole timestamp is not stored?
+"""]]
diff --git a/doc/design/metadata/comment_8_0a7e55e7626f72f63966fa1e1d2cf100._comment b/doc/design/metadata/comment_8_0a7e55e7626f72f63966fa1e1d2cf100._comment
new file mode 100644
index 000000000..965b1932e
--- /dev/null
+++ b/doc/design/metadata/comment_8_0a7e55e7626f72f63966fa1e1d2cf100._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="Can tags/metadata be used for preferred content?"
+ date="2014-10-01T22:45:36Z"
+ content="""
+Would love to be able to \"tag\" something as archived instead of moving it into a special folder. Coupled with a FinderSync extension on OS X Yosemite for right-click menu. This would allow me to also \"view\" the archive and bring things out of there by \"untagging\" it, if I understand the feature correctly?
+"""]]
diff --git a/doc/design/metadata/comment_9_f0bb62c885a925e0da5ae8ce3c5e9003._comment b/doc/design/metadata/comment_9_f0bb62c885a925e0da5ae8ce3c5e9003._comment
new file mode 100644
index 000000000..fac3bf135
--- /dev/null
+++ b/doc/design/metadata/comment_9_f0bb62c885a925e0da5ae8ce3c5e9003._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 9"
+ date="2014-10-01T23:35:39Z"
+ content="""
+Sorry for the noise, I see that tags _can_ be used for preferred content, excellent!
+
+But it seems metadata is tied to a key, not to a specific file/path. If I have 10 different files all with the same content (for some reason, say a simple txt file, Gemspec, or something), and I want to tag one of them as important, it doesn't mean they all are :o
+"""]]
diff --git a/doc/design/requests_routing/simroutes.hs b/doc/design/requests_routing/simroutes.hs
index d91125935..391816040 100644
--- a/doc/design/requests_routing/simroutes.hs
+++ b/doc/design/requests_routing/simroutes.hs
@@ -182,7 +182,7 @@ merge (ImmobileNode ir) t@(TransferNode { transferrepo = tr }) =
, satisfiedRequests = satisfiedRequests' `S.union` checkSatisfied wantFiles' haveFiles'
}
where
- wantFiles' = foldr addRequest (wantFiles r1) (wantFiles r2)
+ wantFiles' = foldr addRequest (wantFiles r1) (wantFiles r2)
haveFiles' = S.foldr (addFile wantFiles' satisfiedRequests') (haveFiles r1) (haveFiles r2)
satisfiedRequests' = satisfiedRequests r1 `S.union` satisfiedRequests r2
@@ -229,7 +229,7 @@ emptyImmobile = ImmobileNode (NodeRepo [] S.empty S.empty)
mkTransfer :: (RandomGen g) => [NodeName] -> Rand g TransferNode
mkTransfer immobiles = do
- -- Transfer nodes are given random routes. May be simplistic.
+ -- Transfer nodes are given random routes. May be simplistic.
-- Also, some immobile nodes will not be serviced by any transfer nodes.
numpossiblelocs <- getRandomR transferDestinationsRange
possiblelocs <- sequence (replicate numpossiblelocs (randomfrom immobiles))
@@ -283,7 +283,7 @@ summarize _initial@(Network origis _) _final@(Network is _ts) = format
--, ("Immobile nodes at end", show is)
]
where
- findoriginreqs = filter (\r -> requestTTL r == originTTL)
+ findoriginreqs = filter (\r -> requestTTL r == originTTL)
findunsatisfied r =
let wantedfs = S.fromList $ map requestedFile (findoriginreqs (wantFiles r))
in S.difference wantedfs (haveFiles r)
diff --git a/doc/devblog/day_-4__forgetting/comment_9_d9121a5172f02df63364f19eae87d011._comment b/doc/devblog/day_-4__forgetting/comment_9_d9121a5172f02df63364f19eae87d011._comment
new file mode 100644
index 000000000..a104c2dd2
--- /dev/null
+++ b/doc/devblog/day_-4__forgetting/comment_9_d9121a5172f02df63364f19eae87d011._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="stp"
+ ip="24.134.205.34"
+ subject="Any update"
+ date="2014-10-01T12:47:47Z"
+ content="""
+Any update?
+"""]]
diff --git a/doc/devblog/day_221__another_fine_day_of_bugfixing.mdwn b/doc/devblog/day_221__another_fine_day_of_bugfixing.mdwn
new file mode 100644
index 000000000..0c26f5735
--- /dev/null
+++ b/doc/devblog/day_221__another_fine_day_of_bugfixing.mdwn
@@ -0,0 +1,10 @@
+Working through the forum posts and bugs. Backlog is down to 95.
+
+Discovered the first known security hole in git-annex!
+Turns out that S3 and Glacier remotes that were configured with embedcreds=yes and encryption=pubkey or encryption=hybrid
+didn't actually encrypt the AWS credentials that get embedded into the git
+repo. This doesn't affect any repos set up by the assistant.
+
+I've fixed the problem and am going to make a release soon.
+If your repo is affected, see
+[[upgrades/insecure_embedded_creds]] for what to do about it.
diff --git a/doc/devblog/day_222_preparing_for_debian_release.mdwn b/doc/devblog/day_222_preparing_for_debian_release.mdwn
new file mode 100644
index 000000000..62acc02b2
--- /dev/null
+++ b/doc/devblog/day_222_preparing_for_debian_release.mdwn
@@ -0,0 +1,12 @@
+Made two releases of git-annex, yesterday and today, which turned out to
+contain only Debian changes. So no need for other users to upgrade.
+
+This included fixing building on mips, and arm architectures.
+The mips build was running out of memory, and I was able to work around
+that. Then the arm builds broke today, because of a recent change to the
+version of llvm that has completely trashed ghc. Luckily, I was able
+to work around that too.
+
+Hopefully that will get last week's security fix into Debian testing,
+and otherwise have git-annex in Debian in good shape for the upcoming
+freeze.
diff --git a/doc/devblog/day_223__partial_commit_problem.mdwn b/doc/devblog/day_223__partial_commit_problem.mdwn
new file mode 100644
index 000000000..b2d1ec4e0
--- /dev/null
+++ b/doc/devblog/day_223__partial_commit_problem.mdwn
@@ -0,0 +1,26 @@
+`git commit $some_unlocked_file` seems like a reasonably common thing for
+someone to do, so it's surprising to find that it's a [[little bit broken|/bugs/modified_permissions_persist_after_unlock__44___commit]],
+leaving the file staged in the index after (correctly) committing the
+annexed symlink.
+
+This is caused by either a bug in git and/or by git-annex abusing the
+git post-commit hook to do something it shouldn't do, although it's not
+unique in using the post-commit hook this way. I'm talking this over with
+Junio, and the fix will depend on the result of that conversation. It might
+involve git-annex detecting this case and canceling the commit, asking the
+user to `git annex add` the file first. Or it might involve a new git hook,
+although I have not had good luck getting hooks added to git before.
+
+----
+
+Meanwhile, today I did some other bug fixing. Fixed the Internet Archive
+support for embedcreds=yes. Made `git annex map` work for remote repos
+in a directory with an implicit ".git" prefix. And fixed a
+strange problem where the repository repair code caused a `git gc` to run
+and then tripped over its pid file.
+
+I seem to have enough fixes to make another release pretty soon.
+Especially since the current release of git-annex doesn't build with yesod
+1.4.
+
+Backlog: 94 messages
diff --git a/doc/devblog/day_224-226__long_rainy_slog.mdwn b/doc/devblog/day_224-226__long_rainy_slog.mdwn
new file mode 100644
index 000000000..9c26b134b
--- /dev/null
+++ b/doc/devblog/day_224-226__long_rainy_slog.mdwn
@@ -0,0 +1,14 @@
+3 days spent redoing the Android autobuilder! The new version of
+yesod-routes generates TH splices that break the EvilSplicer. So after
+updating everything to new versions for the Nth time, I instead went back
+to older versions. The autobuilder now uses Debian jessie, instead of
+wheezy. And all haskell packages are pinned to use the same version
+as in jessie, rather than the newest versions. Since jessie is quite near
+to being frozen, this should make the autobuilder much less prone to
+getting broken by new versions of haskell packages that need patches for
+Android.
+
+I happened to stumble over <http://hackage.haskell.org/package/setenv>
+while doing that. This supports setting and unsetting environment variables
+on Windows, which I had not known a way to do from Haskell. Cleaned up
+several ugly corners of the Windows port using it.
diff --git a/doc/devblog/day_227__info.mdwn b/doc/devblog/day_227__info.mdwn
new file mode 100644
index 000000000..0b5950fe5
--- /dev/null
+++ b/doc/devblog/day_227__info.mdwn
@@ -0,0 +1,33 @@
+Today, I've expanded `git annex info` to also be able to be used on annexed files
+and on remotes. Looking at the info for an individual remote is quite
+useful, especially for answering questions like: Does the remote have
+embedded creds? Are they encrypted? Does it use chunking? Is that old style
+chunking?
+
+<pre>
+remote: rsync.net
+description: rsync.net demo remote
+uuid: 15b42f18-ebf2-11e1-bea1-f71f1515f9f1
+cost: 250.0
+type: rsync
+url: xxx@usw-s002.rsync.net:foo
+encryption: encrypted (to gpg keys: 7321FC22AC211D23 C910D9222512E3C7)
+chunking: 1 MB chunks
+</pre>
+
+<pre>
+remote: ia3
+description: test [ia3]
+uuid: 12817311-a189-4de3-b806-5f339d304230
+cost: 200.0
+type: S3
+creds: embedded in git repository (not encrypted)
+bucket: joeyh-test-17oct-3
+internet archive item: http://archive.org/details/joeyh-test-17oct-3
+encryption: not encrypted
+chunking: none
+</pre>
+
+Should be quite useful info for debugging too..
+
+Yesterday, I fixed a bug that prevented retrieving files from Glacier.
diff --git a/doc/direct_mode/comment_15_599b2285d24ae1244a1945d572b2c397._comment b/doc/direct_mode/comment_15_599b2285d24ae1244a1945d572b2c397._comment
new file mode 100644
index 000000000..d8c5e825f
--- /dev/null
+++ b/doc/direct_mode/comment_15_599b2285d24ae1244a1945d572b2c397._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmjjrCHEIa4vpDIJoBuJsrF3y8wZQElVHw"
+ nickname="Siyuan"
+ subject="Non-direct mode for Windows"
+ date="2014-10-08T15:55:04Z"
+ content="""
+Why Windows is restricted to direct mode? NTFS has symbolic links too. Is that fundamentally different from POSIX symlinks that it cannot be done?
+"""]]
diff --git a/doc/forum/ARM_build_on_Zyxel_NAS.mdwn b/doc/forum/ARM_build_on_Zyxel_NAS.mdwn
new file mode 100644
index 000000000..4fe46bedb
--- /dev/null
+++ b/doc/forum/ARM_build_on_Zyxel_NAS.mdwn
@@ -0,0 +1,15 @@
+I am trying to run the linux standalone ARM build on my Zyxel NAS, but I get the following error
+
+`FATAL: kernel too old`
+
+The system runs the following:
+
+`
+uname -a
+`
+
+`
+Linux nas 2.6.31.8 #2 Thu Dec 19 14:31:05 CST 2013 armv5tel GNU/Linux
+`
+
+Help would be much appreciated.
diff --git a/doc/forum/ARM_build_on_Zyxel_NAS/comment_1_38f38755c0afd76a2b968836fec395e8._comment b/doc/forum/ARM_build_on_Zyxel_NAS/comment_1_38f38755c0afd76a2b968836fec395e8._comment
new file mode 100644
index 000000000..02d2716ac
--- /dev/null
+++ b/doc/forum/ARM_build_on_Zyxel_NAS/comment_1_38f38755c0afd76a2b968836fec395e8._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="Joey"
+ subject="""comment 1"""
+ date="2014-10-20T15:11:49Z"
+ content="""
+The git-annex arm build is built using the libc from Debian stable,
+which needs a newer version of the Linux kerenl than is on your device.
+
+It would be possible to build git-annex against an older libc, but
+not easily, which is why I don't.
+"""]]
diff --git a/doc/forum/ARM_build_on_Zyxel_NAS/comment_2_44c8f1af0cbe9ad51794e6d8d16be627._comment b/doc/forum/ARM_build_on_Zyxel_NAS/comment_2_44c8f1af0cbe9ad51794e6d8d16be627._comment
new file mode 100644
index 000000000..9cec2bf69
--- /dev/null
+++ b/doc/forum/ARM_build_on_Zyxel_NAS/comment_2_44c8f1af0cbe9ad51794e6d8d16be627._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="musella"
+ ip="84.73.42.152"
+ subject="comment 2"
+ date="2014-10-21T23:35:35Z"
+ content="""
+what is the minimal kernel version that I would need?
+
+"""]]
diff --git a/doc/forum/ARM_build_on_Zyxel_NAS/comment_3_b4f6e5ac672e8ece36cceb74ff3315dd._comment b/doc/forum/ARM_build_on_Zyxel_NAS/comment_3_b4f6e5ac672e8ece36cceb74ff3315dd._comment
new file mode 100644
index 000000000..2ff40efe6
--- /dev/null
+++ b/doc/forum/ARM_build_on_Zyxel_NAS/comment_3_b4f6e5ac672e8ece36cceb74ff3315dd._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 3"
+ date="2014-10-22T16:22:24Z"
+ content="""
+I know kernel 3.2 would work. I don't know what the minimum kernel supported by glibc 2.13 is.
+"""]]
diff --git a/doc/forum/Android_version_does_not_sync/comment_3_2a4efec37015ea44509e7ed16b36a72d._comment b/doc/forum/Android_version_does_not_sync/comment_3_2a4efec37015ea44509e7ed16b36a72d._comment
new file mode 100644
index 000000000..71c5b91d1
--- /dev/null
+++ b/doc/forum/Android_version_does_not_sync/comment_3_2a4efec37015ea44509e7ed16b36a72d._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnjX_O_VChwfDjcKlcRHbc2Aah8aYQlqts"
+ nickname="Ludovic"
+ subject="comment 3"
+ date="2014-10-05T11:01:39Z"
+ content="""
+I have the same problem.
+I checked again, removed the previous annex/ directory and created a new annex repository from the webapp. No annex/.git/index file is created.
+
+I then add a remore ssh server. The synchronisation fails with the error in the log: \"/storage/emulated/legacy/annex/.git/index: copyFile: does not exist (No such file or directory).\"
+
+Running \"git annex sync\" solved the problem.
+"""]]
diff --git a/doc/forum/Attempting_to_repair_fails_with_everincreasing_deltas/comment_3_5a09f65c77dce3c62236c13aa90a1191._comment b/doc/forum/Attempting_to_repair_fails_with_everincreasing_deltas/comment_3_5a09f65c77dce3c62236c13aa90a1191._comment
new file mode 100644
index 000000000..1b4c2499a
--- /dev/null
+++ b/doc/forum/Attempting_to_repair_fails_with_everincreasing_deltas/comment_3_5a09f65c77dce3c62236c13aa90a1191._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-12T17:42:28Z"
+ content="""
+Sorry I didn't get to this earlier. I'm glad you managed to solve the problem, but there is something I am curious about: You say you had 2 folders set as full backups. Were these git repositories in their own right, or were they something else, eg directory special remotes?
+
+I ask because, if there were git repositories then the repair should have been able to pull the missing git objects from them, and fix your repository. Unless the broken disk somehow kept corrupting it, I suppose..
+"""]]
diff --git a/doc/forum/Broken_symlinks_remain_after_drop.mdwn b/doc/forum/Broken_symlinks_remain_after_drop.mdwn
new file mode 100644
index 000000000..bc2404901
--- /dev/null
+++ b/doc/forum/Broken_symlinks_remain_after_drop.mdwn
@@ -0,0 +1,7 @@
+This is a newb question. I don't know whether this is a bug or the way git-annex is intended to function.
+
+I have two annex repos connected to each other. My idea was to have the first repository add files, which would then be moved to the second repository for storage. After moving, repo1 would be empty again, empty and clean of any symlinks.
+
+But after I 'git-annex move * --to repo2' broken symlinks remain in repo1. I don't want any broken/unused symlinks to remain in repo1 for object data it doesn't currently have (even if those files remain in the repository itself).
+
+Is there a way I can clean/remove broken symlinks to object data when those objects aren't present, so the directory only contains symlinks when the repo currently has the object data for those files?
diff --git a/doc/forum/Broken_symlinks_remain_after_drop/comment_1_d4a59b9e58d43d7a3d437e521dd5c4e1._comment b/doc/forum/Broken_symlinks_remain_after_drop/comment_1_d4a59b9e58d43d7a3d437e521dd5c4e1._comment
new file mode 100644
index 000000000..33a9cb058
--- /dev/null
+++ b/doc/forum/Broken_symlinks_remain_after_drop/comment_1_d4a59b9e58d43d7a3d437e521dd5c4e1._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-06T15:30:54Z"
+ content="""
+git-annex is behaving as expected here. The broken symlink allows you to run `git annex get` on it to get the file content back into the repository, or `git mv` to rename the file even though its content is not present, etc.
+
+You can probably accomplish what you want by using git branches. You want a branch for repo2 that has all the files, and a branch for repo1 that has only the files in repo1. git-annex doesn't maintain such branches for you, but you can probably come up with a way to create such branches (`git annex find` will be useful when scripting up a solution).
+
+Or you can adopt the approach the git-annex assistant uses for archived files -- a archive/ directory, where files are moved when they're no longer wanted in the local system, so that their symlinks don't clutter up the view, while still being easily accessible when the time comes to pull something out of the archive.
+"""]]
diff --git a/doc/forum/Broken_symlinks_remain_after_drop/comment_2_399ba969a17a41a022c69a1f7c480857._comment b/doc/forum/Broken_symlinks_remain_after_drop/comment_2_399ba969a17a41a022c69a1f7c480857._comment
new file mode 100644
index 000000000..f9225b066
--- /dev/null
+++ b/doc/forum/Broken_symlinks_remain_after_drop/comment_2_399ba969a17a41a022c69a1f7c480857._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="ghen1"
+ ip="66.41.70.34"
+ subject="comment 2"
+ date="2014-10-09T02:20:14Z"
+ content="""
+Thank you for your reply and suggestions. How could I maintain an archive folder on a local repo without those changes (moved files) being reflected in the other repos? Would this require branching as well? How does the assistant do it?
+"""]]
diff --git a/doc/forum/Changing_files_during_git_annex_runs.mdwn b/doc/forum/Changing_files_during_git_annex_runs.mdwn
new file mode 100644
index 000000000..7ff362d1c
--- /dev/null
+++ b/doc/forum/Changing_files_during_git_annex_runs.mdwn
@@ -0,0 +1,12 @@
+Hello,
+
+I have my music git annexed, direct mode. It's about 30k files of 429GB size. Some actions take considerable time (sync, add and of course transfer to/from other repos). During this time I don't hear music because of my player changes files. :-(
+
+When is it a problem when a files changes during git annex operations?
+
+git annex get gives a wrong checksum I guess and you need to re-transfer later.
+
+What about git annex add?
+
+Thx!
+Florian
diff --git a/doc/forum/Changing_files_during_git_annex_runs/comment_1_8067077c49dafbe2afa7d182b3314df4._comment b/doc/forum/Changing_files_during_git_annex_runs/comment_1_8067077c49dafbe2afa7d182b3314df4._comment
new file mode 100644
index 000000000..312100b11
--- /dev/null
+++ b/doc/forum/Changing_files_during_git_annex_runs/comment_1_8067077c49dafbe2afa7d182b3314df4._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.111"
+ subject="comment 1"
+ date="2014-10-13T21:33:35Z"
+ content="""
+git-annex will detect things like files being changed at the same time that `git annex add` is run. It doesn't currently do a full lsof check to make sure nothing has a file open when `git annex add` is run (the assistant does do such extra checks).
+
+I don't see why syncing or transferring files to other repos should prevent your music player or whatever from using the files that you have locally present.
+
+If you're using direct mode and your music player is modifying the contents of files in the repository, then that will prevent other repositories downloading older versions of those files, since the old version is no longer present in the direct mode repository. There is the possibility for some innefficient attempts to transfer a file, that would fail because it got modified in between. git-annex detects this, but it can still waste bandwidth.
+
+(If my music player was modifying files all the time, I'd give it a good talking to; that's not the job of a music player. But, I keep my music in an indirect mode repo, so any music player would find it hard to modify annexed files anyway.)
+"""]]
diff --git a/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work.mdwn b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work.mdwn
new file mode 100644
index 000000000..2865441e7
--- /dev/null
+++ b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work.mdwn
@@ -0,0 +1,47 @@
+Posting this here because I am not sure if it is a bug or if I am missing something.
+
+I have a ~10 GB file in git-annex. I can't get it to go to S3 whatever I do.
+
+I added a S3 special remote:
+
+ git annex initremote s3-mybucket type=S3 chunk=1MiB keyid=ABCD1234 bucket=mybucket
+
+Then, I tried copying files to the remote. Small files worked, but big files don't:
+
+ $ git annex copy bigfile.tgz --to s3-mybucket
+ copy bigfile.tgz (gpg)
+ You need a passphrase to unlock the secret key for
+ user: "user"
+ 2048-bit RSA key, ID ABCD1234, created 2014-10-13 (main key ID ABCD1234)
+
+ (checking s3-mybucket...) (to s3-mybucket...)
+
+ Your proposed upload exceeds the maximum allowed size
+ failed
+ git-annex: copy: 1 failed
+
+I tried some stuff like this too:
+
+ git annex enableremote s3-mybucket chunk=100MiB
+ git annex enableremote s3-mybucket chunksize=100MiB
+
+It didn't work. Same result.
+
+ $ git annex version
+ git-annex version: 5.20140717
+ build flags: Assistant Webapp Webapp-secure Pairing Testsuite S3 WebDAV FsEvents XMPP DNS Feeds Quvi TDFA CryptoHash
+ key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+ remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+ local repository version: 5
+ supported repository version: 5
+ upgrade supported from repository versions: 0 1 2 4
+
+The chunk size did seem to be set properly:
+
+ $ git checkout git-annex
+ $ cat remote.log
+ xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx bucket=mybucket chunk=100MiB chunksize=100MiB cipher=....
+
+I'm on OSX 10.9.4 and I installed git-annex via homebrew.
+
+Any ideas?
diff --git a/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_1_ec390a7d521c697eb6b17e8db1dc9d1d._comment b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_1_ec390a7d521c697eb6b17e8db1dc9d1d._comment
new file mode 100644
index 000000000..7b8a0e1f5
--- /dev/null
+++ b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_1_ec390a7d521c697eb6b17e8db1dc9d1d._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.111"
+ subject="comment 1"
+ date="2014-10-15T15:25:14Z"
+ content="""
+You're using an old version of git-annex from before the recent complete rewrite of the chunking code. That old version did not support chunking for S3. QED. Upgrade.
+
+Note that your configuration has both the chunksize= and chunk= set. This is not a good idea, since they enable different types of chunking that are not compatible. If I were you, I'd delete that special remote and make a new one after upgrading, and be careful to only set `chunk=` in that new one.
+"""]]
diff --git a/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_2_14a584567ef42d5b7955ee970200e74d._comment b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_2_14a584567ef42d5b7955ee970200e74d._comment
new file mode 100644
index 000000000..744b9f182
--- /dev/null
+++ b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_2_14a584567ef42d5b7955ee970200e74d._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://digiuser.livejournal.com/"
+ ip="67.161.4.185"
+ subject="Homebrew"
+ date="2014-10-15T15:37:34Z"
+ content="""
+Oh, ok, thank you. I am using the latest version from homebrew. Is there a way to get homebrew to install the latest version?
+
+Thanks.
+"""]]
diff --git a/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_3_6cbd7329f1f11edf8dd90df27d45158f._comment b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_3_6cbd7329f1f11edf8dd90df27d45158f._comment
new file mode 100644
index 000000000..ecba0fc83
--- /dev/null
+++ b/doc/forum/Copying_to_S3_does_not_work_-_chunking_does_not_work/comment_3_6cbd7329f1f11edf8dd90df27d45158f._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.111"
+ subject="comment 3"
+ date="2014-10-15T17:25:27Z"
+ content="""
+I think that if you have installed all the build deps for git-annex using homebrew, you should be able to just install git-annex from source.
+
+Or, ask the homebrewers to update the package there..
+"""]]
diff --git a/doc/forum/Default_annex.largefiles.mdwn b/doc/forum/Default_annex.largefiles.mdwn
new file mode 100644
index 000000000..9da3220e6
--- /dev/null
+++ b/doc/forum/Default_annex.largefiles.mdwn
@@ -0,0 +1 @@
+I'm new to git annex, so if this has been discussed before, please forgive me. The documentation of annex.largefiles seems to say that all files are added to the annex by default. However, when I tried it, several of my smaller files were not added to the annex. I admit that I haven't tried changing this value yet. Has the default changed? I'm using the package from Arch AUR git-annex-bin. Possibly this version has a different default?
diff --git a/doc/forum/Default_annex.largefiles/comment_1_74a3ad2388e41f1ff17f64a00485a35a._comment b/doc/forum/Default_annex.largefiles/comment_1_74a3ad2388e41f1ff17f64a00485a35a._comment
new file mode 100644
index 000000000..d57e4f71d
--- /dev/null
+++ b/doc/forum/Default_annex.largefiles/comment_1_74a3ad2388e41f1ff17f64a00485a35a._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-20T15:26:35Z"
+ content="""
+The default is to treat all files as "large", so any file it sees
+should be added, except for those that are `.gitignored`.
+"""]]
diff --git a/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__.mdwn b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__.mdwn
new file mode 100644
index 000000000..47c327f1b
--- /dev/null
+++ b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__.mdwn
@@ -0,0 +1,14 @@
+Hi, I'm using the webapp and created a repository on my local computer. Then I created another remote repository (encrypted remote with gcrypt), this remote repository is selected as type "full backup".
+
+I've added some files to the local repository, then changed some of them and watched the sync happen. Then I deleted some files, and these also get synced to the remote.
+
+Now, how can I recover those files from the foreign repo, using the webapp or the command line? I could not find any solution.
+
+I tried:
+git log --diff-filter=D --summary
+and then
+git checkout 488408bfcd58eced685d9e3ca5daf55250850f5d -- .
+to recover the file listed in this remote but got the following response:
+fatal: This operation must be run in a work tree
+
+What do I miss and how does the "Restore" part work when using "full backup" remote repository?
diff --git a/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_1_67ac7e8b53a4374baf640d32dac79030._comment b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_1_67ac7e8b53a4374baf640d32dac79030._comment
new file mode 100644
index 000000000..21adc4ef9
--- /dev/null
+++ b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_1_67ac7e8b53a4374baf640d32dac79030._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 1"
+ date="2014-09-25T15:52:04Z"
+ content="""
+Yes, you need to use git to either revert the repository to a previous version that had the file, or perhaps just revert the commit where the file was deleted. Either way, this requires letting git modify files in the repository, which is prevented by direct mode. So, if you can `git annex indirect` to switch to indirect mode, your git commands will work then.
+"""]]
diff --git a/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_2_eb6df2bfcb3892ae22050a8c5f67ee90._comment b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_2_eb6df2bfcb3892ae22050a8c5f67ee90._comment
new file mode 100644
index 000000000..5963d4b74
--- /dev/null
+++ b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_2_eb6df2bfcb3892ae22050a8c5f67ee90._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="comment 2"
+ date="2014-09-25T19:25:06Z"
+ content="""
+Thanks Joeyh, of course i guess checking stuff in git would then lose my timestamps as in the previous post. I recommend, as a feature request, to make file recovery a bit easier if possible. I'm not a git expert and definitely wont use command line stuff for doing this on the long run, unless some more intuitive commands like \"git annex list-old-versions\" or \"git annex show-deleted\" and then \"git annex restore \"filename\" version_no etc.
+"""]]
diff --git a/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_3_15f36487383a631f16e041e2885c44ec._comment b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_3_15f36487383a631f16e041e2885c44ec._comment
new file mode 100644
index 000000000..99dd7e81b
--- /dev/null
+++ b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_3_15f36487383a631f16e041e2885c44ec._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-02T15:40:40Z"
+ content="""
+Using git does not affect the timestamps or other metadata of files stored by git-annex, which git knows nothing about. It will perhaps change the timestamps of the symlinks that git changes. It you really wanted to avoid that, you could `git clone` the repository and do all the git commands in the clone of the repository, without touching the original repo.
+
+Whether commands like `git checkout` and `git revert` are intuitive depends on how intuitive you find git, I suppose. It sure seems more intuitive to me to reuse git commands that work just fine, rather than adding a whole new set of commands.
+"""]]
diff --git a/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_4_9293831aff5b6cef490f65d03638d34d._comment b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_4_9293831aff5b6cef490f65d03638d34d._comment
new file mode 100644
index 000000000..176c5d915
--- /dev/null
+++ b/doc/forum/Direct_Mode_-_Restore_file_from_Full_Backup_Repository__63__/comment_4_9293831aff5b6cef490f65d03638d34d._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="comment 4"
+ date="2014-10-02T21:57:02Z"
+ content="""
+If git-annex target user community are the ones with some level of git know-how, then you may be right. But I'm not one of them. The only time I use git is to clone a repo from the internet when I want to compile latest source code. Nothing more. Reverting etc are alien to me.
+
+Besides, what happens if there are multiple files in a commit but you want to revert only one file? This may be a stupid question if git is able to filter files when reverting, but accept my apologies if so, since I do not know git.
+
+I simply want to use git-annex to sync files, keeping some versions of the files in case needed.
+"""]]
diff --git a/doc/forum/Equivalent_to_git_bundle__63__.mdwn b/doc/forum/Equivalent_to_git_bundle__63__.mdwn
new file mode 100644
index 000000000..d16b91cad
--- /dev/null
+++ b/doc/forum/Equivalent_to_git_bundle__63__.mdwn
@@ -0,0 +1,10 @@
+Hi,
+
+git provides a neat way to create archives of git repos (or parts thereof): git bundle.
+
+git bundle obviously works with git annex as well, BUT those bundles don't include the actual content (in other words, only the symlinks are bundled up).
+
+Is there a way to get the git bundle functionality with git annex?
+
+THX & Cheers,
+Toby.
diff --git a/doc/forum/Equivalent_to_git_bundle__63__/comment_1_e42936a9bc36fbee69f48e32df303dee._comment b/doc/forum/Equivalent_to_git_bundle__63__/comment_1_e42936a9bc36fbee69f48e32df303dee._comment
new file mode 100644
index 000000000..af3cf569b
--- /dev/null
+++ b/doc/forum/Equivalent_to_git_bundle__63__/comment_1_e42936a9bc36fbee69f48e32df303dee._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="Bram"
+ ip="81.20.68.186"
+ subject="tar"
+ date="2014-10-03T13:44:50Z"
+ content="""
+I would say you're just looking for 'tar cf' or 'tar czf' of the root directory of your repository.
+Unless the (delta) compression that 'git bundle' would perform is a must-have...
+"""]]
diff --git a/doc/forum/Equivalent_to_git_bundle__63__/comment_2_2b8b5c237d8572fdd27202f3502bea96._comment b/doc/forum/Equivalent_to_git_bundle__63__/comment_2_2b8b5c237d8572fdd27202f3502bea96._comment
new file mode 100644
index 000000000..167353df3
--- /dev/null
+++ b/doc/forum/Equivalent_to_git_bundle__63__/comment_2_2b8b5c237d8572fdd27202f3502bea96._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="tdussa"
+ ip="217.84.78.25"
+ subject="tar -- not really"
+ date="2014-10-03T20:40:45Z"
+ content="""
+Hi,
+
+THX for your suggestion. Unfortunately, git bundle is able to carve out particular slices of a repo, which a simple tar obviously cannot do. This functionality is much desired.
+
+Cheers,
+Toby.
+"""]]
diff --git a/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx.mdwn b/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx.mdwn
new file mode 100644
index 000000000..4f144d51e
--- /dev/null
+++ b/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx.mdwn
@@ -0,0 +1,4 @@
+Hi
+I created shares between Linux and Android(using nightly 4.4). I used Assistant on both. It seems like the files from Android to Linux are all missing, at least the symlinks are broken maybe. However files from Linux to Android are fine.
+
+thanks
diff --git a/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx/comment_1_72d7811990e78fba0b7fc2e1c7ee515f._comment b/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx/comment_1_72d7811990e78fba0b7fc2e1c7ee515f._comment
new file mode 100644
index 000000000..9a0f2880f
--- /dev/null
+++ b/doc/forum/Git-Annex_Android_sync_files_are_missing_on_Linuxx/comment_1_72d7811990e78fba0b7fc2e1c7ee515f._comment
@@ -0,0 +1,15 @@
+[[!comment format=mdwn
+ username="htun"
+ ip="5.104.224.15"
+ subject="comment 1"
+ date="2014-10-17T04:57:08Z"
+ content="""
+
+
+Here is what the file is looking for symlink
+.git/annex/objects/MF/kG/SHA256E-s1589297--0ec542aabec66b1877699ef27549fe355339224680d7923078d03f375028fca1.apk/SHA256E-s1589297--0ec542aabec66b1877699ef27549fe355339224680d7923078d03f375028fca1.apk
+
+
+and here is what is in the objects SHA256E-s1589297--0ec542aabec66b1877699ef27549fe355339224680d7923078d03f375028fca1.apk/SHA256E-s1589297--0ec542aabec66b1877699ef27549fe355339224680d7923078d03f375028fca1.apk.map
+
+"""]]
diff --git a/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server.mdwn b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server.mdwn
new file mode 100644
index 000000000..66d224b3a
--- /dev/null
+++ b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server.mdwn
@@ -0,0 +1,14 @@
+Hi, I'm trying to setup git annex assistant (my first time).
+When I add the server (in "transfert" mode, if that matters) I get the following error:
+
+ "Neither rsync nor git-annex are installed on the server. Perhaps you should go install them?"
+
+I manually verified that both rsync and git/git-annex are installed and available from PATH in the "annex" account and all seems to be ok.
+
+Can you suggest a way to get a more specific information on the source of the error?
+
+My first guess was that this is due to the fact that rsync and git-annex are installed in "non-standard locations". My server run NixOS (http://nixos.org) which has a completely different convention about directory hierarchy from traditional linux/unix OS (that is, no /usr/bin /usr/lib etc.). However, I tried to "cheat" by manually adding symbolic links into a /usr/bin but this didn't work either, so I might be looking in the wrong direction.
+
+Any suggestion appreciated, thank you in advance,
+
+Marco
diff --git a/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_1_75c599cc26e7d3645f69173861d4f8be._comment b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_1_75c599cc26e7d3645f69173861d4f8be._comment
new file mode 100644
index 000000000..20b83372e
--- /dev/null
+++ b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_1_75c599cc26e7d3645f69173861d4f8be._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawn26A25mnLHRtWAP587-NPwEFKzolmENL4"
+ nickname="Marco"
+ subject="Also with standalone git-annex"
+ date="2014-09-24T14:14:43Z"
+ content="""
+Update: I also tried to install the standalone distribution in the home of the annex user on the server as shown in the video (BTW, nice illustration!), but I get the same error.
+(On the client side I installed the osx app instead.)
+"""]]
diff --git a/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_2_496e2f3a61b609ebb28ab55e5c30022b._comment b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_2_496e2f3a61b609ebb28ab55e5c30022b._comment
new file mode 100644
index 000000000..f0e63837c
--- /dev/null
+++ b/doc/forum/Git_annex_assistant_can__39__t_find_rsync_nor_git-annex_on_server/comment_2_496e2f3a61b609ebb28ab55e5c30022b._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 2"
+ date="2014-09-25T15:42:41Z"
+ content="""
+You need to be able to `ssh yourserver which rsync` and have it succeed. That's what git-annex uses to probe if rsync etc is present.
+
+Note that, since that does not start a login shell, bash doesn't source ~/.bash* at all, or even /etc/profile. So none of the ways people add nonstandard directories to PATH will work.
+
+So, use this to check the PATH that is available on the system: `ssh yourserver 'echo $PATH'`
+"""]]
diff --git a/doc/forum/Git_annex_hangs.mdwn b/doc/forum/Git_annex_hangs.mdwn
new file mode 100644
index 000000000..a7bb35905
--- /dev/null
+++ b/doc/forum/Git_annex_hangs.mdwn
@@ -0,0 +1,2 @@
+http://stackoverflow.com/questions/26305691/git-annex-hangs
+Does anyone know what might be causing this?
diff --git a/doc/forum/Git_annex_hangs/comment_1_e6b854d4625ae3015aea9c5de71a28ef._comment b/doc/forum/Git_annex_hangs/comment_1_e6b854d4625ae3015aea9c5de71a28ef._comment
new file mode 100644
index 000000000..34df8fb97
--- /dev/null
+++ b/doc/forum/Git_annex_hangs/comment_1_e6b854d4625ae3015aea9c5de71a28ef._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-10T18:51:57Z"
+ content="""
+I have followed up there, but the basic answer is, pass --debug to see what it's doing.
+"""]]
diff --git a/doc/forum/Git_annex_hangs/comment_2_4f848771e60c38321a97361b0d1b33dd._comment b/doc/forum/Git_annex_hangs/comment_2_4f848771e60c38321a97361b0d1b33dd._comment
new file mode 100644
index 000000000..cc1b23c08
--- /dev/null
+++ b/doc/forum/Git_annex_hangs/comment_2_4f848771e60c38321a97361b0d1b33dd._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawktbkKjilg70XC9XBFpIgVhtfLYH-0UMHY"
+ nickname="Tad"
+ subject="comment 2"
+ date="2014-10-10T19:08:49Z"
+ content="""
+--debug is useful. Thanks! I have tried from scratch and added backend after creating file and that worked. I will use --debug in the future
+"""]]
diff --git a/doc/forum/Git_annex_hangs/comment_3_a07abdd1dc21a69ad6be0526edaeffc1._comment b/doc/forum/Git_annex_hangs/comment_3_a07abdd1dc21a69ad6be0526edaeffc1._comment
new file mode 100644
index 000000000..452f175a5
--- /dev/null
+++ b/doc/forum/Git_annex_hangs/comment_3_a07abdd1dc21a69ad6be0526edaeffc1._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-12T20:14:36Z"
+ content="""
+Probably the same problem reported here:
+<https://github.com/datalad/datalad/issues/12>
+
+I guess this is a mismatch between the version of git that git-annex was built with and the version it's using. In particular, git check-attr behavior varies between git older than 1.7.7 and newer, and git-annex picks the version at build time.
+
+So, I think this is a broken Ubuntu PPA, and if that's the current one, the maintainer of the PPA needs to be contacted to update the git-annex to match the git version, or depend on an appropriate git version.
+"""]]
diff --git a/doc/forum/Git_annex_hangs/comment_4_2ba5992c32753ed03ddd5c12264e9acf._comment b/doc/forum/Git_annex_hangs/comment_4_2ba5992c32753ed03ddd5c12264e9acf._comment
new file mode 100644
index 000000000..d46dea1cb
--- /dev/null
+++ b/doc/forum/Git_annex_hangs/comment_4_2ba5992c32753ed03ddd5c12264e9acf._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnjssENdbjzvblxOm4Qr8x2C-BdIV_k_y4"
+ nickname="Tadeusz"
+ subject="comment 4"
+ date="2014-10-15T15:25:27Z"
+ content="""
+I will look into PPA. I hope it won't be this slow:) How long would it take for git annex to add/modify/sync files (~100) taking up 100GB or so? Days? Hours? Minutes?
+"""]]
diff --git a/doc/forum/Git_annex_hangs/comment_5_5fd749f92343079b3916a4d32ddf39c7._comment b/doc/forum/Git_annex_hangs/comment_5_5fd749f92343079b3916a4d32ddf39c7._comment
new file mode 100644
index 000000000..892f2abf2
--- /dev/null
+++ b/doc/forum/Git_annex_hangs/comment_5_5fd749f92343079b3916a4d32ddf39c7._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 5"""
+ date="2014-10-20T15:28:01Z"
+ content="""
+Adding the files will take as long as it takes to read and hash
+the conents of those files from disk. If that's too slow, `--backend=WORM`
+will bypass the hashing, so it will take seconds.
+
+Time required to sync files depends on the bandwidth to wherever it's
+syncing with, obviously.
+"""]]
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__.mdwn b/doc/forum/How_To_Permanently_Delete_a_File__63__.mdwn
new file mode 100644
index 000000000..fd654079f
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__.mdwn
@@ -0,0 +1,13 @@
+Hi,
+
+We have several large git annex repos where all of the files are on remotes and we want to got through and clean up the repositories by deleting some subset of files.
+
+What is the fastest way to permanently delete files from a git annex repository with remotes?
+
+I guess I can to ``git annex drop --numcopies=0 <file>; git rm <file>``. Does that actually delete the file permanently?
+
+Is there a faster way?
+
+Thanks,
+
+Mike
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_1_7f2cefb0991789be5a960eb9c0a9df3f._comment b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_1_7f2cefb0991789be5a960eb9c0a9df3f._comment
new file mode 100644
index 000000000..c0eb14174
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_1_7f2cefb0991789be5a960eb9c0a9df3f._comment
@@ -0,0 +1,22 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmwjQzWgiD7_I3zw-_91rMRf_6qoThupis"
+ nickname="Mike"
+ subject="comment 1"
+ date="2014-10-09T17:53:04Z"
+ content="""
+I experimented with this by making an empty directory with two empty files and one file with some content. I added them all, then ran ``git annex drop --numcopies=0 <file>; git rm <file>`` on one of the empty files.
+
+Interestingly, what happened is that git annex deleted the empty file from .git/annex/objects, but left the directory structure. In this case the link pointed to:
+
+.git/annex/objects/pX/ZJ/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+
+After the drop command what was left was the following empty directory:
+
+.git/annex/objects/pX/ZJ/SHA256E-s0--e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855/
+
+Also interestingly and terrifyingly, because there were two empty files, both pointed to the same object, the ``git annex drop`` command deleted the file in the objects directory, and now the second link points to nothing. The file is done.
+
+This means that if you have a git annex repository and you have two copies of a file, and you think to yourself, \"oh, let me just delete one, I don't need two\", and you use the method above, you will permanently and irrevocably delete both files. Not good.
+
+Any better ideas on how to do this?
+"""]]
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_2_d13b456c5b3990082c16e78a50f5db91._comment b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_2_d13b456c5b3990082c16e78a50f5db91._comment
new file mode 100644
index 000000000..959ac282f
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_2_d13b456c5b3990082c16e78a50f5db91._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmwjQzWgiD7_I3zw-_91rMRf_6qoThupis"
+ nickname="Mike"
+ subject="comment 2"
+ date="2014-10-09T18:02:59Z"
+ content="""
+Tried another approach:
+
+``git annex unannex <file>; rm <file>``
+
+This does not delete the original, and it only works if you do ``git annex get <file>`` first. It won't update the remote, unless you cd into that remote and run ``git annex sync`` there. After that there is the illusion the file is done, but its content is still in .git/annex/objects. In my test case I could vim into the file in question in the objects directory and it was still there.
+
+So ``git annex drop`` deletes both copies of duplicate files and so is too dangerous to use and ``git annex unannex`` doesn't delete the file anywhere. I am a little stuck here, what do I do?
+"""]]
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_3_854c17ff8cb38486c4bef618d1e94919._comment b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_3_854c17ff8cb38486c4bef618d1e94919._comment
new file mode 100644
index 000000000..404d12ae4
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_3_854c17ff8cb38486c4bef618d1e94919._comment
@@ -0,0 +1,24 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmwjQzWgiD7_I3zw-_91rMRf_6qoThupis"
+ nickname="Mike"
+ subject="comment 3"
+ date="2014-10-09T18:16:34Z"
+ content="""
+OK, I should have read more before writing.
+
+It seems like the procedure is described here:
+http://git-annex.branchable.com/walkthrough/unused_data/
+
+The process is:
+1. rm files or directories
+2. git annex sync in all remotes (that is a pain, I wish I only had to do it once)
+3. git annex unused
+4. git annex dropunused
+5. Repeat 4 and 5 on any repository where the data is stored
+
+That does work for me, it is just slightly cumbersome. If there is another way or if I am missing something, please let me know.
+
+Thanks,
+
+Mike
+"""]]
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_4_9572ad02bbf6845b1ab6d7c612c12a2a._comment b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_4_9572ad02bbf6845b1ab6d7c612c12a2a._comment
new file mode 100644
index 000000000..6e8ddc595
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_4_9572ad02bbf6845b1ab6d7c612c12a2a._comment
@@ -0,0 +1,19 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 4"
+ date="2014-10-09T18:23:58Z"
+ content="""
+This post is misplaced, it is not a tip about how to use git-annex, but a question. I will be moving it to the forum after posting this comment.
+
+The right answer is probably to run: `git annex drop $file`, with no --numcopies, no --force, etc. Just let git-annex do its job; it will check the remotes to ensure that enough copies of the file exist to make it safe to drop the content of the file from the local repository. (
+Note that --numcopies=0 is very unsafe, you're asking git-annex to delete even the last copy of your data without checking when you do that.)
+
+If your goal is to get rid of every copy of this file from every repository that has a copy, I suggest just `git rm $file; git commit`, followed by running `git annex unused` in the various repositories to clean them up.
+
+There is a faster way, which is to run `git annex drop --from $remote` for each remote that has the file. If you want to get rid of every copy of the file, for sure, you could add a --force to that.
+
+git-annex deduplicates data, so it's completely expected that if two files have the same content, dropping one will remove the content of the other.
+
+I cannot reproduce any .git/annex/objects/foo empty directories being left behind by git-annex after doing that. Perhaps you are not using a current version of git-annex?
+"""]]
diff --git a/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_5_9c28faabb7d7bd1e83d551e2938d3532._comment b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_5_9c28faabb7d7bd1e83d551e2938d3532._comment
new file mode 100644
index 000000000..5f6b0bf08
--- /dev/null
+++ b/doc/forum/How_To_Permanently_Delete_a_File__63__/comment_5_9c28faabb7d7bd1e83d551e2938d3532._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmwjQzWgiD7_I3zw-_91rMRf_6qoThupis"
+ nickname="Mike"
+ subject="comment 5"
+ date="2014-10-13T14:51:47Z"
+ content="""
+Sorry about the misplacement, that was a complete accident.
+
+What I am trying to do is to delete files as quickly as possible from every repository. In this case we are using git annex to move non-critical data from our main RAID drive to an external drive while still maintaining the full directory structure on the RAID drive. This is very valuable because we sometimes won't need the data for months or years, but then we may suddenly need a few files, and git annex makes it very easy to get them back. But we are talking about many terabytes and thousands and thousands of files here, and sometimes we just want to completely get rid of that data, it just takes up too much drive space. I wanted to make it as easy and safe as possible for people to just delete files from every repository, hence the question.
+
+I am nervous about using ``git annex drop --force`` because it seems to me that if there are two identical copies of a file in a repository, that command will kill the content of both... or does that only happen with ``git annex drop --numcopies=0``?
+
+I think the best solution for me seems to be the ``git rm <file>; git annex unused; git annex dropunused; git annex sync`` series of commands. It would just be nice if it were possible to achieve the same results in every repository with a simple command such as ``git annex rm --all <file>``. I recognise that this would be a dangerous command, but frankly I feel like in linux, everyone should be aware just how dangerous ``rm`` is in every context :-)
+"""]]
diff --git a/doc/forum/How_to_list_all_existing_metadata_types__63__.mdwn b/doc/forum/How_to_list_all_existing_metadata_types__63__.mdwn
new file mode 100644
index 000000000..43b3c1cfc
--- /dev/null
+++ b/doc/forum/How_to_list_all_existing_metadata_types__63__.mdwn
@@ -0,0 +1,15 @@
+Is there any way to list all of the existing tag and metadata field types? What I mean is, I have files tagged with several different tags, files with several metadata fields; is there any way to list all the tag and field names being used (not all the files WITH those tags)?
+
+For example, something like:
+
+ git annex metadata --listfields
+ lastchanged
+ month
+ month-lastchanged
+ year
+ year-lastchanged
+
+ git annex metadata --listtags
+ Public
+ Personal
+ Work
diff --git a/doc/forum/How_to_list_all_existing_metadata_types__63__/comment_1_a8c30f697f32a3807661a59482d79b18._comment b/doc/forum/How_to_list_all_existing_metadata_types__63__/comment_1_a8c30f697f32a3807661a59482d79b18._comment
new file mode 100644
index 000000000..3405ea4d6
--- /dev/null
+++ b/doc/forum/How_to_list_all_existing_metadata_types__63__/comment_1_a8c30f697f32a3807661a59482d79b18._comment
@@ -0,0 +1,19 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-09T19:57:54Z"
+ content="""
+git-annex doesn't currently have a way to generate those lists itself, but you could use `git annex metadata --json` to get the metadata of all files, and pipe that json into a parser to get the data you want.
+
+The output could also be parsed in non-json mode. For example, this will list the tags:
+
+ git annex metadata | grep '^ tag=' | cut -d '=' -f 2 | sort | uniq
+
+Although it's possible for metadata to contain newlines, and so parsing the json is a more reliable approach.
+
+Another nice way to see all the tags is to switch to a view of all tags:
+
+ git annex view 'tag=*'
+ ls
+"""]]
diff --git a/doc/forum/How_to_work_with_transfer_repos_manually__63__.mdwn b/doc/forum/How_to_work_with_transfer_repos_manually__63__.mdwn
new file mode 100644
index 000000000..8ec42dba4
--- /dev/null
+++ b/doc/forum/How_to_work_with_transfer_repos_manually__63__.mdwn
@@ -0,0 +1,18 @@
+Hello,
+
+I have 3 repos, desktop, external and server. desktop and server are sometimes connected, sometimes they should sync using the server. I want to do it manually without the assistent, since I love to learn it that way before I let the assistent do the work.
+
+client and desktop are "wanted standard" and "group client". server is "group transfer".
+
+desktop and server have each other and server in their remotes. server has no remotes.
+
+Is this setup fine that way?
+
+How to use it with the transfer repo?
+
+"git annex sync && git annex copy --to server --auto" after changing files?
+"git annex sync && git annex copy --from server --auto" to update?
+
+Will the on the server automatically be dropped? Or do the server needs to have a active role, i.e. called via ssh?
+
+Thanks!
diff --git a/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_3dec369405e6b6a4a6e5121546c03712._comment b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_3dec369405e6b6a4a6e5121546c03712._comment
new file mode 100644
index 000000000..c6d3f9dae
--- /dev/null
+++ b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_3dec369405e6b6a4a6e5121546c03712._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-20T15:29:41Z"
+ content="""
+If you use `git annex sync --content`, it will do a full sync,
+including uploading any necessary files to the transfer repo,
+downloading any files that are on the transfer repo, and
+dropping files from the transfer repo once they've been
+transferred to the client repos.
+"""]]
diff --git a/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_b8f3c09b470d99578a4a17064498dd39._comment b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_b8f3c09b470d99578a4a17064498dd39._comment
new file mode 100644
index 000000000..51a473ed1
--- /dev/null
+++ b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_1_b8f3c09b470d99578a4a17064498dd39._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawk9SYh6N-JUMkYkW4aOk55zC3Vr9KonDV4"
+ nickname="Florian"
+ subject="comment 1"
+ date="2014-10-20T10:51:16Z"
+ content="""
+Ok, git annex copy --to server --auto and git annex get on the receiver did the trick.
+
+But how can I drop files from the transfer server that both clients have? I tried git annex drop . or git annex drop --auto . but it just did nothing.
+
+Thx!
+"""]]
diff --git a/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_3_be2c594bc1d162cfb1acc3a01fc284f2._comment b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_3_be2c594bc1d162cfb1acc3a01fc284f2._comment
new file mode 100644
index 000000000..393cd4497
--- /dev/null
+++ b/doc/forum/How_to_work_with_transfer_repos_manually__63__/comment_3_be2c594bc1d162cfb1acc3a01fc284f2._comment
@@ -0,0 +1,7 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 3"""
+ date="2014-10-20T18:53:31Z"
+ content="""
+git annex drop --auto --from server
+"""]]
diff --git a/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__.mdwn b/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__.mdwn
new file mode 100644
index 000000000..cf5a5c8be
--- /dev/null
+++ b/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__.mdwn
@@ -0,0 +1,28 @@
+I am trying to synchronize "Linux Workstation" and "Android Tablet" with annex assistant through RSync remote and my google account.
+
+I keep failing horribly :-) I followed http://git-annex.branchable.com/assistant/remote_sharing_walkthrough/,
+but I seem to either do the steps in wrong order, because I never managed to materialize the files on my Android device.
+
+Steps went thusly:
+
+1. L) install annex on linux and run assistant and select repository dir
+2. L) add rsync remote, set folder to android_docs
+3. L) add jabber account -> I see progress bars and it seems to sync just fine
+4. A) install annex on android, specify repository
+5. A) add the "share with other devices repository"
+6. A) login with my google account
+7. A) sync fails because I am on failing wifi network
+8. A) try to add RSync remote, accidentaly specify annex folder
+9. A) realize mistake, try to remove rsync repo
+10. A) hangs on "cleaning step"
+11. A) battery dies
+12. A) charge battery, start android, move to functional wifi network
+13. A) start annex assistant, hangs in terminal (don't remember error message :-/)
+14. A) remove repo folder from android, remove annex,download apk again, reinstall
+15. A) run annex, select folder, add the "share with other devices repository", login with google account
+16. L) See fail syncing on linux "fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists."
+17. A) android seems that it finished syncig, asks for remote repository
+18. A) specify my rsync account with correct folder "android_docs"
+19. A) Could not resolve hostname git-annex-$servername-$username_$port_$directory
+
+I am starting to thing, that randomly creating and removing repositories is not the way forward, so I'd like to get to clean state and redo my setup :)
diff --git a/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__/comment_1_d77fbbbe3a7438a1e79f175df1f69ef3._comment b/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__/comment_1_d77fbbbe3a7438a1e79f175df1f69ef3._comment
new file mode 100644
index 000000000..8dc0692e6
--- /dev/null
+++ b/doc/forum/Is_there_a_way_to_get_back_to_clean-state_after_unworking_annex_assistant_configuration_attempt__63__/comment_1_d77fbbbe3a7438a1e79f175df1f69ef3._comment
@@ -0,0 +1,17 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawldUCypSR21BObbzC0Uf8NVd1vZnjYlXZc"
+ nickname="Adam"
+ subject="Ok, found it :-)"
+ date="2014-10-04T10:11:55Z"
+ content="""
+I think I found reliable although a bit excessive way to get back to clean state:
+
+1. stop all the running daemons
+2. on android, remove folder git-annex.home
+3. on linux, remove ~/.config/git-annex and ~/.ssh/git-annex
+4. on both attempted repositories I removed .git folder
+5. I removed everything I have stored on rsync.net
+
+Then I re-did the steps on good wifi network an plugged in to power source on both devices :-)
+It seems that important step is to wait for the annex to share the remote repo with the other device (took ~5 minutes)
+"""]]
diff --git a/doc/forum/Modification_time_of_files_retained_in_synchronized_remote_copies__63__/comment_1_2b13584998108af0522b898c5d396ba4._comment b/doc/forum/Modification_time_of_files_retained_in_synchronized_remote_copies__63__/comment_1_2b13584998108af0522b898c5d396ba4._comment
new file mode 100644
index 000000000..1d5bf00a5
--- /dev/null
+++ b/doc/forum/Modification_time_of_files_retained_in_synchronized_remote_copies__63__/comment_1_2b13584998108af0522b898c5d396ba4._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="comment 1"
+ date="2014-09-19T21:31:23Z"
+ content="""
+I just saw your post and agree with you. Maybe we shall create a bug report. I would also need to keep my files in original timestamps. (may not mind about permissions though).
+"""]]
diff --git a/doc/forum/Move_unsynced_file_in_direct_mode/comment_1_12a797cba753168dfde9e6339c00f481._comment b/doc/forum/Move_unsynced_file_in_direct_mode/comment_1_12a797cba753168dfde9e6339c00f481._comment
new file mode 100644
index 000000000..73a164da2
--- /dev/null
+++ b/doc/forum/Move_unsynced_file_in_direct_mode/comment_1_12a797cba753168dfde9e6339c00f481._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 1"
+ date="2014-09-18T18:27:37Z"
+ content="""
+Well, you can run `git annex assistant` or `git annex watch` and it will automatically notice the moved file and commit it. I think this is what you were trying to do when you set annex.autocommit to true (which is the default so accomplished nothing).
+
+But your example does show a bug: `git annex add` should add the dangling symlink to git in direct mode, as it already does in indirect mode. Fixed in [[!commit 44e7d6e1fe6e13091adbd572f66412e3601df3c5]].
+"""]]
diff --git a/doc/forum/Move_unsynced_file_in_direct_mode/comment_2_f3aec24668c35780a033f2b035df10ee._comment b/doc/forum/Move_unsynced_file_in_direct_mode/comment_2_f3aec24668c35780a033f2b035df10ee._comment
new file mode 100644
index 000000000..971e70cdc
--- /dev/null
+++ b/doc/forum/Move_unsynced_file_in_direct_mode/comment_2_f3aec24668c35780a033f2b035df10ee._comment
@@ -0,0 +1,20 @@
+[[!comment format=mdwn
+ username="xn"
+ ip="71.59.214.243"
+ subject="comment 2"
+ date="2014-09-18T19:03:30Z"
+ content="""
+Thanks for tracking down that bug and for clearing up my confusion about `annex.autocommit`, Joey.
+
+I didn't realize `annex.autocommit=true` is only used by `git annex assistant` and `git annex watch`. I thought that running `git annex sync` with `annex.autocommit=true` would also commit the change.
+
+A few small changes to `git-annex(1)` could clarify:
+
+ sync [remote ...]
+ ...
+ The sync process involves first committing all local *staged* changes...
+
+ annex.autocommit
+ Set to false to prevent git-annex assistant and *git-annex watch* from automatically committing changes to files in the repository.
+
+"""]]
diff --git a/doc/forum/Preserving_extended_attributes.mdwn b/doc/forum/Preserving_extended_attributes.mdwn
new file mode 100644
index 000000000..5533daeea
--- /dev/null
+++ b/doc/forum/Preserving_extended_attributes.mdwn
@@ -0,0 +1,5 @@
+Hey,
+
+I was wondering if it is currently possible to let the assistant (or git-annex in general) preserve extended attributes. I didn't find any options hinting at this, although it should be possible at least in theory by using the metadata system of git-annex...
+
+Considering that some applications use extended attributes to store custom meta data (like tags etc.), I think it would be valuable to have such an option...
diff --git a/doc/forum/Removing_git-annex_repo/comment_1_58fcceb96647a8c7f33d188ae908f3bd._comment b/doc/forum/Removing_git-annex_repo/comment_1_58fcceb96647a8c7f33d188ae908f3bd._comment
new file mode 100644
index 000000000..d03ca6b4e
--- /dev/null
+++ b/doc/forum/Removing_git-annex_repo/comment_1_58fcceb96647a8c7f33d188ae908f3bd._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 1"
+ date="2014-09-25T15:44:16Z"
+ content="""
+chmod u+w -R ~/annex/.git
+"""]]
diff --git a/doc/forum/SSH_remote_transfers_queued_but_no_movement/comment_1_fea4e2317f850d6166480cddba088ae5._comment b/doc/forum/SSH_remote_transfers_queued_but_no_movement/comment_1_fea4e2317f850d6166480cddba088ae5._comment
new file mode 100644
index 000000000..d9bf5a97f
--- /dev/null
+++ b/doc/forum/SSH_remote_transfers_queued_but_no_movement/comment_1_fea4e2317f850d6166480cddba088ae5._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 1"
+ date="2014-09-18T19:41:39Z"
+ content="""
+First, take a look at the output of `ps fax` .. you should see a `git-annex assistant` process and near it there ought to be a `git annex transferkeys` process. See if that process has any children under it, like perhaps a rsync. If so, it might just be stalled talking to the host for some reason.
+
+The best way to debug it further is probably to run `git annex copy --to $remote` at the command line, passing the name of your remote repository. See if it also stalls there. If so, add a --debug and you can see the actual rsync commands it's using, and perhaps work out the problem from there.
+"""]]
diff --git a/doc/forum/Stale_keys_and_.cache_files_left_in_.git__47__annex__47__objects/comment_1_2aa80b317863a99e676a375d907d0e84._comment b/doc/forum/Stale_keys_and_.cache_files_left_in_.git__47__annex__47__objects/comment_1_2aa80b317863a99e676a375d907d0e84._comment
new file mode 100644
index 000000000..34cbb011a
--- /dev/null
+++ b/doc/forum/Stale_keys_and_.cache_files_left_in_.git__47__annex__47__objects/comment_1_2aa80b317863a99e676a375d907d0e84._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-02T16:23:35Z"
+ content="""
+I cannot see a way that eg, `dropunused` could leave empty object directories. The few functions that remove content from a repository call `cleanObjectLoc` to prune the empty directories that result. Of course, the empty directories can be safely removed.
+
+IIRC older versions of direct mode might have made stale .cache and .map before. They can be deleted if you're not using direct mode.
+
+The only way I can see that a dash could come before the 'm' and the mtime is if the mtime were negative. I don't know how that would happen, but the code has certainly never put a dash there, and
+WORM-s123-m-123456789--name is not a valid git-annex key; git-annex cannot parse it.
+"""]]
diff --git a/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__.mdwn b/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__.mdwn
new file mode 100644
index 000000000..932c78a9a
--- /dev/null
+++ b/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__.mdwn
@@ -0,0 +1,5 @@
+Hello,
+
+I'm using the git-annex assistant (in Mac Mavericks) and I'm trying to create a new remote rsync repo, I have the details and everything but I can't do it over ssh, I need the call to use rsync otherwise I get: **This account is restricted by rssh. Allowed commands: scp rsync ** . Can this be changed manually? How can I create a remote rsync repo?
+
+Thanks
diff --git a/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__/comment_1_68e911629da672473bd6188407a68be2._comment b/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__/comment_1_68e911629da672473bd6188407a68be2._comment
new file mode 100644
index 000000000..b6fa2d1a1
--- /dev/null
+++ b/doc/forum/This_account_is_restricted_by_rssh._Allowed_commands:_scp_rsync__160__/comment_1_68e911629da672473bd6188407a68be2._comment
@@ -0,0 +1,13 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-02T15:33:58Z"
+ content="""
+You can set it up manually at the command line, and one it's set up, the assistant will be able to transfer files to it using rsync. See [[special_remotes/rsync]] for some setup examples. Note that the rsync server will need to be configured, somehow, to let you log in without giving a password -- one typical way this might be set up is using .ssh/authorized_keys on the rsync server.
+
+The assistant doesn't try to set up rsync special remotes because this is not very common, and there's no one way it can use to set up a ssh key so it can log in without a password
+that will work across different rsync server setups. It does support setting up rsync.net just because that's one I know how to handle.
+
+
+"""]]
diff --git a/doc/forum/Using_the_Git-Annex_Assistant_as_a_Backup_and_Syncing_Service/comment_1_7070f6e7e05fba7686d8620d62906a83._comment b/doc/forum/Using_the_Git-Annex_Assistant_as_a_Backup_and_Syncing_Service/comment_1_7070f6e7e05fba7686d8620d62906a83._comment
new file mode 100644
index 000000000..07cc63434
--- /dev/null
+++ b/doc/forum/Using_the_Git-Annex_Assistant_as_a_Backup_and_Syncing_Service/comment_1_7070f6e7e05fba7686d8620d62906a83._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-02T15:54:44Z"
+ content="""
+rsync.net alone is not sufficient, because it's only used for storing the (encrypted) contents of files, not for syncing the git repository, or finding out when there other computer has made a change to the repository.
+
+You can use rsync.net plus xmpp. However, xmpp is not encrypted when it passes through the xmpp server.
+
+A fully encrypted option is to install git-anex on a server accessible by ssh, and then use the assistant to set up an encrypted git repository on the server. No xmpp needed in this configuration.
+"""]]
diff --git a/doc/forum/XMPP_problem_behind_router.mdwn b/doc/forum/XMPP_problem_behind_router.mdwn
new file mode 100644
index 000000000..5eec76a07
--- /dev/null
+++ b/doc/forum/XMPP_problem_behind_router.mdwn
@@ -0,0 +1,3 @@
+I'm trying to configure a jabber account for use with git-annex, but it seems that something's wrong as soon as I try to go through my router (wired or wireless). Compared to directly connecting to my modem, wireshark shows a lot of TCP retransmissions, eventually resulting in "Unable to connect to the Jabber server. Maybe you entered the wrong password? (Error message: host gmail.com:5222 failed: connect: timeout (Connection timed out))" in the webapp.
+
+I've tried to configure the account both in the webapp and manually in the .git/annex/creds/xmpp file, but it doesn't seem to make a difference. It's able to connect if I directly connect it to my modem, so I'm fairly sure it's not a problem at my computer or with the credentials. It doesn't appear to be a problem at the firewall on the router, but I could certainly be missing something. Are there some other tests I could try to narrow down the problem?
diff --git a/doc/forum/XMPP_problem_behind_router/comment_1_25a7f8dc5cf14cda4d76b2f8c6ca77d5._comment b/doc/forum/XMPP_problem_behind_router/comment_1_25a7f8dc5cf14cda4d76b2f8c6ca77d5._comment
new file mode 100644
index 000000000..04b1965c0
--- /dev/null
+++ b/doc/forum/XMPP_problem_behind_router/comment_1_25a7f8dc5cf14cda4d76b2f8c6ca77d5._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://me.yahoo.com/a/wbh0dY54mcPwTpeOweuPQ8JiZrH3hg--#9b726"
+ nickname="Joe"
+ subject="comment 1"
+ date="2014-09-23T01:15:23Z"
+ content="""
+I forgot to mention that XMPP via other clients (e.g., empathy) works fine.
+"""]]
diff --git a/doc/forum/XMPP_problem_behind_router/comment_2_3186ebe32c30764b9fd53625dd3e4eda._comment b/doc/forum/XMPP_problem_behind_router/comment_2_3186ebe32c30764b9fd53625dd3e4eda._comment
new file mode 100644
index 000000000..63387ae3d
--- /dev/null
+++ b/doc/forum/XMPP_problem_behind_router/comment_2_3186ebe32c30764b9fd53625dd3e4eda._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 2"
+ date="2014-09-25T15:48:39Z"
+ content="""
+I don't think that google XMPP server is at gmail.com. This suggests that the SRV record lookup to get from the email server to the xmpp server failed somehow. So, I'd look for a issue on the router's DNS server, or perhaps reconfigure DNS to bypass that server.
+"""]]
diff --git a/doc/forum/XMPP_problem_behind_router/comment_3_7fa8fe8cb92993c935ba2dbfb2aef728._comment b/doc/forum/XMPP_problem_behind_router/comment_3_7fa8fe8cb92993c935ba2dbfb2aef728._comment
new file mode 100644
index 000000000..d8ee076f6
--- /dev/null
+++ b/doc/forum/XMPP_problem_behind_router/comment_3_7fa8fe8cb92993c935ba2dbfb2aef728._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://me.yahoo.com/a/wbh0dY54mcPwTpeOweuPQ8JiZrH3hg--#9b726"
+ nickname="Joe"
+ subject="comment 3"
+ date="2014-09-29T01:38:14Z"
+ content="""
+Thanks, that seemed to be the problem! My router's dnsmasq had the 'filterwin2k' option set, which apparently [blocks queries for SRV records](http://wiki.openwrt.org/doc/howto/dhcp.dnsmasq#sip-phones.and.dnsmasq). This may be fixed in newer OpenWRT versions, but my device is no longer supported.
+"""]]
diff --git a/doc/forum/add_only_binary_files__63__.mdwn b/doc/forum/add_only_binary_files__63__.mdwn
new file mode 100644
index 000000000..4b40db5dd
--- /dev/null
+++ b/doc/forum/add_only_binary_files__63__.mdwn
@@ -0,0 +1 @@
+Is there a way to only add binary files with git annex add command?
diff --git a/doc/forum/add_only_binary_files__63__/comment_1_7ce3be5bafd62ce5ed78bcd9323039cc._comment b/doc/forum/add_only_binary_files__63__/comment_1_7ce3be5bafd62ce5ed78bcd9323039cc._comment
new file mode 100644
index 000000000..e53cb1f0f
--- /dev/null
+++ b/doc/forum/add_only_binary_files__63__/comment_1_7ce3be5bafd62ce5ed78bcd9323039cc._comment
@@ -0,0 +1,16 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-06T15:36:54Z"
+ content="""
+If you can configure `annex.largefiles` to match only binary files, then `git annex add` will respect it, and only add those files.
+
+For example, if you were working on a game written in C, and wanted to use git-annex only for the game art, but not the source code, you could configure it:
+
+ git config annex.largefiles 'largerthan=100kb and not (include=*.c or include=*.h)'
+
+This doesn't currently support looking at the file content to determine eg, its MIME type. That's been suggested as an added feature before.
+
+More simply, if you `git add` the non-binary files yourself first, `git annex add` will skip over those files and only add the other files.
+"""]]
diff --git a/doc/forum/annex_merge_creates___34__synced__47____42____34___branches/comment_4_79219e920a6beb4bd3265571f59f51cb._comment b/doc/forum/annex_merge_creates___34__synced__47____42____34___branches/comment_4_79219e920a6beb4bd3265571f59f51cb._comment
new file mode 100644
index 000000000..8e0764948
--- /dev/null
+++ b/doc/forum/annex_merge_creates___34__synced__47____42____34___branches/comment_4_79219e920a6beb4bd3265571f59f51cb._comment
@@ -0,0 +1,36 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlog_5wIICaMcrKTexlFNA6IO6UTp323aE"
+ nickname="Torkaly"
+ subject="comment 4"
+ date="2014-09-19T18:22:36Z"
+ content="""
+delete the *synced/master* branch:
+```
+$ git branch -d synced/master
+Branch synced/master entfernt (war 20ec8b3).
+```
+
+then call *annex merge*:
+```
+$ git annex merge
+merge git-annex ok
+```
+
+check branches:
+```
+$ git branch -a
+ git-annex
+* master
+ synced/master
+ remotes/origin/git-annex
+ remotes/origin/master
+```
+and there is the *synced/master* branch again.
+
+But that's not my problem. My problem was: how to use annex with a central repository.
+I done that by deleting all remote synced/* branches. And now I'm updating the git-annex branch by `git fetch`ing and
+`git annex merge`ing again.
+
+PS: the MD for code blocks is broken
+
+"""]]
diff --git a/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_5_c1d247fa128c0a0fc899284f5f95002c._comment b/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_5_c1d247fa128c0a0fc899284f5f95002c._comment
new file mode 100644
index 000000000..474bd6673
--- /dev/null
+++ b/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_5_c1d247fa128c0a0fc899284f5f95002c._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="Understanding"
+ date="2014-09-19T21:24:19Z"
+ content="""
+If box does not have the encryption keys in this \"shared encryption\" scenario and if you had only your computer and this remote repo, does that mean losing your computer (ie your git repository) would mean also losing access to those encrypted content? So, actually an encrypted remote, even if marked as full backup, is not actually a backup unless you have a 3rd computer that has the same git repo, in the case of losing your original computer or accidentally wiping it...
+"""]]
diff --git a/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_6_cf877a3502802492cd2bc3012cb2d779._comment b/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_6_cf877a3502802492cd2bc3012cb2d779._comment
new file mode 100644
index 000000000..3765a67cc
--- /dev/null
+++ b/doc/forum/assistant_created_encrypted__backup_remote:_Howto_restore__63__/comment_6_cf877a3502802492cd2bc3012cb2d779._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 6"
+ date="2014-09-25T15:59:34Z"
+ content="""
+Right.
+
+So, I think I should go change the description displayed by the webapp to \"full backup (file contents only)\" and \"full backup (entire git repository)\" or so. It's a little hard to word it precisely without making it hard to understand.
+
+Or, the webapp could display a nudge to make a clone when no other clones of the git repository exist. I think that's probably more valuable, so [[todo_added|todo/webapp_nudge_when_less_than_numcopies_clones]].
+"""]]
diff --git a/doc/forum/big_overhead/comment_12_475d5af95adcfcd3a51e10f270205eb7._comment b/doc/forum/big_overhead/comment_12_475d5af95adcfcd3a51e10f270205eb7._comment
new file mode 100644
index 000000000..53e15b764
--- /dev/null
+++ b/doc/forum/big_overhead/comment_12_475d5af95adcfcd3a51e10f270205eb7._comment
@@ -0,0 +1,71 @@
+[[!comment format=mdwn
+ username="rasmus"
+ ip="146.185.23.178"
+ subject="comment 12"
+ date="2014-09-19T00:43:56Z"
+ content="""
+Hi Joey,
+
+Thanks for giving the thread a more appropriate title and thanks for the helpful messages.
+
+Let me start with the easy points:
+
+
+* Looking at my log file of installed packages I have never used `etckeeper` on my system. So unless it could have entered through `annex` then I think we can rule that one out.
+* According to `git log` the repos are from January 2014 where I restarted my repos.
+
+
+ commit 029a8e76ab5f66aa4390987130985550a1ccd69c
+ Author: Rasmus <w530@domain.eu>
+ Date: Thu Jan 23 21:06:13 2014 +0100
+
+ created repository
+
+
+* When I start git repos I typically just use \"init\" so I don't think I did the 2012 commits.
+* I checked out one of the 74mb files. When I do `file test.blob` it shows `test.blob: GPG symmetrically encrypted data (CAST5 cipher)`. But none of my normal passwords worked. Could such a gpg'ed file be from local network connections where the assistant asks for a passphrase? I'm pretty sure that my transfer repo has only been using `gcrypt` and I believe I \"restarted\" my repos because I switched to `gcrypt` repos. Also, my transfer repo is 10Gb as well which sounds big for transfer repo.
+
+I performed a similar \"analysis\" on the `conf.annex` repo which should contain mostly no binary files (some 16x16 pngs etc).
+
+`conf.annex` has 727 unreachable objects and 3477 commits in total. Of these 338 are commits. Here's an example of a larger commit message of an unreachable commit.
+
+ commit 601c10f9512e8d3502d9dd52ef409560ebb5b7e0
+ Author: root <root@localhost>
+ Date: Mon Dec 31 19:00:01 2012 -0400
+
+ Initial commit
+
+ diff --git a/6fbbea493cdec9d912d256374199cc4c012022d35524c8789a7aceeb953442a5 b/6fbbea493cdec9d912d256374199cc4c012022d35524c8789a7aceeb953442a5
+ new file mode 100644
+ index 0000000..ea5fcc3
+ Binary files /dev/null and b/6fbbea493cdec9d912d256374199cc4c012022d35524c8789a7aceeb953442a5 differ
+ diff --git a/91bd0c092128cf2e60e1a608c31e92caf1f9c1595f83f2890ef17c0e4881aa0a b/91bd0c092128cf2e60e1a608c31e92caf1f9c1595f83f2890ef17c0e4881aa0a
+ new file mode 100644
+ index 0000000..a86c1a9
+ Binary files /dev/null and b/91bd0c092128cf2e60e1a608c31e92caf1f9c1595f83f2890ef17c0e4881aa0a differ
+ diff --git a/9da3fcfc1635c674012c35d90c21adce3c35440e629d64fe117fe349a6b3e194 b/9da3fcfc1635c674012c35d90c21adce3c35440e629d64fe117fe349a6b3e194
+ new file mode 100644
+ index 0000000..ef1d71c
+ Binary files /dev/null and b/9da3fcfc1635c674012c35d90c21adce3c35440e629d64fe117fe349a6b3e194 differ
+ diff --git a/ad4ae79c29b3756f7e41257db7454f3c319112d06385a8bc12d28209a82f2594 b/ad4ae79c29b3756f7e41257db7454f3c319112d06385a8bc12d28209a82f2594
+ new file mode 100644
+ index 0000000..61d3e5b
+ Binary files /dev/null and b/ad4ae79c29b3756f7e41257db7454f3c319112d06385a8bc12d28209a82f2594 differ
+ diff --git a/bd0e9cb492077e0c090bc62892c8de438c51a956c8215b2c68de7caa7e2431cc b/bd0e9cb492077e0c090bc62892c8de438c51a956c8215b2c68de7caa7e2431cc
+ new file mode 100644
+ index 0000000..92e9bd7
+ Binary files /dev/null and b/bd0e9cb492077e0c090bc62892c8de438c51a956c8215b2c68de7caa7e2431cc differ
+
+Across all commits 6006 objects are mentioned, but only 371 are unique.
+
+I checked out one blob and again `file` reports `GPG symmetrically encrypted data (CAST5 cipher)`. Interesting for `conf.annex` I get this line when trying to decrypt
+
+ gpg: DBG: cleared passphrase cached with ID: SBF83A0F822D0F664
+
+
+For `doc.annex` I get
+
+ gpg: DBG: cleared passphrase cached with ID: S32DEAD1E8DD06A4D
+
+And on my other computer I see a third ID. I'm not sure if this means anything when files are symmetrically encrypted, though.
+"""]]
diff --git a/doc/forum/big_overhead/comment_13_1c8cc992f04fc63179094c494bd25025._comment b/doc/forum/big_overhead/comment_13_1c8cc992f04fc63179094c494bd25025._comment
new file mode 100644
index 000000000..229ef256a
--- /dev/null
+++ b/doc/forum/big_overhead/comment_13_1c8cc992f04fc63179094c494bd25025._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="I know what it is now"
+ date="2014-09-19T02:43:22Z"
+ content="""
+These objects are the ones written by git-remote-gcrypt when pushing to a remote. That's why the weird dates, root pseudo-commit, crazy filenames, and big gpg encrypted blobs. All countermeasures that git-remote-gcrypt uses to keep your encrypted git remote safe and not leak information about what's in it.
+
+So, this is a bug in git-remote-gcrypt. It needs to clean these objects up after pushing them! (Also after failed pushes.)
+"""]]
diff --git a/doc/forum/big_overhead/comment_14_cbfb3d557915258e72c65a4e84df77a9._comment b/doc/forum/big_overhead/comment_14_cbfb3d557915258e72c65a4e84df77a9._comment
new file mode 100644
index 000000000..87632c909
--- /dev/null
+++ b/doc/forum/big_overhead/comment_14_cbfb3d557915258e72c65a4e84df77a9._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="108.236.230.124"
+ subject="comment 14"
+ date="2014-09-19T02:59:36Z"
+ content="""
+<https://github.com/bluss/git-remote-gcrypt/issues/16>
+"""]]
diff --git a/doc/forum/big_overhead/comment_15_b973529bae549bcbaaae792f0403989b._comment b/doc/forum/big_overhead/comment_15_b973529bae549bcbaaae792f0403989b._comment
new file mode 100644
index 000000000..a875cc34e
--- /dev/null
+++ b/doc/forum/big_overhead/comment_15_b973529bae549bcbaaae792f0403989b._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="rasmus"
+ ip="217.130.110.20"
+ subject="comment 15"
+ date="2014-09-19T06:29:58Z"
+ content="""
+Brilliant! Thanks for taking time to analyze the issue and taking the bug to `gcrypt`.
+
+[I'm surprised that a different key than my git-annex key is used and that it's a symmetric key, but I will explore the technology on my own].
+"""]]
diff --git a/doc/forum/drop__47__whereis_not_showing_gcrypted_special_ssh_remote/comment_2_2c14d88e55ea7d4edc90ce0091025f32._comment b/doc/forum/drop__47__whereis_not_showing_gcrypted_special_ssh_remote/comment_2_2c14d88e55ea7d4edc90ce0091025f32._comment
new file mode 100644
index 000000000..a181940d2
--- /dev/null
+++ b/doc/forum/drop__47__whereis_not_showing_gcrypted_special_ssh_remote/comment_2_2c14d88e55ea7d4edc90ce0091025f32._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 2"
+ date="2014-10-02T15:48:02Z"
+ content="""
+Sorry that it took me so long to get back to you.
+
+The git remote that corresponds to your server should have an annex-uuid setting. (Eg, remote.server.annex-uuid.) Look up the uuid. Then look in `git-annex info`. I'd expect the uuid to be listed there. It might be that somehow two repositories got the same uuid set (probability says no, but things find ways to break). If that did happen, I think it would replicate what you've reported, and in that case, the server's uuid would be listed by `git annex info`, but perhaps it would have the name of the usb drive.
+"""]]
diff --git a/doc/forum/files_being_dropped_undesirably.mdwn b/doc/forum/files_being_dropped_undesirably.mdwn
new file mode 100644
index 000000000..2a4d1bf7c
--- /dev/null
+++ b/doc/forum/files_being_dropped_undesirably.mdwn
@@ -0,0 +1,47 @@
+I currently am using 3 repositories for my personal annex, and on each of them I'm running the assistant (they are normal git annex repositories). However all my files are migrating to my work desktop. My other two repositories seem to keep dropping them.
+
+Last night on my laptop I did a "git annex get *" to pull _all_ the files onto it. I saw in the .git/annex/daemon.log that each file was being dropped as soon as it was gotten. The output of "git annex get" showed files being transfered across, and the .git/annex/daemon.log showed files being dropped straight away. Currently I'd like to keep all my files on all my repositories (and perhaps later I'll revise that).
+
+Could someone please help me understand why annex is dropping my files, and what I could do to keep them on all my repositories?
+
+Here is the output of a get for a single file:
+
+ ~/Documents/personal-annex $ git annex get 2014/09/15/IMG_1123.JPG
+ get 2014/09/15/IMG_1123.JPG (from pea15.local_Documents_annexpersonal...)
+ SHA256E-s1841221--deeaa13935907ad606f941397bb57432c1eccfd5c361b8c16d2b19bfbe8437a6.JPG
+ 1,841,221 100% 11.40MB/s 0:00:00 (xfr#1, to-chk=0/1)
+ ok
+ (Recording state in git...)
+
+
+Here is the corresponding daemon.log output:
+
+ [2014-10-09 09:11:34 AEDT] Committer: Committing changes to git
+ [2ok
+ (Recording state in git...)
+ (Recording state in git...)
+ (Recording state in git...)
+ drop 2014/09/15/IMG_1123.JPG 01(checking pea15.local_Documents_annexpersonal...) 4-10-09 09:11:34 AEDT] Pusher: Syncing with pea15.local_Documents_annexpersonal
+ [2014-10-09 09:11:35 AEDT] Committer: Committing changes to git
+ To ssh://geoffc@git-annex-pea-15-geoffc_Documents.2Fannex.2Dpersonal/~/Documents/annex-personal/
+ 04742c0..d1a5a36 git-annex -> synced/git-annex
+ [2014-10-09 09:11:38 AEDT] Pusher: Syncing with pea15.local_Documents_annexpersonal
+ Everything up-to-date
+
+
+And here is a snippet from my .git/config:
+
+ [annex]
+ uuid = 57c4e6d1-0c6b-4c49-a235-4119d3864c14
+ version = 5
+ direct = true
+ #diskreserve = 2 gigabyte
+ autoupgrade = ask
+ debug = false
+ expireunused = false
+ autocommit = true
+ [remote "pea15.local_Documents_annexpersonal"]
+ url = ssh://geoffc@git-annex-pea-15-geoffc_Documents.2Fannex.2Dpersonal/~/Documents/annex-personal/
+ fetch = +refs/heads/*:refs/remotes/pea15.local_Documents_annexpersonal/*
+ annex-uuid = 2ef6bbfe-662f-48ba-aa52-8e2f82bcfb15
+ annex-cost = 175.0
diff --git a/doc/forum/files_being_dropped_undesirably/comment_1_d03f8ed7d3f3da58612bf238c1790fb4._comment b/doc/forum/files_being_dropped_undesirably/comment_1_d03f8ed7d3f3da58612bf238c1790fb4._comment
new file mode 100644
index 000000000..887b12ac9
--- /dev/null
+++ b/doc/forum/files_being_dropped_undesirably/comment_1_d03f8ed7d3f3da58612bf238c1790fb4._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="Bram"
+ ip="81.20.68.186"
+ subject="Group and wanted expression"
+ date="2014-10-09T12:09:58Z"
+ content="""
+What are the group and wanted expressions for your repository?
+
+ git annex group .
+ git annex wanted .
+"""]]
diff --git a/doc/forum/files_being_dropped_undesirably/comment_2_7d885abebfec789348639494b1bb1829._comment b/doc/forum/files_being_dropped_undesirably/comment_2_7d885abebfec789348639494b1bb1829._comment
new file mode 100644
index 000000000..7bdb9243c
--- /dev/null
+++ b/doc/forum/files_being_dropped_undesirably/comment_2_7d885abebfec789348639494b1bb1829._comment
@@ -0,0 +1,24 @@
+[[!comment format=mdwn
+ username="go8ose"
+ ip="203.26.118.202"
+ subject="Group and wanted expression"
+ date="2014-10-09T22:19:01Z"
+ content="""
+Here is the group and wanted output on my laptop (that is dropping all the files):
+
+ ~/Documents/personal-annex $ git annex group .
+ unwanted
+ ~/Documents/personal-annex $ git annex wanted .
+ standard
+
+Here is the output on my desktop (that seems to be keeping all the files):
+
+ ~/Documents/annex-personal $ git annex group .
+
+ ~/Documents/annex-personal $ git annex wanted .
+
+
+
+
+Reading the manpage suggests I might want to change this. However I haven't seen in the man page a list of the standard predefined groups, nor an explanation of how each predefined group behaves. Is that documented somewhere else?
+"""]]
diff --git a/doc/forum/files_being_dropped_undesirably/comment_3_7c70b58f89408304055eefb1b166ef2e._comment b/doc/forum/files_being_dropped_undesirably/comment_3_7c70b58f89408304055eefb1b166ef2e._comment
new file mode 100644
index 000000000..00d511596
--- /dev/null
+++ b/doc/forum/files_being_dropped_undesirably/comment_3_7c70b58f89408304055eefb1b166ef2e._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="go8ose"
+ ip="203.26.118.202"
+ subject="comment 3"
+ date="2014-10-09T22:31:49Z"
+ content="""
+To answer my question about documentation on the 'groups' and 'wanted' features, I've found [[preferred_content]] and [[preferred_content/standard_groups]].
+
+In my case I've now setup my laptop to use group 'client' for the '.' repository. Similarly I've set 'client' on my desktop. I found my home server was already set to 'backup'. I still don't understand \"git annex\" well enough to know what's going on. I thought with these settings that running \"git annex get --auto\" on my laptop would have fetched all the files, but it didn't. A \"git annex get *\" is fetching all the files though, so I'm achieving what I want.
+"""]]
diff --git a/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved.mdwn b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved.mdwn
new file mode 100644
index 000000000..a4386add7
--- /dev/null
+++ b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved.mdwn
@@ -0,0 +1 @@
+The settings don't seem to be saving. If I edit it, then immediately open the file again, the settings are what they were before. This happens no matter what repo I do it from.
diff --git a/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_1_81111f59caea9f70cb9d597381e42c96._comment b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_1_81111f59caea9f70cb9d597381e42c96._comment
new file mode 100644
index 000000000..27a719853
--- /dev/null
+++ b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_1_81111f59caea9f70cb9d597381e42c96._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-12T17:26:35Z"
+ content="""
+Please paste the line that you've configured, and then show how it looks when you open vicfg again.
+"""]]
diff --git a/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_2_8ca9156d21d9f3db0d83d6aa9b69caa0._comment b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_2_8ca9156d21d9f3db0d83d6aa9b69caa0._comment
new file mode 100644
index 000000000..467d45740
--- /dev/null
+++ b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_2_8ca9156d21d9f3db0d83d6aa9b69caa0._comment
@@ -0,0 +1,17 @@
+[[!comment format=mdwn
+ username="ghen1"
+ ip="66.41.70.34"
+ subject="comment 2"
+ date="2014-10-12T21:03:01Z"
+ content="""
+The custom settings I had made before (the rest are still commented out):
+trust a8372263-6eba-47e2-9604-3e2c9bbb6d42 = trusted
+wanted 3e2bcb1e-39ad-4863-a9ce-a18a262644c1 = present or (not include=/Books/*)
+wanted a8372263-6eba-47e2-9604-3e2c9bbb6d42 = present or include=/Books/*
+
+I am trying to comment them out again.
+
+Based on your answer, I guessed that these settings don't just reset to default when they are commented out, so I tried changing the above trusted setting to semitrusted, and sure enough it worked.
+
+So it seems once these settings are made they can't be commented out again, and any invalid settings are ignored?
+"""]]
diff --git a/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_3_9da6ca0250ab0dcfc9a012df75e2e711._comment b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_3_9da6ca0250ab0dcfc9a012df75e2e711._comment
new file mode 100644
index 000000000..375ee1254
--- /dev/null
+++ b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_3_9da6ca0250ab0dcfc9a012df75e2e711._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-12T23:26:27Z"
+ content="""
+vicfg won't accept invalid input, if you make a type in the syntax it'll land you back in the editor with the problem line marked.
+
+But right, commenting out a line does not reset it to the default. Basically, once one of these settings is changed, there is no longer a default to go back to. You have to leave the line uncommented and change the value to what you want it to be. Ie, \"= semitrusted\" for the trust levels, and \"= \" for the wanted expressions.
+
+I think that vicfg should avoid this [[todo/vicfg_comment_gotcha]] and have filed that as a todo item.
+"""]]
diff --git a/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_4_55c52c45f3aaddfb63a1f53efe2ee582._comment b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_4_55c52c45f3aaddfb63a1f53efe2ee582._comment
new file mode 100644
index 000000000..0e73c4869
--- /dev/null
+++ b/doc/forum/git-annex_vicfg_preferred_content_settings_are_not_being_saved/comment_4_55c52c45f3aaddfb63a1f53efe2ee582._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 4"""
+ date="2014-10-20T15:40:38Z"
+ content="""
+Right. In the meantime, I've fixed the behavior of vicfg when deleting
+lines, so it resets them to the default automatically.
+"""]]
diff --git a/doc/forum/git_annex_ls___47___metadata_in_git_annex_whereis/comment_3_24c54ed70220974b98700bf717d1e770._comment b/doc/forum/git_annex_ls___47___metadata_in_git_annex_whereis/comment_3_24c54ed70220974b98700bf717d1e770._comment
new file mode 100644
index 000000000..67ff59d32
--- /dev/null
+++ b/doc/forum/git_annex_ls___47___metadata_in_git_annex_whereis/comment_3_24c54ed70220974b98700bf717d1e770._comment
@@ -0,0 +1,29 @@
+[[!comment format=mdwn
+ username="sudoman"
+ ip="216.15.125.93"
+ subject="ls symlink workaround; idea for a solution"
+ date="2014-09-29T18:58:23Z"
+ content="""
+as a workaround, you could make a bash alias for `ls -l` -> `ls -lL`. the problem with this is that links to other links are fully dereferenced.
+
+what looks like this in a non-git-annex directory with `ls -lh`:
+
+ total 3.8M
+ -rw-r--r-- 1 sudoman sudoman 3.8M Sep 29 13:56 42x3551_02.pdf
+ lrwxrwxrwx 1 sudoman sudoman 14 Sep 29 14:00 tmp -> 42x3551_02.pdf
+
+looks like this in an indirect git annex repo with `ls -lhL`:
+
+ total 7.5M
+ -r--r--r-- 1 sudoman sudoman 3.8M Sep 29 13:56 42x3551_02.pdf
+ -r--r--r-- 1 sudoman sudoman 3.8M Sep 29 13:56 tmp
+
+
+the ls alias is a bit hackish, but for some purposes it's an improvement.
+
+rsync may work as desired when using a command like `rsync -l --safe-links` (haven't tried it. users might want to experiment by adding `--exclude` to that command.)
+
+
+a potential solution for ls (and cp) could be the inclusion of a patched version under `git annex util ls`. writing shim programs using `LD_PRELOAD` instead of patching may drastically reduce the amount of code needing future security updates.
+
+"""]]
diff --git a/doc/forum/git_annex_sync:_only_git-annex.mdwn b/doc/forum/git_annex_sync:_only_git-annex.mdwn
new file mode 100644
index 000000000..84ae140cd
--- /dev/null
+++ b/doc/forum/git_annex_sync:_only_git-annex.mdwn
@@ -0,0 +1,3 @@
+Hi,
+
+i have an already existing git repository with a branch (*master*) and i added git annex to it (*git annex init*). Now i want to synchronise the file tracking information with annex through *git annex sync*, but keep the master branch unsynchronised (i want push/pull it manually as there are not only annexed files but also code). What is the best approach for my setup?
diff --git a/doc/forum/git_annex_sync:_only_git-annex/comment_1_2be68ed36a1e6bfc896d5aea9463d3c7._comment b/doc/forum/git_annex_sync:_only_git-annex/comment_1_2be68ed36a1e6bfc896d5aea9463d3c7._comment
new file mode 100644
index 000000000..bf90184aa
--- /dev/null
+++ b/doc/forum/git_annex_sync:_only_git-annex/comment_1_2be68ed36a1e6bfc896d5aea9463d3c7._comment
@@ -0,0 +1,14 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-20T15:19:13Z"
+ content="""
+Sounds like you should just use normal `git push`/`git pull` commands.
+Works fine with git-annex.
+
+Just be sure to include the `git-annex` branch in your pushes.
+Eg, `git push origin master git-annex`
+
+You'll probably want to run `git annex merge` after pulling, to merge the
+local and remote git-annex branches.
+"""]]
diff --git a/doc/forum/git_annex_sync:_only_git-annex/comment_2_50e137e4d278dfd0103a41aff0cfa3a9._comment b/doc/forum/git_annex_sync:_only_git-annex/comment_2_50e137e4d278dfd0103a41aff0cfa3a9._comment
new file mode 100644
index 000000000..10f3792b3
--- /dev/null
+++ b/doc/forum/git_annex_sync:_only_git-annex/comment_2_50e137e4d278dfd0103a41aff0cfa3a9._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlog_5wIICaMcrKTexlFNA6IO6UTp323aE"
+ nickname="Torkaly"
+ subject="comment 2"
+ date="2014-10-22T08:56:43Z"
+ content="""
+Thank you for your response.
+
+So annex looks like it's not really designed to work with an existing git repository, but only standalone?!
+"""]]
diff --git a/doc/forum/git_annex_sync:_only_git-annex/comment_3_7753f8276478e0e05c10dba2b84bbc49._comment b/doc/forum/git_annex_sync:_only_git-annex/comment_3_7753f8276478e0e05c10dba2b84bbc49._comment
new file mode 100644
index 000000000..49a61c0b7
--- /dev/null
+++ b/doc/forum/git_annex_sync:_only_git-annex/comment_3_7753f8276478e0e05c10dba2b84bbc49._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 3"
+ date="2014-10-22T16:18:16Z"
+ content="""
+I struggle to see how you could draw that conclusion from what I said.
+
+git-annex will work fine in an existing git repository. You can mix regular git commands like `git add`, `git push`, `git pull`, `git merge` with git-annex commands like `git annex add`, `git annex copy --to origin`, `git annex get`, `git annex merge`, in the same repository.
+
+The `git annex sync` command effcetively runs `git commit; git pull; git annex merge; git push; git annex copy --to origin; git annex get`. If you don't want to run all those commands at once, you don't want to run `git annex sync`. That will not prevent you from using git-annex in any way.
+"""]]
diff --git a/doc/forum/git_annex_sync:_only_git-annex/comment_4_cc3cebf5bc403b490e31e63af964a823._comment b/doc/forum/git_annex_sync:_only_git-annex/comment_4_cc3cebf5bc403b490e31e63af964a823._comment
new file mode 100644
index 000000000..134ea87e2
--- /dev/null
+++ b/doc/forum/git_annex_sync:_only_git-annex/comment_4_cc3cebf5bc403b490e31e63af964a823._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.96"
+ subject="comment 4"
+ date="2014-10-22T16:18:50Z"
+ content="""
+I struggle to see how you could draw that conclusion from what I said.
+
+git-annex will work fine in an existing git repository. You can mix regular git commands like `git add`, `git push`, `git pull`, `git merge` with git-annex commands like `git annex add`, `git annex copy --to origin`, `git annex get`, `git annex merge`, in the same repository.
+
+The `git annex sync` command effcetively runs `git commit; git pull; git annex merge; git push; git annex copy --to origin; git annex get`. If you don't want to run all those commands at once, you don't want to run `git annex sync`. That will not prevent you from using git-annex in any way.
+"""]]
diff --git a/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name.mdwn b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name.mdwn
new file mode 100644
index 000000000..0cc7ae530
--- /dev/null
+++ b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name.mdwn
@@ -0,0 +1,21 @@
+Hi Joey,
+
+I'm trying to extract data from a git annex whereis --json, but discovered that in json, the output has two values with same name.
+For instance:
+
+{
+ "command":"whereis",
+ "file":"filename.webm",
+ "note":"3 copies",
+ "whereis":[
+ {"uuid":"1b7d69fe-22e2-11e4-bc47-279f5115dfde","description":"chasqui","here":false},
+ {"uuid":"e5cc9824-450b-4340-b30f-c2d92c6a52f7","description":"coco","here":false},
+ {"uuid":"f84a1327-febb-4199-a106-9c3fd2288826","description":"[dpadua]","here":false}],
+ "note":"\t1b7d69fe-22e2-11e4-bc47-279f5115dfde -- chasqui\n \te5cc9824-450b-4340-b30f-c2d92c6a52f7 -- coco\n \tf84a1327-febb-4199-a106-9c3fd2288826 -- [dpadua]\n",
+ "untrusted":[],
+ "success":true
+}
+
+When I read the json from another program, it gets only the second value (and I wanted the first, the number of copies). I'm using git-annex version 5.20140831+b1
+
+
diff --git a/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_1_3bfde59729b904aa1ef815427dd35ae6._comment b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_1_3bfde59729b904aa1ef815427dd35ae6._comment
new file mode 100644
index 000000000..a027b5276
--- /dev/null
+++ b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_1_3bfde59729b904aa1ef815427dd35ae6._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-10T17:09:32Z"
+ content="""
+The \"3 copies\" string is not meant to be machine-parsable anyway. Why don't you just look at the whereis object in the json, and count the number of items in its list, which will give you the number of copies.
+"""]]
diff --git a/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_2_d08a955a11953cc783f09bfba180dbd6._comment b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_2_d08a955a11953cc783f09bfba180dbd6._comment
new file mode 100644
index 000000000..02bd6de1f
--- /dev/null
+++ b/doc/forum/git_annex_whereis_--json_output_with_two_variables_with_same_name/comment_2_d08a955a11953cc783f09bfba180dbd6._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawkR2dZjxqujBXcitvsP_PeWG8A-LfLU_tg"
+ nickname="Fernão"
+ subject="comment 2"
+ date="2014-10-10T18:18:04Z"
+ content="""
+nice, it worked =)
+"""]]
diff --git a/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_7_15d918ededb5b8375b0ca13d0b3523ff._comment b/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_7_15d918ededb5b8375b0ca13d0b3523ff._comment
new file mode 100644
index 000000000..1ca6b80ad
--- /dev/null
+++ b/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_7_15d918ededb5b8375b0ca13d0b3523ff._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlog_5wIICaMcrKTexlFNA6IO6UTp323aE"
+ nickname="Torkaly"
+ subject="comment 7"
+ date="2014-10-17T11:00:06Z"
+ content="""
+Is there a way just to sync *git-annex* branch with the *git annex sync* command? As we have an already existing git branch and want to push/pull *master* manually.
+"""]]
diff --git a/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_8_dcc3f2c6d55006776610e8d770b61d12._comment b/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_8_dcc3f2c6d55006776610e8d770b61d12._comment
new file mode 100644
index 000000000..35983e802
--- /dev/null
+++ b/doc/forum/help_running_git-annex_on_top_of_existing_repo/comment_8_dcc3f2c6d55006776610e8d770b61d12._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 8"""
+ date="2014-10-20T15:25:13Z"
+ content="""
+@Torkaly just `git pull` as usual, and then run `git annex merge`
+to auto-merge the git-anne branches. Then `git push origin git-annex`
+to push the git-annex branch.
+"""]]
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit.mdwn b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit.mdwn
new file mode 100644
index 000000000..960f563ce
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit.mdwn
@@ -0,0 +1,9 @@
+Been using Unison for a while to sync a folder at three different computers (can be on or off simultanously). However, I've for different reasons been looking for a different solution. Here comes git annex.e
+
+I started out with one computer, generated a new repo locally and at the server (a synology nas) with the assistant. Then i put all my files into the local directory and the files uploaded to the server. All good. I put the local computer in client mode and the server in backup mode. Then i configured a second computer with an empty repo folder, was asked to join repos, did that. Also put this computer in client mode and the server in backup. After a while all the files was located in the second computers repo directory. Yes, I was confident this was the way to go.
+
+Howewer, doing the same with my laptop was not successful at all. After similar setup as above it only downloaded about 5% of the data. Bummer. While I tried to study the logs I thought I should check the second computers repo. That was modified and a lot of files where gone. Wow...of course I have backup, but I was hoping for a proper and consistent operation.
+
+So what causes this? Should I not put the server in backup mode? Anyone have an idea?
+
+Other thoughts?
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_1_a0551431a57ccab2463f2a6d43553337._comment b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_1_a0551431a57ccab2463f2a6d43553337._comment
new file mode 100644
index 000000000..99fa06512
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_1_a0551431a57ccab2463f2a6d43553337._comment
@@ -0,0 +1,15 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawncVeolylM8VoRbWhIYDlfGhIP69-aNXm4"
+ nickname="Espen"
+ subject="comment 1"
+ date="2014-10-10T07:14:36Z"
+ content="""
+Okey, came in to the office today to check the first computer I set up. It lacks all the files except the ones located on the third computer. So all sites are synced (only a lot of files are dropped). Seems something happened while the third computer was set up that caused just a few files to sync from the backup server and then git somehow thought that was it and then synced computer one, two and the server according to files present on computer three (obviously way to few files). This is slightly worrisome and I wonder what caused this. I'll inspect the log files for the transfer on computer three when I get home.
+
+Can anyone of you confirm that having this setup should not cause problems:
+
+computer 1 - client mode
+computer 2 - client mode
+computer 3 - client mode
+backup unit/nas - backup mode
+"""]]
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_2_e96e8cf6e08e3a21bfcefbc202e78fe2._comment b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_2_e96e8cf6e08e3a21bfcefbc202e78fe2._comment
new file mode 100644
index 000000000..fa0f49120
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_2_e96e8cf6e08e3a21bfcefbc202e78fe2._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 2"
+ date="2014-10-14T22:13:44Z"
+ content="""
+Did you figure this one out?
+"""]]
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_3_2ad4c1a4bfe00c22444ab878c84a8830._comment b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_3_2ad4c1a4bfe00c22444ab878c84a8830._comment
new file mode 100644
index 000000000..8c42c43fd
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_3_2ad4c1a4bfe00c22444ab878c84a8830._comment
@@ -0,0 +1,47 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawncVeolylM8VoRbWhIYDlfGhIP69-aNXm4"
+ nickname="Espen"
+ subject="comment 3"
+ date="2014-10-15T17:32:10Z"
+ content="""
+Finally had some time today to lock at the logs. However, I started from scratch. Created a local repo with the assistant, transferred some real files into the local repo dir (not .git of course). Waiting until everything was added etc. Then I created the remote server repo and it immediately started to transfer files from the local repo to the remote. Then I was slightly surprised when I got back home and saw this in the logfile:
+
+----- cut -----
+
+somefile
+
+ 32,768 1% 0.00kB/s 0:00:00 To ssh://someserverrepodir
+ * [new branch] git-annex -> synced/git-annex
+ * [new branch] annex/direct/master -> synced/master
+error: Ref refs/heads/synced/git-annex is at 112bb35566a0ee9434fb74524cdced45792bf8ed but expected 0000000000000000000000000000000000000000
+error: Ref refs/heads/synced/master is at d593f3c3a5090009789154bd60c3390d9a1b90d6 but expected 0000000000000000000000000000000000000000
+remote: error: failed to lock refs/heads/synced/git-annex
+remote: error: failed to lock refs/heads/synced/master
+To ssh://someserverrepodir
+ ! [remote rejected] git-annex -> synced/git-annex (failed to lock)
+ ! [remote rejected] annex/direct/master -> synced/master (failed to lock)
+error: failed to push some refs to 'ssh://someserverrepodir'
+
+ 1,966,080 96% 1.73MB/s 0:00:00
+ 2,034,254 100% 1.79MB/s 0:00:01 (xfr#1, to-chk=0/1)
+fatal: 'someserverrepodescription' does not appear to be a git repository
+fatal: Could not read from remote repository.
+
+Please make sure you have the correct access rights
+and the repository exists.
+fatal: 'someserverrepodescription' does not appear to be a git repository
+fatal: Could not read from remote repository.
+
+Please make sure you have the correct access rights
+and the repository exists.
+fatal: 'someserverrepodescription' does not appear to be a git repository
+fatal: Could not read from remote repository.
+
+Please make sure you have the correct access rights
+and the repository exists.
+[2014-10-13 21:58:05 CEST] Transferrer: Uploaded somefile
+
+----- cut -----
+
+This was a total surprise to me. If this was the stuff that made the previous setup fail, I don not know, but I will try to track down this error first. I would be quite surprised if it indeed was a permission issue. Also visible is an upload session for one of the files...that seems to go on fine and is probably not related to the error at all. This was the only error printed once during the entire transfer/sync process. Thought the first thing I would check was the size and number of files in the local and remote directory, but I'm not really sure how to do this with all the git stuff around on the remote. Typically \"git ls-files | wc -l\" or similar gets me going on the local repo. How would I do this on the remote (in backup mode if that counts)?
+"""]]
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_4_44639388349a9ea5eabda9ebf79817b3._comment b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_4_44639388349a9ea5eabda9ebf79817b3._comment
new file mode 100644
index 000000000..c97438d8a
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_4_44639388349a9ea5eabda9ebf79817b3._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawncVeolylM8VoRbWhIYDlfGhIP69-aNXm4"
+ nickname="Espen"
+ subject="comment 4"
+ date="2014-10-15T17:34:52Z"
+ content="""
+Sorry, forgot to format the text and the typo. Can I edit my own comments?
+"""]]
diff --git a/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_5_339123ab87b69b11d6e999ad6eaf6df5._comment b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_5_339123ab87b69b11d6e999ad6eaf6df5._comment
new file mode 100644
index 000000000..58bed56e5
--- /dev/null
+++ b/doc/forum/how_to_set_up_syncing_for_multiple_computer_and_a_centralized_backup_unit/comment_5_339123ab87b69b11d6e999ad6eaf6df5._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawncVeolylM8VoRbWhIYDlfGhIP69-aNXm4"
+ nickname="Espen"
+ subject="comment 5"
+ date="2014-10-15T17:44:55Z"
+ content="""
+Did a quick \"du -h\" on both directories. Local is 27 and remote is 23.7 GB, so some data is obviously missing on the remote. Also, how do I increase either the permitted size of one log file, or the number of log files stored? Seems the logs where rotated over the whole allowed log batch and size before the job was done during the night, so difficult to tell if there were more errors.
+"""]]
diff --git a/doc/forum/lsof_resource_use_problems.mdwn b/doc/forum/lsof_resource_use_problems.mdwn
new file mode 100644
index 000000000..f15b81fe9
--- /dev/null
+++ b/doc/forum/lsof_resource_use_problems.mdwn
@@ -0,0 +1,42 @@
+When the assistant runs lsof on my file system, the lsof process consumes a horrendous amount of memory (>11GB). This forces a large amount of swapping, and brings the system to its knees until the process exits. The same thing occurs when I run lsof manually, but this is currently making the assistant unusable for me. Is this normal when running lsof on a large number of files, or is something wrong with my particular setup?
+
+An example of resource usage from top, and some system details:
+
+---
+ PID USERNAME THR PRI NICE SIZE RES STATE C TIME WCPU COMMAND
+ 33735 username 1 23 0 28208M 11507M pfault 0 0:07 58.50% lsof
+---
+ [username@hostname /mnt/media]$ uname -a
+ FreeBSD hostname 9.2-RELEASE-p10 FreeBSD 9.2-RELEASE-p10 #0 r262572+4fb5adc: Wed Aug 6 17:07:16 PDT 2014 root@build3.ixsystems.com:/fusion/jkh/921/freenas/os-base/amd64/fusion/jkh/921/freenas/FreeBSD/src/sys/FREENAS.amd64 amd64
+---
+ [username@hostname /mnt/media]$ lsof -h
+ lsof 4.88
+---
+ [username@hostname /mnt/media]$ git annex info
+ repository mode: direct
+ trusted repositories: 0
+ semitrusted repositories: 1
+ d03b21fc-666d-457d-b953-0ca0ac7393d8 -- [hostname_media_indirect]
+ untrusted repositories: 2
+ 00000000-0000-0000-0000-000000000001 -- web
+ 31497a4d-290e-409a-9fd2-20c7340c245b -- hostname_mnt/media [here]
+ transfers in progress: none
+ available local disk space: 780.1 gigabytes (+10 gigabytes reserved)
+ local annex keys: 41576
+ local annex size: 943.95 gigabytes (+ 49 unknown size)
+ annexed files in working tree: 41887
+ size of annexed files in working tree: 945.14 gigabytes (+ 50 unknown size)
+ bloom filter size: 16 mebibytes (8.3% full)
+ backend usage:
+ SHA512E: 81518
+ WORM: 1846
+ URL: 99
+---
+ [username@hostname /mnt/media]$ git annex version
+ git-annex version: 5.20140817
+ build flags: Assistant Webapp Webapp-secure Pairing S3 WebDAV Kqueue XMPP DNS Feeds Quvi TDFA CryptoHash
+ key/value backends: SHA256E SHA1E SHA512E SHA224E SHA384E SKEIN256E SKEIN512E SHA256 SHA1 SHA512 SHA224 SHA384 SKEIN256 SKEIN512 WORM URL
+ remote types: git gcrypt S3 bup directory rsync web webdav tahoe glacier ddar hook external
+ local repository version: 5
+ supported repository version: 5
+ upgrade supported from repository versions: 0 1 2 4
diff --git a/doc/forum/lsof_resource_use_problems/comment_1_a5e5d410545fa7f93f08936ec6aeee42._comment b/doc/forum/lsof_resource_use_problems/comment_1_a5e5d410545fa7f93f08936ec6aeee42._comment
new file mode 100644
index 000000000..55312113e
--- /dev/null
+++ b/doc/forum/lsof_resource_use_problems/comment_1_a5e5d410545fa7f93f08936ec6aeee42._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.111"
+ subject="comment 1"
+ date="2014-10-13T21:21:18Z"
+ content="""
+That seems very wrong.. On my linux system here, I can `lsof /` and it uses 4 kb total, and runs in 0.09 seconds, to report on 3500 open files.
+
+But, I don't know about freebsd. lsof may be more expensive there for some reason, like needing to look in kernel memory rather than in /proc perhaps? But expensive to the tune of gigabytes of space used, that must be lsof misbehaving.
+"""]]
diff --git a/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository.mdwn b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository.mdwn
new file mode 100644
index 000000000..b9500fa10
--- /dev/null
+++ b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository.mdwn
@@ -0,0 +1,19 @@
+I have a Desktop computer at home and another at work, and work from home every other day. I'd like to use git-annex assistant to keep my git checkouts in sync across both of them. I may be a bit unusual in that I don't always commit before going home.
+
+I also would like to have anything in the git stash synchronized between the two, the .git/config file, etc.
+
+So to be clear, I have a file structure like the following:
+
+ ./annex/.git
+ ./annex/project/README.md
+ ./annex/project/.git
+ ./annex/project2/README.md
+ ./annex/project2/.git
+
+In my testing it seems that the "project/.git" folders aren't synchronized between my two desktops, even though the rest of the files are. A workaround is to rename "project/.git" to something else, like "project/.gitfoo", and then use --git-dir when issuing my git commands.
+
+Is this something that can be worked around? I apologize if this is covered elsewhere, as I wasn't thinking of the right terms to search for.
+
+I've looked through the code and can't find anything obvious. I imagine this is because git hard-codes all ".git" subdirectories as something it should ignore.
+
+Is there a better workaround? I realize this is probably a niche use case.
diff --git a/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_1_419b27cb1c71bce021ef9f2e471aa92e._comment b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_1_419b27cb1c71bce021ef9f2e471aa92e._comment
new file mode 100644
index 000000000..8d698b1ba
--- /dev/null
+++ b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_1_419b27cb1c71bce021ef9f2e471aa92e._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="jewel"
+ ip="23.30.55.105"
+ subject="comment 1"
+ date="2014-10-12T05:22:03Z"
+ content="""
+Now I've found the relevant bug: `http://git-annex.branchable.com/bugs/Can__39__t_add_a_git_repo_to_git_annex:___34__Invalid_path_repo__47__.git__47__X__34___for_many_X`.
+
+Note the symlink workaround near the end. A quick test shows that it's working great. I'll try it out for a few months and report back.
+"""]]
diff --git a/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_2_dae4c7a42080dd89150159b2946839b1._comment b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_2_dae4c7a42080dd89150159b2946839b1._comment
new file mode 100644
index 000000000..2f574c8fe
--- /dev/null
+++ b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_2_dae4c7a42080dd89150159b2946839b1._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 2"
+ date="2014-10-12T17:30:23Z"
+ content="""
+This is fundamentally not a good idea. All it takes is one conflict inside the .git directories, and your git repositories will be trashed/corrupted. The page you link to has comments explaining why.
+"""]]
diff --git a/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_3_9d9fa65559ba4bb0e4676289b5a65684._comment b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_3_9d9fa65559ba4bb0e4676289b5a65684._comment
new file mode 100644
index 000000000..483ff007e
--- /dev/null
+++ b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_3_9d9fa65559ba4bb0e4676289b5a65684._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="jewel"
+ ip="23.30.55.105"
+ subject="comment 3"
+ date="2014-10-12T19:04:00Z"
+ content="""
+Thanks for the warning. I'll keep hourly incremental backups (using obnam) on each computer just in case something terrible happens.
+"""]]
diff --git a/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_4_4e89b3590cc33b2565cd173ef7c85013._comment b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_4_4e89b3590cc33b2565cd173ef7c85013._comment
new file mode 100644
index 000000000..b32abd3ec
--- /dev/null
+++ b/doc/forum/multiple_git_repositories_inside_git_annex_assistant_repository/comment_4_4e89b3590cc33b2565cd173ef7c85013._comment
@@ -0,0 +1,11 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 4"
+ date="2014-10-14T22:09:37Z"
+ content="""
+If I use git-annex to keep my home and work computer in sync, the conflict would only happen if I somehow manage to make a change in the git repo on one machine that's not propagated to the other straight away (eg, network down), and I then modify the git repo of the other as well?
+
+Could git-annex have a different merge strategy in this case, not try to merge anything under .git and give a warning?
+
+"""]]
diff --git a/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__.mdwn b/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__.mdwn
new file mode 100644
index 000000000..c746e3d6d
--- /dev/null
+++ b/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__.mdwn
@@ -0,0 +1,47 @@
+I have been trying to uninit a very large directory and ran into problems.
+
+`git annex unannex` seems to be completely infeasible because of how long it takes, also it seemed to grow the directory enormously, probably because it copies large files and then waits to delete them?
+
+I tried unannexing with the following commands instead:
+
+ `git annex get --from=backup`
+ `git annex unannex --fast`
+ `git annex uninit`
+
+This does what it is supposed to it seems, it hard links all copies of files. However, the unint step gives the following message:
+
+ git-annex: Not fully uninitialized
+ Some annexed data is still left in /science/carlo/GR_Coverage_Manuscript_Revisions_140616/.git/annex/objects/
+ This may include deleted files, or old versions of modified files.
+
+ If you don't care about preserving the data, just delete the
+ directory.
+
+ Or, you can move it to another location, in case it turns out
+ something in there is important.
+
+ Or, you can run `git annex unused` followed by `git annex dropunused`
+ to remove data that is not used by any tag or branch, which might
+ take care of all the data.
+
+ Then run `git annex uninit` again to finish.
+
+However, running `git annex unused` returns nothing.
+
+When I run `du -l -h --max-depth=1` I get the following output:
+
+ 646G ./01-Collate_New_Species_Data
+ 6.3G ./02-Prep_Annotations
+ 1.8T ./03-Map_Riboprofiling_Data
+ 111G ./04-Generate_Preprocessed_Files
+ 94G ./05-Det_Codon_Specfic_Occupancy
+ 3.6T ./.git
+ 6.2T .
+
+The .git/annex/objects directory remains 3.5TB in size, while the root directory, minus the contents of .git is only 2.7TB.
+
+I want to delete the .git folder to free up space, but I am very nervous now, because it looks like there is extra data in git annex that isn't present in the main directory. I don't want that to be lost, that would be a complete disaster.
+
+I tried the exact same process on a test directory, and it seemed to work completely, but losing this data would be an absolute disaster, so I don't want to risk it.
+
+Any thoughts?
diff --git a/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__/comment_1_4aaf93801119b36a01e452c7bb0fc7e9._comment b/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__/comment_1_4aaf93801119b36a01e452c7bb0fc7e9._comment
new file mode 100644
index 000000000..86cd83d3a
--- /dev/null
+++ b/doc/forum/unannex_--fast_+_uninit_leaves_files_in_.git__47__annex__47__objects__63__/comment_1_4aaf93801119b36a01e452c7bb0fc7e9._comment
@@ -0,0 +1,18 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-20T15:33:02Z"
+ content="""
+Well, are there any files in your repository (outside .git) that
+are still symlinks to content in .git? If not, you know that
+every file in the repository's working tree has been unannexed
+ok.
+
+The remaining files in .git/annex/objects are not unused, so some branch or tag
+must refer to those files.
+
+You might try running `git log --stat -S'KEY'`
+where KEY is the basename name of one of the files in .git/annex/objects.
+This will find commits to the repo that refer to that object, so you'll
+know where it was used and what filename corresponded to it.
+"""]]
diff --git a/doc/forum/using_git-annex_with_lightroom.mdwn b/doc/forum/using_git-annex_with_lightroom.mdwn
new file mode 100644
index 000000000..605f2b903
--- /dev/null
+++ b/doc/forum/using_git-annex_with_lightroom.mdwn
@@ -0,0 +1,6 @@
+I'm using git-annex to sync my photos across multiple computers, and it works beautifully. I would also like to sync Lightroom catalogues. The photo editing program creates a *.lrdata directory where it stores the edits in its own tree format. Merging two such directories obviously creates a mess.
+
+Is there an elegant way to tell git-annex to treat the whole directory as a single file and overwrite the whole directory structure at once? I'm guessing the same problem occurs with mac os packages.
+
+Many thanks!
+Alex
diff --git a/doc/forum/using_git-annex_with_lightroom/comment_1_ec977efd277f0644767a4fc7064e4baf._comment b/doc/forum/using_git-annex_with_lightroom/comment_1_ec977efd277f0644767a4fc7064e4baf._comment
new file mode 100644
index 000000000..e7e1fa9a6
--- /dev/null
+++ b/doc/forum/using_git-annex_with_lightroom/comment_1_ec977efd277f0644767a4fc7064e4baf._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 1"
+ date="2014-10-02T15:23:18Z"
+ content="""
+There's not really a way to handle this sort of thing. I suggest that you put *.lrdata in `.gitignore`
+"""]]
diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn
index 71f0c0beb..b22ff3881 100644
--- a/doc/git-annex.mdwn
+++ b/doc/git-annex.mdwn
@@ -148,7 +148,8 @@ subdirectories).
Or specify `--fast` to sync with the remotes with the
lowest annex-cost value.
- The sync process involves first committing all local changes,
+ The sync process involves first committing any local changes to files
+ that have previously been added to the repository,
then fetching and merging the `synced/master` and the `git-annex` branch
from the remote repositories, and finally pushing the changes back to
those branches on the remote repositories. You can use standard git
@@ -675,17 +676,17 @@ subdirectories).
To generate output suitable for the gource visualization program,
specify `--gource`.
-* `info [directory ...]`
+* `info [directory|file|remote ...]`
- Displays some statistics and other information, including how much data
- is in the annex and a list of all known repositories.
+ Displays statistics and other information for the specified item,
+ which can be a directory, or a file, or a remote.
+ When no item is specified, displays statistics and information
+ for the repository as a whole.
- To only show the data that can be gathered quickly, use `--fast`.
+ When a directory is specified, the file matching options can be used
+ to select the files in the directory that are included in the statistics.
- When a directory is specified, shows a differently formatted info
- display for that directory. In this mode, all of the matching
- options can be used to filter the files that will be included in
- the information.
+ To only show the data that can be gathered quickly, use `--fast`.
For example, suppose you want to run "git annex get .", but
would first like to see how much disk space that will use.
diff --git a/doc/install/Docker.mdwn b/doc/install/Docker.mdwn
index f3d7c189e..fd7ff1cd2 100644
--- a/doc/install/Docker.mdwn
+++ b/doc/install/Docker.mdwn
@@ -30,3 +30,10 @@ import qualified Propellor.Property.SiteSpecific.GitAnnexBuilder as GitAnnexBuil
This will autobuild every hour at :15, and the autobuilt image will be
left inside the container in /home/builder/gitbuilder/out/
+
+# container for backport building
+
+For building the Debian stable backport, the container
+`joeyh/git-annex-wheezy-backport` is used. This is nothing special, it
+just has the right versions of build dependencies installed from Debian
+stable and backports.
diff --git a/doc/install/OSX/comment_9_f11f726d1fee3c4c91f3c984e792037d._comment b/doc/install/OSX/comment_9_f11f726d1fee3c4c91f3c984e792037d._comment
new file mode 100644
index 000000000..afc7268e1
--- /dev/null
+++ b/doc/install/OSX/comment_9_f11f726d1fee3c4c91f3c984e792037d._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawn3p4i4lk_zMilvjnJ9sS6g2nerpgz0Fjc"
+ nickname="Matthias"
+ subject="Why different versions?"
+ date="2014-09-23T10:59:32Z"
+ content="""
+Why are there different versions for 10.7, 10.8, 10.9 anyway? Is it not possible to produce an executable compatible with all these? I mean, it's the same architecture and executable format, not? I guess there has to be a reason, explanations are welcome :-)
+"""]]
diff --git a/doc/news/version_5.20140709.mdwn b/doc/news/version_5.20140709.mdwn
deleted file mode 100644
index e7609949f..000000000
--- a/doc/news/version_5.20140709.mdwn
+++ /dev/null
@@ -1,11 +0,0 @@
-git-annex 5.20140709 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * Fix race in direct mode merge code that could cause all files in the
- repository to be removed. It should be able to recover repositories
- experiencing this bug without data loss. See:
- http://git-annex.branchable.com/bugs/bad\_merge\_commit\_deleting\_all\_files/
- * Fix git version that supported --no-gpg-sign.
- * Fix bug in automatic merge conflict resolution, when one side is an
- annexed symlink, and the other side is a non-annexed symlink.
- * Really fix bug that caused the assistant to make many unncessary
- empty merge commits."""]]
diff --git a/doc/news/version_5.20140717.mdwn b/doc/news/version_5.20140717.mdwn
deleted file mode 100644
index 9d7b83183..000000000
--- a/doc/news/version_5.20140717.mdwn
+++ /dev/null
@@ -1,28 +0,0 @@
-git-annex 5.20140717 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * Fix minor FD leak in journal code. Closes: #[754608](http://bugs.debian.org/754608)
- * direct: Fix handling of case where a work tree subdirectory cannot
- be written to due to permissions.
- * migrate: Avoid re-checksumming when migrating from hashE to hash backend.
- * uninit: Avoid failing final removal in some direct mode repositories
- due to file modes.
- * S3: Deal with AWS ACL configurations that do not allow creating or
- checking the location of a bucket, but only reading and writing content to
- it.
- * resolvemerge: New plumbing command that runs the automatic merge conflict
- resolver.
- * Deal with change in git 2.0 that made indirect mode merge conflict
- resolution leave behind old files.
- * sync: Fix git sync with local git remotes even when they don't have an
- annex.uuid set. (The assistant already did so.)
- * Set gcrypt-publish-participants when setting up a gcrypt repository,
- to avoid unncessary passphrase prompts.
- This is a security/usability tradeoff. To avoid exposing the gpg key
- ids who can decrypt the repository, users can unset
- gcrypt-publish-participants.
- * Install nautilus hooks even when ~/.local/share/nautilus/ does not yet
- exist, since it is not automatically created for Gnome 3 users.
- * Windows: Move .vbs files out of git\bin, to avoid that being in the
- PATH, which caused some weird breakage. (Thanks, divB)
- * Windows: Fix locking issue that prevented the webapp starting
- (since 5.20140707)."""]] \ No newline at end of file
diff --git a/doc/news/version_5.20140817.mdwn b/doc/news/version_5.20140817.mdwn
deleted file mode 100644
index 82e44eb57..000000000
--- a/doc/news/version_5.20140817.mdwn
+++ /dev/null
@@ -1,42 +0,0 @@
-git-annex 5.20140817 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * New chunk= option to chunk files stored in special remotes.
- Supported by: directory, S3, webdav, gcrypt, rsync, and all external
- and hook special remotes.
- * Partially transferred files are automatically resumed when using
- chunked remotes!
- * The old chunksize= option is deprecated. Do not use for new remotes.
- * Legacy code for directory remotes using the old chunksize= option
- will keep them working, but more slowly than before.
- * webapp: Automatically install Konqueror integration scripts
- to get and drop files.
- * repair: Removing bad objects could leave fsck finding no more
- unreachable objects, but some branches no longer accessible.
- Fix this, including support for fixing up repositories that
- were incompletely repaired before.
- * Fix cost calculation for non-encrypted remotes.
- * Display exception message when a transfer fails due to an exception.
- * WebDAV: Sped up by avoiding making multiple http connections
- when storing a file.
- * WebDAV: Avoid buffering whole file in memory when uploading and
- downloading.
- * WebDAV: Dropped support for DAV before 1.0.
- * testremote: New command to test uploads/downloads to a remote.
- * Dropping an object from a bup special remote now deletes the git branch
- for the object, although of course the object's content cannot be deleted
- due to the nature of bup.
- * unlock: Better error handling; continue past files that are not available
- or cannot be unlocked due to disk space, and try all specified files.
- * Windows: Now uses actual inode equivilants in new direct mode
- repositories, for safer detection of eg, renaming of files with the same
- size and mtime.
- * direct: Fix ugly warning messages.
- * WORM backend: When adding a file in a subdirectory, avoid including the
- subdirectory in the key name.
- * S3, Glacier, WebDAV: Fix bug that prevented accessing the creds
- when the repository was configured with encryption=shared embedcreds=yes.
- * direct: Avoid leaving file content in misctemp if interrupted.
- * git-annex-shell sendkey: Don't fail if a remote asks for a key to be sent
- that already has a transfer lock file indicating it's being sent to that
- remote. The remote may have moved between networks, or reconnected.
- * Switched from the old haskell HTTP library to http-conduit."""]] \ No newline at end of file
diff --git a/doc/news/version_5.20140831.mdwn b/doc/news/version_5.20140831.mdwn
deleted file mode 100644
index 713adb9fc..000000000
--- a/doc/news/version_5.20140831.mdwn
+++ /dev/null
@@ -1,13 +0,0 @@
-git-annex 5.20140831 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * Make --help work when not in a git repository. Closes: #[758592](http://bugs.debian.org/758592)
- * Ensure that all lock fds are close-on-exec, fixing various problems with
- them being inherited by child processes such as git commands.
- * When accessing a local remote, shut down git-cat-file processes
- afterwards, to ensure that remotes on removable media can be unmounted.
- Closes: #[758630](http://bugs.debian.org/758630)
- * Fix handing of autocorrection when running outside a git repository.
- * Fix stub git-annex test support when built without tasty.
- * Do not preserve permissions and acls when copying files from
- one local git repository to another. Timestamps are still preserved
- as long as cp --preserve=timestamps is supported. Closes: #[729757](http://bugs.debian.org/729757)"""]] \ No newline at end of file
diff --git a/doc/news/version_5.20140919.mdwn b/doc/news/version_5.20140919.mdwn
new file mode 100644
index 000000000..7a179c9fe
--- /dev/null
+++ b/doc/news/version_5.20140919.mdwn
@@ -0,0 +1,16 @@
+git-annex 5.20140919 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Security fix for S3 and glacier when using embedcreds=yes with
+ encryption=pubkey or encryption=hybrid. CVE-2014-6274
+ The creds embedded in the git repo were *not* encrypted.
+ git-annex enableremote will warn when used on a remote that has
+ this problem. For details, see:
+ https://git-annex.branchable.com/upgrades/insecure\_embedded\_creds/
+ * assistant: Detect when repository has been deleted or moved, and
+ automatically shut down the assistant. Closes: #[761261](http://bugs.debian.org/761261)
+ * Windows: Avoid crashing trying to list gpg secret keys, for gcrypt
+ which is not yet supported on Windows.
+ * WebDav: Fix enableremote crash when the remote already exists.
+ (Bug introduced in version 5.20140817.)
+ * add: In direct mode, adding an annex symlink will check it into git,
+ as was already done in indirect mode."""]] \ No newline at end of file
diff --git a/doc/news/version_5.20140926.mdwn b/doc/news/version_5.20140926.mdwn
new file mode 100644
index 000000000..289dd0bbf
--- /dev/null
+++ b/doc/news/version_5.20140926.mdwn
@@ -0,0 +1,5 @@
+git-annex 5.20140926 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Depend on new enough git for --no-gpg-sign to work. Closes: #[762446](http://bugs.debian.org/762446)
+ * Work around failure to build on mips by using cabal, not Setup,
+ to build in debian/rules."""]] \ No newline at end of file
diff --git a/doc/news/version_5.20140927.mdwn b/doc/news/version_5.20140927.mdwn
new file mode 100644
index 000000000..9aabf1594
--- /dev/null
+++ b/doc/news/version_5.20140927.mdwn
@@ -0,0 +1,6 @@
+git-annex 5.20140927 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Really depend (not just build-depend) on new enough git for --no-gpg-sign
+ to work. Closes: #[763057](http://bugs.debian.org/763057)
+ * Add temporary workaround for bug #763078 which broke building on armel
+ and armhf."""]] \ No newline at end of file
diff --git a/doc/news/version_5.20141013.mdwn b/doc/news/version_5.20141013.mdwn
new file mode 100644
index 000000000..d4eb62fa0
--- /dev/null
+++ b/doc/news/version_5.20141013.mdwn
@@ -0,0 +1,7 @@
+git-annex 5.20141013 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Adjust cabal file to support building w/o assistant on the hurd.
+ * Support building with yesod 1.4.
+ * S3: Fix embedcreds=yes handling for the Internet Archive.
+ * map: Handle .git prefixed remote repos. Closes: #[614759](http://bugs.debian.org/614759)
+ * repair: Prevent auto gc from happening when fetching from a remote."""]] \ No newline at end of file
diff --git a/doc/publicrepos.mdwn b/doc/publicrepos.mdwn
index 625522d34..9b35570d9 100644
--- a/doc/publicrepos.mdwn
+++ b/doc/publicrepos.mdwn
@@ -5,15 +5,22 @@ the public repositories that you can clone to try out git-annex.
`git clone https://downloads.kitenet.net/.git/`
Various downloads of things produced by Joey Hess, including git-annex
builds and videos.
+
* debconf-share
`git clone http://annex.debconf.org/debconf-share/.git/`
[DebConf](http://debconf.org/) Media, photos, videos, etc.
+
* [conference-proceedings](https://github.com/RichiH/conference_proceedings)
`git clone https://github.com/RichiH/conference_proceedings.git`
A growing collection of videos of technology conferences.
Submit a pull request to add your own!
+
* [ocharles's papers](https://github.com/ocharles/papers)
Lots of CS papers read by [Oliver](http://ocharles.org.uk/blog/).
+* [MRI brain scan data](http://studyforrest.org/pages/access.html)
+ `git clone http://psydata.ovgu.de/forrest_gump/.git studyforrest`
+ High-resolution, ultra-highfield fMRI dataset on auditory perception.
+
This is a wiki -- add your own public repository to the list!
See [[tips/centralized_git_repository_tutorial]].
diff --git a/doc/related_software.mdwn b/doc/related_software.mdwn
index 43ffe0597..1e5c0f6b0 100644
--- a/doc/related_software.mdwn
+++ b/doc/related_software.mdwn
@@ -13,3 +13,5 @@ designed to interoperate with it.
* [git annex darktable integration](https://github.com/xxv/darktable-git-annex)
* [Magit](http://github.com/magit/magit), an Emacs mode for Git, has
[an extension](https://github.com/magit/magit-annex) for git annex.
+* [DataLad](http://datalad.org/) uses git-annex to provide access to
+ scientific data available from various sources.
diff --git a/doc/special_remotes/rsync.mdwn b/doc/special_remotes/rsync.mdwn
index eb218b181..628d2e8cb 100644
--- a/doc/special_remotes/rsync.mdwn
+++ b/doc/special_remotes/rsync.mdwn
@@ -2,12 +2,12 @@ This special remote type rsyncs file contents to somewhere else.
Setup example:
- # git annex initremote myrsync type=rsync rsyncurl=rsync://rsync.example.com/myrsync keyid=joey@kitenet.net
+ # git annex initremote myrsync type=rsync rsyncurl=rsync://rsync.example.com/myrsync keyid=joey@kitenet.net encryption=shared
# git annex describe myrsync "rsync server"
Or for using rsync over SSH
- # git annex initremote myrsync type=rsync rsyncurl=ssh.example.com:/myrsync keyid=joey@kitenet.net
+ # git annex initremote myrsync type=rsync rsyncurl=ssh.example.com:/myrsync keyid=joey@kitenet.net encryption=shared
# git annex describe myrsync "rsync server"
## configuration
diff --git a/doc/thanks.mdwn b/doc/thanks.mdwn
index 10ab1f199..c4a559b8e 100644
--- a/doc/thanks.mdwn
+++ b/doc/thanks.mdwn
@@ -6,6 +6,14 @@ do. You have my most sincere thanks. --[[Joey]]
(If I got your name wrong, or you don't want it publically posted here,
email <joey@kitenet.net>.)
+## 2014-2015
+
+<img alt="NSF logo" src="https://www.nsf.gov/images/logos/nsf1.gif">
+
+git-annex development is partially supported by the
+[NSF](https://www.nsf.gov/awardsearch/showAward?AWD_ID=1429999) as a part of the
+[DataLad project](http://datalad.org/).
+
## 2013-2014
Continued git-annex development was [crowd funded](https://campaign.joeyh.name/)
diff --git a/doc/tips/Synology_NAS_and_git_annex/comment_2_5e723ccf026fe970ad31207f9f036b69._comment b/doc/tips/Synology_NAS_and_git_annex/comment_2_5e723ccf026fe970ad31207f9f036b69._comment
new file mode 100644
index 000000000..60122bb13
--- /dev/null
+++ b/doc/tips/Synology_NAS_and_git_annex/comment_2_5e723ccf026fe970ad31207f9f036b69._comment
@@ -0,0 +1,30 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawllyDAwjTPuM6G2d3eKE481V0qGXRKtF8s"
+ nickname="Pieter"
+ subject="Syncing metadata only with Synology NAS and git annex"
+ date="2014-10-06T12:03:35Z"
+ content="""
+I am not able to get full syncing working. It says \"syncing enabled (metadata only)\".
+When I click on Actions -> Edit it says:
+
+Just a git repository.
+
+This repository is not currently set up as a git annex; only git metadata is synced with this repository.
+
+If this repository's ssh server has git-annex installed, you can upgrade this repository to a full git annex, which will store the contents of your files, not only their metadata.
+
+When I try to upgrade the repository it does not work. The log says:
+
+sh: git-annex-shell: not found
+
+rsync: connection unexpectedly closed (0 bytes received so far) [sender]
+
+rsync error: remote command not found (code 127) at io.c(226) [sender=3.1.1]
+
+I'm using Version: 5.20140717 on the Linux Ubuntu 14.10.
+
+Ssh'ing from Ubuntu to gituser@synology works fine and shows the git-annex-shell options
+
+Any ideas?
+
+"""]]
diff --git a/doc/tips/Synology_NAS_and_git_annex/comment_3_8beb2b4b79c7787a92689aaad3bfc452._comment b/doc/tips/Synology_NAS_and_git_annex/comment_3_8beb2b4b79c7787a92689aaad3bfc452._comment
new file mode 100644
index 000000000..681b001d0
--- /dev/null
+++ b/doc/tips/Synology_NAS_and_git_annex/comment_3_8beb2b4b79c7787a92689aaad3bfc452._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-06T15:51:38Z"
+ content="""
+Pieter, I suspect you didn't follow the part of the instructions where it says to run \"./runshell\" on the NAS. If you didn't do that, there will be no ~/.ssh/git-annex-shell script set up.
+
+You have put git-annex-shell in your PATH somehow, but you probably did it by editing a bash dotfile. Those files are used for interactive login shells, but not when git-annex is sshing noninteractively into the NAS to run git-annex-shell.
+"""]]
diff --git a/doc/tips/deleting_unwanted_files.mdwn b/doc/tips/deleting_unwanted_files.mdwn
new file mode 100644
index 000000000..8c09b42aa
--- /dev/null
+++ b/doc/tips/deleting_unwanted_files.mdwn
@@ -0,0 +1,40 @@
+It's quite hard to delete a file from a git repository once it's checked in and pushed to origin. This is normally ok, since git repositories contain mostly small files, and a good thing since losing hard work stinks.
+
+With git-annex this changes some: Very large files can be managed with git-annex, and it's not uncommon to be done with such a file and want to delete it. So, git-annex provides a number of ways to handle this, while still trying to avoid accidental foot shooting that would lose the last copy of an important file.
+
+## the garbage collecting method
+
+In this method, you just remove annexed files whenever you want, and commit the changes. This is probably the most natural way to go.
+
+In an indirect mode repo, you can do this the same way you would in a regular git repository. For example, `git rm foo; git commit -m "removed foo"`. This leaves the contents of the files still in the annex, not really deleted yet.
+
+If you have a direct mode repo, you can't run `git rm` in it. Instead, you can just delete files using `rm` or your file manager, and then run `git annex sync` to commit the deletion. That will delete the file's content from your disk. Even if it's the only copy of the file!
+
+Either way, deleting files can leave some garbage lying around in either the local repository, or other repositories that contained a copy of the content of the file you deleted. Eventually you'll want to free up some disk space used by one of these repositories, and then it's time to take out the garbage.
+
+To collect the garbage, you can run `git annex unused` inside the repository which you want to slim down. That will list files stored in the annex that are not used by any git branches or tags. Followed by `git annex dropunused 1-10` to delete a range of the unused files from the annex.
+
+In recent versions of git-annex, `git annex dropunused` checks that enough other copies of a file's content exist in other repositories before deleting it, so this won't ever delete the last copy of some file. This is a good default, because these unused files are still referred to by some commits in the git history, and you might want to retain the full history of every version of a file.
+
+But, let's say you don't care about that, you only want to keep files that are in use by branches and tags. Then you can use `git annex dropunused --force` with a range of files, which will delete them even if it's the last copy.
+
+Finally, sometimes you want to remove unused files from a special remote. To accomplish this, pass `--from remotename` to the unused and dropunused commands, and they will act on
+files stored in that remote, rather than on the local repository.
+
+## let the assistant take care of it
+
+If you're using the git-annex assistant, you don't normally need to worry about this. Just delete files however you normally would. The assistant will try to migrate unused file contents away from your local repository and store them in whatever backup repositories you've set up.
+
+## delete all the copies method
+
+You have a file. You want that file to immediately vanish from the face of the earth to the best of your abilities.
+
+Note that, since git-annex deduplicates files by default, any files with
+the same content will be removed by these commands.
+
+1. `git annex drop --force file`
+2. `git annex whereis file`
+3. `git annex drop --force file --from $repo` repeat for each repository listed by the whereis command
+4. `rm file; git annex sync`
+
+Of course, if you have offline backup repositories that contain this file, you'll have to bring them online before you can drop it from them, etc.
diff --git a/doc/tips/dumb_metadata_extraction_from_xbmc.mdwn b/doc/tips/dumb_metadata_extraction_from_xbmc.mdwn
index 652c37e5b..e3611ec30 100644
--- a/doc/tips/dumb_metadata_extraction_from_xbmc.mdwn
+++ b/doc/tips/dumb_metadata_extraction_from_xbmc.mdwn
@@ -20,7 +20,7 @@ Or just show the files that haven't been played yet:
git annex view playCount=0
-Use `git checkout master` to reset the view. Note that the above will flatten the tree hierarchy, which you may not way. Try this in that case:
+Use `git checkout master` to reset the view. Note that the above will flatten the tree hierarchy, which you may not want. Try this in that case:
git annex view playCount=0 films/=*
diff --git a/doc/tips/dumb_metadata_extraction_from_xbmc/git-annex-xbmc-playcount.pl b/doc/tips/dumb_metadata_extraction_from_xbmc/git-annex-xbmc-playcount.pl
index 76ad33649..85b97188f 100644
--- a/doc/tips/dumb_metadata_extraction_from_xbmc/git-annex-xbmc-playcount.pl
+++ b/doc/tips/dumb_metadata_extraction_from_xbmc/git-annex-xbmc-playcount.pl
@@ -1,28 +1,227 @@
#! /usr/bin/perl -w
-my $dbpath="/home/video/.xbmc/userdata/Database/MyVideos75.db";
-my $prefix="/home/media/video/";
-
-my @lines = `echo 'SELECT playCount, path.strPath, files.strFileName FROM movie JOIN files ON files.idFile=movie.idFile JOIN path ON path.idPath=files.idPath;' | sqlite3 $dbpath`;
-for (@lines) {
- my ($count, $dir, $file) = split /\|/;
- chomp $file;
- # empty or non-numeric count is zero
- if ($count !~ /[0-9]/) {
- $count = 0;
+use Getopt::Long;
+use Pod::Usage;
+
+my $help = 0;
+my $usage = 0;
+my $dryrun = 0;
+my $verbose = 0;
+my $path = '';
+my $annex = '';
+my $home = $ENV{'HOME'};
+
+sub main() {
+ checkargs();
+ if (!$path) {
+ $path = $home . '/.xbmc/userdata/Database';
}
- $dir =~ s/$prefix//;
- if ($file =~ s#stack://##) {
- for (split /,/, $file) {
- s/$prefix//;
- s/^ //;
- s/ $//;
- my @cmd = (qw(git annex metadata --set), "playCount=$count", $_);
- system(@cmd);
- }
+ print("# checking XBMC directory '$path'\n") if ($verbose);
+ $dbpath = finddb($path);
+ if (!$dbpath) {
+ pod2usage("$0: can't find a XBMC database in '$path'.");
+ }
+ print("# using database '$dbpath'\n") if ($verbose);
+ checkdb();
+}
+
+# list videos database, find the latest one
+# modified version of
+# http://stackoverflow.com/questions/4651092/getting-the-list-of-files-sorted-by-modification-date-in-perl
+sub finddb($) {
+ my $path = shift(@_);
+ opendir my($dirh), $path or die "can't opendir $path: $!";
+ my @flist = sort { -M $a <=> -M $b } # Sort by modification time
+ map { "$path/$_" } # We need full paths for sorting
+ grep { /^MyVideos.*\.db$/ }
+ readdir $dirh;
+ closedir $dirh;
+ if ($#flist > 0) {
+ return $flist[0];
}
else {
- my @cmd = (qw(git annex metadata --set), "playCount=$count", "$dir$file");
- system(@cmd);
+ return 0;
+ }
+}
+
+sub checkargs() {
+ pod2usage(1) if $help;
+ pod2usage(-exitval => 0, -verbose => 2) if $usage;
+
+ GetOptions('h|?' => \$help,
+ 'help|usage' => \$usage,
+ # we want to operate on relative links, so set this to
+ # the common annex to the git annex repo
+ 'annex=s' => \$annex,
+ 'path=s' => \$path,
+ 'home=s' => \$home,
+ 'dryrun|n' => \$dryrun,
+ 'verbose|v' => \$verbose,
+ )
+ or die("Error parsing commandline\n");
+}
+
+sub checkdb() {
+ my @lines = `echo 'SELECT playCount, path.strPath, files.strFileName FROM movie JOIN files ON files.idFile=movie.idFile JOIN path ON path.idPath=files.idPath;' | sqlite3 $dbpath`;
+ print "# finding files...\n" if $verbose;
+ for (@lines) {
+ my ($count, $dir, $file) = split /\|/;
+ chomp $file;
+ # empty or non-numeric count is zero
+ if ($count !~ /[0-9]/) {
+ $count = 0;
+ }
+ print "# $dir/$file\n" if $verbose;
+ if ($file =~ s#stack://##) {
+ for (split /,/, $file) {
+ s/$annex//;
+ s/^ //;
+ s/ $//;
+ my @cmd = (qw(git annex metadata --set), "playCount=$count", $_);
+ if ($dryrun) {
+ print join(' ', @cmd) . "\n";
+ }
+ else {
+ system(@cmd);
+ }
+ }
+ }
+ else {
+ $dir =~ s/$annex//;
+ my @cmd = (qw(git annex metadata --set), "playCount=$count", "$dir$file");
+ if ($dryrun) {
+ print join(' ', @cmd) . "\n";
+ }
+ else {
+ system(@cmd);
+ }
+ }
}
}
+
+main();
+
+__END__
+=encoding utf8
+
+=head1 NAME
+
+git-annex-xbmc-playcount - register XBMC playcounts as git-annex metadata
+
+=head1 SYNOPSIS
+
+git-annex-xbmc-playcount [--path .xbmc/userdata/Database]
+
+ Options:
+ -h short usage
+ --help complete help
+ --dryrun, -n do nothing and show the commands that would be ran
+ --annex path to the git-annex repo
+ --home the home directory where the .xbmc directory is located
+ --path the location of the Database directory of XBMC, overrides --home
+ --verbose show interaction details with the database
+
+=head1 DESCRIPTION
+
+This program will look into the XBMC database for the "playcount"
+field to register that number as metadata in the git-annex repository.
+
+=head1 OPTIONS
+
+=over 8
+
+=item B<--dryrun>
+
+Do nothing but show all the steps that would be ran. The output can be
+piped through a POSIX shell after inspection. B<-n> is an alias of
+this command. Example:
+
+ git-annex-xbmc-playcount -n | tee runme
+ # inspect the output
+ sh < runme
+
+=item B<--annex>
+
+This option allows the user to specify the root of the git-annex
+repository, which is then stripped off the paths found in the XBMC
+database.
+
+=item B<--home>
+
+Home of the user running XBMC. If not specified, defaults to the $HOME
+environment variables. The script will look into
+B<$home/.xbmc/userdata/Database> for a file matching
+B<^MyVideos.*\.db$> and will fail if none is found.
+
+=item B<--path>
+
+Manually specify the path to B<.xbmc/userdata/Database>. This
+overrides B<--home>.
+
+Note that this doesn't point directly to the datbase itself, because
+there are usually many database files and we want to automatically
+find the latest. This may be a stupid limitation.
+
+=item B<--verbose>
+
+Show more information about path discovery. Doesn't obstruct
+B<--dryrun> output because lines are prefixed with C<#>.
+
+=back
+
+=head1 EXAMPLES
+
+You have a git annex in B</srv/video> and XBMC is ran as the
+B<video> user and you want to be cautious:
+
+ $ ./git-annex-xbmc-playcount.pl --home /home/video/ -n --annex /srv/video/ | tee set-metadata
+ git annex metadata --set playCount=0 films/Animal.Farm.1954.DVDRip.DivX-MDX.avi
+
+This looks about right, set the metadata:
+
+ $ git annex metadata --set playCount=0 films/Animal.Farm.1954.DVDRip.DivX-MDX.avi
+ metadata films/Animal.Farm.1954.DVDRip.DivX-MDX.avi
+ lastchanged=2014-10-04@22-17-42
+ playCount=0
+ playCount-lastchanged=2014-10-04@22-17-42
+ ok
+ (Recording state in git...)
+
+=head1 ENVIRONMENT
+
+B<$HOME> is looked into to find the B<.xbmc> home directory if none of
+B<--home> or B<--path> is specified.
+
+=head1 FILES
+
+=over 8
+
+=item B<$HOME/.xbmc/userdata/Database/MyVideos.*\.db>
+
+This is where we assume the SQLite database of videos XBMC uses is
+stored.
+
+=back
+
+=head1 BUGS
+
+If there are pipes (C<|>) in filenames, the script may fail to find
+the files properly. We would need to rewrite the database code to use
+B<DBD::SQLite>(3pm) instead of a pipe to B<sqlite3>(1).
+
+=head1 LIMITATIONS
+
+It took longer writing this help than writing the stupid script.
+
+The script will not tag files not yet detected by XBMC.
+
+The script is not incremental, so it will repeatedly add the same
+counts to files it has already found.
+
+=head1 SEE ALSO
+
+B<git-annex>(1), B<xbmc>(1)
+
+=head1 AUTHOR
+
+Written by Antoine Beaupré <anarcat@debian.org>
diff --git a/doc/tips/file_manager_integration.mdwn b/doc/tips/file_manager_integration.mdwn
index b81d75819..8f47a3d68 100644
--- a/doc/tips/file_manager_integration.mdwn
+++ b/doc/tips/file_manager_integration.mdwn
@@ -31,7 +31,7 @@ XFCE uses the Thunar file manager, which can also be easily configured to allow
for drop, and for get:
- git-annex drop --notify-start --notify-finish -- %F
+ git-annex get --notify-start --notify-finish -- %F
This gives me the resulting config on disk, in `.config/Thunar/uca.xml`:
diff --git a/doc/tips/file_manager_integration/comment_3_e7096737268cf66fce2709e9e4937f51._comment b/doc/tips/file_manager_integration/comment_3_e7096737268cf66fce2709e9e4937f51._comment
new file mode 100644
index 000000000..1c3c7eeb5
--- /dev/null
+++ b/doc/tips/file_manager_integration/comment_3_e7096737268cf66fce2709e9e4937f51._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://id.koumbit.net/anarcat"
+ ip="72.0.72.144"
+ subject="comment 3"
+ date="2014-10-01T02:02:39Z"
+ content="""
+for some reason this doesn't work in gnome2. i had to add the shortcuts in /usr/share/nautilus-scripts (iirc). --[[anarcat]]
+"""]]
diff --git a/doc/tips/googledriveannex/comment_5_b547ee81946e14975f082f22ccbea035._comment b/doc/tips/googledriveannex/comment_5_b547ee81946e14975f082f22ccbea035._comment
new file mode 100644
index 000000000..93b7fc508
--- /dev/null
+++ b/doc/tips/googledriveannex/comment_5_b547ee81946e14975f082f22ccbea035._comment
@@ -0,0 +1,29 @@
+[[!comment format=mdwn
+ username="hugo"
+ ip="37.160.8.232"
+ subject="'content-length' error"
+ date="2014-10-05T19:40:23Z"
+ content="""
+I moved a big PDF to Google Drive (with shared encryption).
+
+Now, when I try to get it again:
+
+ get Documents/Guyau - The Non-Religion of the Future, nonreligionoffut00guyarich.pdf (from googledrive...) (gpg)
+ Traceback (most recent call last):
+ File \"/usr/bin/git-annex-remote-googledrive\", line 411, in <module>
+ common.startRemote()
+ File \"/usr/share/googledriveannex-git/lib/CommonFunctions.py\", line 555, in startRemote
+ sys.modules[\"__main__\"].transfer(line)
+ File \"/usr/bin/git-annex-remote-googledrive\", line 372, in transfer
+ if getFile(line[2], \" \".join(line[3:]), folder):
+ File \"/usr/bin/git-annex-remote-googledrive\", line 257, in getFile
+ ret = common.fetchPage({\"link\": download_url, \"headers\": [(\"Authorization\", \"Bearer \" + credentials.access_token)], \"progress\": \"true\"})
+ File \"/usr/share/googledriveannex-git/lib/CommonFunctions.py\", line 207, in fetchPage
+ totalsize = int(con.headers['content-length'])
+ File \"/usr/lib/python2.7/rfc822.py\", line 388, in __getitem__
+ return self.dict[name.lower()]
+ KeyError: 'content-length'
+
+It works for smaller files. Is there a limit on the file size?
+
+"""]]
diff --git a/doc/tips/googledriveannex/comment_6_3a693129a0928b327c7ac4ef45c96acb._comment b/doc/tips/googledriveannex/comment_6_3a693129a0928b327c7ac4ef45c96acb._comment
new file mode 100644
index 000000000..122cdc898
--- /dev/null
+++ b/doc/tips/googledriveannex/comment_6_3a693129a0928b327c7ac4ef45c96acb._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="hugo"
+ ip="37.160.8.232"
+ subject="Re: 'content-length' error "
+ date="2014-10-05T19:43:04Z"
+ content="""
+The Google Drive interface tells me:
+
+Size 29,776,826 bytes
+"""]]
diff --git a/doc/tips/googledriveannex/comment_7_3a645a0cd1e4c939b7a4b8a97a0e9b03._comment b/doc/tips/googledriveannex/comment_7_3a645a0cd1e4c939b7a4b8a97a0e9b03._comment
new file mode 100644
index 000000000..970a5e7b5
--- /dev/null
+++ b/doc/tips/googledriveannex/comment_7_3a645a0cd1e4c939b7a4b8a97a0e9b03._comment
@@ -0,0 +1,26 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 7"
+ date="2014-10-06T15:23:59Z"
+ content="""
+Hugo, I didn't write this code, but it looks to me like you could work around the problem by changing line 207 of lib/CommonFunctions.py:
+
+<pre>
+diff --git a/lib/CommonFunctions.py b/lib/CommonFunctions.py
+index 050b93e..083f5d6 100644
+--- a/lib/CommonFunctions.py
++++ b/lib/CommonFunctions.py
+@@ -204,7 +204,7 @@ def fetchPage(params={}):
+ if get(\"progress\"):
+ data = False
+ tdata = \"\"
+- totalsize = int(con.headers['content-length'])
++ totalsize = 0
+ chunksize = totalsize / 100
+ if chunksize < 4096:
+ chunksize = 4096
+</pre>
+
+Probably the API used to return a content-length header, and no longer does, or doesn't do so reliably. It does not seem to be used for anything too important -- this change will break git-annex's progress display a little bit, perhaps.
+"""]]
diff --git a/doc/tips/googledriveannex/comment_8_7df56c426b27f12dfde09edf345cb76b._comment b/doc/tips/googledriveannex/comment_8_7df56c426b27f12dfde09edf345cb76b._comment
new file mode 100644
index 000000000..d9bcc333c
--- /dev/null
+++ b/doc/tips/googledriveannex/comment_8_7df56c426b27f12dfde09edf345cb76b._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="hugo"
+ ip="79.212.203.201"
+ subject="Re: comment 7 Re: 'content-length' error "
+ date="2014-10-06T16:45:03Z"
+ content="""
+Thanks Joey, I was able to get the file after this modification. I’ll make a pull request on Github.
+"""]]
diff --git a/doc/tips/using_the_web_as_a_special_remote/comment_8_3f32d536f51d5e9908953caf5736b0a0._comment b/doc/tips/using_the_web_as_a_special_remote/comment_8_3f32d536f51d5e9908953caf5736b0a0._comment
new file mode 100644
index 000000000..c3403500a
--- /dev/null
+++ b/doc/tips/using_the_web_as_a_special_remote/comment_8_3f32d536f51d5e9908953caf5736b0a0._comment
@@ -0,0 +1,16 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawnvr2UPmp7ABeH0yI8KGAHCqFhl91Ju4Tc"
+ nickname="Calvin"
+ subject="HTTP Authentication?"
+ date="2014-09-29T21:37:44Z"
+ content="""
+Hi!
+
+I have a somewhat interesting use case. My course notes require HTTP authentication. This is possible with wget, but is there any way to make git annex do it?
+
+[wget authentication stuff!](http://stackoverflow.com/questions/4272770/wget-with-authentication)
+
+It would be nice to have the user and pass encrypted with GPG too. This might be a strange use case, but I can see other people wanting to do something like this in the future.
+
+Thanks!
+"""]]
diff --git a/doc/tips/using_the_web_as_a_special_remote/comment_9_b420b1f320d620a9909cce5086c549bf._comment b/doc/tips/using_the_web_as_a_special_remote/comment_9_b420b1f320d620a9909cce5086c549bf._comment
new file mode 100644
index 000000000..d6b194d69
--- /dev/null
+++ b/doc/tips/using_the_web_as_a_special_remote/comment_9_b420b1f320d620a9909cce5086c549bf._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.55"
+ subject="comment 9"
+ date="2014-09-30T18:09:04Z"
+ content="""
+For urls using http basic auth, you can use the standard url form, http://username:password@example.org/url/ , which should work with `git annex addurl`. The url, including the password, will be stored in the git-annex branch though. If you want to protect the password from being exposed to anyone who gets a clone of the repository, just download manually, and then `git annex add` the file.
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps.mdwn b/doc/todo/does_not_preserve_timestamps.mdwn
new file mode 100644
index 000000000..0d8f2371d
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps.mdwn
@@ -0,0 +1,16 @@
+### Please describe the problem.
+I see that files are synced between my computers with git-annex but the timestamps do not match. The one that receives files always puts the current time of file creation on the file.
+
+### What steps will reproduce the problem?
+Install git-annex on two computers. Connect with XMPP. Then add cloud storage with shared encryption for transferring files. Since you want also backup, choose "full backup" as the type of cloud storage.
+
+
+### What version of git-annex are you using? On what operating system?
+Downloaded binary package dated 13/09/2014 amd64 Ubuntu 14.04.
+
+
+### Please provide any additional information below.
+
+Files are in sync. For example, I move a file from a directory to my synced annex directory. It contains timestamp of 01/01/2010 for example. Once the file gets transferred to the remote computer, it gets current time, for example 20/09/2014 rather than keeping 01/01/2010.
+
+All computers are linux based, ext4 filesystems. File transfers are done through shared encryption rsync remote.
diff --git a/doc/todo/does_not_preserve_timestamps/comment_1_caf5e5cb17f4d05fff8c2fab661cd93f._comment b/doc/todo/does_not_preserve_timestamps/comment_1_caf5e5cb17f4d05fff8c2fab661cd93f._comment
new file mode 100644
index 000000000..48ec44d2b
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_1_caf5e5cb17f4d05fff8c2fab661cd93f._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.144"
+ subject="comment 1"
+ date="2014-09-23T20:27:25Z"
+ content="""
+The closest git comes to storing a timestamp is the date of the last commit of a file for mtime, and first commit for ctime. However, those are pretty expensive to look up for a given file. And git doesn't try to preserve timestamps in checkouts at all, which argues that git-annex, at least at the command line, should not either.
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps/comment_2_c337fca1474b5b78f61ad6f421138ae4._comment b/doc/todo/does_not_preserve_timestamps/comment_2_c337fca1474b5b78f61ad6f421138ae4._comment
new file mode 100644
index 000000000..4b5a750cf
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_2_c337fca1474b5b78f61ad6f421138ae4._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="comment 2"
+ date="2014-09-23T20:58:10Z"
+ content="""
+Thanks Joey for the comment.
+
+But when syncing two repos, timestamps are critical at least for my use case. I can't lose this info. Even if it's expensive.
+
+Appreciate if you can consider to add it for direct mode repos, ie when a file is synced to another repo and created there, it shall carry at least the mtime of the file in source repo. Owncloud sync does it, btsync does it, although I know git-annex is different than those.
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps/comment_3_9a3eeddc46e5a420575f00cb47caf703._comment b/doc/todo/does_not_preserve_timestamps/comment_3_9a3eeddc46e5a420575f00cb47caf703._comment
new file mode 100644
index 000000000..ba34823d4
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_3_9a3eeddc46e5a420575f00cb47caf703._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmK0703vNSIQsP1mGf-4MAPnsBZiSc6yVo"
+ nickname="Emre"
+ subject="comment 3"
+ date="2014-09-23T21:15:29Z"
+ content="""
+Btw, git storing the last commit time as Mtime is not enough, it shall store the original timestamp of the file, not the date of commit. Hope I could explain and hope this is something doable.
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps/comment_4_99b064259fc2e3c6eb83c3da3b2d3bac._comment b/doc/todo/does_not_preserve_timestamps/comment_4_99b064259fc2e3c6eb83c3da3b2d3bac._comment
new file mode 100644
index 000000000..08de75619
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_4_99b064259fc2e3c6eb83c3da3b2d3bac._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://svario.it/gioele"
+ nickname="gioele"
+ subject="comment 4"
+ date="2014-09-24T07:15:09Z"
+ content="""
+You can try to store the timestamps just before commit and restore them on checkout.
+
+Have a look at [metastore](https://github.com/przemoc/metastore): it is a ready-made solution for plain git. Maybe you can adapt it to git-annex.
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps/comment_5_c95c8b9bd617830604500213c962fc7a._comment b/doc/todo/does_not_preserve_timestamps/comment_5_c95c8b9bd617830604500213c962fc7a._comment
new file mode 100644
index 000000000..7632548cd
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_5_c95c8b9bd617830604500213c962fc7a._comment
@@ -0,0 +1,9 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 5"
+ date="2014-10-01T22:02:43Z"
+ content="""
+I was just about to start using git-annex for my data when I saw this bug report, which unfortunately is a blocker. Persisting basic metadata of files that are synced seems like a core feature of a file sync/transfer tool, so I'm really hoping this can be solved somehow :/
+
+"""]]
diff --git a/doc/todo/does_not_preserve_timestamps/comment_6_b99e00d0bc4258c4cb28b544b19ea3b8._comment b/doc/todo/does_not_preserve_timestamps/comment_6_b99e00d0bc4258c4cb28b544b19ea3b8._comment
new file mode 100644
index 000000000..67fe25595
--- /dev/null
+++ b/doc/todo/does_not_preserve_timestamps/comment_6_b99e00d0bc4258c4cb28b544b19ea3b8._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlM_DRhi_5pJrTA0HbApHR25iAgy-NBXTY"
+ nickname="Tor Arne"
+ subject="comment 6"
+ date="2014-10-01T22:39:01Z"
+ content="""
+Isn't this what the metadata feature does though? http://git-annex.branchable.com/design/metadata/
+
+With annex.genmetadata true set, it should store year and mont (but not day/time? if so why not?
+
+Is the missing piece of the puzzle to apply the metadata again on checkout?
+"""]]
diff --git a/doc/todo/read-only_removable_drives.mdwn b/doc/todo/read-only_removable_drives.mdwn
index 379074897..9dfc3569c 100644
--- a/doc/todo/read-only_removable_drives.mdwn
+++ b/doc/todo/read-only_removable_drives.mdwn
@@ -5,3 +5,13 @@ I would expect it to at least be able to leech the files off of it.
Otherwise, I would welcome advice on how to fix this problem without doing a `sudo chown -R` every time i plug this drive somewhere ... --[[anarcat]]
> Workaround: `sudo setfacl -R -m u:anarcat:rwx /media/foo/annex`
+
+Note: this seems like there was at least one dupe opened about this in [[bugs/annex_get_fails_from_read-only_filesystem]].
+
+I concede that this may refer to many different issues, so here's a short inventory of issues with readonly repositories:
+
+* trying to add an external readonly drive through the webapp: not detected: see [[todo/show_readonly_removable_drives_in_the_webapp]]
+* trying to add an external readonly drive through the commandline: fails to sync? - couldn't reproduce locally, i will need to go back to that machine for more tests :(
+* trying to add a ssh readonly remote through the webapp: fails to sync and considers the remote "git-only" (which also fails) - couldn't reproduce locally either - maybe this is related to the upgrade option in [[bugs/annex_get_fails_from_read-only_filesystem/]]
+* trying to add a local readonly remote through the webapp: fails to add, see [[bugs/cannot_add_local_readonly_repo_through_the_webapp]]
+* failing to sync with a readonly remote of a different version: still an issue, see [[bugs/annex_get_fails_from_read-only_filesystem/]] - at least content should be syncable even if the upgrade fails (think of failure conditions such as broken hard drives that are put in readonly mode or ddrescue'd disk images)
diff --git a/doc/todo/read-only_removable_drives/comment_2_08fced29b86b21f63bb0868747227e08._comment b/doc/todo/read-only_removable_drives/comment_2_08fced29b86b21f63bb0868747227e08._comment
new file mode 100644
index 000000000..e89e4a546
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_2_08fced29b86b21f63bb0868747227e08._comment
@@ -0,0 +1,12 @@
+[[!comment format=mdwn
+ username="https://id.koumbit.net/anarcat"
+ ip="72.0.72.144"
+ subject="this also affects ssh remotes"
+ date="2014-10-05T15:26:52Z"
+ content="""
+so i tried another experiment today: i tried to allow access to a remote user to my /srv/foo annex. the annex is writable by me, but not by the user, yet i was expecting the user to be able to sync with it. not push, mind you, but at least pull: in a \"git-only\" scenario, that would be perfectly possible. yet the assistant freaks out because it can't run git-annex on the repo because of a write failure on some pack files, and downgrades the repository to a \"git-only\" repo, which is also inaccurate: it can't sync the metadata either...
+
+i would have expected this repository to be marked as \"readonly\" and the user be capable of fetching new changes automatically from the \"central repo\".
+
+maybe that's the essence of the todo here... --[[anarcat]]
+"""]]
diff --git a/doc/todo/read-only_removable_drives/comment_3_2675e211c7bd248b7f7c1bbc6fd46679._comment b/doc/todo/read-only_removable_drives/comment_3_2675e211c7bd248b7f7c1bbc6fd46679._comment
new file mode 100644
index 000000000..3ce119e67
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_3_2675e211c7bd248b7f7c1bbc6fd46679._comment
@@ -0,0 +1,10 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 3"
+ date="2014-10-06T16:03:04Z"
+ content="""
+Note that I still don't have a test case that reproduces this. I have tried and failed a second time to reproduce the original reported problem.
+
+Also, one issue per bug report tends to result in happier developers.
+"""]]
diff --git a/doc/todo/read-only_removable_drives/comment_4_9e9bc6dd5fa8c4cf7f2511b771bd1bc7._comment b/doc/todo/read-only_removable_drives/comment_4_9e9bc6dd5fa8c4cf7f2511b771bd1bc7._comment
new file mode 100644
index 000000000..2f79d2f4a
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_4_9e9bc6dd5fa8c4cf7f2511b771bd1bc7._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 4"
+ date="2014-10-06T16:06:52Z"
+ content="""
+Of course, git-annex sync fails when it tries to git push to the removable repo. I don't see that causing any problems in practice.
+"""]]
diff --git a/doc/todo/read-only_removable_drives/comment_5_a693c5744bfc6c33f5605aa9d9c0bfe0._comment b/doc/todo/read-only_removable_drives/comment_5_a693c5744bfc6c33f5605aa9d9c0bfe0._comment
new file mode 100644
index 000000000..ed24a1054
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_5_a693c5744bfc6c33f5605aa9d9c0bfe0._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="http://joeyh.name/"
+ ip="209.250.56.54"
+ subject="comment 5"
+ date="2014-10-06T16:10:02Z"
+ content="""
+I guess that the actual problem encountered is entirely limited to the webapp, where if the user chooses \"Add another repository\", then enters the path to an existing, read-only repository, and clicks Make Repository, the webapp replies \"Cannot write a repository there.\"
+"""]]
diff --git a/doc/todo/read-only_removable_drives/comment_6_737e3d315f29a4fc61597ce4f9ec6206._comment b/doc/todo/read-only_removable_drives/comment_6_737e3d315f29a4fc61597ce4f9ec6206._comment
new file mode 100644
index 000000000..de662d9a5
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_6_737e3d315f29a4fc61597ce4f9ec6206._comment
@@ -0,0 +1,20 @@
+[[!comment format=mdwn
+ username="anarcat"
+ ip="70.83.139.100"
+ subject="maybe this should be split up, here are the issues i know about"
+ date="2014-10-06T16:14:36Z"
+ content="""
+well this specific issue is more about how git-annex cannot get files off readonly medium, whether it is webapp or not.
+
+maybe it would be wiser to split this up in separate bug reports, because I had at least three different situations:
+
+* trying to add an external readonly drive through the webapp: not detected
+* trying to add an external readonly drive through the commandline: fails to sync? - to be confirmed
+* trying to add a ssh readonly remote through the webapp: fails to sync and considers the remote \"git-only\" (which also fails)
+
+I would need to try the third case through the commandline.
+
+And then there's the upgrade scenario in [[bugs/annex_get_fails_from_read-only_filesystem/]].
+
+Did i miss anything?
+"""]]
diff --git a/doc/todo/read-only_removable_drives/comment_7_16c8652d38ae57db4ed1860a4733a18b._comment b/doc/todo/read-only_removable_drives/comment_7_16c8652d38ae57db4ed1860a4733a18b._comment
new file mode 100644
index 000000000..121d6768d
--- /dev/null
+++ b/doc/todo/read-only_removable_drives/comment_7_16c8652d38ae57db4ed1860a4733a18b._comment
@@ -0,0 +1,8 @@
+[[!comment format=mdwn
+ username="https://id.koumbit.net/anarcat"
+ ip="72.0.72.144"
+ subject="comment 7"
+ date="2014-10-20T20:04:09Z"
+ content="""
+alright, i documented those issues more distinctively in the summary. hopefully that will clear things up a little. i still need to do some work to reproduce two of those issues, maybe a lot of this is related to the upgrade problem mentionned in [[bugs/annex_get_fails_from_read-only_filesystem/]].
+"""]]
diff --git a/doc/todo/show_readonly_removable_drives_in_the_webapp.mdwn b/doc/todo/show_readonly_removable_drives_in_the_webapp.mdwn
new file mode 100644
index 000000000..e0e570fe0
--- /dev/null
+++ b/doc/todo/show_readonly_removable_drives_in_the_webapp.mdwn
@@ -0,0 +1,15 @@
+Coming from [[todo/read-only_removable_drives/]], this is use case 1: use inserts an `ext` formatted filesystem that he built at home (so files are owned by uid `1000`) in the office computer (where he is uid `1001`). Now, this is a limitation of UNIX-style removable drives, admittedly, but I would expect to be able to sync "down" from the hard drives to copy the contents locally.
+
+So in short, expected behavior:
+
+1. insert the drive
+2. drive is shown in the webapp menu
+3. add the drive as a remote for the local repo
+4. sync the content from the drive to the local repo
+
+Actual behavior:
+
+1. insert the drive
+2. drive is not shown in the webapp menu
+
+--[[anarcat]]
diff --git a/doc/todo/show_readonly_removable_drives_in_the_webapp/comment_1_c41140289f9b062e96cfd5d9d5382155._comment b/doc/todo/show_readonly_removable_drives_in_the_webapp/comment_1_c41140289f9b062e96cfd5d9d5382155._comment
new file mode 100644
index 000000000..faa49bf14
--- /dev/null
+++ b/doc/todo/show_readonly_removable_drives_in_the_webapp/comment_1_c41140289f9b062e96cfd5d9d5382155._comment
@@ -0,0 +1,15 @@
+[[!comment format=mdwn
+ username="joey"
+ subject="""comment 1"""
+ date="2014-10-21T16:33:25Z"
+ content="""
+The webapp only shows drives the user can write to, because
+in general, there are a vast number of mounted things in modern
+OS's that the user would be very puzzled to see listed as removable
+drives. Such as tmpfs and cgroup mount points, and efi boot partitions.
+Complicating the webapp with knowledge to filter such things out would be a
+constantly losing battle.
+
+I guess it would be ok to add a "not listed here?" link in the webapp
+that allowed chosing from a full list or entering the path by hand.
+"""]]
diff --git a/doc/todo/vicfg_comment_gotcha.mdwn b/doc/todo/vicfg_comment_gotcha.mdwn
new file mode 100644
index 000000000..910af01a4
--- /dev/null
+++ b/doc/todo/vicfg_comment_gotcha.mdwn
@@ -0,0 +1,20 @@
+A user might run vicfg and want to reset a line back to the default value
+from the value they have periously set. A natural way to do that is to
+comment out the line (or delete it). However, what that actually does is
+vicfg parses the result and skips over that setting, since it's not
+present, and so no change is saved.
+
+It could try to parse commented out lines and detect deleted lines,
+but that way lies madness. Also, it's not at all clear what the "default"
+should be in response to such an action. The default varies per type of
+configuration, and vicfg does't know about defaults.
+
+> [[fixed|done]]; this was a job for Data.Default! --[[Joey]]
+
+Instead, I think it should detect when a setting provided in the input
+version of the file is not present in the output version, and plop the user
+back into the editor with an error, telling them that cannot be handled,
+and suggesting they instead change the value to the value they now want it
+to have.
+
+> Nah, too complicated.
diff --git a/doc/todo/webapp_nudge_when_less_than_numcopies_clones.mdwn b/doc/todo/webapp_nudge_when_less_than_numcopies_clones.mdwn
new file mode 100644
index 000000000..ee38d0fff
--- /dev/null
+++ b/doc/todo/webapp_nudge_when_less_than_numcopies_clones.mdwn
@@ -0,0 +1,7 @@
+Currently, nothing stops a user from setting up ~/annex, adding some special remote, and never once ending up with a clone of their repository, so there is really no backup of the repository as a whole, despite the special remotes.
+
+Potentially adding to the confusion, they might have remotes in repository groups "full backup" or "backup", and so think everything is backed up.
+
+Webapp could count the number of known remote uuids that are not special remotes, and require there to be at least numcopies of them (excluding the current repo I suppose), and pop up a nudge with a button that presents the various available ways to make a non-special remote.
+
+Working out if a remote uuid is a special remote is probably the hard bit. A special remote will be listed in uuid.log, with a type other than gcrypt or git. Any other uuid, that is not dead, can count as 1 clone. This does not handle git remotes that are not using git-annex (eg github), so it could also look through the git remote list and count any that don't have an annex-uuid.
diff --git a/doc/todo/whishlist:_temporary_relinking_to_remotes.mdwn b/doc/todo/whishlist:_temporary_relinking_to_remotes.mdwn
new file mode 100644
index 000000000..3b1ca70eb
--- /dev/null
+++ b/doc/todo/whishlist:_temporary_relinking_to_remotes.mdwn
@@ -0,0 +1,30 @@
+Imagine the following situation:
+You have a directory structure like this:
+
+`./`
+`+--dir1`
+`|+--file1 (local)`
+`|+--file2 (remote1)`
+`|+--file3 (remote2)`
+
+Now when these files are quite big and you need them in one directory temporarily you would need to use `git annex get dir1` to copy them all over to local. This can take some time.
+
+I whish we had a command like this:
+`git annex getlinks dir1`
+where git annex would try to not link to the missing local objects but to the remote ones. So there is no need to copy the data around just to use it for a short time. After you are done you could use `git annex resetlinks dir1` to reset the links to the local objects.
+
+I know that many specialremotes will not support this without much hassle, but it would be cool to be able to get atleast the links from external drives and maybe ssh remotes via sshfs.
+To keep the data consistent there can be a constraint that every action (add, sync, commit or others) first issue a `resetlinks`.
+
+What do you think of that?
+
+> Already implemented via the `annex.hardlink` configuration.
+>
+> I don't think that separate commands/options to control whether or not
+> to hard link makes sense, because a repository containing hardlinks
+> needs to be set as untrusted to avoid breaking numcopies counting.
+> Which is done automatically by git-annex when it detects the repository
+> was cloned with `git clone --shared`.
+>
+> [[done]]
+> --[[Joey]]
diff --git a/doc/todo/wishlist:_git_annex_diff/comment_2_2e8324f47b66dce385263e258e94da16._comment b/doc/todo/wishlist:_git_annex_diff/comment_2_2e8324f47b66dce385263e258e94da16._comment
new file mode 100644
index 000000000..83501b791
--- /dev/null
+++ b/doc/todo/wishlist:_git_annex_diff/comment_2_2e8324f47b66dce385263e258e94da16._comment
@@ -0,0 +1,32 @@
+[[!comment format=mdwn
+ username="Bram"
+ ip="81.20.68.186"
+ subject="Diff of unlocked file"
+ date="2014-10-14T10:11:04Z"
+ content="""
+I wrote a little shell script that implements part of this request. It shows the diff between an unlocked file and its locked version (i.e. the current edits that have not yet been annexed).
+This only works in non-direct mode, and obviously with 'diffable' content only.
+
+Usage is simple, the only parameter it requires is the unlocked filename.
+
+ #!/bin/bash
+
+ DIFF=\"diff\"
+ FILE=\"$1\"
+ KEY=$(git annex lookupkey \"$FILE\")
+
+ GITPATH=\"$(git rev-parse --show-toplevel)/.git\"
+ ANNEXPATH=$GITPATH/annex/objects/$(git annex examinekey --format='${hashdirmixed}${key}/${key}' \"$KEY\")
+
+ if [ -L \"$FILE\" ]; then
+ echo \"$FILE is not unlocked.\" > /dev/stderr
+ else
+ if [ -r \"$ANNEXPATH\" ]; then
+ $DIFF \"$ANNEXPATH\" \"$FILE\"
+ else
+ echo \"Cannot find $ANNEXPATH\" > /dev/stderr
+ exit 1
+ fi
+ exit 1
+ fi
+"""]]
diff --git a/doc/upgrades/insecure_embedded_creds.mdwn b/doc/upgrades/insecure_embedded_creds.mdwn
new file mode 100644
index 000000000..a221fb51f
--- /dev/null
+++ b/doc/upgrades/insecure_embedded_creds.mdwn
@@ -0,0 +1,42 @@
+git-annex had a bug in the S3 and Glacier remotes where if embedcreds=yes
+was set, and the remote used encryption=pubkey or encryption=hybrid,
+the embedded AWS credentials were stored in the git repository
+in (effectively) plaintext, not encrypted as they were supposed to be.
+
+That means that anyone who gets a copy of the git repository can extract the
+AWS credentials from it. Which would be bad..
+
+A remote with this problem cannot be enabled using `git annex
+enableremote`. Old versions of git-annex will fail with a gpg error;
+the current version will fail with a pointer to this web page.
+
+If your repository has this problem, chose from one of these approaches
+to deal with it:
+
+1. Change your AWS credentials, so the ones stored in the clear in git
+ won't be used.
+
+ After changing the credentials, make sure you have a
+ fixed version of git-annex, and you can then re-embed the new creds
+ into the repository, encrypted this time, by setting the
+ `AWS_SECRET_ACCESS_KEY` and `AWS_ACCESS_KEY_ID` environment variables,
+ and running `git annex enableremote $remotename embedcreds=yes`
+
+2. Fix the problem and then remove the history of the git-annex branch
+ of the repository.
+
+ Make sure you have a fixed version of git-annex, and force git-annex
+ to rewrite the embedded creds, with encryption this time, by setting
+ by setting the `AWS_SECRET_ACCESS_KEY` and `AWS_ACCESS_KEY_ID`
+ environment variables, and running `git annex enableremote $remotename embedcreds=yes`
+
+ Then, to get rid of old versions of the git-annex branch that still
+ contain the creds in cleartext, you can use `git annex forget`;
+ note that it will remove other historical data too.
+
+ Keep in mind that this will not necessarily delete data from clones
+ you do not control.
+
+3. If you're sure that you're the only one who has access to the repository,
+ you could decide to leave it as-is. It's no more insecure than if you
+ had used encryption=shared in the first place when setting it up.
diff --git a/git-annex.cabal b/git-annex.cabal
index 425bc3f90..94b1ed3be 100644
--- a/git-annex.cabal
+++ b/git-annex.cabal
@@ -1,5 +1,5 @@
Name: git-annex
-Version: 5.20140916
+Version: 5.20141013
Cabal-Version: >= 1.8
License: GPL-3
Maintainer: Joey Hess <joey@kitenet.net>
@@ -18,13 +18,18 @@ Description:
dealing with files larger than git can currently easily handle, whether due
to limitations in memory, time, or disk space.
.
- Even without file content tracking, being able to manage files with git,
- move files around and delete files with versioned directory trees, and use
- branches and distributed clones, are all very handy reasons to use git. And
- annexed files can co-exist in the same git repository with regularly
- versioned files, which is convenient for maintaining documents, Makefiles,
- etc that are associated with annexed files but that benefit from full
- revision control.
+ It can store large files in many places, from local hard drives, to a
+ large number of cloud storage services, including S3, WebDAV,
+ and rsync, with a dozen cloud storage providers usable via plugins.
+ Files can be stored encrypted with gpg, so that the cloud storage
+ provider cannot see your data. git-annex keeps track of where each file
+ is stored, so it knows how many copies are available, and has many
+ facilities to ensure your data is preserved.
+ .
+ git-annex can also be used to keep a folder in sync between computers,
+ noticing when files are changed, and automatically committing them
+ to git and transferring them to other computers. The git-annex webapp
+ makes it easy to set up and use git-annex this way.
Flag S3
Description: Enable S3 support
@@ -120,7 +125,7 @@ Executable git-annex
GHC-Options: -O2
if (os(windows))
- Build-Depends: Win32, Win32-extras, unix-compat (>= 0.4.1.3)
+ Build-Depends: Win32, Win32-extras, unix-compat (>= 0.4.1.3), setenv
C-Sources: Utility/winprocess.c
else
Build-Depends: unix
diff --git a/git-annex.hs b/git-annex.hs
index f1af0eea5..f2005e13e 100644
--- a/git-annex.hs
+++ b/git-annex.hs
@@ -19,9 +19,6 @@ import qualified Test
#ifdef mingw32_HOST_OS
import Utility.UserInfo
import Utility.Env
-import Config.Files
-import System.Process
-import System.Exit
#endif
main :: IO ()
@@ -33,7 +30,9 @@ main = do
| isshell n = CmdLine.GitAnnexShell.run ps
| otherwise =
#ifdef mingw32_HOST_OS
- winEnv gitannex ps
+ do
+ winEnv
+ gitannex ps
#else
gitannex ps
#endif
@@ -49,37 +48,17 @@ main = do
#ifdef mingw32_HOST_OS
{- On Windows, if HOME is not set, probe it and set it.
- - This is a workaround for some Cygwin commands needing HOME to be set,
- - and for there being no known way to set environment variables on
- - Windows, except by passing an environment in each call to a program.
- - While ugly, this workaround is easier than trying to ensure HOME is set
- - in all calls to the affected programs.
+ - This is a workaround for some Cygwin commands needing HOME to be set.
-
- If TZ is set, unset it.
- TZ being set can interfere with workarounds for Windows timezone
- horribleness, and prevents getCurrentTimeZone from seeing the system
- time zone.
- -
- - Due to Windows limitations, have to re-exec git-annex with the new
- - environment.
-}
-winEnv :: ([String] -> IO ()) -> [String] -> IO ()
-winEnv a ps = do
- e <- getEnvironment
+winEnv :: IO ()
+winEnv = do
home <- myHomeDir
- let e' = wantedenv e home
- if (e' /= e)
- then do
- cmd <- readProgramFile
- (_, _, _, pid) <- createProcess (proc cmd ps)
- { env = Just e' }
- exitWith =<< waitForProcess pid
- else a ps
- where
- wantedenv e home = delEntry "TZ" $ case lookup "HOME" e of
- Nothing -> e
- Just _ -> addEntries
- [ ("HOME", home)
- , ("CYGWIN", "nodosfilewarning")
- ] e
+ setEnv "HOME" home False
+ setEnv "CYGWIN" "nodosfilewarning" True
+ unsetEnv "TZ"
#endif
diff --git a/standalone/android/Makefile b/standalone/android/Makefile
index 82df0db7b..ea3d1e1cc 100644
--- a/standalone/android/Makefile
+++ b/standalone/android/Makefile
@@ -115,14 +115,15 @@ $(GIT_ANNEX_ANDROID_SOURCETREE)/busybox/build-stamp: busybox_config
touch $@
$(GIT_ANNEX_ANDROID_SOURCETREE)/git/build-stamp: git.patch
- cat git.patch | (cd $(GIT_ANNEX_ANDROID_SOURCETREE)/git && git am)
+ # This is a known-good version that the patch works with.
+ cat git.patch | (cd $(GIT_ANNEX_ANDROID_SOURCETREE)/git && git reset --hard f9dc5d65ca31cb79893e1296efe37727bf58f3f3 && git am)
cd $(GIT_ANNEX_ANDROID_SOURCETREE)/git && $(MAKE) install NO_OPENSSL=1 NO_GETTEXT=1 NO_GECOS_IN_PWENT=1 NO_GETPASS=1 NO_NSEC=1 NO_MKDTEMP=1 NO_PTHREADS=1 NO_PERL=1 NO_CURL=1 NO_EXPAT=1 NO_TCLTK=1 NO_ICONV=1 HAVE_CLOCK_GETTIME= prefix= DESTDIR=installed-tree
touch $@
$(GIT_ANNEX_ANDROID_SOURCETREE)/rsync/build-stamp: rsync.patch
# This is a known-good version that the patch works with.
cat rsync.patch | (cd $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync && git reset --hard eec26089b1c7bdbb260674480ffe6ece257bca63 && git am)
- cp $(GIT_ANNEX_ANDROID_SOURCETREE)/automake/lib/config.sub $(GIT_ANNEX_ANDROID_SOURCETREE)/automake/lib/config.guess $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync/
+ cp /usr/share/misc/config.sub /usr/share/misc/config.guess $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync/
cd $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync && ./configure --host=arm-linux-androideabi --disable-locale --disable-iconv-open --disable-iconv --disable-acl-support --disable-xattr-support
cd $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync && $(MAKE)
touch $@
@@ -153,7 +154,6 @@ source: $(GIT_ANNEX_ANDROID_SOURCETREE)
$(GIT_ANNEX_ANDROID_SOURCETREE):
mkdir -p $(GIT_ANNEX_ANDROID_SOURCETREE)
- git clone git://git.savannah.gnu.org/automake.git $(GIT_ANNEX_ANDROID_SOURCETREE)/automake
git clone git://git.debian.org/git/d-i/busybox $(GIT_ANNEX_ANDROID_SOURCETREE)/busybox
git clone git://git.kernel.org/pub/scm/git/git.git $(GIT_ANNEX_ANDROID_SOURCETREE)/git
git clone git://git.samba.org/rsync.git $(GIT_ANNEX_ANDROID_SOURCETREE)/rsync
diff --git a/standalone/android/buildchroot b/standalone/android/buildchroot
index 396beab78..e64bdd03d 100755
--- a/standalone/android/buildchroot
+++ b/standalone/android/buildchroot
@@ -5,7 +5,7 @@ if [ "$(whoami)" != root ]; then
exit 1
fi
-debootstrap --arch=i386 stable debian-stable-android
+debootstrap --arch=i386 jessie debian-stable-android
cp $0-inchroot debian-stable-android/tmp
cp $0-inchroot-asuser debian-stable-android/tmp
cp $(dirname $0)/abiversion debian-stable-android/tmp
diff --git a/standalone/android/buildchroot-inchroot b/standalone/android/buildchroot-inchroot
index 5c462d8cc..8300999b5 100755
--- a/standalone/android/buildchroot-inchroot
+++ b/standalone/android/buildchroot-inchroot
@@ -10,23 +10,18 @@ fi
# java needs this mounted to work
mount -t proc proc /proc || true
-echo "deb-src http://ftp.us.debian.org/debian stable main" >> /etc/apt/sources.list
+echo "deb-src http://ftp.us.debian.org/debian jessie main" >> /etc/apt/sources.list
apt-get update
apt-get -y install build-essential ghc git libncurses5-dev cabal-install
apt-get -y install happy alex
-apt-get -y install llvm-3.0 # not 3.1; buggy on arm. 3.2 is ok too
+apt-get -y install llvm-3.4
apt-get -y install ca-certificates curl file m4 autoconf zlib1g-dev
apt-get -y install libgnutls-dev libxml2-dev libgsasl7-dev pkg-config c2hs
apt-get -y install ant default-jdk rsync wget gnupg lsof
apt-get -y install gettext unzip python
-apt-get -y install locales
-# works around a dependncy issue with the current hjsmin
-apt-get -y install libghc-hjsmin-dev
+apt-get -y install locales automake
echo en_US.UTF-8 UTF-8 >> /etc/locale.gen
locale-gen
apt-get clean
-wget http://snapshot.debian.org/archive/debian/20130903T155330Z/pool/main/a/automake-1.14/automake_1.14-1_all.deb
-dpkg -i automake*.deb
-rm *.deb
useradd builder --create-home || true
su builder -c $0-asuser
diff --git a/standalone/android/buildchroot-inchroot-asuser b/standalone/android/buildchroot-inchroot-asuser
index fd27f3fc5..a7bea231a 100755
--- a/standalone/android/buildchroot-inchroot-asuser
+++ b/standalone/android/buildchroot-inchroot-asuser
@@ -13,15 +13,11 @@ fi
cd
rm -rf .ghc .cabal .android
-cabal update
-cabal install happy alex --bindir=$HOME/bin
-PATH=$HOME/bin:$PATH
-export PATH
mkdir -p .android
cd .android
git clone https://github.com/joeyh/ghc-android
cd ghc-android
-git checkout stable-ghc-snapshot
+git checkout jessie-ghc-snapshot
./build
# This saves 2 gb, and the same sources are in build-*/ghc
diff --git a/standalone/android/cabal.config b/standalone/android/cabal.config
new file mode 100644
index 000000000..4eecbfeaf
--- /dev/null
+++ b/standalone/android/cabal.config
@@ -0,0 +1,208 @@
+constraints: Crypto ==4.2.5.1,
+ DAV ==1.0.3,
+ HTTP ==4000.2.17,
+ HUnit ==1.2.5.2,
+ IfElse ==0.85,
+ MissingH ==1.2.1.0,
+ MonadRandom ==0.1.13,
+ QuickCheck ==2.7.6,
+ SHA ==1.6.1,
+ SafeSemaphore ==0.10.1,
+ aeson ==0.7.0.6,
+ ansi-terminal ==0.6.1.1,
+ ansi-wl-pprint ==0.6.7.1,
+ appar ==0.1.4,
+ asn1-encoding ==0.8.1.3,
+ asn1-parse ==0.8.1,
+ asn1-types ==0.2.3,
+ async ==2.0.1.5,
+ attoparsec ==0.11.3.4,
+ attoparsec-conduit ==1.1.0,
+ authenticate ==1.3.2.10,
+ base-unicode-symbols ==0.2.2.4,
+ base16-bytestring ==0.1.1.6,
+ base64-bytestring ==1.0.0.1,
+ bifunctors ==4.1.1.1,
+ bloomfilter ==2.0.0.0,
+ byteable ==0.1.1,
+ byteorder ==1.0.4,
+ case-insensitive ==1.2.0.1,
+ cereal ==0.4.0.1,
+ cipher-aes ==0.2.8,
+ cipher-des ==0.0.6,
+ cipher-rc4 ==0.1.4,
+ clientsession ==0.9.0.3,
+ comonad ==4.2,
+ conduit ==1.1.6,
+ conduit-extra ==1.1.3,
+ connection ==0.2.3,
+ contravariant ==0.6.1.1,
+ cookie ==0.4.1.2,
+ cprng-aes ==0.5.2,
+ crypto-api ==0.13.2,
+ crypto-cipher-types ==0.0.9,
+ crypto-numbers ==0.2.3,
+ crypto-pubkey ==0.2.4,
+ crypto-pubkey-types ==0.4.2.2,
+ crypto-random ==0.0.7,
+ cryptohash ==0.11.6,
+ cryptohash-conduit ==0.1.1,
+ css-text ==0.1.2.1,
+ shakespeare-text ==1.0.2,
+ data-default ==0.5.3,
+ data-default-class ==0.0.1,
+ data-default-instances-base ==0.0.1,
+ data-default-instances-containers ==0.0.1,
+ data-default-instances-dlist ==0.0.1,
+ data-default-instances-old-locale ==0.0.1,
+ dataenc ==0.14.0.7,
+ dbus ==0.10.8,
+ distributive ==0.4.4,
+ dlist ==0.7.0.1,
+ dns ==1.3.0,
+ edit-distance ==0.2.1.2,
+ either ==4.3,
+ email-validate ==1.0.0,
+ entropy ==0.2.1,
+ errors ==1.4.7,
+ exceptions ==0.6.1,
+ failure ==0.2.0.3,
+ fast-logger ==2.1.5,
+ fdo-notify ==0.3.1,
+ feed ==0.3.9.2,
+ file-embed ==0.0.6,
+ fingertree ==0.1.0.0,
+ free ==4.9,
+ gnuidn ==0.2,
+ gnutls ==0.1.4,
+ gsasl ==0.3.5,
+ hS3 ==0.5.7,
+ hamlet ==1.1.9.2,
+ hashable ==1.2.1.0,
+ hinotify ==0.3.5,
+ hjsmin ==0.1.4.7,
+ hslogger ==1.2.1,
+ http-client ==0.3.8.2,
+ http-client-tls ==0.2.2,
+ http-conduit ==2.1.2.3,
+ http-date ==0.0.2,
+ http-types ==0.8.5,
+ hxt ==9.3.1.4,
+ hxt-charproperties ==9.1.1.1,
+ hxt-regex-xmlschema ==9.0.4,
+ hxt-unicode ==9.0.2.2,
+ idna ==0.2,
+ iproute ==1.2.11,
+ json ==0.5,
+ keys ==3.10.1,
+ language-javascript ==0.5.13,
+ lens ==4.4.0.2,
+ libxml-sax ==0.7.5,
+ mime-mail ==0.4.1.2,
+ mime-types ==0.1.0.4,
+ mmorph ==1.0.3,
+ monad-control ==0.3.2.2,
+ monad-logger ==0.3.6.1,
+ monad-loops ==0.4.2.1,
+ monads-tf ==0.1.0.2,
+ mtl ==2.1.2,
+ nats ==0.1.2,
+ network ==2.4.1.2,
+ network-conduit ==1.1.0,
+ network-info ==0.2.0.5,
+ network-multicast ==0.0.10,
+ network-protocol-xmpp ==0.4.6,
+ network-uri ==2.6.0.1,
+ optparse-applicative ==0.10.0,
+ parallel ==3.2.0.4,
+ path-pieces ==0.1.4,
+ pem ==0.2.2,
+ persistent ==1.3.3,
+ persistent-template ==1.3.2.2,
+ pointed ==4.0,
+ prelude-extras ==0.4,
+ profunctors ==4.0.4,
+ publicsuffixlist ==0.1,
+ punycode ==2.0,
+ random ==1.0.1.1,
+ ranges ==0.2.4,
+ reducers ==3.10.2.1,
+ reflection ==1.2.0.1,
+ regex-base ==0.93.2,
+ regex-compat ==0.95.1,
+ regex-posix ==0.95.2,
+ regex-tdfa ==1.2.0,
+ resource-pool ==0.2.1.1,
+ resourcet ==1.1.2.3,
+ safe ==0.3.8,
+ securemem ==0.1.3,
+ semigroupoids ==4.2,
+ semigroups ==0.15.3,
+ shakespeare ==1.2.1.1,
+ shakespeare-css ==1.0.7.4,
+ shakespeare-i18n ==1.0.0.5,
+ shakespeare-js ==1.2.0.4,
+ silently ==1.2.4.1,
+ simple-sendfile ==0.2.14,
+ skein ==1.0.9,
+ socks ==0.5.4,
+ split ==0.2.2,
+ stm ==2.4.2,
+ stm-chans ==3.0.0.2,
+ streaming-commons ==0.1.4.1,
+ stringprep ==0.1.5,
+ stringsearch ==0.3.6.5,
+ syb ==0.4.0,
+ system-fileio ==0.3.14,
+ system-filepath ==0.4.12,
+ tagged ==0.7.2,
+ tagsoup ==0.13.1,
+ tagstream-conduit ==0.5.5.1,
+ tasty ==0.10,
+ tasty-hunit ==0.9,
+ tasty-quickcheck ==0.8.1,
+ tasty-rerun ==1.1.3,
+ text ==1.1.1.0,
+ text-icu ==0.6.3.7,
+ tf-random ==0.5,
+ tls ==1.2.9,
+ transformers ==0.3.0.0,
+ transformers-base ==0.4.1,
+ transformers-compat ==0.3.3.3,
+ unbounded-delays ==0.1.0.8,
+ unix-compat ==0.4.1.3,
+ unix-time ==0.2.2,
+ unordered-containers ==0.2.5.0,
+ utf8-string ==0.3.7,
+ uuid ==1.3.3,
+ vault ==0.3.0.3,
+ vector ==0.10.0.1,
+ void ==0.6.1,
+ wai ==3.0.1.1,
+ wai-app-static ==3.0.0.1,
+ wai-extra ==3.0.1.2,
+ wai-logger ==2.1.1,
+ warp ==3.0.0.5,
+ warp-tls ==3.0.0,
+ word8 ==0.1.1,
+ x509 ==1.4.11,
+ x509-store ==1.4.4,
+ x509-system ==1.4.5,
+ x509-validation ==1.5.0,
+ xml ==1.3.13,
+ xml-conduit ==1.2.1,
+ xml-hamlet ==0.4.0.9,
+ xml-types ==0.3.4,
+ xss-sanitize ==0.3.5.2,
+ yaml ==0.8.9.3,
+ yesod ==1.2.6.1,
+ yesod-auth ==1.3.4.6,
+ yesod-core ==1.2.20.1,
+ yesod-default ==1.2.0,
+ yesod-form ==1.3.16,
+ yesod-persistent ==1.2.3.1,
+ yesod-routes ==1.2.0.7,
+ yesod-static ==1.2.4,
+ zlib ==0.5.4.1,
+ bytestring ==0.10.4.0,
+ scientific ==0.3.3.1
diff --git a/standalone/android/haskell-patches/dns_use-android-net.dns1-command-instead-of-resolv.conf.patch b/standalone/android/haskell-patches/dns_use-android-net.dns1-command-instead-of-resolv.conf.patch
index a00338fab..962a64207 100644
--- a/standalone/android/haskell-patches/dns_use-android-net.dns1-command-instead-of-resolv.conf.patch
+++ b/standalone/android/haskell-patches/dns_use-android-net.dns1-command-instead-of-resolv.conf.patch
@@ -1,20 +1,15 @@
-From aaef1aadb21a198475a656132ef4488b85b8fd1b Mon Sep 17 00:00:00 2001
+From 087f1ae5e17f0e6d7c9f6b4092a5bb5bb6f5bf60 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Thu, 3 Jul 2014 23:22:47 +0000
-Subject: [PATCH] use android net.dns1 command instead of resolv.conf file
+Date: Thu, 16 Oct 2014 02:59:11 +0000
+Subject: [PATCH] port
-Android has no /etc/resolv.conf. Some might have /system/etc/resolv.conf,
-but even that does not seem likely.
-
-This is likely a little slow, but is at least fine for git-annex's uses,
-since it only uses this library for occasional SRV lookups.
---
- Network/DNS/Resolver.hs | 11 +++++++++--
- dns.cabal | 1 +
- 2 files changed, 10 insertions(+), 2 deletions(-)
+ Network/DNS/Resolver.hs | 13 ++++++++-----
+ dns.cabal | 1 +
+ 2 files changed, 9 insertions(+), 5 deletions(-)
diff --git a/Network/DNS/Resolver.hs b/Network/DNS/Resolver.hs
-index e4124b8..7aca431 100644
+index 5721e03..c4400d1 100644
--- a/Network/DNS/Resolver.hs
+++ b/Network/DNS/Resolver.hs
@@ -19,7 +19,7 @@ module Network.DNS.Resolver (
@@ -26,7 +21,7 @@ index e4124b8..7aca431 100644
import qualified Data.ByteString.Char8 as BS
import Data.Char (isSpace)
import Data.List (isPrefixOf)
-@@ -32,6 +32,7 @@ import Network.Socket (AddrInfoFlag(..), AddrInfo(..), SockAddr(..), PortNumber(
+@@ -32,6 +32,7 @@ import Network.Socket (AddrInfoFlag(..), AddrInfo(..), defaultHints, getAddrInfo
import Prelude hiding (lookup)
import System.Random (getStdRandom, randomR)
import System.Timeout (timeout)
@@ -34,26 +29,28 @@ index e4124b8..7aca431 100644
#if mingw32_HOST_OS == 1
import Network.Socket (send)
-@@ -132,7 +133,13 @@ makeResolvSeed conf = ResolvSeed <$> addr
+@@ -130,10 +131,12 @@ makeResolvSeed conf = ResolvSeed <$> addr
+ where
addr = case resolvInfo conf of
- RCHostName numhost -> makeAddrInfo numhost Nothing
- RCHostPort numhost mport -> makeAddrInfo numhost $ Just mport
-- RCFilePath file -> toAddr <$> readFile file >>= \i -> makeAddrInfo i Nothing
+ RCHostName numhost -> makeAddrInfo numhost
+- RCFilePath file -> toAddr <$> readFile file >>= makeAddrInfo
+- toAddr cs = let l:_ = filter ("nameserver" `isPrefixOf`) $ lines cs
+- in extract l
+- extract = reverse . dropWhile isSpace . reverse . dropWhile isSpace . drop 11
+ RCFilePath file -> do
+ -- Android has no /etc/resolv.conf; use getprop command.
+ ls <- catch (lines <$> readProcess "getprop" ["net.dns1"] []) (const (return []) :: IOException -> IO [String])
-+ let addr = case ls of
++ makeAddrInfo $ case ls of
+ [] -> "8.8.8.8" -- google public dns as a fallback only
+ (l:_) -> l
-+ makeAddrInfo addr Nothing
- toAddr cs = let l:_ = filter ("nameserver" `isPrefixOf`) $ lines cs
- in extract l
- extract = reverse . dropWhile isSpace . reverse . dropWhile isSpace . drop 11
+
+ makeAddrInfo :: HostName -> IO AddrInfo
+ makeAddrInfo addr = do
diff --git a/dns.cabal b/dns.cabal
-index 0a08a9e..724a3e0 100644
+index ceaf5f4..cd15e61 100644
--- a/dns.cabal
+++ b/dns.cabal
-@@ -38,6 +38,7 @@ Library
+@@ -37,6 +37,7 @@ Library
, network >= 2.3
, random
, resourcet
@@ -62,5 +59,5 @@ index 0a08a9e..724a3e0 100644
Build-Depends: base >= 4 && < 5
, attoparsec
--
-1.7.10.4
+2.1.1
diff --git a/standalone/android/haskell-patches/entropy_cross-build.patch b/standalone/android/haskell-patches/entropy_cross-build.patch
deleted file mode 100644
index 37e85ed13..000000000
--- a/standalone/android/haskell-patches/entropy_cross-build.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-From a3cc880bd06a8d7efda79339afa81e02decbd04b Mon Sep 17 00:00:00 2001
-From: dummy <dummy@example.com>
-Date: Mon, 14 Jul 2014 21:01:25 +0000
-Subject: [PATCH] fix cross build
-
----
- entropy.cabal | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/entropy.cabal b/entropy.cabal
-index 914d33a..9ab80f7 100644
---- a/entropy.cabal
-+++ b/entropy.cabal
-@@ -16,7 +16,7 @@ bug-reports: https://github.com/TomMD/entropy/issues
- stability: stable
- -- build-type: Simple
- -- ^^ Used for HaLVM
--build-type: Custom
-+build-type: Simple
- -- ^^ Test for RDRAND support using 'ghc'
- cabal-version: >=1.10
- tested-with: GHC == 7.8.2
---
-1.7.10.4
-
diff --git a/standalone/android/haskell-patches/gnuidn_fix-build-with-new-base.patch b/standalone/android/haskell-patches/gnuidn_fix-build-with-new-base.patch
new file mode 100644
index 000000000..ff9d8f245
--- /dev/null
+++ b/standalone/android/haskell-patches/gnuidn_fix-build-with-new-base.patch
@@ -0,0 +1,50 @@
+From afdec6c9e66211a0ac8419fffe191b059d1fd00c Mon Sep 17 00:00:00 2001
+From: foo <foo@bar>
+Date: Sun, 22 Sep 2013 17:24:33 +0000
+Subject: [PATCH] fix build with new base
+
+---
+ Data/Text/IDN/IDNA.chs | 1 +
+ Data/Text/IDN/Punycode.chs | 1 +
+ Data/Text/IDN/StringPrep.chs | 1 +
+ 3 files changed, 3 insertions(+)
+
+diff --git a/Data/Text/IDN/IDNA.chs b/Data/Text/IDN/IDNA.chs
+index ed29ee4..dbb4ba5 100644
+--- a/Data/Text/IDN/IDNA.chs
++++ b/Data/Text/IDN/IDNA.chs
+@@ -31,6 +31,7 @@ import Foreign
+ import Foreign.C
+
+ import Data.Text.IDN.Internal
++import System.IO.Unsafe
+
+ #include <idna.h>
+ #include <idn-free.h>
+diff --git a/Data/Text/IDN/Punycode.chs b/Data/Text/IDN/Punycode.chs
+index 24b5fa6..4e62555 100644
+--- a/Data/Text/IDN/Punycode.chs
++++ b/Data/Text/IDN/Punycode.chs
+@@ -32,6 +32,7 @@ import Data.List (unfoldr)
+ import qualified Data.ByteString as B
+ import qualified Data.Text as T
+
++import System.IO.Unsafe
+ import Foreign
+ import Foreign.C
+
+diff --git a/Data/Text/IDN/StringPrep.chs b/Data/Text/IDN/StringPrep.chs
+index 752dc9e..5e9fd84 100644
+--- a/Data/Text/IDN/StringPrep.chs
++++ b/Data/Text/IDN/StringPrep.chs
+@@ -39,6 +39,7 @@ import qualified Data.ByteString as B
+ import qualified Data.Text as T
+ import qualified Data.Text.Encoding as TE
+
++import System.IO.Unsafe
+ import Foreign
+ import Foreign.C
+
+--
+1.7.10.4
+
diff --git a/standalone/android/haskell-patches/network_2.4.1.0_0003-configure-misdetects-accept4.patch b/standalone/android/haskell-patches/network_2.4.1.0_0003-configure-misdetects-accept4.patch
index 116fa320e..084d355ba 100644
--- a/standalone/android/haskell-patches/network_2.4.1.0_0003-configure-misdetects-accept4.patch
+++ b/standalone/android/haskell-patches/network_2.4.1.0_0003-configure-misdetects-accept4.patch
@@ -1,26 +1,26 @@
-From 63a7a97511266c1a9d2414d3314ee17fc88bb8f2 Mon Sep 17 00:00:00 2001
+From 478fc7ae42030c1345e75727e54e1f8f895d3e22 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Fri, 18 Oct 2013 15:58:35 +0000
-Subject: [PATCH] configure misdetects accept4
+Date: Wed, 15 Oct 2014 15:16:21 +0000
+Subject: [PATCH] avoid accept4
---
- Network/Socket.hsc | 4 ++--
+ Network/Socket.hsc | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Network/Socket.hsc b/Network/Socket.hsc
-index 9af31f8..6c21209 100644
+index 2fe62ee..94db7a4 100644
--- a/Network/Socket.hsc
+++ b/Network/Socket.hsc
-@@ -503,7 +503,7 @@ accept sock@(MkSocket s family stype protocol status) = do
- return new_sock
+@@ -511,7 +511,7 @@ accept sock@(MkSocket s family stype protocol status) = do
#else
with (fromIntegral sz) $ \ ptr_len -> do
+ new_sock <-
-# ifdef HAVE_ACCEPT4
+#if 0
- new_sock <- throwSocketErrorIfMinus1RetryMayBlock "accept"
+ throwSocketErrorIfMinus1RetryMayBlock "accept"
(threadWaitRead (fromIntegral s))
(c_accept4 s sockaddr ptr_len (#const SOCK_NONBLOCK))
-@@ -1615,7 +1615,7 @@ foreign import CALLCONV SAFE_ON_WIN "connect"
+@@ -1602,7 +1602,7 @@ foreign import CALLCONV SAFE_ON_WIN "connect"
c_connect :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt
foreign import CALLCONV unsafe "accept"
c_accept :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> IO CInt
@@ -30,5 +30,5 @@ index 9af31f8..6c21209 100644
c_accept4 :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> CInt -> IO CInt
#endif
--
-1.7.10.4
+2.1.1
diff --git a/standalone/android/haskell-patches/shakespeare-text_remove-TH.patch b/standalone/android/haskell-patches/shakespeare-text_remove-TH.patch
new file mode 100644
index 000000000..ece906f4b
--- /dev/null
+++ b/standalone/android/haskell-patches/shakespeare-text_remove-TH.patch
@@ -0,0 +1,153 @@
+From dca2a30ca06865bf66cd25cc14b06f5d28190231 Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Thu, 16 Oct 2014 02:46:57 +0000
+Subject: [PATCH] remove TH
+
+---
+ Text/Shakespeare/Text.hs | 125 +++++------------------------------------------
+ 1 file changed, 11 insertions(+), 114 deletions(-)
+
+diff --git a/Text/Shakespeare/Text.hs b/Text/Shakespeare/Text.hs
+index 6865a5a..e25a8be 100644
+--- a/Text/Shakespeare/Text.hs
++++ b/Text/Shakespeare/Text.hs
+@@ -7,18 +7,18 @@ module Text.Shakespeare.Text
+ ( TextUrl
+ , ToText (..)
+ , renderTextUrl
+- , stext
+- , text
+- , textFile
+- , textFileDebug
+- , textFileReload
+- , st -- | strict text
+- , lt -- | lazy text, same as stext :)
++ --, stext
++ --, text
++ --, textFile
++ --, textFileDebug
++ --, textFileReload
++ --, st -- | strict text
++ --, lt -- | lazy text, same as stext :)
+ -- * Yesod code generation
+- , codegen
+- , codegenSt
+- , codegenFile
+- , codegenFileReload
++ --, codegen
++ --, codegenSt
++ --, codegenFile
++ --, codegenFileReload
+ ) where
+
+ import Language.Haskell.TH.Quote (QuasiQuoter (..))
+@@ -45,106 +45,3 @@ instance ToText Int32 where toText = toText . show
+ instance ToText Int64 where toText = toText . show
+ instance ToText Int where toText = toText . show
+
+-settings :: Q ShakespeareSettings
+-settings = do
+- toTExp <- [|toText|]
+- wrapExp <- [|id|]
+- unWrapExp <- [|id|]
+- return $ defaultShakespeareSettings { toBuilder = toTExp
+- , wrap = wrapExp
+- , unwrap = unWrapExp
+- }
+-
+-
+-stext, lt, st, text :: QuasiQuoter
+-stext =
+- QuasiQuoter { quoteExp = \s -> do
+- rs <- settings
+- render <- [|toLazyText|]
+- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
+- return (render `AppE` rendered)
+- }
+-lt = stext
+-
+-st =
+- QuasiQuoter { quoteExp = \s -> do
+- rs <- settings
+- render <- [|TL.toStrict . toLazyText|]
+- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
+- return (render `AppE` rendered)
+- }
+-
+-text = QuasiQuoter { quoteExp = \s -> do
+- rs <- settings
+- quoteExp (shakespeare rs) $ filter (/='\r') s
+- }
+-
+-
+-textFile :: FilePath -> Q Exp
+-textFile fp = do
+- rs <- settings
+- shakespeareFile rs fp
+-
+-
+-textFileDebug :: FilePath -> Q Exp
+-textFileDebug = textFileReload
+-{-# DEPRECATED textFileDebug "Please use textFileReload instead" #-}
+-
+-textFileReload :: FilePath -> Q Exp
+-textFileReload fp = do
+- rs <- settings
+- shakespeareFileReload rs fp
+-
+--- | codegen is designed for generating Yesod code, including templates
+--- So it uses different interpolation characters that won't clash with templates.
+-codegenSettings :: Q ShakespeareSettings
+-codegenSettings = do
+- toTExp <- [|toText|]
+- wrapExp <- [|id|]
+- unWrapExp <- [|id|]
+- return $ defaultShakespeareSettings { toBuilder = toTExp
+- , wrap = wrapExp
+- , unwrap = unWrapExp
+- , varChar = '~'
+- , urlChar = '*'
+- , intChar = '&'
+- , justVarInterpolation = True -- always!
+- }
+-
+--- | codegen is designed for generating Yesod code, including templates
+--- So it uses different interpolation characters that won't clash with templates.
+--- You can use the normal text quasiquoters to generate code
+-codegen :: QuasiQuoter
+-codegen =
+- QuasiQuoter { quoteExp = \s -> do
+- rs <- codegenSettings
+- render <- [|toLazyText|]
+- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
+- return (render `AppE` rendered)
+- }
+-
+--- | Generates strict Text
+--- codegen is designed for generating Yesod code, including templates
+--- So it uses different interpolation characters that won't clash with templates.
+-codegenSt :: QuasiQuoter
+-codegenSt =
+- QuasiQuoter { quoteExp = \s -> do
+- rs <- codegenSettings
+- render <- [|TL.toStrict . toLazyText|]
+- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
+- return (render `AppE` rendered)
+- }
+-
+-codegenFileReload :: FilePath -> Q Exp
+-codegenFileReload fp = do
+- rs <- codegenSettings
+- render <- [|TL.toStrict . toLazyText|]
+- rendered <- shakespeareFileReload rs{ justVarInterpolation = True } fp
+- return (render `AppE` rendered)
+-
+-codegenFile :: FilePath -> Q Exp
+-codegenFile fp = do
+- rs <- codegenSettings
+- render <- [|TL.toStrict . toLazyText|]
+- rendered <- shakespeareFile rs{ justVarInterpolation = True } fp
+- return (render `AppE` rendered)
+--
+2.1.1
+
diff --git a/standalone/android/haskell-patches/unix-time_hack-for-Bionic.patch b/standalone/android/haskell-patches/unix-time_hack-for-Bionic.patch
index 4955d45cd..16c4f92a2 100644
--- a/standalone/android/haskell-patches/unix-time_hack-for-Bionic.patch
+++ b/standalone/android/haskell-patches/unix-time_hack-for-Bionic.patch
@@ -1,19 +1,18 @@
-From add5feeb9ee9b4ffa1b43e4ba04b63e5ac2bfaf7 Mon Sep 17 00:00:00 2001
+From db9eb179885874af342bb2c3adef7185496ba1f1 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Mon, 14 Jul 2014 20:45:24 +0000
+Date: Wed, 15 Oct 2014 16:37:32 +0000
Subject: [PATCH] hack for bionic
---
- Data/UnixTime/Types.hsc | 12 ------------
- cbits/conv.c | 2 +-
- unix-time.cabal | 1 -
- 3 files changed, 1 insertion(+), 14 deletions(-)
+ Data/UnixTime/Types.hsc | 12 ------------
+ cbits/conv.c | 2 +-
+ 2 files changed, 1 insertion(+), 13 deletions(-)
diff --git a/Data/UnixTime/Types.hsc b/Data/UnixTime/Types.hsc
-index 2ad0623..04fd766 100644
+index d30f39b..ec7ca4c 100644
--- a/Data/UnixTime/Types.hsc
+++ b/Data/UnixTime/Types.hsc
-@@ -12,8 +12,6 @@ import Data.Binary
+@@ -9,8 +9,6 @@ import Foreign.Storable
#include <sys/time.h>
@@ -22,7 +21,7 @@ index 2ad0623..04fd766 100644
-- |
-- Data structure for Unix time.
data UnixTime = UnixTime {
-@@ -23,16 +21,6 @@ data UnixTime = UnixTime {
+@@ -20,16 +18,6 @@ data UnixTime = UnixTime {
, utMicroSeconds :: {-# UNPACK #-} !Int32
} deriving (Eq,Ord,Show)
@@ -36,9 +35,9 @@ index 2ad0623..04fd766 100644
- (#poke struct timeval, tv_sec) ptr (utSeconds ut)
- (#poke struct timeval, tv_usec) ptr (utMicroSeconds ut)
-
- #if __GLASGOW_HASKELL__ >= 704
- instance Binary UnixTime where
- put (UnixTime (CTime sec) msec) = do
+ -- |
+ -- Format of the strptime()/strftime() style.
+ type Format = ByteString
diff --git a/cbits/conv.c b/cbits/conv.c
index ec31fef..b7bc0f9 100644
--- a/cbits/conv.c
@@ -52,18 +51,6 @@ index ec31fef..b7bc0f9 100644
}
size_t c_format_unix_time(char *fmt, time_t src, char* dst, int siz) {
-diff --git a/unix-time.cabal b/unix-time.cabal
-index 5de3f7c..7a0c244 100644
---- a/unix-time.cabal
-+++ b/unix-time.cabal
-@@ -15,7 +15,6 @@ Extra-Tmp-Files: config.log config.status autom4te.cache cbits/config.h
- Library
- Default-Language: Haskell2010
- GHC-Options: -Wall
-- CC-Options: -fPIC
- Exposed-Modules: Data.UnixTime
- Other-Modules: Data.UnixTime.Conv
- Data.UnixTime.Diff
--
-1.7.10.4
+2.1.1
diff --git a/standalone/android/haskell-patches/x509-system_support-Android-cert-store.patch b/standalone/android/haskell-patches/x509-system_support-Android-cert-store.patch
index b3aa407df..14ed66089 100644
--- a/standalone/android/haskell-patches/x509-system_support-Android-cert-store.patch
+++ b/standalone/android/haskell-patches/x509-system_support-Android-cert-store.patch
@@ -1,36 +1,27 @@
-From 2c736615e38ee4f582af9d98d7169cf07b84d875 Mon Sep 17 00:00:00 2001
-From: Joey Hess <joey@kitenet.net>
-Date: Mon, 10 Feb 2014 23:27:32 +0000
+From 61d0e47cd038f25157e48385fc080d0d374b214d Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Tue, 14 Oct 2014 02:07:57 +0000
Subject: [PATCH] support Android cert store
-Android puts it in a different place and has only hashed files.
+Android has only hashsed cert files.
See https://github.com/vincenthz/hs-certificate/issues/19
---
- System/X509/Unix.hs | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
+ System/X509/Unix.hs | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/System/X509/Unix.hs b/System/X509/Unix.hs
-index cbf9bbe..cab4f4a 100644
+index 9df3331..a30da26 100644
--- a/System/X509/Unix.hs
+++ b/System/X509/Unix.hs
-@@ -34,7 +34,7 @@ import qualified Control.Exception as E
- import Data.Char
-
- defaultSystemPath :: FilePath
--defaultSystemPath = "/etc/ssl/certs/"
-+defaultSystemPath = "/system/etc/security/cacerts/"
-
- envPathOverride :: String
- envPathOverride = "SYSTEM_CERTIFICATE_PATH"
-@@ -46,7 +46,7 @@ listDirectoryCerts path = (map (path </>) . filter isCert <$> getDirectoryConten
+@@ -56,7 +56,7 @@ listDirectoryCerts path = do
&& isDigit (s !! 9)
&& (s !! 8) == '.'
&& all isHexDigit (take 8 s)
- isCert x = (not $ isPrefixOf "." x) && (not $ isHashedFile x)
+ isCert x = (not $ isPrefixOf "." x)
- getSystemCertificateStore :: IO CertificateStore
- getSystemCertificateStore = makeCertificateStore . concat <$> (getSystemPath >>= listDirectoryCerts >>= mapM readCertificates)
+ getDirContents = E.catch (Just <$> getDirectoryContents path) emptyPaths
+ where emptyPaths :: E.IOException -> IO (Maybe [FilePath])
--
1.7.10.4
diff --git a/standalone/android/install-haskell-packages b/standalone/android/install-haskell-packages
index a7ebbc115..3cd6d3e6a 100755
--- a/standalone/android/install-haskell-packages
+++ b/standalone/android/install-haskell-packages
@@ -4,13 +4,10 @@
#
# You should install ghc-android first.
#
-# Note that the newest version of packages are installed.
-# It attempts to reuse patches for older versions, but
-# new versions of packages often break cross-compilation by adding TH,
-# etc
-#
-# Future work: Convert to using the method used here:
-# https://github.com/kaoskorobase/ghc-ios-cabal-scripts/
+# The cabal.config is used to pin the haskell packages to the last
+# versions that have been gotten working. To update, delete the
+# cabal.config, run this script with an empty cabal and fix up the broken
+# patches, and then use cabal freeze to generate a new cabal.config.
set -e
@@ -18,35 +15,19 @@ if [ ! -d haskell-patches ]; then
cd standalone/android
fi
-cabalopts="$@"
-
setupcabal () {
- cabal update
-
- # Workaround for http://www.reddit.com/r/haskell/comments/26045a/if_youre_finding_cabal_cant_build_your_project/
- # should be able to remove this eventually.
- cabal install transformers-compat -fthree
- cabal install mtl-2.1.3.1
-
# Some packages fail to install in a non unicode locale.
LANG=en_US.UTF-8
export LANG
-
- # The android build chroot has recent versions of alex and happy
- # installed here.
- PATH=$HOME/bin:$PATH
- export PATH
-}
-
-cabalinstall () {
- echo cabal install "$@" "$cabalopts"
- eval cabal install "$@" "$cabalopts"
}
patched () {
pkg=$1
ver=$2
if [ -z "$ver" ]; then
+ ver="$(grep " $pkg " ../cabal.config | cut -d= -f 3 | sed 's/,$//')"
+ fi
+ if [ -z "$ver" ]; then
cabal unpack $pkg
else
cabal unpack $pkg-$ver
@@ -57,6 +38,7 @@ patched () {
git config user.email dummy@example.com
git add .
git commit -m "pre-patched state of $pkg"
+ ln -sf ../../cabal.config
for patch in ../../haskell-patches/${pkg}_* ../../../no-th/haskell-patches/${pkg}_*; do
if [ -e "$patch" ]; then
echo trying $patch
@@ -67,15 +49,24 @@ patched () {
fi
fi
done
- cabalinstall
+ if [ -e config.sub ]; then
+ cp /usr/share/misc/config.sub .
+ fi
+ if [ -e config.guess ]; then
+ cp /usr/share/misc/config.guess .
+ fi
+ cabal install # --reinstall --force-reinstalls
+ rm -f cabal.config
+
rm -rf $pkg*
cd ..
}
installgitannexdeps () {
pushd ../..
- echo cabal install --only-dependencies "$@"
+ ln -sf standalone/android/cabal.config
cabal install --only-dependencies "$@"
+ rm -f cabal.config
popd
}
@@ -83,7 +74,8 @@ install_pkgs () {
rm -rf tmp
mkdir tmp
cd tmp
-
+cat <<EOF
+EOF
patched network
patched unix-time
patched lifted-base
@@ -94,7 +86,7 @@ install_pkgs () {
patched iproute
patched primitive
patched socks
- patched entropy
+ # patched entropy # needed for newer version, not current pinned version
patched vector
patched stm-chans
patched persistent
@@ -105,11 +97,13 @@ install_pkgs () {
patched x509-system
patched persistent-template
patched system-filepath
+ patched optparse-applicative
patched wai-app-static
patched shakespeare
patched shakespeare-css
patched shakespeare-js
patched yesod-routes
+ patched hamlet
patched yesod-core
patched yesod-persistent
patched yesod-form
@@ -124,23 +118,19 @@ install_pkgs () {
patched dns
patched gnutls
patched unbounded-delays
+ patched gnuidn
+ patched network-protocol-xmpp
cd ..
installgitannexdeps -fAndroid -f-Pairing
}
-echo
-echo
-echo native build
-echo
-setupcabal
-installgitannexdeps
+# native cabal needs its own update
+cabal update
-echo
-echo
-echo cross build
-echo
PATH=$HOME/.ghc/$(cat abiversion)/bin:$HOME/.ghc/$(cat abiversion)/arm-linux-androideabi/bin:$PATH
setupcabal
+cabal update
+
install_pkgs
diff --git a/standalone/android/term.patch b/standalone/android/term.patch
index efea69564..c0ceefd74 100644
--- a/standalone/android/term.patch
+++ b/standalone/android/term.patch
@@ -585,7 +585,7 @@ index 57219c3..79b45ef 100755
# Make sure target-11 is installed
-$ANDROID update sdk -u -t android-11
-+$ANDROID update sdk -u -t android-18
++$ANDROID update sdk -u -t android-19
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ATE_ROOT="$( cd $DIR/.. && pwd )"
@@ -594,5 +594,5 @@ index 57219c3..79b45ef 100755
PROJECT_DIR="$( dirname "$PROJECT_FILE" )"
echo "Updating $PROJECT_FILE"
- $ANDROID update project -p "$PROJECT_DIR" --target android-11
-+ $ANDROID update project -p "$PROJECT_DIR" --target android-18
++ $ANDROID update project -p "$PROJECT_DIR" --target android-19
done
diff --git a/standalone/no-th/haskell-patches/DAV_build-without-TH.patch b/standalone/no-th/haskell-patches/DAV_build-without-TH.patch
index cc730ebbd..6d17d634e 100644
--- a/standalone/no-th/haskell-patches/DAV_build-without-TH.patch
+++ b/standalone/no-th/haskell-patches/DAV_build-without-TH.patch
@@ -1,19 +1,19 @@
-From 8e115228601a97b19d3f713ccf2d13f58838d927 Mon Sep 17 00:00:00 2001
+From e54cfacbb9fb24f75d3d93cd8ee6da67b161574f Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Mon, 26 May 2014 01:48:22 +0000
-Subject: [PATCH] expand TH
+Date: Thu, 16 Oct 2014 02:51:28 +0000
+Subject: [PATCH] remove TH
---
- DAV.cabal | 24 +---
- Network/Protocol/HTTP/DAV.hs | 96 ++++++++++++----
- Network/Protocol/HTTP/DAV/TH.hs | 232 ++++++++++++++++++++++++++++++++++++++-
- 3 files changed, 307 insertions(+), 45 deletions(-)
+ DAV.cabal | 28 +----
+ Network/Protocol/HTTP/DAV.hs | 92 +++++++++++++---
+ Network/Protocol/HTTP/DAV/TH.hs | 232 +++++++++++++++++++++++++++++++++++++++-
+ 3 files changed, 306 insertions(+), 46 deletions(-)
diff --git a/DAV.cabal b/DAV.cabal
-index 5d50e39..f2abf89 100644
+index 95fffd8..5669c51 100644
--- a/DAV.cabal
+++ b/DAV.cabal
-@@ -43,30 +43,7 @@ library
+@@ -47,33 +47,7 @@ library
, utf8-string
, xml-conduit >= 1.0 && < 1.3
, xml-hamlet >= 0.4 && < 0.5
@@ -26,7 +26,7 @@ index 5d50e39..f2abf89 100644
- , case-insensitive >= 0.4
- , containers
- , data-default
-- , either >= 4.1
+- , either >= 4.3
- , errors
- , exceptions
- , http-client >= 0.2
@@ -34,13 +34,16 @@ index 5d50e39..f2abf89 100644
- , http-types >= 0.7
- , lens >= 3.0
- , mtl >= 2.1
-- , network >= 2.3
-- , optparse-applicative >= 0.5.0
+- , optparse-applicative >= 0.10.0
- , transformers >= 0.3
- , transformers-base
- , utf8-string
- , xml-conduit >= 1.0 && < 1.3
- , xml-hamlet >= 0.4 && < 0.5
+- if flag(network-uri)
+- build-depends: network-uri >= 2.6, network >= 2.6
+- else
+- build-depends: network >= 2.3 && <2.6
+ , text
source-repository head
@@ -412,3 +415,6 @@ index 0ecd476..1653bf6 100644
+ __userAgent_a3kh)
+ Data.Functor.<$> (_f_a3k7 __userAgent'_a3kg))
+{-# INLINE userAgent #-}
+--
+2.1.1
+
diff --git a/standalone/no-th/haskell-patches/hamlet_hack_TH.patch b/standalone/no-th/haskell-patches/hamlet_hack_TH.patch
new file mode 100644
index 000000000..c4e11ca82
--- /dev/null
+++ b/standalone/no-th/haskell-patches/hamlet_hack_TH.patch
@@ -0,0 +1,205 @@
+From 0509d4383c328c20be61cf3e3bbc98a0a1161588 Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Thu, 16 Oct 2014 02:21:17 +0000
+Subject: [PATCH] hack TH
+
+---
+ Text/Hamlet.hs | 86 +++++++++++++++++-----------------------------------
+ Text/Hamlet/Parse.hs | 3 +-
+ 2 files changed, 29 insertions(+), 60 deletions(-)
+
+diff --git a/Text/Hamlet.hs b/Text/Hamlet.hs
+index 9500ecb..ec8471a 100644
+--- a/Text/Hamlet.hs
++++ b/Text/Hamlet.hs
+@@ -11,36 +11,36 @@
+ module Text.Hamlet
+ ( -- * Plain HTML
+ Html
+- , shamlet
+- , shamletFile
+- , xshamlet
+- , xshamletFile
++ --, shamlet
++ --, shamletFile
++ --, xshamlet
++ --, xshamletFile
+ -- * Hamlet
+ , HtmlUrl
+- , hamlet
+- , hamletFile
+- , hamletFileReload
+- , ihamletFileReload
+- , xhamlet
+- , xhamletFile
++ --, hamlet
++ --, hamletFile
++ --, hamletFileReload
++ --, ihamletFileReload
++ --, xhamlet
++ --, xhamletFile
+ -- * I18N Hamlet
+ , HtmlUrlI18n
+- , ihamlet
+- , ihamletFile
++ --, ihamlet
++ --, ihamletFile
+ -- * Type classes
+ , ToAttributes (..)
+ -- * Internal, for making more
+ , HamletSettings (..)
+ , NewlineStyle (..)
+- , hamletWithSettings
+- , hamletFileWithSettings
++ --, hamletWithSettings
++ --, hamletFileWithSettings
+ , defaultHamletSettings
+ , xhtmlHamletSettings
+- , Env (..)
+- , HamletRules (..)
+- , hamletRules
+- , ihamletRules
+- , htmlRules
++ --, Env (..)
++ --, HamletRules (..)
++ --, hamletRules
++ --, ihamletRules
++ --, htmlRules
+ , CloseStyle (..)
+ -- * Used by generated code
+ , condH
+@@ -110,47 +110,9 @@ type HtmlUrl url = Render url -> Html
+ -- | A function generating an 'Html' given a message translator and a URL rendering function.
+ type HtmlUrlI18n msg url = Translate msg -> Render url -> Html
+
+-docsToExp :: Env -> HamletRules -> Scope -> [Doc] -> Q Exp
+-docsToExp env hr scope docs = do
+- exps <- mapM (docToExp env hr scope) docs
+- case exps of
+- [] -> [|return ()|]
+- [x] -> return x
+- _ -> return $ DoE $ map NoBindS exps
+-
+ unIdent :: Ident -> String
+ unIdent (Ident s) = s
+
+-bindingPattern :: Binding -> Q (Pat, [(Ident, Exp)])
+-bindingPattern (BindAs i@(Ident s) b) = do
+- name <- newName s
+- (pattern, scope) <- bindingPattern b
+- return (AsP name pattern, (i, VarE name):scope)
+-bindingPattern (BindVar i@(Ident s))
+- | all isDigit s = do
+- return (LitP $ IntegerL $ read s, [])
+- | otherwise = do
+- name <- newName s
+- return (VarP name, [(i, VarE name)])
+-bindingPattern (BindTuple is) = do
+- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
+- return (TupP patterns, concat scopes)
+-bindingPattern (BindList is) = do
+- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
+- return (ListP patterns, concat scopes)
+-bindingPattern (BindConstr con is) = do
+- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
+- return (ConP (mkConName con) patterns, concat scopes)
+-bindingPattern (BindRecord con fields wild) = do
+- let f (Ident field,b) =
+- do (p,s) <- bindingPattern b
+- return ((mkName field,p),s)
+- (patterns, scopes) <- fmap unzip $ mapM f fields
+- (patterns1, scopes1) <- if wild
+- then bindWildFields con $ map fst fields
+- else return ([],[])
+- return (RecP (mkConName con) (patterns++patterns1), concat scopes ++ scopes1)
+-
+ mkConName :: DataConstr -> Name
+ mkConName = mkName . conToStr
+
+@@ -158,6 +120,7 @@ conToStr :: DataConstr -> String
+ conToStr (DCUnqualified (Ident x)) = x
+ conToStr (DCQualified (Module xs) (Ident x)) = intercalate "." $ xs ++ [x]
+
++{-
+ -- Wildcards bind all of the unbound fields to variables whose name
+ -- matches the field name.
+ --
+@@ -296,10 +259,12 @@ hamlet = hamletWithSettings hamletRules defaultHamletSettings
+
+ xhamlet :: QuasiQuoter
+ xhamlet = hamletWithSettings hamletRules xhtmlHamletSettings
++-}
+
+ asHtmlUrl :: HtmlUrl url -> HtmlUrl url
+ asHtmlUrl = id
+
++{-
+ hamletRules :: Q HamletRules
+ hamletRules = do
+ i <- [|id|]
+@@ -360,6 +325,7 @@ hamletFromString :: Q HamletRules -> HamletSettings -> String -> Q Exp
+ hamletFromString qhr set s = do
+ hr <- qhr
+ hrWithEnv hr $ \env -> docsToExp env hr [] $ docFromString set s
++-}
+
+ docFromString :: HamletSettings -> String -> [Doc]
+ docFromString set s =
+@@ -367,6 +333,7 @@ docFromString set s =
+ Error s' -> error s'
+ Ok (_, d) -> d
+
++{-
+ hamletFileWithSettings :: Q HamletRules -> HamletSettings -> FilePath -> Q Exp
+ hamletFileWithSettings qhr set fp = do
+ #ifdef GHC_7_4
+@@ -408,6 +375,7 @@ strToExp s@(c:_)
+ | isUpper c = ConE $ mkName s
+ | otherwise = VarE $ mkName s
+ strToExp "" = error "strToExp on empty string"
++-}
+
+ -- | Checks for truth in the left value in each pair in the first argument. If
+ -- a true exists, then the corresponding right action is performed. Only the
+@@ -452,7 +420,7 @@ hamletUsedIdentifiers settings =
+ data HamletRuntimeRules = HamletRuntimeRules {
+ hrrI18n :: Bool
+ }
+-
++{-
+ hamletFileReloadWithSettings :: HamletRuntimeRules
+ -> HamletSettings -> FilePath -> Q Exp
+ hamletFileReloadWithSettings hrr settings fp = do
+@@ -479,7 +447,7 @@ hamletFileReloadWithSettings hrr settings fp = do
+ c VTUrlParam = [|EUrlParam|]
+ c VTMixin = [|\r -> EMixin $ \c -> r c|]
+ c VTMsg = [|EMsg|]
+-
++-}
+ -- move to Shakespeare.Base?
+ readFileUtf8 :: FilePath -> IO String
+ readFileUtf8 fp = fmap TL.unpack $ readUtf8File fp
+diff --git a/Text/Hamlet/Parse.hs b/Text/Hamlet/Parse.hs
+index b7e2954..1f14946 100644
+--- a/Text/Hamlet/Parse.hs
++++ b/Text/Hamlet/Parse.hs
+@@ -616,6 +616,7 @@ data NewlineStyle = NoNewlines -- ^ never add newlines
+ | DefaultNewlineStyle
+ deriving Show
+
++{-
+ instance Lift NewlineStyle where
+ lift NoNewlines = [|NoNewlines|]
+ lift NewlinesText = [|NewlinesText|]
+@@ -627,7 +628,7 @@ instance Lift (String -> CloseStyle) where
+
+ instance Lift HamletSettings where
+ lift (HamletSettings a b c d) = [|HamletSettings $(lift a) $(lift b) $(lift c) $(lift d)|]
+-
++-}
+
+ htmlEmptyTags :: Set String
+ htmlEmptyTags = Set.fromAscList
+--
+2.1.1
+
diff --git a/standalone/no-th/haskell-patches/lens_no-TH.patch b/standalone/no-th/haskell-patches/lens_no-TH.patch
index 7fdd70639..bc453bfa1 100644
--- a/standalone/no-th/haskell-patches/lens_no-TH.patch
+++ b/standalone/no-th/haskell-patches/lens_no-TH.patch
@@ -1,20 +1,20 @@
-From bc312c7431877b3b788de5e7ce5ee743be73c0ba Mon Sep 17 00:00:00 2001
+From 10c9ade98b3ac2054947f411d77db2eb28896b9f Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Tue, 10 Jun 2014 22:13:58 +0000
-Subject: [PATCH] remove TH
+Date: Thu, 16 Oct 2014 01:43:10 +0000
+Subject: [PATCH] avoid TH
---
- lens.cabal | 19 +------------------
+ lens.cabal | 17 +----------------
src/Control/Lens.hs | 8 ++------
src/Control/Lens/Cons.hs | 2 --
src/Control/Lens/Internal/Fold.hs | 2 --
src/Control/Lens/Operators.hs | 2 +-
src/Control/Lens/Prism.hs | 2 --
src/Control/Monad/Primitive/Lens.hs | 1 -
- 7 files changed, 4 insertions(+), 32 deletions(-)
+ 7 files changed, 4 insertions(+), 30 deletions(-)
diff --git a/lens.cabal b/lens.cabal
-index d70c2f4..28af768 100644
+index 5388301..d7b02b9 100644
--- a/lens.cabal
+++ b/lens.cabal
@@ -10,7 +10,7 @@ stability: provisional
@@ -26,7 +26,7 @@ index d70c2f4..28af768 100644
-- build-tools: cpphs
tested-with: GHC == 7.4.1, GHC == 7.4.2, GHC == 7.6.3, GHC == 7.8.1, GHC == 7.8.2
synopsis: Lenses, Folds and Traversals
-@@ -220,7 +220,6 @@ library
+@@ -217,7 +217,6 @@ library
Control.Exception.Lens
Control.Lens
Control.Lens.Action
@@ -34,7 +34,16 @@ index d70c2f4..28af768 100644
Control.Lens.Combinators
Control.Lens.Cons
Control.Lens.Each
-@@ -248,29 +247,24 @@ library
+@@ -234,8 +233,6 @@ library
+ Control.Lens.Internal.Context
+ Control.Lens.Internal.Deque
+ Control.Lens.Internal.Exception
+- Control.Lens.Internal.FieldTH
+- Control.Lens.Internal.PrismTH
+ Control.Lens.Internal.Fold
+ Control.Lens.Internal.Getter
+ Control.Lens.Internal.Indexed
+@@ -247,25 +244,21 @@ library
Control.Lens.Internal.Reflection
Control.Lens.Internal.Review
Control.Lens.Internal.Setter
@@ -60,11 +69,7 @@ index d70c2f4..28af768 100644
Control.Monad.Primitive.Lens
Control.Parallel.Strategies.Lens
Control.Seq.Lens
-- Data.Aeson.Lens
- Data.Array.Lens
- Data.Bits.Lens
- Data.ByteString.Lens
-@@ -293,17 +287,10 @@ library
+@@ -291,12 +284,8 @@ library
Data.Typeable.Lens
Data.Vector.Lens
Data.Vector.Generic.Lens
@@ -76,13 +81,8 @@ index d70c2f4..28af768 100644
- Language.Haskell.TH.Lens
Numeric.Lens
-- other-modules:
-- Control.Lens.Internal.TupleIxedTH
--
- cpp-options: -traditional
-
- if flag(safe)
-@@ -405,7 +392,6 @@ test-suite doctests
+ other-modules:
+@@ -403,7 +392,6 @@ test-suite doctests
deepseq,
doctest >= 0.9.1,
filepath,
@@ -90,7 +90,7 @@ index d70c2f4..28af768 100644
mtl,
nats,
parallel,
-@@ -443,7 +429,6 @@ benchmark plated
+@@ -441,7 +429,6 @@ benchmark plated
comonad,
criterion,
deepseq,
@@ -98,7 +98,7 @@ index d70c2f4..28af768 100644
lens,
transformers
-@@ -478,7 +463,6 @@ benchmark unsafe
+@@ -476,7 +463,6 @@ benchmark unsafe
comonads-fd,
criterion,
deepseq,
@@ -106,7 +106,7 @@ index d70c2f4..28af768 100644
lens,
transformers
-@@ -495,6 +479,5 @@ benchmark zipper
+@@ -493,6 +479,5 @@ benchmark zipper
comonads-fd,
criterion,
deepseq,
@@ -201,10 +201,10 @@ index 9992e63..631e8e6 100644
, ( # )
-- * "Control.Lens.Setter"
diff --git a/src/Control/Lens/Prism.hs b/src/Control/Lens/Prism.hs
-index 9e0bec7..0cf6737 100644
+index b75c870..c6c6596 100644
--- a/src/Control/Lens/Prism.hs
+++ b/src/Control/Lens/Prism.hs
-@@ -59,8 +59,6 @@ import Unsafe.Coerce
+@@ -61,8 +61,6 @@ import Unsafe.Coerce
import Data.Profunctor.Unsafe
#endif
@@ -226,5 +226,5 @@ index ee942c6..2f37134 100644
prim :: (PrimMonad m) => Iso' (m a) (State# (PrimState m) -> (# State# (PrimState m), a #))
prim = iso internal primitive
--
-2.0.0
+2.1.1
diff --git a/standalone/no-th/haskell-patches/optparse-applicative_remove-ANN.patch b/standalone/no-th/haskell-patches/optparse-applicative_remove-ANN.patch
new file mode 100644
index 000000000..1bb843524
--- /dev/null
+++ b/standalone/no-th/haskell-patches/optparse-applicative_remove-ANN.patch
@@ -0,0 +1,33 @@
+From b128590966d4946219e45e2efd88acf7a354abc2 Mon Sep 17 00:00:00 2001
+From: androidbuilder <androidbuilder@example.com>
+Date: Tue, 14 Oct 2014 02:28:02 +0000
+Subject: [PATCH] remove ANN
+
+---
+ Options/Applicative.hs | 2 --
+ Options/Applicative/Help/Core.hs | 2 --
+ 2 files changed, 4 deletions(-)
+
+diff --git a/Options/Applicative.hs b/Options/Applicative.hs
+index bd4129d..f412062 100644
+--- a/Options/Applicative.hs
++++ b/Options/Applicative.hs
+@@ -34,5 +34,3 @@ import Options.Applicative.Common
+ import Options.Applicative.Builder
+ import Options.Applicative.Builder.Completer
+ import Options.Applicative.Extra
+-
+-{-# ANN module "HLint: ignore Use import/export shortcut" #-}
+diff --git a/Options/Applicative/Help/Core.hs b/Options/Applicative/Help/Core.hs
+index 0a79169..3f1ce3f 100644
+--- a/Options/Applicative/Help/Core.hs
++++ b/Options/Applicative/Help/Core.hs
+@@ -139,5 +139,3 @@ parserUsage pprefs p progn = hsep
+ [ string "Usage:"
+ , string progn
+ , align (extractChunk (briefDesc pprefs p)) ]
+-
+-{-# ANN footerHelp "HLint: ignore Eta reduce" #-}
+--
+1.7.10.4
+
diff --git a/standalone/no-th/haskell-patches/persistent-template_stub-out.patch b/standalone/no-th/haskell-patches/persistent-template_stub-out.patch
index 29002eb32..f3ee63e06 100644
--- a/standalone/no-th/haskell-patches/persistent-template_stub-out.patch
+++ b/standalone/no-th/haskell-patches/persistent-template_stub-out.patch
@@ -1,25 +1,25 @@
-From 4b958f97bffdeedc0c946d5fdc9749d2cc566fcc Mon Sep 17 00:00:00 2001
+From e6542197f1da6984bb6cd3310dba77363dfab2d9 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Thu, 26 Dec 2013 15:54:37 -0400
+Date: Thu, 16 Oct 2014 01:51:02 +0000
Subject: [PATCH] stub out
---
- persistent-template.cabal | 2 +-
+ persistent-template.cabal | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/persistent-template.cabal b/persistent-template.cabal
-index c4aee68..7905278 100644
+index 59b4149..e11b418 100644
--- a/persistent-template.cabal
+++ b/persistent-template.cabal
-@@ -24,7 +24,7 @@ library
+@@ -26,7 +26,7 @@ library
, aeson
, monad-logger
, unordered-containers
- exposed-modules: Database.Persist.TH
-+ exposed-modules:
++ exposed-modules:
ghc-options: -Wall
if impl(ghc >= 7.4)
cpp-options: -DGHC_7_4
--
-1.7.10.4
+2.1.1
diff --git a/standalone/no-th/haskell-patches/persistent_1.1.5.1_0001-disable-TH.patch b/standalone/no-th/haskell-patches/persistent_1.1.5.1_0001-disable-TH.patch
index 7a66e1fd1..cd86ccd2d 100644
--- a/standalone/no-th/haskell-patches/persistent_1.1.5.1_0001-disable-TH.patch
+++ b/standalone/no-th/haskell-patches/persistent_1.1.5.1_0001-disable-TH.patch
@@ -1,14 +1,14 @@
-From efd18199fa245e51e6137036062ded8b0b26f78c Mon Sep 17 00:00:00 2001
+From aae3ace106cf26c931cc94c96fb6fbfe83f950f2 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Tue, 17 Dec 2013 18:08:22 +0000
-Subject: [PATCH] disable TH
+Date: Wed, 15 Oct 2014 17:05:37 +0000
+Subject: [PATCH] avoid TH
---
Database/Persist/Sql/Raw.hs | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/Database/Persist/Sql/Raw.hs b/Database/Persist/Sql/Raw.hs
-index 73189dd..d432790 100644
+index 3ac2ca9..bcc2011 100644
--- a/Database/Persist/Sql/Raw.hs
+++ b/Database/Persist/Sql/Raw.hs
@@ -11,7 +11,7 @@ import Data.IORef (writeIORef, readIORef, newIORef)
@@ -20,7 +20,7 @@ index 73189dd..d432790 100644
import Data.Int (Int64)
import Control.Monad.Trans.Class (lift)
import qualified Data.Text as T
-@@ -22,7 +22,6 @@ rawQuery :: (MonadSqlPersist m, MonadResource m)
+@@ -23,7 +23,6 @@ rawQuery :: (MonadSqlPersist m, MonadResource m)
-> [PersistValue]
-> Source m [PersistValue]
rawQuery sql vals = do
@@ -28,7 +28,7 @@ index 73189dd..d432790 100644
conn <- lift askSqlConn
bracketP
(getStmtConn conn sql)
-@@ -34,7 +33,6 @@ rawExecute x y = liftM (const ()) $ rawExecuteCount x y
+@@ -35,7 +34,6 @@ rawExecute x y = liftM (const ()) $ rawExecuteCount x y
rawExecuteCount :: MonadSqlPersist m => Text -> [PersistValue] -> m Int64
rawExecuteCount sql vals = do
@@ -37,5 +37,5 @@ index 73189dd..d432790 100644
res <- liftIO $ stmtExecute stmt vals
liftIO $ stmtReset stmt
--
-1.8.5.1
+2.1.1
diff --git a/standalone/no-th/haskell-patches/process-conduit_avoid-TH.patch b/standalone/no-th/haskell-patches/process-conduit_avoid-TH.patch
index 8fa07e85a..875119afd 100644
--- a/standalone/no-th/haskell-patches/process-conduit_avoid-TH.patch
+++ b/standalone/no-th/haskell-patches/process-conduit_avoid-TH.patch
@@ -1,24 +1,24 @@
-From 7e85a025349877565a70c375ef55508f215eaaf8 Mon Sep 17 00:00:00 2001
+From ed77588c57704030a9d412dd49f11c172c6268ab Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Wed, 21 May 2014 04:23:49 +0000
-Subject: [PATCH] remove TH
+Date: Tue, 14 Oct 2014 03:46:03 +0000
+Subject: [PATCH] unused
---
- process-conduit.cabal | 1 -
+ process-conduit.cabal | 1 -
1 file changed, 1 deletion(-)
diff --git a/process-conduit.cabal b/process-conduit.cabal
-index e6988e0..a2e03e0 100644
+index 34bb168..2f137a8 100644
--- a/process-conduit.cabal
+++ b/process-conduit.cabal
-@@ -24,7 +24,6 @@ source-repository head
+@@ -22,7 +22,6 @@ source-repository head
library
- exposed-modules: Data.Conduit.Process
+ exposed-modules: Data.Conduit.ProcessOld
- System.Process.QQ
build-depends: base == 4.*
, template-haskell >= 2.4
--
-2.0.0.rc2
+1.7.10.4
diff --git a/standalone/no-th/haskell-patches/shakespeare-css_remove_TH.patch b/standalone/no-th/haskell-patches/shakespeare-css_remove_TH.patch
new file mode 100644
index 000000000..82e2c6420
--- /dev/null
+++ b/standalone/no-th/haskell-patches/shakespeare-css_remove_TH.patch
@@ -0,0 +1,366 @@
+From 657fa7135bbcf3d5adb3cc0032e09887dd80a2a7 Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Thu, 16 Oct 2014 02:05:14 +0000
+Subject: [PATCH] hack TH
+
+---
+ Text/Cassius.hs | 23 --------
+ Text/Css.hs | 151 --------------------------------------------------
+ Text/CssCommon.hs | 4 --
+ Text/Lucius.hs | 46 +--------------
+ shakespeare-css.cabal | 2 +-
+ 5 files changed, 3 insertions(+), 223 deletions(-)
+
+diff --git a/Text/Cassius.hs b/Text/Cassius.hs
+index 91fc90f..c515807 100644
+--- a/Text/Cassius.hs
++++ b/Text/Cassius.hs
+@@ -13,10 +13,6 @@ module Text.Cassius
+ , renderCss
+ , renderCssUrl
+ -- * Parsing
+- , cassius
+- , cassiusFile
+- , cassiusFileDebug
+- , cassiusFileReload
+ -- * ToCss instances
+ -- ** Color
+ , Color (..)
+@@ -27,11 +23,8 @@ module Text.Cassius
+ , AbsoluteUnit (..)
+ , AbsoluteSize (..)
+ , absoluteSize
+- , EmSize (..)
+- , ExSize (..)
+ , PercentageSize (..)
+ , percentageSize
+- , PixelSize (..)
+ -- * Internal
+ , cassiusUsedIdentifiers
+ ) where
+@@ -43,25 +36,9 @@ import Language.Haskell.TH.Quote (QuasiQuoter (..))
+ import Language.Haskell.TH.Syntax
+ import qualified Data.Text.Lazy as TL
+ import Text.CssCommon
+-import Text.Lucius (lucius)
+ import qualified Text.Lucius
+ import Text.IndentToBrace (i2b)
+
+-cassius :: QuasiQuoter
+-cassius = QuasiQuoter { quoteExp = quoteExp lucius . i2b }
+-
+-cassiusFile :: FilePath -> Q Exp
+-cassiusFile fp = do
+-#ifdef GHC_7_4
+- qAddDependentFile fp
+-#endif
+- contents <- fmap TL.unpack $ qRunIO $ readUtf8File fp
+- quoteExp cassius contents
+-
+-cassiusFileDebug, cassiusFileReload :: FilePath -> Q Exp
+-cassiusFileDebug = cssFileDebug True [|Text.Lucius.parseTopLevels|] Text.Lucius.parseTopLevels
+-cassiusFileReload = cassiusFileDebug
+-
+ -- | Determine which identifiers are used by the given template, useful for
+ -- creating systems like yesod devel.
+ cassiusUsedIdentifiers :: String -> [(Deref, VarType)]
+diff --git a/Text/Css.hs b/Text/Css.hs
+index 75dc549..20c206c 100644
+--- a/Text/Css.hs
++++ b/Text/Css.hs
+@@ -166,22 +166,6 @@ cssUsedIdentifiers toi2b parseBlocks s' =
+ (scope, rest') = go rest
+ go' (Attr k v) = k ++ v
+
+-cssFileDebug :: Bool -- ^ perform the indent-to-brace conversion
+- -> Q Exp
+- -> Parser [TopLevel Unresolved]
+- -> FilePath
+- -> Q Exp
+-cssFileDebug toi2b parseBlocks' parseBlocks fp = do
+- s <- fmap TL.unpack $ qRunIO $ readUtf8File fp
+-#ifdef GHC_7_4
+- qAddDependentFile fp
+-#endif
+- let vs = cssUsedIdentifiers toi2b parseBlocks s
+- c <- mapM vtToExp vs
+- cr <- [|cssRuntime toi2b|]
+- parseBlocks'' <- parseBlocks'
+- return $ cr `AppE` parseBlocks'' `AppE` (LitE $ StringL fp) `AppE` ListE c
+-
+ combineSelectors :: HasLeadingSpace
+ -> [Contents]
+ -> [Contents]
+@@ -287,18 +271,6 @@ cssRuntime toi2b parseBlocks fp cd render' = unsafePerformIO $ do
+
+ addScope scope = map (DerefIdent . Ident *** CDPlain . fromString) scope ++ cd
+
+-vtToExp :: (Deref, VarType) -> Q Exp
+-vtToExp (d, vt) = do
+- d' <- lift d
+- c' <- c vt
+- return $ TupE [d', c' `AppE` derefToExp [] d]
+- where
+- c :: VarType -> Q Exp
+- c VTPlain = [|CDPlain . toCss|]
+- c VTUrl = [|CDUrl|]
+- c VTUrlParam = [|CDUrlParam|]
+- c VTMixin = [|CDMixin|]
+-
+ getVars :: Monad m => [(String, String)] -> Content -> m [(Deref, VarType)]
+ getVars _ ContentRaw{} = return []
+ getVars scope (ContentVar d) =
+@@ -342,111 +314,8 @@ compressBlock (Block x y blocks mixins) =
+ cc (ContentRaw a:ContentRaw b:c) = cc $ ContentRaw (a ++ b) : c
+ cc (a:b) = a : cc b
+
+-blockToMixin :: Name
+- -> Scope
+- -> Block Unresolved
+- -> Q Exp
+-blockToMixin r scope (Block _sel props subblocks mixins) =
+- [|Mixin
+- { mixinAttrs = concat
+- $ $(listE $ map go props)
+- : map mixinAttrs $mixinsE
+- -- FIXME too many complications to implement sublocks for now...
+- , mixinBlocks = [] -- foldr (.) id $(listE $ map subGo subblocks) []
+- }|]
+- {-
+- . foldr (.) id $(listE $ map subGo subblocks)
+- . (concatMap mixinBlocks $mixinsE ++)
+- |]
+- -}
+- where
+- mixinsE = return $ ListE $ map (derefToExp []) mixins
+- go (Attr x y) = conE 'Attr
+- `appE` (contentsToBuilder r scope x)
+- `appE` (contentsToBuilder r scope y)
+- subGo (Block sel' b c d) = blockToCss r scope $ Block sel' b c d
+-
+-blockToCss :: Name
+- -> Scope
+- -> Block Unresolved
+- -> Q Exp
+-blockToCss r scope (Block sel props subblocks mixins) =
+- [|((Block
+- { blockSelector = $(selectorToBuilder r scope sel)
+- , blockAttrs = concat
+- $ $(listE $ map go props)
+- : map mixinAttrs $mixinsE
+- , blockBlocks = ()
+- , blockMixins = ()
+- } :: Block Resolved):)
+- . foldr (.) id $(listE $ map subGo subblocks)
+- . (concatMap mixinBlocks $mixinsE ++)
+- |]
+- where
+- mixinsE = return $ ListE $ map (derefToExp []) mixins
+- go (Attr x y) = conE 'Attr
+- `appE` (contentsToBuilder r scope x)
+- `appE` (contentsToBuilder r scope y)
+- subGo (hls, Block sel' b c d) =
+- blockToCss r scope $ Block sel'' b c d
+- where
+- sel'' = combineSelectors hls sel sel'
+-
+-selectorToBuilder :: Name -> Scope -> [Contents] -> Q Exp
+-selectorToBuilder r scope sels =
+- contentsToBuilder r scope $ intercalate [ContentRaw ","] sels
+-
+-contentsToBuilder :: Name -> Scope -> [Content] -> Q Exp
+-contentsToBuilder r scope contents =
+- appE [|mconcat|] $ listE $ map (contentToBuilder r scope) contents
+-
+-contentToBuilder :: Name -> Scope -> Content -> Q Exp
+-contentToBuilder _ _ (ContentRaw x) =
+- [|fromText . pack|] `appE` litE (StringL x)
+-contentToBuilder _ scope (ContentVar d) =
+- case d of
+- DerefIdent (Ident s)
+- | Just val <- lookup s scope -> [|fromText . pack|] `appE` litE (StringL val)
+- _ -> [|toCss|] `appE` return (derefToExp [] d)
+-contentToBuilder r _ (ContentUrl u) =
+- [|fromText|] `appE`
+- (varE r `appE` return (derefToExp [] u) `appE` listE [])
+-contentToBuilder r _ (ContentUrlParam u) =
+- [|fromText|] `appE`
+- ([|uncurry|] `appE` varE r `appE` return (derefToExp [] u))
+-contentToBuilder _ _ ContentMixin{} = error "contentToBuilder on ContentMixin"
+-
+ type Scope = [(String, String)]
+
+-topLevelsToCassius :: [TopLevel Unresolved]
+- -> Q Exp
+-topLevelsToCassius a = do
+- r <- newName "_render"
+- lamE [varP r] $ appE [|CssNoWhitespace . foldr ($) []|] $ fmap ListE $ go r [] a
+- where
+- go _ _ [] = return []
+- go r scope (TopBlock b:rest) = do
+- e <- [|(++) $ map TopBlock ($(blockToCss r scope b) [])|]
+- es <- go r scope rest
+- return $ e : es
+- go r scope (TopAtBlock name s b:rest) = do
+- let s' = contentsToBuilder r scope s
+- e <- [|(:) $ TopAtBlock $(lift name) $(s') $(blocksToCassius r scope b)|]
+- es <- go r scope rest
+- return $ e : es
+- go r scope (TopAtDecl dec cs:rest) = do
+- e <- [|(:) $ TopAtDecl $(lift dec) $(contentsToBuilder r scope cs)|]
+- es <- go r scope rest
+- return $ e : es
+- go r scope (TopVar k v:rest) = go r ((k, v) : scope) rest
+-
+-blocksToCassius :: Name
+- -> Scope
+- -> [Block Unresolved]
+- -> Q Exp
+-blocksToCassius r scope a = do
+- appE [|foldr ($) []|] $ listE $ map (blockToCss r scope) a
+-
+ renderCss :: Css -> TL.Text
+ renderCss css =
+ toLazyText $ mconcat $ map go tops
+@@ -515,23 +384,3 @@ renderBlock haveWhiteSpace indent (Block sel attrs () ())
+ | haveWhiteSpace = fromString ";\n"
+ | otherwise = singleton ';'
+
+-instance Lift Mixin where
+- lift (Mixin a b) = [|Mixin a b|]
+-instance Lift (Attr Unresolved) where
+- lift (Attr k v) = [|Attr k v :: Attr Unresolved |]
+-instance Lift (Attr Resolved) where
+- lift (Attr k v) = [|Attr $(liftBuilder k) $(liftBuilder v) :: Attr Resolved |]
+-
+-liftBuilder :: Builder -> Q Exp
+-liftBuilder b = [|fromText $ pack $(lift $ TL.unpack $ toLazyText b)|]
+-
+-instance Lift Content where
+- lift (ContentRaw s) = [|ContentRaw s|]
+- lift (ContentVar d) = [|ContentVar d|]
+- lift (ContentUrl d) = [|ContentUrl d|]
+- lift (ContentUrlParam d) = [|ContentUrlParam d|]
+- lift (ContentMixin m) = [|ContentMixin m|]
+-instance Lift (Block Unresolved) where
+- lift (Block a b c d) = [|Block a b c d|]
+-instance Lift (Block Resolved) where
+- lift (Block a b () ()) = [|Block $(liftBuilder a) b () ()|]
+diff --git a/Text/CssCommon.hs b/Text/CssCommon.hs
+index 719e0a8..8c40e8c 100644
+--- a/Text/CssCommon.hs
++++ b/Text/CssCommon.hs
+@@ -1,4 +1,3 @@
+-{-# LANGUAGE TemplateHaskell #-}
+ {-# LANGUAGE GeneralizedNewtypeDeriving #-}
+ {-# LANGUAGE FlexibleInstances #-}
+ {-# LANGUAGE CPP #-}
+@@ -156,6 +155,3 @@ showSize :: Rational -> String -> String
+ showSize value' unit = printf "%f" value ++ unit
+ where value = fromRational value' :: Double
+
+-mkSizeType "EmSize" "em"
+-mkSizeType "ExSize" "ex"
+-mkSizeType "PixelSize" "px"
+diff --git a/Text/Lucius.hs b/Text/Lucius.hs
+index 346883d..f38492b 100644
+--- a/Text/Lucius.hs
++++ b/Text/Lucius.hs
+@@ -8,13 +8,9 @@
+ {-# OPTIONS_GHC -fno-warn-missing-fields #-}
+ module Text.Lucius
+ ( -- * Parsing
+- lucius
+- , luciusFile
+- , luciusFileDebug
+- , luciusFileReload
+ -- ** Mixins
+- , luciusMixin
+- , Mixin
++ -- luciusMixin
++ Mixin
+ -- ** Runtime
+ , luciusRT
+ , luciusRT'
+@@ -40,11 +36,8 @@ module Text.Lucius
+ , AbsoluteUnit (..)
+ , AbsoluteSize (..)
+ , absoluteSize
+- , EmSize (..)
+- , ExSize (..)
+ , PercentageSize (..)
+ , percentageSize
+- , PixelSize (..)
+ -- * Internal
+ , parseTopLevels
+ , luciusUsedIdentifiers
+@@ -67,18 +60,6 @@ import Data.List (isSuffixOf)
+ import Control.Arrow (second)
+ import Text.Shakespeare (VarType)
+
+--- |
+---
+--- >>> renderCss ([lucius|foo{bar:baz}|] undefined)
+--- "foo{bar:baz}"
+-lucius :: QuasiQuoter
+-lucius = QuasiQuoter { quoteExp = luciusFromString }
+-
+-luciusFromString :: String -> Q Exp
+-luciusFromString s =
+- topLevelsToCassius
+- $ either (error . show) id $ parse parseTopLevels s s
+-
+ whiteSpace :: Parser ()
+ whiteSpace = many whiteSpace1 >> return ()
+
+@@ -218,17 +199,6 @@ parseComment = do
+ _ <- manyTill anyChar $ try $ string "*/"
+ return $ ContentRaw ""
+
+-luciusFile :: FilePath -> Q Exp
+-luciusFile fp = do
+-#ifdef GHC_7_4
+- qAddDependentFile fp
+-#endif
+- contents <- fmap TL.unpack $ qRunIO $ readUtf8File fp
+- luciusFromString contents
+-
+-luciusFileDebug, luciusFileReload :: FilePath -> Q Exp
+-luciusFileDebug = cssFileDebug False [|parseTopLevels|] parseTopLevels
+-luciusFileReload = luciusFileDebug
+
+ parseTopLevels :: Parser [TopLevel Unresolved]
+ parseTopLevels =
+@@ -377,15 +347,3 @@ luciusRTMinified tl scope = either Left (Right . renderCss . CssNoWhitespace) $
+ -- creating systems like yesod devel.
+ luciusUsedIdentifiers :: String -> [(Deref, VarType)]
+ luciusUsedIdentifiers = cssUsedIdentifiers False parseTopLevels
+-
+-luciusMixin :: QuasiQuoter
+-luciusMixin = QuasiQuoter { quoteExp = luciusMixinFromString }
+-
+-luciusMixinFromString :: String -> Q Exp
+-luciusMixinFromString s' = do
+- r <- newName "_render"
+- case fmap compressBlock $ parse parseBlock s s of
+- Left e -> error $ show e
+- Right block -> blockToMixin r [] block
+- where
+- s = concat ["mixin{", s', "}"]
+diff --git a/shakespeare-css.cabal b/shakespeare-css.cabal
+index 2d3b25a..cc0553c 100644
+--- a/shakespeare-css.cabal
++++ b/shakespeare-css.cabal
+@@ -35,8 +35,8 @@ library
+
+ exposed-modules: Text.Cassius
+ Text.Lucius
+- other-modules: Text.MkSizeType
+ Text.Css
++ other-modules: Text.MkSizeType
+ Text.IndentToBrace
+ Text.CssCommon
+ ghc-options: -Wall
+--
+2.1.1
+
diff --git a/standalone/no-th/haskell-patches/shakespeare-js_hack_TH.patch b/standalone/no-th/haskell-patches/shakespeare-js_hack_TH.patch
new file mode 100644
index 000000000..905467130
--- /dev/null
+++ b/standalone/no-th/haskell-patches/shakespeare-js_hack_TH.patch
@@ -0,0 +1,316 @@
+From 26f7328b0123d3ffa66873b91189ba3bdae3356c Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Thu, 16 Oct 2014 02:07:32 +0000
+Subject: [PATCH] hack TH
+
+---
+ Text/Coffee.hs | 56 ++++-----------------------------------------
+ Text/Julius.hs | 67 +++++++++---------------------------------------------
+ Text/Roy.hs | 51 ++++-------------------------------------
+ Text/TypeScript.hs | 51 ++++-------------------------------------
+ 4 files changed, 24 insertions(+), 201 deletions(-)
+
+diff --git a/Text/Coffee.hs b/Text/Coffee.hs
+index 488c81b..61db85b 100644
+--- a/Text/Coffee.hs
++++ b/Text/Coffee.hs
+@@ -51,13 +51,13 @@ module Text.Coffee
+ -- ** Template-Reading Functions
+ -- | These QuasiQuoter and Template Haskell methods return values of
+ -- type @'JavascriptUrl' url@. See the Yesod book for details.
+- coffee
+- , coffeeFile
+- , coffeeFileReload
+- , coffeeFileDebug
++ -- coffee
++ --, coffeeFile
++ --, coffeeFileReload
++ --, coffeeFileDebug
+
+ #ifdef TEST_EXPORT
+- , coffeeSettings
++ --, coffeeSettings
+ #endif
+ ) where
+
+@@ -65,49 +65,3 @@ import Language.Haskell.TH.Quote (QuasiQuoter (..))
+ import Language.Haskell.TH.Syntax
+ import Text.Shakespeare
+ import Text.Julius
+-
+-coffeeSettings :: Q ShakespeareSettings
+-coffeeSettings = do
+- jsettings <- javascriptSettings
+- return $ jsettings { varChar = '%'
+- , preConversion = Just PreConvert {
+- preConvert = ReadProcess "coffee" ["-spb"]
+- , preEscapeIgnoreBalanced = "'\"`" -- don't insert backtacks for variable already inside strings or backticks.
+- , preEscapeIgnoreLine = "#" -- ignore commented lines
+- , wrapInsertion = Just WrapInsertion {
+- wrapInsertionIndent = Just " "
+- , wrapInsertionStartBegin = "("
+- , wrapInsertionSeparator = ", "
+- , wrapInsertionStartClose = ") =>"
+- , wrapInsertionEnd = ""
+- , wrapInsertionAddParens = False
+- }
+- }
+- }
+-
+--- | Read inline, quasiquoted CoffeeScript.
+-coffee :: QuasiQuoter
+-coffee = QuasiQuoter { quoteExp = \s -> do
+- rs <- coffeeSettings
+- quoteExp (shakespeare rs) s
+- }
+-
+--- | Read in a CoffeeScript template file. This function reads the file once, at
+--- compile time.
+-coffeeFile :: FilePath -> Q Exp
+-coffeeFile fp = do
+- rs <- coffeeSettings
+- shakespeareFile rs fp
+-
+--- | Read in a CoffeeScript template file. This impure function uses
+--- unsafePerformIO to re-read the file on every call, allowing for rapid
+--- iteration.
+-coffeeFileReload :: FilePath -> Q Exp
+-coffeeFileReload fp = do
+- rs <- coffeeSettings
+- shakespeareFileReload rs fp
+-
+--- | Deprecated synonym for 'coffeeFileReload'
+-coffeeFileDebug :: FilePath -> Q Exp
+-coffeeFileDebug = coffeeFileReload
+-{-# DEPRECATED coffeeFileDebug "Please use coffeeFileReload instead." #-}
+diff --git a/Text/Julius.hs b/Text/Julius.hs
+index ec30690..5b5a075 100644
+--- a/Text/Julius.hs
++++ b/Text/Julius.hs
+@@ -14,17 +14,17 @@ module Text.Julius
+ -- ** Template-Reading Functions
+ -- | These QuasiQuoter and Template Haskell methods return values of
+ -- type @'JavascriptUrl' url@. See the Yesod book for details.
+- js
+- , julius
+- , juliusFile
+- , jsFile
+- , juliusFileDebug
+- , jsFileDebug
+- , juliusFileReload
+- , jsFileReload
++ -- js
++ -- julius
++ -- juliusFile
++ -- jsFile
++ --, juliusFileDebug
++ --, jsFileDebug
++ --, juliusFileReload
++ --, jsFileReload
+
+ -- * Datatypes
+- , JavascriptUrl
++ JavascriptUrl
+ , Javascript (..)
+ , RawJavascript (..)
+
+@@ -37,9 +37,9 @@ module Text.Julius
+ , renderJavascriptUrl
+
+ -- ** internal, used by 'Text.Coffee'
+- , javascriptSettings
++ --, javascriptSettings
+ -- ** internal
+- , juliusUsedIdentifiers
++ --, juliusUsedIdentifiers
+ , asJavascriptUrl
+ ) where
+
+@@ -102,48 +102,3 @@ instance RawJS TL.Text where rawJS = RawJavascript . fromLazyText
+ instance RawJS Builder where rawJS = RawJavascript
+ instance RawJS Bool where rawJS = RawJavascript . unJavascript . toJavascript
+
+-javascriptSettings :: Q ShakespeareSettings
+-javascriptSettings = do
+- toJExp <- [|toJavascript|]
+- wrapExp <- [|Javascript|]
+- unWrapExp <- [|unJavascript|]
+- asJavascriptUrl' <- [|asJavascriptUrl|]
+- return $ defaultShakespeareSettings { toBuilder = toJExp
+- , wrap = wrapExp
+- , unwrap = unWrapExp
+- , modifyFinalValue = Just asJavascriptUrl'
+- }
+-
+-js, julius :: QuasiQuoter
+-js = QuasiQuoter { quoteExp = \s -> do
+- rs <- javascriptSettings
+- quoteExp (shakespeare rs) s
+- }
+-
+-julius = js
+-
+-jsFile, juliusFile :: FilePath -> Q Exp
+-jsFile fp = do
+- rs <- javascriptSettings
+- shakespeareFile rs fp
+-
+-juliusFile = jsFile
+-
+-
+-jsFileReload, juliusFileReload :: FilePath -> Q Exp
+-jsFileReload fp = do
+- rs <- javascriptSettings
+- shakespeareFileReload rs fp
+-
+-juliusFileReload = jsFileReload
+-
+-jsFileDebug, juliusFileDebug :: FilePath -> Q Exp
+-juliusFileDebug = jsFileReload
+-{-# DEPRECATED juliusFileDebug "Please use juliusFileReload instead." #-}
+-jsFileDebug = jsFileReload
+-{-# DEPRECATED jsFileDebug "Please use jsFileReload instead." #-}
+-
+--- | Determine which identifiers are used by the given template, useful for
+--- creating systems like yesod devel.
+-juliusUsedIdentifiers :: String -> [(Deref, VarType)]
+-juliusUsedIdentifiers = shakespeareUsedIdentifiers defaultShakespeareSettings
+diff --git a/Text/Roy.hs b/Text/Roy.hs
+index 6e5e246..9ab0dbc 100644
+--- a/Text/Roy.hs
++++ b/Text/Roy.hs
+@@ -39,12 +39,12 @@ module Text.Roy
+ -- ** Template-Reading Functions
+ -- | These QuasiQuoter and Template Haskell methods return values of
+ -- type @'JavascriptUrl' url@. See the Yesod book for details.
+- roy
+- , royFile
+- , royFileReload
++ -- roy
++ --, royFile
++ --, royFileReload
+
+ #ifdef TEST_EXPORT
+- , roySettings
++ --, roySettings
+ #endif
+ ) where
+
+@@ -53,46 +53,3 @@ import Language.Haskell.TH.Syntax
+ import Text.Shakespeare
+ import Text.Julius
+
+--- | The Roy language compiles down to Javascript.
+--- We do this compilation once at compile time to avoid needing to do it during the request.
+--- We call this a preConversion because other shakespeare modules like Lucius use Haskell to compile during the request instead rather than a system call.
+-roySettings :: Q ShakespeareSettings
+-roySettings = do
+- jsettings <- javascriptSettings
+- return $ jsettings { varChar = '#'
+- , preConversion = Just PreConvert {
+- preConvert = ReadProcess "roy" ["--stdio", "--browser"]
+- , preEscapeIgnoreBalanced = "'\""
+- , preEscapeIgnoreLine = "//"
+- , wrapInsertion = Just WrapInsertion {
+- wrapInsertionIndent = Just " "
+- , wrapInsertionStartBegin = "(\\"
+- , wrapInsertionSeparator = " "
+- , wrapInsertionStartClose = " ->\n"
+- , wrapInsertionEnd = ")"
+- , wrapInsertionAddParens = True
+- }
+- }
+- }
+-
+--- | Read inline, quasiquoted Roy.
+-roy :: QuasiQuoter
+-roy = QuasiQuoter { quoteExp = \s -> do
+- rs <- roySettings
+- quoteExp (shakespeare rs) s
+- }
+-
+--- | Read in a Roy template file. This function reads the file once, at
+--- compile time.
+-royFile :: FilePath -> Q Exp
+-royFile fp = do
+- rs <- roySettings
+- shakespeareFile rs fp
+-
+--- | Read in a Roy template file. This impure function uses
+--- unsafePerformIO to re-read the file on every call, allowing for rapid
+--- iteration.
+-royFileReload :: FilePath -> Q Exp
+-royFileReload fp = do
+- rs <- roySettings
+- shakespeareFileReload rs fp
+diff --git a/Text/TypeScript.hs b/Text/TypeScript.hs
+index 70c8820..5be994a 100644
+--- a/Text/TypeScript.hs
++++ b/Text/TypeScript.hs
+@@ -57,12 +57,12 @@ module Text.TypeScript
+ -- ** Template-Reading Functions
+ -- | These QuasiQuoter and Template Haskell methods return values of
+ -- type @'JavascriptUrl' url@. See the Yesod book for details.
+- tsc
+- , typeScriptFile
+- , typeScriptFileReload
++ -- tsc
++ --, typeScriptFile
++ --, typeScriptFileReload
+
+ #ifdef TEST_EXPORT
+- , typeScriptSettings
++ --, typeScriptSettings
+ #endif
+ ) where
+
+@@ -71,46 +71,3 @@ import Language.Haskell.TH.Syntax
+ import Text.Shakespeare
+ import Text.Julius
+
+--- | The TypeScript language compiles down to Javascript.
+--- We do this compilation once at compile time to avoid needing to do it during the request.
+--- We call this a preConversion because other shakespeare modules like Lucius use Haskell to compile during the request instead rather than a system call.
+-typeScriptSettings :: Q ShakespeareSettings
+-typeScriptSettings = do
+- jsettings <- javascriptSettings
+- return $ jsettings { varChar = '#'
+- , preConversion = Just PreConvert {
+- preConvert = ReadProcess "sh" ["-c", "TMP_IN=$(mktemp XXXXXXXXXX.ts); TMP_OUT=$(mktemp XXXXXXXXXX.js); cat /dev/stdin > ${TMP_IN} && tsc --out ${TMP_OUT} ${TMP_IN} && cat ${TMP_OUT}; rm ${TMP_IN} && rm ${TMP_OUT}"]
+- , preEscapeIgnoreBalanced = "'\""
+- , preEscapeIgnoreLine = "//"
+- , wrapInsertion = Just WrapInsertion {
+- wrapInsertionIndent = Nothing
+- , wrapInsertionStartBegin = ";(function("
+- , wrapInsertionSeparator = ", "
+- , wrapInsertionStartClose = "){"
+- , wrapInsertionEnd = "})"
+- , wrapInsertionAddParens = False
+- }
+- }
+- }
+-
+--- | Read inline, quasiquoted TypeScript
+-tsc :: QuasiQuoter
+-tsc = QuasiQuoter { quoteExp = \s -> do
+- rs <- typeScriptSettings
+- quoteExp (shakespeare rs) s
+- }
+-
+--- | Read in a TypeScript template file. This function reads the file once, at
+--- compile time.
+-typeScriptFile :: FilePath -> Q Exp
+-typeScriptFile fp = do
+- rs <- typeScriptSettings
+- shakespeareFile rs fp
+-
+--- | Read in a Roy template file. This impure function uses
+--- unsafePerformIO to re-read the file on every call, allowing for rapid
+--- iteration.
+-typeScriptFileReload :: FilePath -> Q Exp
+-typeScriptFileReload fp = do
+- rs <- typeScriptSettings
+- shakespeareFileReload rs fp
+--
+2.1.1
+
diff --git a/standalone/no-th/haskell-patches/shakespeare_remove-TH.patch b/standalone/no-th/haskell-patches/shakespeare_remove-TH.patch
index 86022ec3d..940514756 100644
--- a/standalone/no-th/haskell-patches/shakespeare_remove-TH.patch
+++ b/standalone/no-th/haskell-patches/shakespeare_remove-TH.patch
@@ -1,791 +1,18 @@
-From 6de4e75bfbfccb8aedcbf3ee75e5d544f1eeeca5 Mon Sep 17 00:00:00 2001
+From 38a22dae4f7f9726379fdaa3f85d78d75eee9d8e Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Thu, 3 Jul 2014 21:48:14 +0000
-Subject: [PATCH] remove TH
+Date: Thu, 16 Oct 2014 02:01:22 +0000
+Subject: [PATCH] hack TH
---
- Text/Cassius.hs | 23 ------
- Text/Coffee.hs | 56 ++-------------
- Text/Css.hs | 151 ---------------------------------------
- Text/CssCommon.hs | 4 --
- Text/Hamlet.hs | 86 +++++++---------------
- Text/Hamlet/Parse.hs | 3 +-
- Text/Julius.hs | 67 +++--------------
- Text/Lucius.hs | 46 +-----------
- Text/Roy.hs | 51 ++-----------
- Text/Shakespeare.hs | 70 +++---------------
- Text/Shakespeare/Base.hs | 28 --------
- Text/Shakespeare/I18N.hs | 178 ++--------------------------------------------
- Text/Shakespeare/Text.hs | 125 +++-----------------------------
- shakespeare.cabal | 3 +-
- 14 files changed, 78 insertions(+), 813 deletions(-)
+ Text/Shakespeare.hs | 70 ++++++++----------------------------------------
+ Text/Shakespeare/Base.hs | 28 -------------------
+ 2 files changed, 11 insertions(+), 87 deletions(-)
-diff --git a/Text/Cassius.hs b/Text/Cassius.hs
-index 91fc90f..c515807 100644
---- a/Text/Cassius.hs
-+++ b/Text/Cassius.hs
-@@ -13,10 +13,6 @@ module Text.Cassius
- , renderCss
- , renderCssUrl
- -- * Parsing
-- , cassius
-- , cassiusFile
-- , cassiusFileDebug
-- , cassiusFileReload
- -- * ToCss instances
- -- ** Color
- , Color (..)
-@@ -27,11 +23,8 @@ module Text.Cassius
- , AbsoluteUnit (..)
- , AbsoluteSize (..)
- , absoluteSize
-- , EmSize (..)
-- , ExSize (..)
- , PercentageSize (..)
- , percentageSize
-- , PixelSize (..)
- -- * Internal
- , cassiusUsedIdentifiers
- ) where
-@@ -43,25 +36,9 @@ import Language.Haskell.TH.Quote (QuasiQuoter (..))
- import Language.Haskell.TH.Syntax
- import qualified Data.Text.Lazy as TL
- import Text.CssCommon
--import Text.Lucius (lucius)
- import qualified Text.Lucius
- import Text.IndentToBrace (i2b)
-
--cassius :: QuasiQuoter
--cassius = QuasiQuoter { quoteExp = quoteExp lucius . i2b }
--
--cassiusFile :: FilePath -> Q Exp
--cassiusFile fp = do
--#ifdef GHC_7_4
-- qAddDependentFile fp
--#endif
-- contents <- fmap TL.unpack $ qRunIO $ readUtf8File fp
-- quoteExp cassius contents
--
--cassiusFileDebug, cassiusFileReload :: FilePath -> Q Exp
--cassiusFileDebug = cssFileDebug True [|Text.Lucius.parseTopLevels|] Text.Lucius.parseTopLevels
--cassiusFileReload = cassiusFileDebug
--
- -- | Determine which identifiers are used by the given template, useful for
- -- creating systems like yesod devel.
- cassiusUsedIdentifiers :: String -> [(Deref, VarType)]
-diff --git a/Text/Coffee.hs b/Text/Coffee.hs
-index 488c81b..61db85b 100644
---- a/Text/Coffee.hs
-+++ b/Text/Coffee.hs
-@@ -51,13 +51,13 @@ module Text.Coffee
- -- ** Template-Reading Functions
- -- | These QuasiQuoter and Template Haskell methods return values of
- -- type @'JavascriptUrl' url@. See the Yesod book for details.
-- coffee
-- , coffeeFile
-- , coffeeFileReload
-- , coffeeFileDebug
-+ -- coffee
-+ --, coffeeFile
-+ --, coffeeFileReload
-+ --, coffeeFileDebug
-
- #ifdef TEST_EXPORT
-- , coffeeSettings
-+ --, coffeeSettings
- #endif
- ) where
-
-@@ -65,49 +65,3 @@ import Language.Haskell.TH.Quote (QuasiQuoter (..))
- import Language.Haskell.TH.Syntax
- import Text.Shakespeare
- import Text.Julius
--
--coffeeSettings :: Q ShakespeareSettings
--coffeeSettings = do
-- jsettings <- javascriptSettings
-- return $ jsettings { varChar = '%'
-- , preConversion = Just PreConvert {
-- preConvert = ReadProcess "coffee" ["-spb"]
-- , preEscapeIgnoreBalanced = "'\"`" -- don't insert backtacks for variable already inside strings or backticks.
-- , preEscapeIgnoreLine = "#" -- ignore commented lines
-- , wrapInsertion = Just WrapInsertion {
-- wrapInsertionIndent = Just " "
-- , wrapInsertionStartBegin = "("
-- , wrapInsertionSeparator = ", "
-- , wrapInsertionStartClose = ") =>"
-- , wrapInsertionEnd = ""
-- , wrapInsertionAddParens = False
-- }
-- }
-- }
--
---- | Read inline, quasiquoted CoffeeScript.
--coffee :: QuasiQuoter
--coffee = QuasiQuoter { quoteExp = \s -> do
-- rs <- coffeeSettings
-- quoteExp (shakespeare rs) s
-- }
--
---- | Read in a CoffeeScript template file. This function reads the file once, at
---- compile time.
--coffeeFile :: FilePath -> Q Exp
--coffeeFile fp = do
-- rs <- coffeeSettings
-- shakespeareFile rs fp
--
---- | Read in a CoffeeScript template file. This impure function uses
---- unsafePerformIO to re-read the file on every call, allowing for rapid
---- iteration.
--coffeeFileReload :: FilePath -> Q Exp
--coffeeFileReload fp = do
-- rs <- coffeeSettings
-- shakespeareFileReload rs fp
--
---- | Deprecated synonym for 'coffeeFileReload'
--coffeeFileDebug :: FilePath -> Q Exp
--coffeeFileDebug = coffeeFileReload
--{-# DEPRECATED coffeeFileDebug "Please use coffeeFileReload instead." #-}
-diff --git a/Text/Css.hs b/Text/Css.hs
-index 75dc549..20c206c 100644
---- a/Text/Css.hs
-+++ b/Text/Css.hs
-@@ -166,22 +166,6 @@ cssUsedIdentifiers toi2b parseBlocks s' =
- (scope, rest') = go rest
- go' (Attr k v) = k ++ v
-
--cssFileDebug :: Bool -- ^ perform the indent-to-brace conversion
-- -> Q Exp
-- -> Parser [TopLevel Unresolved]
-- -> FilePath
-- -> Q Exp
--cssFileDebug toi2b parseBlocks' parseBlocks fp = do
-- s <- fmap TL.unpack $ qRunIO $ readUtf8File fp
--#ifdef GHC_7_4
-- qAddDependentFile fp
--#endif
-- let vs = cssUsedIdentifiers toi2b parseBlocks s
-- c <- mapM vtToExp vs
-- cr <- [|cssRuntime toi2b|]
-- parseBlocks'' <- parseBlocks'
-- return $ cr `AppE` parseBlocks'' `AppE` (LitE $ StringL fp) `AppE` ListE c
--
- combineSelectors :: HasLeadingSpace
- -> [Contents]
- -> [Contents]
-@@ -287,18 +271,6 @@ cssRuntime toi2b parseBlocks fp cd render' = unsafePerformIO $ do
-
- addScope scope = map (DerefIdent . Ident *** CDPlain . fromString) scope ++ cd
-
--vtToExp :: (Deref, VarType) -> Q Exp
--vtToExp (d, vt) = do
-- d' <- lift d
-- c' <- c vt
-- return $ TupE [d', c' `AppE` derefToExp [] d]
-- where
-- c :: VarType -> Q Exp
-- c VTPlain = [|CDPlain . toCss|]
-- c VTUrl = [|CDUrl|]
-- c VTUrlParam = [|CDUrlParam|]
-- c VTMixin = [|CDMixin|]
--
- getVars :: Monad m => [(String, String)] -> Content -> m [(Deref, VarType)]
- getVars _ ContentRaw{} = return []
- getVars scope (ContentVar d) =
-@@ -342,111 +314,8 @@ compressBlock (Block x y blocks mixins) =
- cc (ContentRaw a:ContentRaw b:c) = cc $ ContentRaw (a ++ b) : c
- cc (a:b) = a : cc b
-
--blockToMixin :: Name
-- -> Scope
-- -> Block Unresolved
-- -> Q Exp
--blockToMixin r scope (Block _sel props subblocks mixins) =
-- [|Mixin
-- { mixinAttrs = concat
-- $ $(listE $ map go props)
-- : map mixinAttrs $mixinsE
-- -- FIXME too many complications to implement sublocks for now...
-- , mixinBlocks = [] -- foldr (.) id $(listE $ map subGo subblocks) []
-- }|]
-- {-
-- . foldr (.) id $(listE $ map subGo subblocks)
-- . (concatMap mixinBlocks $mixinsE ++)
-- |]
-- -}
-- where
-- mixinsE = return $ ListE $ map (derefToExp []) mixins
-- go (Attr x y) = conE 'Attr
-- `appE` (contentsToBuilder r scope x)
-- `appE` (contentsToBuilder r scope y)
-- subGo (Block sel' b c d) = blockToCss r scope $ Block sel' b c d
--
--blockToCss :: Name
-- -> Scope
-- -> Block Unresolved
-- -> Q Exp
--blockToCss r scope (Block sel props subblocks mixins) =
-- [|((Block
-- { blockSelector = $(selectorToBuilder r scope sel)
-- , blockAttrs = concat
-- $ $(listE $ map go props)
-- : map mixinAttrs $mixinsE
-- , blockBlocks = ()
-- , blockMixins = ()
-- } :: Block Resolved):)
-- . foldr (.) id $(listE $ map subGo subblocks)
-- . (concatMap mixinBlocks $mixinsE ++)
-- |]
-- where
-- mixinsE = return $ ListE $ map (derefToExp []) mixins
-- go (Attr x y) = conE 'Attr
-- `appE` (contentsToBuilder r scope x)
-- `appE` (contentsToBuilder r scope y)
-- subGo (hls, Block sel' b c d) =
-- blockToCss r scope $ Block sel'' b c d
-- where
-- sel'' = combineSelectors hls sel sel'
--
--selectorToBuilder :: Name -> Scope -> [Contents] -> Q Exp
--selectorToBuilder r scope sels =
-- contentsToBuilder r scope $ intercalate [ContentRaw ","] sels
--
--contentsToBuilder :: Name -> Scope -> [Content] -> Q Exp
--contentsToBuilder r scope contents =
-- appE [|mconcat|] $ listE $ map (contentToBuilder r scope) contents
--
--contentToBuilder :: Name -> Scope -> Content -> Q Exp
--contentToBuilder _ _ (ContentRaw x) =
-- [|fromText . pack|] `appE` litE (StringL x)
--contentToBuilder _ scope (ContentVar d) =
-- case d of
-- DerefIdent (Ident s)
-- | Just val <- lookup s scope -> [|fromText . pack|] `appE` litE (StringL val)
-- _ -> [|toCss|] `appE` return (derefToExp [] d)
--contentToBuilder r _ (ContentUrl u) =
-- [|fromText|] `appE`
-- (varE r `appE` return (derefToExp [] u) `appE` listE [])
--contentToBuilder r _ (ContentUrlParam u) =
-- [|fromText|] `appE`
-- ([|uncurry|] `appE` varE r `appE` return (derefToExp [] u))
--contentToBuilder _ _ ContentMixin{} = error "contentToBuilder on ContentMixin"
--
- type Scope = [(String, String)]
-
--topLevelsToCassius :: [TopLevel Unresolved]
-- -> Q Exp
--topLevelsToCassius a = do
-- r <- newName "_render"
-- lamE [varP r] $ appE [|CssNoWhitespace . foldr ($) []|] $ fmap ListE $ go r [] a
-- where
-- go _ _ [] = return []
-- go r scope (TopBlock b:rest) = do
-- e <- [|(++) $ map TopBlock ($(blockToCss r scope b) [])|]
-- es <- go r scope rest
-- return $ e : es
-- go r scope (TopAtBlock name s b:rest) = do
-- let s' = contentsToBuilder r scope s
-- e <- [|(:) $ TopAtBlock $(lift name) $(s') $(blocksToCassius r scope b)|]
-- es <- go r scope rest
-- return $ e : es
-- go r scope (TopAtDecl dec cs:rest) = do
-- e <- [|(:) $ TopAtDecl $(lift dec) $(contentsToBuilder r scope cs)|]
-- es <- go r scope rest
-- return $ e : es
-- go r scope (TopVar k v:rest) = go r ((k, v) : scope) rest
--
--blocksToCassius :: Name
-- -> Scope
-- -> [Block Unresolved]
-- -> Q Exp
--blocksToCassius r scope a = do
-- appE [|foldr ($) []|] $ listE $ map (blockToCss r scope) a
--
- renderCss :: Css -> TL.Text
- renderCss css =
- toLazyText $ mconcat $ map go tops
-@@ -515,23 +384,3 @@ renderBlock haveWhiteSpace indent (Block sel attrs () ())
- | haveWhiteSpace = fromString ";\n"
- | otherwise = singleton ';'
-
--instance Lift Mixin where
-- lift (Mixin a b) = [|Mixin a b|]
--instance Lift (Attr Unresolved) where
-- lift (Attr k v) = [|Attr k v :: Attr Unresolved |]
--instance Lift (Attr Resolved) where
-- lift (Attr k v) = [|Attr $(liftBuilder k) $(liftBuilder v) :: Attr Resolved |]
--
--liftBuilder :: Builder -> Q Exp
--liftBuilder b = [|fromText $ pack $(lift $ TL.unpack $ toLazyText b)|]
--
--instance Lift Content where
-- lift (ContentRaw s) = [|ContentRaw s|]
-- lift (ContentVar d) = [|ContentVar d|]
-- lift (ContentUrl d) = [|ContentUrl d|]
-- lift (ContentUrlParam d) = [|ContentUrlParam d|]
-- lift (ContentMixin m) = [|ContentMixin m|]
--instance Lift (Block Unresolved) where
-- lift (Block a b c d) = [|Block a b c d|]
--instance Lift (Block Resolved) where
-- lift (Block a b () ()) = [|Block $(liftBuilder a) b () ()|]
-diff --git a/Text/CssCommon.hs b/Text/CssCommon.hs
-index 719e0a8..8c40e8c 100644
---- a/Text/CssCommon.hs
-+++ b/Text/CssCommon.hs
-@@ -1,4 +1,3 @@
--{-# LANGUAGE TemplateHaskell #-}
- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
- {-# LANGUAGE FlexibleInstances #-}
- {-# LANGUAGE CPP #-}
-@@ -156,6 +155,3 @@ showSize :: Rational -> String -> String
- showSize value' unit = printf "%f" value ++ unit
- where value = fromRational value' :: Double
-
--mkSizeType "EmSize" "em"
--mkSizeType "ExSize" "ex"
--mkSizeType "PixelSize" "px"
-diff --git a/Text/Hamlet.hs b/Text/Hamlet.hs
-index 39c1528..6321cd3 100644
---- a/Text/Hamlet.hs
-+++ b/Text/Hamlet.hs
-@@ -11,36 +11,36 @@
- module Text.Hamlet
- ( -- * Plain HTML
- Html
-- , shamlet
-- , shamletFile
-- , xshamlet
-- , xshamletFile
-+ --, shamlet
-+ --, shamletFile
-+ --, xshamlet
-+ --, xshamletFile
- -- * Hamlet
- , HtmlUrl
-- , hamlet
-- , hamletFile
-- , hamletFileReload
-- , ihamletFileReload
-- , xhamlet
-- , xhamletFile
-+ --, hamlet
-+ --, hamletFile
-+ --, hamletFileReload
-+ --, ihamletFileReload
-+ --, xhamlet
-+ --, xhamletFile
- -- * I18N Hamlet
- , HtmlUrlI18n
-- , ihamlet
-- , ihamletFile
-+ --, ihamlet
-+ --, ihamletFile
- -- * Type classes
- , ToAttributes (..)
- -- * Internal, for making more
- , HamletSettings (..)
- , NewlineStyle (..)
-- , hamletWithSettings
-- , hamletFileWithSettings
-+ --, hamletWithSettings
-+ --, hamletFileWithSettings
- , defaultHamletSettings
- , xhtmlHamletSettings
-- , Env (..)
-- , HamletRules (..)
-- , hamletRules
-- , ihamletRules
-- , htmlRules
-+ --, Env (..)
-+ --, HamletRules (..)
-+ --, hamletRules
-+ --, ihamletRules
-+ --, htmlRules
- , CloseStyle (..)
- -- * Used by generated code
- , condH
-@@ -110,47 +110,9 @@ type HtmlUrl url = Render url -> Html
- -- | A function generating an 'Html' given a message translator and a URL rendering function.
- type HtmlUrlI18n msg url = Translate msg -> Render url -> Html
-
--docsToExp :: Env -> HamletRules -> Scope -> [Doc] -> Q Exp
--docsToExp env hr scope docs = do
-- exps <- mapM (docToExp env hr scope) docs
-- case exps of
-- [] -> [|return ()|]
-- [x] -> return x
-- _ -> return $ DoE $ map NoBindS exps
--
- unIdent :: Ident -> String
- unIdent (Ident s) = s
-
--bindingPattern :: Binding -> Q (Pat, [(Ident, Exp)])
--bindingPattern (BindAs i@(Ident s) b) = do
-- name <- newName s
-- (pattern, scope) <- bindingPattern b
-- return (AsP name pattern, (i, VarE name):scope)
--bindingPattern (BindVar i@(Ident s))
-- | all isDigit s = do
-- return (LitP $ IntegerL $ read s, [])
-- | otherwise = do
-- name <- newName s
-- return (VarP name, [(i, VarE name)])
--bindingPattern (BindTuple is) = do
-- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
-- return (TupP patterns, concat scopes)
--bindingPattern (BindList is) = do
-- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
-- return (ListP patterns, concat scopes)
--bindingPattern (BindConstr con is) = do
-- (patterns, scopes) <- fmap unzip $ mapM bindingPattern is
-- return (ConP (mkConName con) patterns, concat scopes)
--bindingPattern (BindRecord con fields wild) = do
-- let f (Ident field,b) =
-- do (p,s) <- bindingPattern b
-- return ((mkName field,p),s)
-- (patterns, scopes) <- fmap unzip $ mapM f fields
-- (patterns1, scopes1) <- if wild
-- then bindWildFields con $ map fst fields
-- else return ([],[])
-- return (RecP (mkConName con) (patterns++patterns1), concat scopes ++ scopes1)
--
- mkConName :: DataConstr -> Name
- mkConName = mkName . conToStr
-
-@@ -158,6 +120,7 @@ conToStr :: DataConstr -> String
- conToStr (DCUnqualified (Ident x)) = x
- conToStr (DCQualified (Module xs) (Ident x)) = intercalate "." $ xs ++ [x]
-
-+{-
- -- Wildcards bind all of the unbound fields to variables whose name
- -- matches the field name.
- --
-@@ -296,10 +259,12 @@ hamlet = hamletWithSettings hamletRules defaultHamletSettings
-
- xhamlet :: QuasiQuoter
- xhamlet = hamletWithSettings hamletRules xhtmlHamletSettings
-+-}
-
- asHtmlUrl :: HtmlUrl url -> HtmlUrl url
- asHtmlUrl = id
-
-+{-
- hamletRules :: Q HamletRules
- hamletRules = do
- i <- [|id|]
-@@ -360,6 +325,7 @@ hamletFromString :: Q HamletRules -> HamletSettings -> String -> Q Exp
- hamletFromString qhr set s = do
- hr <- qhr
- hrWithEnv hr $ \env -> docsToExp env hr [] $ docFromString set s
-+-}
-
- docFromString :: HamletSettings -> String -> [Doc]
- docFromString set s =
-@@ -367,6 +333,7 @@ docFromString set s =
- Error s' -> error s'
- Ok (_, d) -> d
-
-+{-
- hamletFileWithSettings :: Q HamletRules -> HamletSettings -> FilePath -> Q Exp
- hamletFileWithSettings qhr set fp = do
- #ifdef GHC_7_4
-@@ -408,6 +375,7 @@ strToExp s@(c:_)
- | isUpper c = ConE $ mkName s
- | otherwise = VarE $ mkName s
- strToExp "" = error "strToExp on empty string"
-+-}
-
- -- | Checks for truth in the left value in each pair in the first argument. If
- -- a true exists, then the corresponding right action is performed. Only the
-@@ -460,7 +428,7 @@ hamletUsedIdentifiers settings =
- data HamletRuntimeRules = HamletRuntimeRules {
- hrrI18n :: Bool
- }
--
-+{-
- hamletFileReloadWithSettings :: HamletRuntimeRules
- -> HamletSettings -> FilePath -> Q Exp
- hamletFileReloadWithSettings hrr settings fp = do
-@@ -487,7 +455,7 @@ hamletFileReloadWithSettings hrr settings fp = do
- c VTUrlParam = [|EUrlParam|]
- c VTMixin = [|\r -> EMixin $ \c -> r c|]
- c VTMsg = [|EMsg|]
--
-+-}
- -- move to Shakespeare.Base?
- readFileUtf8 :: FilePath -> IO String
- readFileUtf8 fp = fmap TL.unpack $ readUtf8File fp
-diff --git a/Text/Hamlet/Parse.hs b/Text/Hamlet/Parse.hs
-index b7e2954..1f14946 100644
---- a/Text/Hamlet/Parse.hs
-+++ b/Text/Hamlet/Parse.hs
-@@ -616,6 +616,7 @@ data NewlineStyle = NoNewlines -- ^ never add newlines
- | DefaultNewlineStyle
- deriving Show
-
-+{-
- instance Lift NewlineStyle where
- lift NoNewlines = [|NoNewlines|]
- lift NewlinesText = [|NewlinesText|]
-@@ -627,7 +628,7 @@ instance Lift (String -> CloseStyle) where
-
- instance Lift HamletSettings where
- lift (HamletSettings a b c d) = [|HamletSettings $(lift a) $(lift b) $(lift c) $(lift d)|]
--
-+-}
-
- htmlEmptyTags :: Set String
- htmlEmptyTags = Set.fromAscList
-diff --git a/Text/Julius.hs b/Text/Julius.hs
-index ec30690..5b5a075 100644
---- a/Text/Julius.hs
-+++ b/Text/Julius.hs
-@@ -14,17 +14,17 @@ module Text.Julius
- -- ** Template-Reading Functions
- -- | These QuasiQuoter and Template Haskell methods return values of
- -- type @'JavascriptUrl' url@. See the Yesod book for details.
-- js
-- , julius
-- , juliusFile
-- , jsFile
-- , juliusFileDebug
-- , jsFileDebug
-- , juliusFileReload
-- , jsFileReload
-+ -- js
-+ -- julius
-+ -- juliusFile
-+ -- jsFile
-+ --, juliusFileDebug
-+ --, jsFileDebug
-+ --, juliusFileReload
-+ --, jsFileReload
-
- -- * Datatypes
-- , JavascriptUrl
-+ JavascriptUrl
- , Javascript (..)
- , RawJavascript (..)
-
-@@ -37,9 +37,9 @@ module Text.Julius
- , renderJavascriptUrl
-
- -- ** internal, used by 'Text.Coffee'
-- , javascriptSettings
-+ --, javascriptSettings
- -- ** internal
-- , juliusUsedIdentifiers
-+ --, juliusUsedIdentifiers
- , asJavascriptUrl
- ) where
-
-@@ -102,48 +102,3 @@ instance RawJS TL.Text where rawJS = RawJavascript . fromLazyText
- instance RawJS Builder where rawJS = RawJavascript
- instance RawJS Bool where rawJS = RawJavascript . unJavascript . toJavascript
-
--javascriptSettings :: Q ShakespeareSettings
--javascriptSettings = do
-- toJExp <- [|toJavascript|]
-- wrapExp <- [|Javascript|]
-- unWrapExp <- [|unJavascript|]
-- asJavascriptUrl' <- [|asJavascriptUrl|]
-- return $ defaultShakespeareSettings { toBuilder = toJExp
-- , wrap = wrapExp
-- , unwrap = unWrapExp
-- , modifyFinalValue = Just asJavascriptUrl'
-- }
--
--js, julius :: QuasiQuoter
--js = QuasiQuoter { quoteExp = \s -> do
-- rs <- javascriptSettings
-- quoteExp (shakespeare rs) s
-- }
--
--julius = js
--
--jsFile, juliusFile :: FilePath -> Q Exp
--jsFile fp = do
-- rs <- javascriptSettings
-- shakespeareFile rs fp
--
--juliusFile = jsFile
--
--
--jsFileReload, juliusFileReload :: FilePath -> Q Exp
--jsFileReload fp = do
-- rs <- javascriptSettings
-- shakespeareFileReload rs fp
--
--juliusFileReload = jsFileReload
--
--jsFileDebug, juliusFileDebug :: FilePath -> Q Exp
--juliusFileDebug = jsFileReload
--{-# DEPRECATED juliusFileDebug "Please use juliusFileReload instead." #-}
--jsFileDebug = jsFileReload
--{-# DEPRECATED jsFileDebug "Please use jsFileReload instead." #-}
--
---- | Determine which identifiers are used by the given template, useful for
---- creating systems like yesod devel.
--juliusUsedIdentifiers :: String -> [(Deref, VarType)]
--juliusUsedIdentifiers = shakespeareUsedIdentifiers defaultShakespeareSettings
-diff --git a/Text/Lucius.hs b/Text/Lucius.hs
-index 346883d..f38492b 100644
---- a/Text/Lucius.hs
-+++ b/Text/Lucius.hs
-@@ -8,13 +8,9 @@
- {-# OPTIONS_GHC -fno-warn-missing-fields #-}
- module Text.Lucius
- ( -- * Parsing
-- lucius
-- , luciusFile
-- , luciusFileDebug
-- , luciusFileReload
- -- ** Mixins
-- , luciusMixin
-- , Mixin
-+ -- luciusMixin
-+ Mixin
- -- ** Runtime
- , luciusRT
- , luciusRT'
-@@ -40,11 +36,8 @@ module Text.Lucius
- , AbsoluteUnit (..)
- , AbsoluteSize (..)
- , absoluteSize
-- , EmSize (..)
-- , ExSize (..)
- , PercentageSize (..)
- , percentageSize
-- , PixelSize (..)
- -- * Internal
- , parseTopLevels
- , luciusUsedIdentifiers
-@@ -67,18 +60,6 @@ import Data.List (isSuffixOf)
- import Control.Arrow (second)
- import Text.Shakespeare (VarType)
-
---- |
----
---- >>> renderCss ([lucius|foo{bar:baz}|] undefined)
---- "foo{bar:baz}"
--lucius :: QuasiQuoter
--lucius = QuasiQuoter { quoteExp = luciusFromString }
--
--luciusFromString :: String -> Q Exp
--luciusFromString s =
-- topLevelsToCassius
-- $ either (error . show) id $ parse parseTopLevels s s
--
- whiteSpace :: Parser ()
- whiteSpace = many whiteSpace1 >> return ()
-
-@@ -218,17 +199,6 @@ parseComment = do
- _ <- manyTill anyChar $ try $ string "*/"
- return $ ContentRaw ""
-
--luciusFile :: FilePath -> Q Exp
--luciusFile fp = do
--#ifdef GHC_7_4
-- qAddDependentFile fp
--#endif
-- contents <- fmap TL.unpack $ qRunIO $ readUtf8File fp
-- luciusFromString contents
--
--luciusFileDebug, luciusFileReload :: FilePath -> Q Exp
--luciusFileDebug = cssFileDebug False [|parseTopLevels|] parseTopLevels
--luciusFileReload = luciusFileDebug
-
- parseTopLevels :: Parser [TopLevel Unresolved]
- parseTopLevels =
-@@ -377,15 +347,3 @@ luciusRTMinified tl scope = either Left (Right . renderCss . CssNoWhitespace) $
- -- creating systems like yesod devel.
- luciusUsedIdentifiers :: String -> [(Deref, VarType)]
- luciusUsedIdentifiers = cssUsedIdentifiers False parseTopLevels
--
--luciusMixin :: QuasiQuoter
--luciusMixin = QuasiQuoter { quoteExp = luciusMixinFromString }
--
--luciusMixinFromString :: String -> Q Exp
--luciusMixinFromString s' = do
-- r <- newName "_render"
-- case fmap compressBlock $ parse parseBlock s s of
-- Left e -> error $ show e
-- Right block -> blockToMixin r [] block
-- where
-- s = concat ["mixin{", s', "}"]
-diff --git a/Text/Roy.hs b/Text/Roy.hs
-index 6e5e246..9ab0dbc 100644
---- a/Text/Roy.hs
-+++ b/Text/Roy.hs
-@@ -39,12 +39,12 @@ module Text.Roy
- -- ** Template-Reading Functions
- -- | These QuasiQuoter and Template Haskell methods return values of
- -- type @'JavascriptUrl' url@. See the Yesod book for details.
-- roy
-- , royFile
-- , royFileReload
-+ -- roy
-+ --, royFile
-+ --, royFileReload
-
- #ifdef TEST_EXPORT
-- , roySettings
-+ --, roySettings
- #endif
- ) where
-
-@@ -53,46 +53,3 @@ import Language.Haskell.TH.Syntax
- import Text.Shakespeare
- import Text.Julius
-
---- | The Roy language compiles down to Javascript.
---- We do this compilation once at compile time to avoid needing to do it during the request.
---- We call this a preConversion because other shakespeare modules like Lucius use Haskell to compile during the request instead rather than a system call.
--roySettings :: Q ShakespeareSettings
--roySettings = do
-- jsettings <- javascriptSettings
-- return $ jsettings { varChar = '#'
-- , preConversion = Just PreConvert {
-- preConvert = ReadProcess "roy" ["--stdio", "--browser"]
-- , preEscapeIgnoreBalanced = "'\""
-- , preEscapeIgnoreLine = "//"
-- , wrapInsertion = Just WrapInsertion {
-- wrapInsertionIndent = Just " "
-- , wrapInsertionStartBegin = "(\\"
-- , wrapInsertionSeparator = " "
-- , wrapInsertionStartClose = " ->\n"
-- , wrapInsertionEnd = ")"
-- , wrapInsertionAddParens = True
-- }
-- }
-- }
--
---- | Read inline, quasiquoted Roy.
--roy :: QuasiQuoter
--roy = QuasiQuoter { quoteExp = \s -> do
-- rs <- roySettings
-- quoteExp (shakespeare rs) s
-- }
--
---- | Read in a Roy template file. This function reads the file once, at
---- compile time.
--royFile :: FilePath -> Q Exp
--royFile fp = do
-- rs <- roySettings
-- shakespeareFile rs fp
--
---- | Read in a Roy template file. This impure function uses
---- unsafePerformIO to re-read the file on every call, allowing for rapid
---- iteration.
--royFileReload :: FilePath -> Q Exp
--royFileReload fp = do
-- rs <- roySettings
-- shakespeareFileReload rs fp
diff --git a/Text/Shakespeare.hs b/Text/Shakespeare.hs
-index 67d7dde..a510215 100644
+index 68e344f..97361a2 100644
--- a/Text/Shakespeare.hs
+++ b/Text/Shakespeare.hs
-@@ -15,12 +15,12 @@ module Text.Shakespeare
+@@ -14,12 +14,12 @@ module Text.Shakespeare
, WrapInsertion (..)
, PreConversion (..)
, defaultShakespeareSettings
@@ -803,7 +30,7 @@ index 67d7dde..a510215 100644
, RenderUrl
, VarType (..)
, Deref
-@@ -153,38 +153,6 @@ defaultShakespeareSettings = ShakespeareSettings {
+@@ -154,38 +154,6 @@ defaultShakespeareSettings = ShakespeareSettings {
, modifyFinalValue = Nothing
}
@@ -842,7 +69,7 @@ index 67d7dde..a510215 100644
type QueryParameters = [(TS.Text, TS.Text)]
type RenderUrl url = (url -> QueryParameters -> TS.Text)
-@@ -348,6 +316,7 @@ pack' = TS.pack
+@@ -349,6 +317,7 @@ pack' = TS.pack
{-# NOINLINE pack' #-}
#endif
@@ -850,7 +77,7 @@ index 67d7dde..a510215 100644
contentsToShakespeare :: ShakespeareSettings -> [Content] -> Q Exp
contentsToShakespeare rs a = do
r <- newName "_render"
-@@ -399,16 +368,19 @@ shakespeareFile r fp =
+@@ -400,16 +369,19 @@ shakespeareFile r fp =
qAddDependentFile fp >>
#endif
readFileQ fp >>= shakespeareFromString r
@@ -870,7 +97,7 @@ index 67d7dde..a510215 100644
data VarExp url = EPlain Builder
| EUrl url
-@@ -417,8 +389,10 @@ data VarExp url = EPlain Builder
+@@ -418,8 +390,10 @@ data VarExp url = EPlain Builder
-- | Determine which identifiers are used by the given template, useful for
-- creating systems like yesod devel.
@@ -881,7 +108,7 @@ index 67d7dde..a510215 100644
type MTime = UTCTime
-@@ -435,28 +409,6 @@ insertReloadMap :: FilePath -> (MTime, [Content]) -> IO [Content]
+@@ -436,28 +410,6 @@ insertReloadMap :: FilePath -> (MTime, [Content]) -> IO [Content]
insertReloadMap fp (mt, content) = atomicModifyIORef reloadMapRef
(\reloadMap -> (M.insert fp (mt, content) reloadMap, content))
@@ -949,366 +176,6 @@ index a0e983c..23b4692 100644
derefParens, derefCurlyBrackets :: UserParser a Deref
derefParens = between (char '(') (char ')') parseDeref
derefCurlyBrackets = between (char '{') (char '}') parseDeref
-diff --git a/Text/Shakespeare/I18N.hs b/Text/Shakespeare/I18N.hs
-index a39a614..753cba7 100644
---- a/Text/Shakespeare/I18N.hs
-+++ b/Text/Shakespeare/I18N.hs
-@@ -52,10 +52,10 @@
- --
- -- You can also adapt those instructions for use with other systems.
- module Text.Shakespeare.I18N
-- ( mkMessage
-- , mkMessageFor
-- , mkMessageVariant
-- , RenderMessage (..)
-+ --( mkMessage
-+ --, mkMessageFor
-+ ---, mkMessageVariant
-+ ( RenderMessage (..)
- , ToMessage (..)
- , SomeMessage (..)
- , Lang
-@@ -106,143 +106,6 @@ instance RenderMessage master Text where
- -- | an RFC1766 / ISO 639-1 language code (eg, @fr@, @en-GB@, etc).
- type Lang = Text
-
---- |generate translations from translation files
----
---- This function will:
----
---- 1. look in the supplied subdirectory for files ending in @.msg@
----
---- 2. generate a type based on the constructors found
----
---- 3. create a 'RenderMessage' instance
----
--mkMessage :: String -- ^ base name to use for translation type
-- -> FilePath -- ^ subdirectory which contains the translation files
-- -> Lang -- ^ default translation language
-- -> Q [Dec]
--mkMessage dt folder lang =
-- mkMessageCommon True "Msg" "Message" dt dt folder lang
--
--
---- | create 'RenderMessage' instance for an existing data-type
--mkMessageFor :: String -- ^ master translation data type
-- -> String -- ^ existing type to add translations for
-- -> FilePath -- ^ path to translation folder
-- -> Lang -- ^ default language
-- -> Q [Dec]
--mkMessageFor master dt folder lang = mkMessageCommon False "" "" master dt folder lang
--
---- | create an additional set of translations for a type created by `mkMessage`
--mkMessageVariant :: String -- ^ master translation data type
-- -> String -- ^ existing type to add translations for
-- -> FilePath -- ^ path to translation folder
-- -> Lang -- ^ default language
-- -> Q [Dec]
--mkMessageVariant master dt folder lang = mkMessageCommon False "Msg" "Message" master dt folder lang
--
---- |used by 'mkMessage' and 'mkMessageFor' to generate a 'RenderMessage' and possibly a message data type
--mkMessageCommon :: Bool -- ^ generate a new datatype from the constructors found in the .msg files
-- -> String -- ^ string to append to constructor names
-- -> String -- ^ string to append to datatype name
-- -> String -- ^ base name of master datatype
-- -> String -- ^ base name of translation datatype
-- -> FilePath -- ^ path to translation folder
-- -> Lang -- ^ default lang
-- -> Q [Dec]
--mkMessageCommon genType prefix postfix master dt folder lang = do
-- files <- qRunIO $ getDirectoryContents folder
-- (_files', contents) <- qRunIO $ fmap (unzip . catMaybes) $ mapM (loadLang folder) files
--#ifdef GHC_7_4
-- mapM_ qAddDependentFile _files'
--#endif
-- sdef <-
-- case lookup lang contents of
-- Nothing -> error $ "Did not find main language file: " ++ unpack lang
-- Just def -> toSDefs def
-- mapM_ (checkDef sdef) $ map snd contents
-- let mname = mkName $ dt ++ postfix
-- c1 <- fmap concat $ mapM (toClauses prefix dt) contents
-- c2 <- mapM (sToClause prefix dt) sdef
-- c3 <- defClause
-- return $
-- ( if genType
-- then ((DataD [] mname [] (map (toCon dt) sdef) []) :)
-- else id)
-- [ InstanceD
-- []
-- (ConT ''RenderMessage `AppT` (ConT $ mkName master) `AppT` ConT mname)
-- [ FunD (mkName "renderMessage") $ c1 ++ c2 ++ [c3]
-- ]
-- ]
--
--toClauses :: String -> String -> (Lang, [Def]) -> Q [Clause]
--toClauses prefix dt (lang, defs) =
-- mapM go defs
-- where
-- go def = do
-- a <- newName "lang"
-- (pat, bod) <- mkBody dt (prefix ++ constr def) (map fst $ vars def) (content def)
-- guard <- fmap NormalG [|$(return $ VarE a) == pack $(lift $ unpack lang)|]
-- return $ Clause
-- [WildP, ConP (mkName ":") [VarP a, WildP], pat]
-- (GuardedB [(guard, bod)])
-- []
--
--mkBody :: String -- ^ datatype
-- -> String -- ^ constructor
-- -> [String] -- ^ variable names
-- -> [Content]
-- -> Q (Pat, Exp)
--mkBody dt cs vs ct = do
-- vp <- mapM go vs
-- let pat = RecP (mkName cs) (map (varName dt *** VarP) vp)
-- let ct' = map (fixVars vp) ct
-- pack' <- [|Data.Text.pack|]
-- tomsg <- [|toMessage|]
-- let ct'' = map (toH pack' tomsg) ct'
-- mapp <- [|mappend|]
-- let app a b = InfixE (Just a) mapp (Just b)
-- e <-
-- case ct'' of
-- [] -> [|mempty|]
-- [x] -> return x
-- (x:xs) -> return $ foldl' app x xs
-- return (pat, e)
-- where
-- toH pack' _ (Raw s) = pack' `AppE` SigE (LitE (StringL s)) (ConT ''String)
-- toH _ tomsg (Var d) = tomsg `AppE` derefToExp [] d
-- go x = do
-- let y = mkName $ '_' : x
-- return (x, y)
-- fixVars vp (Var d) = Var $ fixDeref vp d
-- fixVars _ (Raw s) = Raw s
-- fixDeref vp (DerefIdent (Ident i)) = DerefIdent $ Ident $ fixIdent vp i
-- fixDeref vp (DerefBranch a b) = DerefBranch (fixDeref vp a) (fixDeref vp b)
-- fixDeref _ d = d
-- fixIdent vp i =
-- case lookup i vp of
-- Nothing -> i
-- Just y -> nameBase y
--
--sToClause :: String -> String -> SDef -> Q Clause
--sToClause prefix dt sdef = do
-- (pat, bod) <- mkBody dt (prefix ++ sconstr sdef) (map fst $ svars sdef) (scontent sdef)
-- return $ Clause
-- [WildP, ConP (mkName "[]") [], pat]
-- (NormalB bod)
-- []
--
--defClause :: Q Clause
--defClause = do
-- a <- newName "sub"
-- c <- newName "langs"
-- d <- newName "msg"
-- rm <- [|renderMessage|]
-- return $ Clause
-- [VarP a, ConP (mkName ":") [WildP, VarP c], VarP d]
-- (NormalB $ rm `AppE` VarE a `AppE` VarE c `AppE` VarE d)
-- []
--
- toCon :: String -> SDef -> Con
- toCon dt (SDef c vs _) =
- RecC (mkName $ "Msg" ++ c) $ map go vs
-@@ -258,39 +121,6 @@ varName a y =
- upper (x:xs) = toUpper x : xs
- upper [] = []
-
--checkDef :: [SDef] -> [Def] -> Q ()
--checkDef x y =
-- go (sortBy (comparing sconstr) x) (sortBy (comparing constr) y)
-- where
-- go _ [] = return ()
-- go [] (b:_) = error $ "Extra message constructor: " ++ constr b
-- go (a:as) (b:bs)
-- | sconstr a < constr b = go as (b:bs)
-- | sconstr a > constr b = error $ "Extra message constructor: " ++ constr b
-- | otherwise = do
-- go' (svars a) (vars b)
-- go as bs
-- go' ((an, at):as) ((bn, mbt):bs)
-- | an /= bn = error "Mismatched variable names"
-- | otherwise =
-- case mbt of
-- Nothing -> go' as bs
-- Just bt
-- | at == bt -> go' as bs
-- | otherwise -> error "Mismatched variable types"
-- go' [] [] = return ()
-- go' _ _ = error "Mistmached variable count"
--
--toSDefs :: [Def] -> Q [SDef]
--toSDefs = mapM toSDef
--
--toSDef :: Def -> Q SDef
--toSDef d = do
-- vars' <- mapM go $ vars d
-- return $ SDef (constr d) vars' (content d)
-- where
-- go (a, Just b) = return (a, b)
-- go (a, Nothing) = error $ "Main language missing type for " ++ show (constr d, a)
-
- data SDef = SDef
- { sconstr :: String
-diff --git a/Text/Shakespeare/Text.hs b/Text/Shakespeare/Text.hs
-index 6865a5a..e25a8be 100644
---- a/Text/Shakespeare/Text.hs
-+++ b/Text/Shakespeare/Text.hs
-@@ -7,18 +7,18 @@ module Text.Shakespeare.Text
- ( TextUrl
- , ToText (..)
- , renderTextUrl
-- , stext
-- , text
-- , textFile
-- , textFileDebug
-- , textFileReload
-- , st -- | strict text
-- , lt -- | lazy text, same as stext :)
-+ --, stext
-+ --, text
-+ --, textFile
-+ --, textFileDebug
-+ --, textFileReload
-+ --, st -- | strict text
-+ --, lt -- | lazy text, same as stext :)
- -- * Yesod code generation
-- , codegen
-- , codegenSt
-- , codegenFile
-- , codegenFileReload
-+ --, codegen
-+ --, codegenSt
-+ --, codegenFile
-+ --, codegenFileReload
- ) where
-
- import Language.Haskell.TH.Quote (QuasiQuoter (..))
-@@ -45,106 +45,3 @@ instance ToText Int32 where toText = toText . show
- instance ToText Int64 where toText = toText . show
- instance ToText Int where toText = toText . show
-
--settings :: Q ShakespeareSettings
--settings = do
-- toTExp <- [|toText|]
-- wrapExp <- [|id|]
-- unWrapExp <- [|id|]
-- return $ defaultShakespeareSettings { toBuilder = toTExp
-- , wrap = wrapExp
-- , unwrap = unWrapExp
-- }
--
--
--stext, lt, st, text :: QuasiQuoter
--stext =
-- QuasiQuoter { quoteExp = \s -> do
-- rs <- settings
-- render <- [|toLazyText|]
-- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
-- return (render `AppE` rendered)
-- }
--lt = stext
--
--st =
-- QuasiQuoter { quoteExp = \s -> do
-- rs <- settings
-- render <- [|TL.toStrict . toLazyText|]
-- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
-- return (render `AppE` rendered)
-- }
--
--text = QuasiQuoter { quoteExp = \s -> do
-- rs <- settings
-- quoteExp (shakespeare rs) $ filter (/='\r') s
-- }
--
--
--textFile :: FilePath -> Q Exp
--textFile fp = do
-- rs <- settings
-- shakespeareFile rs fp
--
--
--textFileDebug :: FilePath -> Q Exp
--textFileDebug = textFileReload
--{-# DEPRECATED textFileDebug "Please use textFileReload instead" #-}
--
--textFileReload :: FilePath -> Q Exp
--textFileReload fp = do
-- rs <- settings
-- shakespeareFileReload rs fp
--
---- | codegen is designed for generating Yesod code, including templates
---- So it uses different interpolation characters that won't clash with templates.
--codegenSettings :: Q ShakespeareSettings
--codegenSettings = do
-- toTExp <- [|toText|]
-- wrapExp <- [|id|]
-- unWrapExp <- [|id|]
-- return $ defaultShakespeareSettings { toBuilder = toTExp
-- , wrap = wrapExp
-- , unwrap = unWrapExp
-- , varChar = '~'
-- , urlChar = '*'
-- , intChar = '&'
-- , justVarInterpolation = True -- always!
-- }
--
---- | codegen is designed for generating Yesod code, including templates
---- So it uses different interpolation characters that won't clash with templates.
---- You can use the normal text quasiquoters to generate code
--codegen :: QuasiQuoter
--codegen =
-- QuasiQuoter { quoteExp = \s -> do
-- rs <- codegenSettings
-- render <- [|toLazyText|]
-- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
-- return (render `AppE` rendered)
-- }
--
---- | Generates strict Text
---- codegen is designed for generating Yesod code, including templates
---- So it uses different interpolation characters that won't clash with templates.
--codegenSt :: QuasiQuoter
--codegenSt =
-- QuasiQuoter { quoteExp = \s -> do
-- rs <- codegenSettings
-- render <- [|TL.toStrict . toLazyText|]
-- rendered <- shakespeareFromString rs { justVarInterpolation = True } s
-- return (render `AppE` rendered)
-- }
--
--codegenFileReload :: FilePath -> Q Exp
--codegenFileReload fp = do
-- rs <- codegenSettings
-- render <- [|TL.toStrict . toLazyText|]
-- rendered <- shakespeareFileReload rs{ justVarInterpolation = True } fp
-- return (render `AppE` rendered)
--
--codegenFile :: FilePath -> Q Exp
--codegenFile fp = do
-- rs <- codegenSettings
-- render <- [|TL.toStrict . toLazyText|]
-- rendered <- shakespeareFile rs{ justVarInterpolation = True } fp
-- return (render `AppE` rendered)
-diff --git a/shakespeare.cabal b/shakespeare.cabal
-index 05b985e..dd8762a 100644
---- a/shakespeare.cabal
-+++ b/shakespeare.cabal
-@@ -61,10 +61,9 @@ library
- Text.Lucius
- Text.Cassius
- Text.Shakespeare.Base
-+ Text.Css
- Text.Shakespeare
-- Text.TypeScript
- other-modules: Text.Hamlet.Parse
-- Text.Css
- Text.MkSizeType
- Text.IndentToBrace
- Text.CssCommon
--
-1.7.10.4
+2.1.1
diff --git a/standalone/no-th/haskell-patches/vector_hack-to-build-with-new-ghc.patch b/standalone/no-th/haskell-patches/vector_hack-to-build-with-new-ghc.patch
index 4d39e91cf..f89f0d60b 100644
--- a/standalone/no-th/haskell-patches/vector_hack-to-build-with-new-ghc.patch
+++ b/standalone/no-th/haskell-patches/vector_hack-to-build-with-new-ghc.patch
@@ -1,11 +1,12 @@
-From b0a79f4f98188ba5d43b7e3912b36d34d099ab65 Mon Sep 17 00:00:00 2001
+From 6ffd4fcb7d27ec6df709d80a40a262406446a259 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Fri, 18 Oct 2013 23:20:35 +0000
+Date: Wed, 15 Oct 2014 17:00:56 +0000
Subject: [PATCH] cross build
---
- Data/Vector/Fusion/Stream/Monadic.hs | 1 -
- 1 file changed, 1 deletion(-)
+ Data/Vector/Fusion/Stream/Monadic.hs | 1 -
+ Data/Vector/Unboxed/Base.hs | 13 -------------
+ 2 files changed, 14 deletions(-)
diff --git a/Data/Vector/Fusion/Stream/Monadic.hs b/Data/Vector/Fusion/Stream/Monadic.hs
index 51fec75..b089b3d 100644
@@ -19,6 +20,30 @@ index 51fec75..b089b3d 100644
#endif
emptyStream :: String
+diff --git a/Data/Vector/Unboxed/Base.hs b/Data/Vector/Unboxed/Base.hs
+index 00350cb..34bfc4a 100644
+--- a/Data/Vector/Unboxed/Base.hs
++++ b/Data/Vector/Unboxed/Base.hs
+@@ -65,19 +65,6 @@ vectorTyCon = mkTyCon3 "vector"
+ vectorTyCon m s = mkTyCon $ m ++ "." ++ s
+ #endif
+
+-instance Typeable1 Vector where
+- typeOf1 _ = mkTyConApp (vectorTyCon "Data.Vector.Unboxed" "Vector") []
+-
+-instance Typeable2 MVector where
+- typeOf2 _ = mkTyConApp (vectorTyCon "Data.Vector.Unboxed.Mutable" "MVector") []
+-
+-instance (Data a, Unbox a) => Data (Vector a) where
+- gfoldl = G.gfoldl
+- toConstr _ = error "toConstr"
+- gunfold _ _ = error "gunfold"
+- dataTypeOf _ = G.mkType "Data.Vector.Unboxed.Vector"
+- dataCast1 = G.dataCast
+-
+ -- ----
+ -- Unit
+ -- ----
--
-1.7.10.4
+2.1.1
diff --git a/standalone/no-th/haskell-patches/yesod-core_expand_TH.patch b/standalone/no-th/haskell-patches/yesod-core_expand_TH.patch
index 07663ac80..f58fcb353 100644
--- a/standalone/no-th/haskell-patches/yesod-core_expand_TH.patch
+++ b/standalone/no-th/haskell-patches/yesod-core_expand_TH.patch
@@ -1,18 +1,18 @@
-From 9feb37d13dc8449dc4445db83485780caee4b7ff Mon Sep 17 00:00:00 2001
+From f1feea61dcba0b16afed5ce8dd5d2433fe505461 Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Tue, 10 Jun 2014 17:44:52 +0000
-Subject: [PATCH] expand and remove TH
+Date: Thu, 16 Oct 2014 02:15:23 +0000
+Subject: [PATCH] hack TH
---
Yesod/Core.hs | 30 +++---
- Yesod/Core/Class/Yesod.hs | 257 ++++++++++++++++++++++++++++++---------------
+ Yesod/Core/Class/Yesod.hs | 256 ++++++++++++++++++++++++++++++---------------
Yesod/Core/Dispatch.hs | 38 ++-----
Yesod/Core/Handler.hs | 25 ++---
- Yesod/Core/Internal/Run.hs | 8 +-
+ Yesod/Core/Internal/Run.hs | 6 +-
Yesod/Core/Internal/TH.hs | 111 --------------------
Yesod/Core/Types.hs | 3 +-
Yesod/Core/Widget.hs | 32 +-----
- 8 files changed, 215 insertions(+), 289 deletions(-)
+ 8 files changed, 213 insertions(+), 288 deletions(-)
diff --git a/Yesod/Core.hs b/Yesod/Core.hs
index 9b29317..7c0792d 100644
@@ -68,7 +68,7 @@ index 9b29317..7c0792d 100644
, renderCssUrl
) where
diff --git a/Yesod/Core/Class/Yesod.hs b/Yesod/Core/Class/Yesod.hs
-index 140600b..75daabc 100644
+index 8631d27..c40eb10 100644
--- a/Yesod/Core/Class/Yesod.hs
+++ b/Yesod/Core/Class/Yesod.hs
@@ -5,18 +5,22 @@
@@ -104,11 +104,11 @@ index 140600b..75daabc 100644
import Network.HTTP.Types (encodePath)
import qualified Network.Wai as W
import Data.Default (def)
-@@ -94,18 +97,27 @@ class RenderRoute site => Yesod site where
+@@ -94,18 +97,26 @@ class RenderRoute site => Yesod site where
defaultLayout w = do
p <- widgetToPageContent w
mmsg <- getMessage
-- giveUrlRenderer [hamlet|
+- withUrlRenderer [hamlet|
- $newline never
- $doctype 5
- <html>
@@ -120,7 +120,7 @@ index 140600b..75daabc 100644
- <p .message>#{msg}
- ^{pageBody p}
- |]
-+ giveUrlRenderer $ \ _render_aHra
++ withUrlRenderer $ \ _render_aHra
+ -> do { id
+ ((Text.Blaze.Internal.preEscapedText . T.pack)
+ "<!DOCTYPE html>\n<html><head><title>");
@@ -140,11 +140,10 @@ index 140600b..75daabc 100644
+ Text.Hamlet.asHtmlUrl (pageBody p) _render_aHra;
+ id
+ ((Text.Blaze.Internal.preEscapedText . T.pack) "</body></html>") }
-+
-- | Override the rendering function for a particular URL. One use case for
-- this is to offload static hosting to a different domain name to avoid
-@@ -374,45 +386,103 @@ widgetToPageContent w = do
+@@ -374,45 +385,103 @@ widgetToPageContent w = do
-- modernizr should be at the end of the <head> http://www.modernizr.com/docs/#installing
-- the asynchronous loader means your page doesn't have to wait for all the js to load
let (mcomplete, asyncScripts) = asyncHelper render scripts jscript jsLoc
@@ -287,7 +286,7 @@ index 140600b..75daabc 100644
return $ PageContent title headAll $
case jsLoader master of
-@@ -442,10 +512,13 @@ defaultErrorHandler NotFound = selectRep $ do
+@@ -442,10 +511,13 @@ defaultErrorHandler NotFound = selectRep $ do
r <- waiRequest
let path' = TE.decodeUtf8With TEE.lenientDecode $ W.rawPathInfo r
setTitle "Not Found"
@@ -305,7 +304,7 @@ index 140600b..75daabc 100644
provideRep $ return $ object ["message" .= ("Not Found" :: Text)]
-- For API requests.
-@@ -455,10 +528,11 @@ defaultErrorHandler NotFound = selectRep $ do
+@@ -455,10 +527,11 @@ defaultErrorHandler NotFound = selectRep $ do
defaultErrorHandler NotAuthenticated = selectRep $ do
provideRep $ defaultLayout $ do
setTitle "Not logged in"
@@ -321,7 +320,7 @@ index 140600b..75daabc 100644
provideRep $ do
-- 401 *MUST* include a WWW-Authenticate header
-@@ -480,10 +554,13 @@ defaultErrorHandler NotAuthenticated = selectRep $ do
+@@ -480,10 +553,13 @@ defaultErrorHandler NotAuthenticated = selectRep $ do
defaultErrorHandler (PermissionDenied msg) = selectRep $ do
provideRep $ defaultLayout $ do
setTitle "Permission Denied"
@@ -339,7 +338,7 @@ index 140600b..75daabc 100644
provideRep $
return $ object $ [
"message" .= ("Permission Denied. " <> msg)
-@@ -492,30 +569,42 @@ defaultErrorHandler (PermissionDenied msg) = selectRep $ do
+@@ -492,30 +568,42 @@ defaultErrorHandler (PermissionDenied msg) = selectRep $ do
defaultErrorHandler (InvalidArgs ia) = selectRep $ do
provideRep $ defaultLayout $ do
setTitle "Invalid Arguments"
@@ -397,7 +396,7 @@ index 140600b..75daabc 100644
provideRep $ return $ object ["message" .= ("Bad method" :: Text), "method" .= TE.decodeUtf8With TEE.lenientDecode m]
asyncHelper :: (url -> [x] -> Text)
-@@ -682,8 +771,4 @@ loadClientSession key getCachedDate sessionName req = load
+@@ -682,8 +770,4 @@ loadClientSession key getCachedDate sessionName req = load
-- turn the TH Loc loaction information into a human readable string
-- leaving out the loc_end parameter
fileLocationToString :: Loc -> String
@@ -484,10 +483,10 @@ index e0d1f0e..cc23fdd 100644
-- | Runs your application using default middlewares (i.e., via 'toWaiApp'). It
-- reads port information from the PORT environment variable, as used by tools
diff --git a/Yesod/Core/Handler.hs b/Yesod/Core/Handler.hs
-index 2e5d7cb..83f93bf 100644
+index d2b196b..13cac17 100644
--- a/Yesod/Core/Handler.hs
+++ b/Yesod/Core/Handler.hs
-@@ -172,7 +172,7 @@ import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
+@@ -174,7 +174,7 @@ import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.Text.Lazy as TL
import qualified Text.Blaze.Html.Renderer.Text as RenderText
@@ -496,7 +495,7 @@ index 2e5d7cb..83f93bf 100644
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
-@@ -201,6 +201,7 @@ import Control.Exception (throwIO)
+@@ -203,6 +203,7 @@ import Control.Exception (throwIO)
import Blaze.ByteString.Builder (Builder)
import Safe (headMay)
import Data.CaseInsensitive (CI)
@@ -504,11 +503,11 @@ index 2e5d7cb..83f93bf 100644
import qualified Data.Conduit.List as CL
import Control.Monad (unless)
import Control.Monad.Trans.Resource (MonadResource, InternalState, runResourceT, withInternalState, getInternalState, liftResourceT, resourceForkIO
-@@ -847,19 +848,15 @@ redirectToPost :: (MonadHandler m, RedirectUrl (HandlerSite m) url)
+@@ -855,19 +856,15 @@ redirectToPost :: (MonadHandler m, RedirectUrl (HandlerSite m) url)
-> m a
redirectToPost url = do
urlText <- toTextUrl url
-- giveUrlRenderer [hamlet|
+- withUrlRenderer [hamlet|
-$newline never
-$doctype 5
-
@@ -521,7 +520,7 @@ index 2e5d7cb..83f93bf 100644
- <p>Javascript has been disabled; please click on the button below to be redirected.
- <input type="submit" value="Continue">
-|] >>= sendResponse
-+ giveUrlRenderer $ \ _render_awps
++ withUrlRenderer $ \ _render_awps
+ -> do { id
+ ((Text.Blaze.Internal.preEscapedText . T.pack)
+ "<!DOCTYPE html>\n<html><head><title>Redirecting...</title></head><body onload=\"document.getElementById('form').submit()\"><form id=\"form\" method=\"post\" action=\"");
@@ -534,20 +533,18 @@ index 2e5d7cb..83f93bf 100644
-- | Wraps the 'Content' generated by 'hamletToContent' in a 'RepHtml'.
hamletToRepHtml :: MonadHandler m => HtmlUrl (Route (HandlerSite m)) -> m Html
diff --git a/Yesod/Core/Internal/Run.hs b/Yesod/Core/Internal/Run.hs
-index 09b4609..e1ef568 100644
+index 311f208..63f666f 100644
--- a/Yesod/Core/Internal/Run.hs
+++ b/Yesod/Core/Internal/Run.hs
-@@ -16,8 +16,8 @@ import Control.Exception.Lifted (catch)
+@@ -16,7 +16,7 @@ import Control.Exception.Lifted (catch)
import Control.Monad (mplus)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.IO.Class (liftIO)
-import Control.Monad.Logger (LogLevel (LevelError), LogSource,
-- liftLoc)
+import Control.Monad.Logger (Loc, LogLevel (LevelError), LogSource,
-+ )
+ liftLoc)
import Control.Monad.Trans.Resource (runResourceT, withInternalState, runInternalState, createInternalState, closeInternalState)
import qualified Data.ByteString as S
- import qualified Data.ByteString.Char8 as S8
@@ -31,7 +31,7 @@ import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Data.Text.Encoding (decodeUtf8With)
@@ -557,7 +554,7 @@ index 09b4609..e1ef568 100644
import qualified Network.HTTP.Types as H
import Network.Wai
#if MIN_VERSION_wai(2, 0, 0)
-@@ -157,8 +157,6 @@ safeEh :: (Loc -> LogSource -> LogLevel -> LogStr -> IO ())
+@@ -158,8 +158,6 @@ safeEh :: (Loc -> LogSource -> LogLevel -> LogStr -> IO ())
-> ErrorResponse
-> YesodApp
safeEh log' er req = do
@@ -686,7 +683,7 @@ index 7e84c1c..a273c29 100644
- ]
- return $ LetE [fun] (VarE helper)
diff --git a/Yesod/Core/Types.hs b/Yesod/Core/Types.hs
-index 7e3fd0d..994d322 100644
+index 388dfe3..b3fce0f 100644
--- a/Yesod/Core/Types.hs
+++ b/Yesod/Core/Types.hs
@@ -21,6 +21,7 @@ import Control.Monad.Catch (MonadCatch (..))
@@ -697,7 +694,7 @@ index 7e3fd0d..994d322 100644
import Control.Monad.Logger (LogLevel, LogSource,
MonadLogger (..))
import Control.Monad.Trans.Control (MonadBaseControl (..))
-@@ -187,7 +188,7 @@ data RunHandlerEnv site = RunHandlerEnv
+@@ -191,7 +192,7 @@ data RunHandlerEnv site = RunHandlerEnv
, rheRoute :: !(Maybe (Route site))
, rheSite :: !site
, rheUpload :: !(RequestBodyLength -> FileUpload)
@@ -767,5 +764,5 @@ index 481199e..8489fbe 100644
ihamletToRepHtml :: (MonadHandler m, RenderMessage (HandlerSite m) message)
=> HtmlUrlI18n message (Route (HandlerSite m))
--
-2.0.0
+2.1.1
diff --git a/standalone/no-th/haskell-patches/yesod-form_spliced-TH.patch b/standalone/no-th/haskell-patches/yesod-form_spliced-TH.patch
index 4f8edee8f..84314a8d9 100644
--- a/standalone/no-th/haskell-patches/yesod-form_spliced-TH.patch
+++ b/standalone/no-th/haskell-patches/yesod-form_spliced-TH.patch
@@ -1,23 +1,32 @@
-From 6aabd510081681f81f4259190be32fbb2819b46c Mon Sep 17 00:00:00 2001
-From: Joey Hess <joey@kitenet.net>
-Date: Fri, 12 Sep 2014 21:30:27 -0400
-Subject: [PATCH] splice TH
+From 1b24ece1a40c9365f719472ca6e342c8c4065c25 Mon Sep 17 00:00:00 2001
+From: dummy <dummy@example.com>
+Date: Thu, 16 Oct 2014 02:31:20 +0000
+Subject: [PATCH] hack TH
---
- Yesod/Form/Bootstrap3.hs | 183 +++++++++---
- Yesod/Form/Fields.hs | 753 ++++++++++++++++++++++++++++++++++++++---------
- Yesod/Form/Functions.hs | 257 +++++++++++++---
- Yesod/Form/Jquery.hs | 134 +++++++--
- Yesod/Form/MassInput.hs | 226 +++++++++++---
- Yesod/Form/Nic.hs | 67 ++++-
- yesod-form.cabal | 1 -
- 7 files changed, 1319 insertions(+), 302 deletions(-)
+ Yesod/Form/Bootstrap3.hs | 186 +++++++++--
+ Yesod/Form/Fields.hs | 816 +++++++++++++++++++++++++++++++++++------------
+ Yesod/Form/Functions.hs | 257 ++++++++++++---
+ Yesod/Form/Jquery.hs | 134 ++++++--
+ Yesod/Form/MassInput.hs | 226 ++++++++++---
+ Yesod/Form/Nic.hs | 67 +++-
+ 6 files changed, 1322 insertions(+), 364 deletions(-)
diff --git a/Yesod/Form/Bootstrap3.hs b/Yesod/Form/Bootstrap3.hs
-index 84e85fc..943c416 100644
+index 84e85fc..1954fb4 100644
--- a/Yesod/Form/Bootstrap3.hs
+++ b/Yesod/Form/Bootstrap3.hs
-@@ -152,44 +152,144 @@ renderBootstrap3 formLayout aform fragment = do
+@@ -26,6 +26,9 @@ import Data.String (IsString(..))
+ import Yesod.Core
+
+ import qualified Data.Text as T
++import qualified Text.Hamlet
++import qualified Text.Blaze.Internal
++import qualified Data.Foldable
+
+ import Yesod.Form.Types
+ import Yesod.Form.Functions
+@@ -152,44 +155,144 @@ renderBootstrap3 formLayout aform fragment = do
let views = views' []
has (Just _) = True
has Nothing = False
@@ -104,7 +113,7 @@ index 84e85fc..943c416 100644
+ Nothing;
+ (asWidgetT . toWidget) (fvInput view_as0a);
+ (asWidgetT . toWidget) (helpWidget view_as0a) }
-+ BootstrapHorizontalForm labelOffset_as0b
++ ; BootstrapHorizontalForm labelOffset_as0b
+ labelSize_as0c
+ inputOffset_as0d
+ inputSize_as0e
@@ -195,7 +204,7 @@ index 84e85fc..943c416 100644
-- | How the 'bootstrapSubmit' button should be rendered.
-@@ -244,7 +344,22 @@ mbootstrapSubmit
+@@ -244,7 +347,22 @@ mbootstrapSubmit
=> BootstrapSubmit msg -> MForm m (FormResult (), FieldView site)
mbootstrapSubmit (BootstrapSubmit msg classes attrs) =
let res = FormSuccess ()
@@ -220,7 +229,7 @@ index 84e85fc..943c416 100644
, fvTooltip = Nothing
, fvId = bootstrapSubmitId
diff --git a/Yesod/Form/Fields.hs b/Yesod/Form/Fields.hs
-index c6091a9..3d7b267 100644
+index c6091a9..9e6bd4e 100644
--- a/Yesod/Form/Fields.hs
+++ b/Yesod/Form/Fields.hs
@@ -1,4 +1,3 @@
@@ -908,7 +917,7 @@ index c6091a9..3d7b267 100644
+ ((Text.Blaze.Internal.preEscapedText . pack) "\">");
+ (asWidgetT . toWidget) inside;
+ (asWidgetT . toWidget)
-+ ((Text.Blaze.Internal.preEscapedText . pack) "</div>") }
++ ((Text.Blaze.Internal.preEscapedText . pack) "</div>") })
+
+ (\theId name isSel -> do { (asWidgetT . toWidget)
+ ((Text.Blaze.Internal.preEscapedText . pack)
@@ -1098,7 +1107,77 @@ index c6091a9..3d7b267 100644
, fieldEnctype = UrlEncoded
}
-@@ -665,9 +1114,21 @@ fileField = Field
+@@ -559,69 +1008,6 @@ optionsPairs opts = do
+ optionsEnum :: (MonadHandler m, Show a, Enum a, Bounded a) => m (OptionList a)
+ optionsEnum = optionsPairs $ map (\x -> (pack $ show x, x)) [minBound..maxBound]
+
+-#if MIN_VERSION_persistent(2, 0, 0)
+-optionsPersist :: ( YesodPersist site, PersistEntity a
+- , PersistQuery (PersistEntityBackend a)
+- , PathPiece (Key a)
+- , RenderMessage site msg
+- , YesodPersistBackend site ~ PersistEntityBackend a
+- )
+-#else
+-optionsPersist :: ( YesodPersist site, PersistEntity a
+- , PersistQuery (YesodPersistBackend site (HandlerT site IO))
+- , PathPiece (Key a)
+- , PersistEntityBackend a ~ PersistMonadBackend (YesodPersistBackend site (HandlerT site IO))
+- , RenderMessage site msg
+- )
+-#endif
+- => [Filter a]
+- -> [SelectOpt a]
+- -> (a -> msg)
+- -> HandlerT site IO (OptionList (Entity a))
+-optionsPersist filts ords toDisplay = fmap mkOptionList $ do
+- mr <- getMessageRender
+- pairs <- runDB $ selectList filts ords
+- return $ map (\(Entity key value) -> Option
+- { optionDisplay = mr (toDisplay value)
+- , optionInternalValue = Entity key value
+- , optionExternalValue = toPathPiece key
+- }) pairs
+-
+--- | An alternative to 'optionsPersist' which returns just the @Key@ instead of
+--- the entire @Entity@.
+---
+--- Since 1.3.2
+-#if MIN_VERSION_persistent(2, 0, 0)
+-optionsPersistKey
+- :: (YesodPersist site
+- , PersistEntity a
+- , PersistQuery (PersistEntityBackend a)
+- , PathPiece (Key a)
+- , RenderMessage site msg
+- , YesodPersistBackend site ~ PersistEntityBackend a
+- )
+-#else
+-optionsPersistKey
+- :: (YesodPersist site
+- , PersistEntity a
+- , PersistQuery (YesodPersistBackend site (HandlerT site IO))
+- , PathPiece (Key a)
+- , RenderMessage site msg
+- , PersistEntityBackend a ~ PersistMonadBackend (YesodDB site))
+-#endif
+- => [Filter a]
+- -> [SelectOpt a]
+- -> (a -> msg)
+- -> HandlerT site IO (OptionList (Key a))
+-
+-optionsPersistKey filts ords toDisplay = fmap mkOptionList $ do
+- mr <- getMessageRender
+- pairs <- runDB $ selectList filts ords
+- return $ map (\(Entity key value) -> Option
+- { optionDisplay = mr (toDisplay value)
+- , optionInternalValue = key
+- , optionExternalValue = toPathPiece key
+- }) pairs
+
+ selectFieldHelper
+ :: (Eq a, RenderMessage site FormMessage)
+@@ -665,9 +1051,21 @@ fileField = Field
case files of
[] -> Right Nothing
file:_ -> Right $ Just file
@@ -1123,7 +1202,7 @@ index c6091a9..3d7b267 100644
, fieldEnctype = Multipart
}
-@@ -694,10 +1155,19 @@ fileAFormReq fs = AForm $ \(site, langs) menvs ints -> do
+@@ -694,10 +1092,19 @@ fileAFormReq fs = AForm $ \(site, langs) menvs ints -> do
{ fvLabel = toHtml $ renderMessage site langs $ fsLabel fs
, fvTooltip = fmap (toHtml . renderMessage site langs) $ fsTooltip fs
, fvId = id'
@@ -1147,7 +1226,7 @@ index c6091a9..3d7b267 100644
, fvErrors = errs
, fvRequired = True
}
-@@ -726,10 +1196,19 @@ fileAFormOpt fs = AForm $ \(master, langs) menvs ints -> do
+@@ -726,10 +1133,19 @@ fileAFormOpt fs = AForm $ \(master, langs) menvs ints -> do
{ fvLabel = toHtml $ renderMessage master langs $ fsLabel fs
, fvTooltip = fmap (toHtml . renderMessage master langs) $ fsTooltip fs
, fvId = id'
@@ -1172,10 +1251,10 @@ index c6091a9..3d7b267 100644
, fvRequired = False
}
diff --git a/Yesod/Form/Functions.hs b/Yesod/Form/Functions.hs
-index 5fd03e6..b14d900 100644
+index 9e6abaf..0c2a0ce 100644
--- a/Yesod/Form/Functions.hs
+++ b/Yesod/Form/Functions.hs
-@@ -59,12 +59,16 @@ import Text.Blaze (Markup, toMarkup)
+@@ -60,12 +60,16 @@ import Text.Blaze (Markup, toMarkup)
#define toHtml toMarkup
import Yesod.Core
import Network.Wai (requestMethod)
@@ -1193,7 +1272,7 @@ index 5fd03e6..b14d900 100644
-- | Get a unique identifier.
newFormIdent :: Monad m => MForm m Text
-@@ -216,7 +220,14 @@ postHelper form env = do
+@@ -217,7 +221,14 @@ postHelper form env = do
let token =
case reqToken req of
Nothing -> mempty
@@ -1209,7 +1288,7 @@ index 5fd03e6..b14d900 100644
m <- getYesod
langs <- languages
((res, xml), enctype) <- runFormGeneric (form token) m langs env
-@@ -296,7 +307,12 @@ getHelper :: MonadHandler m
+@@ -297,7 +308,12 @@ getHelper :: MonadHandler m
-> Maybe (Env, FileEnv)
-> m (a, Enctype)
getHelper form env = do
@@ -1223,7 +1302,7 @@ index 5fd03e6..b14d900 100644
langs <- languages
m <- getYesod
runFormGeneric (form fragment) m langs env
-@@ -331,10 +347,15 @@ identifyForm
+@@ -332,10 +348,15 @@ identifyForm
identifyForm identVal form = \fragment -> do
-- Create hidden <input>.
let fragment' =
@@ -1243,7 +1322,7 @@ index 5fd03e6..b14d900 100644
-- Check if we got its value back.
mp <- askParams
-@@ -364,22 +385,70 @@ renderTable, renderDivs, renderDivsNoLabels :: Monad m => FormRender m a
+@@ -365,22 +386,70 @@ renderTable, renderDivs, renderDivsNoLabels :: Monad m => FormRender m a
renderTable aform fragment = do
(res, views') <- aFormToForm aform
let views = views' []
@@ -1330,7 +1409,7 @@ index 5fd03e6..b14d900 100644
return (res, widget)
where
addIsFirst [] = []
-@@ -395,19 +464,66 @@ renderDivsMaybeLabels :: Monad m => Bool -> FormRender m a
+@@ -396,19 +465,66 @@ renderDivsMaybeLabels :: Monad m => Bool -> FormRender m a
renderDivsMaybeLabels withLabels aform fragment = do
(res, views') <- aFormToForm aform
let views = views' []
@@ -1410,7 +1489,7 @@ index 5fd03e6..b14d900 100644
return (res, widget)
-- | Render a form using Bootstrap v2-friendly shamlet syntax.
-@@ -435,19 +551,62 @@ renderBootstrap2 aform fragment = do
+@@ -436,19 +552,62 @@ renderBootstrap2 aform fragment = do
let views = views' []
has (Just _) = True
has Nothing = False
@@ -2002,18 +2081,6 @@ index 7e4af07..b59745a 100644
, fieldEnctype = UrlEncoded
}
where
-diff --git a/yesod-form.cabal b/yesod-form.cabal
-index bfe94df..1f5aef5 100644
---- a/yesod-form.cabal
-+++ b/yesod-form.cabal
-@@ -51,7 +51,6 @@ library
- exposed-modules: Yesod.Form
- Yesod.Form.Types
- Yesod.Form.Functions
-- Yesod.Form.Bootstrap3
- Yesod.Form.Input
- Yesod.Form.Fields
- Yesod.Form.Jquery
--
-2.1.0
+2.1.1
diff --git a/standalone/no-th/haskell-patches/yesod-persistent_do-not-really-build.patch b/standalone/no-th/haskell-patches/yesod-persistent_do-not-really-build.patch
index a2210d4c4..76aad4e34 100644
--- a/standalone/no-th/haskell-patches/yesod-persistent_do-not-really-build.patch
+++ b/standalone/no-th/haskell-patches/yesod-persistent_do-not-really-build.patch
@@ -1,14 +1,14 @@
-From 92a34bc2b09572a58a4e696e0d8a0a61475535f7 Mon Sep 17 00:00:00 2001
+From e82ed4e6fd7b5ea6dbe474b5de2755ec5794161c Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Tue, 10 Jun 2014 19:09:56 +0000
-Subject: [PATCH] do not really build
+Date: Thu, 16 Oct 2014 02:23:50 +0000
+Subject: [PATCH] stub out
---
yesod-persistent.cabal | 10 ----------
1 file changed, 10 deletions(-)
diff --git a/yesod-persistent.cabal b/yesod-persistent.cabal
-index b44499b..ef33863 100644
+index b116f3a..017b184 100644
--- a/yesod-persistent.cabal
+++ b/yesod-persistent.cabal
@@ -14,16 +14,6 @@ description: Some helpers for using Persistent from Yesod.
@@ -16,8 +16,8 @@ index b44499b..ef33863 100644
library
build-depends: base >= 4 && < 5
- , yesod-core >= 1.2.2 && < 1.3
-- , persistent >= 1.2 && < 1.4
-- , persistent-template >= 1.2 && < 1.4
+- , persistent >= 1.2 && < 2.1
+- , persistent-template >= 1.2 && < 2.1
- , transformers >= 0.2.2
- , blaze-builder
- , conduit
@@ -29,5 +29,5 @@ index b44499b..ef33863 100644
test-suite test
--
-2.0.0
+2.1.1
diff --git a/standalone/no-th/haskell-patches/yesod_hack-TH.patch b/standalone/no-th/haskell-patches/yesod_hack-TH.patch
index 001d4a471..ebf8a786b 100644
--- a/standalone/no-th/haskell-patches/yesod_hack-TH.patch
+++ b/standalone/no-th/haskell-patches/yesod_hack-TH.patch
@@ -1,13 +1,13 @@
-From da032b804c0a35c2831664e28c9211f4fe712593 Mon Sep 17 00:00:00 2001
+From 59091cd37958fee79b9e346fe3118d5ed7d0104b Mon Sep 17 00:00:00 2001
From: dummy <dummy@example.com>
-Date: Tue, 10 Jun 2014 20:39:42 +0000
-Subject: [PATCH] avoid TH
+Date: Thu, 16 Oct 2014 02:36:37 +0000
+Subject: [PATCH] hack TH
---
Yesod.hs | 19 ++++++++++++--
- Yesod/Default/Main.hs | 32 +-----------------------
+ Yesod/Default/Main.hs | 31 +----------------------
Yesod/Default/Util.hs | 69 ++-------------------------------------------------
- 3 files changed, 20 insertions(+), 100 deletions(-)
+ 3 files changed, 20 insertions(+), 99 deletions(-)
diff --git a/Yesod.hs b/Yesod.hs
index b367144..fbe309c 100644
@@ -41,7 +41,7 @@ index b367144..fbe309c 100644
+insert = undefined
+
diff --git a/Yesod/Default/Main.hs b/Yesod/Default/Main.hs
-index 565ed35..41c2df0 100644
+index 565ed35..bf46642 100644
--- a/Yesod/Default/Main.hs
+++ b/Yesod/Default/Main.hs
@@ -1,10 +1,8 @@
@@ -64,7 +64,7 @@ index 565ed35..41c2df0 100644
import System.Log.FastLogger (LogStr, toLogStr)
import Language.Haskell.TH.Syntax (qLocation)
-@@ -55,34 +53,6 @@ defaultMain load getApp = do
+@@ -55,33 +53,6 @@ defaultMain load getApp = do
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
@@ -95,10 +95,9 @@ index 565ed35..41c2df0 100644
-#else
- const True
-#endif
--
+
-- | Run your application continously, listening for SIGINT and exiting
-- when received
- --
diff --git a/Yesod/Default/Util.hs b/Yesod/Default/Util.hs
index a10358e..0547424 100644
--- a/Yesod/Default/Util.hs
@@ -196,5 +195,5 @@ index a10358e..0547424 100644
- else return $ Just ex
- else return Nothing
--
-2.0.0
+2.1.1