Commit a07ef252 authored by Oleg Grenrus's avatar Oleg Grenrus

Implement file+noindex:///local/repositories

Resolve #6359

`preferred-versions` are left out for now.
It shouldn't be difficult to add, but needs work nevertheless.

We also allow relative paths, which kind of work,
if you are careful.

In addtition
- change the index cache to use `Distribution.Utils.Structured`,
  making Binary instances generically derived.
- separate Distribution.Client.HashValue into own module.
  This allows to use HashValue for hashing the part of localRepoPath
  (breaks module dependency cycle).

Almost as a feature generated 01-index.cache is never updated.
If you change the contents of the directory, you have to purge
01-index.cache file yourself.
parent 787b1f22
......@@ -49,8 +49,10 @@ module Distribution.Utils.Structured (
-- | These functions operate like @binary@'s counterparts,
-- but the serialised version has a structure hash in front.
structuredEncode,
structuredEncodeFile,
structuredDecode,
structuredDecodeOrFailIO,
structuredDecodeFileOrFail,
-- * Structured class
Structured (structure),
MD5,
......@@ -262,6 +264,10 @@ structuredEncode
=> a -> LBS.ByteString
structuredEncode x = Binary.encode (Tag :: Tag a, x)
-- | Lazily serialise a value to a file
structuredEncodeFile :: (Binary.Binary a, Structured a) => FilePath -> a -> IO ()
structuredEncodeFile f = LBS.writeFile f . structuredEncode
-- | Structured 'Binary.decode'.
-- Decode a value from a lazy 'LBS.ByteString', reconstructing the original structure.
-- Throws pure exception on invalid inputs.
......@@ -280,6 +286,10 @@ structuredDecodeOrFailIO bs =
handler (ErrorCall str) = return $ Left str
#endif
-- | Lazily reconstruct a value previously written to a file.
structuredDecodeFileOrFail :: (Binary.Binary a, Structured a) => FilePath -> IO (Either String a)
structuredDecodeFileOrFail f = structuredDecodeOrFailIO =<< LBS.readFile f
-------------------------------------------------------------------------------
-- Helper data
-------------------------------------------------------------------------------
......
......@@ -57,7 +57,7 @@ The name of the repository is given on the first line, and can be
anything; packages downloaded from this repository will be cached under
``~/.cabal/packages/hackage.haskell.org`` (or whatever name you specify;
you can change the prefix by changing the value of
``remote-repo-cache``). If you want, you can configure multiple
:cfg-field:`remote-repo-cache`). If you want, you can configure multiple
repositories, and ``cabal`` will combine them and be able to download
packages from any of them.
......@@ -97,7 +97,32 @@ received were the right ones. How that is done is however outside the
scope of ``cabal`` proper.
More information about the security infrastructure can be found at
https://github.com/well-typed/hackage-security.
https://github.com/haskell/hackage-security.
Local no-index repositories
^^^^^^^^^^^^^^^^^^^^^^^^^^^
It's possible to use a directory of `.tar.gz` package files as a local package
repository.
::
repository my-local-repository
url: file+noindex:///absolute/path/to/directory
``cabal`` will construct the index automatically from the
``package-name-version.tar.gz`` files in the directory, and will use optional
corresponding ``package-name-version.cabal`` files as new revisions.
The index is cached inside the given directory. If the directory is not
writable, you can append ``#shared-cache`` fragment to the URI,
then the cache will be stored inside the :cfg-field:`remote-repo-cache` directory.
The part of the path will be used to determine the cache key part.
.. note::
The URI scheme ``file:`` is interpreted as a remote repository,
as described in the previous sections, thus requiring manual construction
of ``01-index.tar`` file.
Legacy repositories
^^^^^^^^^^^^^^^^^^^
......@@ -120,7 +145,7 @@ although, in (and only in) the specific case of Hackage, the URL
``http://hackage.haskell.org/packages/archive`` will be silently
translated to ``http://hackage.haskell.org/``.
The second kind of legacy repositories are so-called local
The second kind of legacy repositories are so-called “(legacy) local”
repositories:
::
......
......@@ -186,7 +186,8 @@ updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> (Repo, IndexState)
updateRepo verbosity _updateFlags repoCtxt (repo, indexState) = do
transport <- repoContextGetTransport repoCtxt
case repo of
RepoLocal{..} -> return ()
RepoLocal{} -> return ()
RepoLocalNoIndex{} -> return ()
RepoRemote{..} -> do
downloadResult <- downloadIndex transport verbosity
repoRemote repoLocalDir
......
......@@ -41,7 +41,8 @@ module Distribution.Client.Config (
userConfigUpdate,
createDefaultConfigFile,
remoteRepoFields
remoteRepoFields,
postProcessRepo,
) where
import Language.Haskell.Extension ( Language(Haskell2010) )
......@@ -50,7 +51,7 @@ import Distribution.Deprecated.ViewAsFieldDescr
( viewAsFieldDescr )
import Distribution.Client.Types
( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo
( RemoteRepo(..), LocalRepo (..), Username(..), Password(..), emptyRemoteRepo
, AllowOlder(..), AllowNewer(..), RelaxDeps(..), isRelaxDeps
)
import Distribution.Client.BuildReports.Types
......@@ -64,7 +65,7 @@ import Distribution.Client.Setup
, InstallFlags(..), installOptions, defaultInstallFlags
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, showRepo, parseRepo, readRepo )
, showRemoteRepo, parseRemoteRepo, readRemoteRepo )
import Distribution.Client.CmdInstall.ClientInstallFlags
( ClientInstallFlags(..), defaultClientInstallFlags
, clientInstallOptions )
......@@ -92,7 +93,7 @@ import Distribution.Deprecated.ParseUtils
, locatedErrorMsg, showPWarning
, readFields, warning, lineNo
, simpleField, listField, spaceListField
, parseFilePathQ, parseOptCommaList, parseTokenQ )
, parseFilePathQ, parseOptCommaList, parseTokenQ, syntaxError)
import Distribution.Client.ParseUtils
( parseFields, ppFields, ppSection )
import Distribution.Client.HttpUtils
......@@ -252,6 +253,7 @@ instance Semigroup SavedConfig where
globalRemoteRepos = lastNonEmptyNL globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = lastNonEmptyNL globalLocalRepos,
globalLocalNoIndexRepos = lastNonEmptyNL globalLocalNoIndexRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
......@@ -1034,7 +1036,7 @@ deprecatedFieldDescriptions :: [FieldDescr SavedConfig]
deprecatedFieldDescriptions =
[ liftGlobalFlag $
listField "repos"
(Disp.text . showRepo) parseRepo
(Disp.text . showRemoteRepo) parseRemoteRepo
(fromNubList . globalRemoteRepos)
(\rs cfg -> cfg { globalRemoteRepos = toNubList rs })
, liftGlobalFlag $
......@@ -1117,9 +1119,9 @@ parseConfig src initial = \str -> do
let init0 = savedInitFlags config
user0 = savedUserInstallDirs config
global0 = savedGlobalInstallDirs config
(remoteRepoSections0, haddockFlags, initFlags, user, global, paths, args) <-
(remoteRepoSections0, localRepoSections0, haddockFlags, initFlags, user, global, paths, args) <-
foldM parseSections
([], savedHaddockFlags config, init0, user0, global0, [], [])
([], [], savedHaddockFlags config, init0, user0, global0, [], [])
knownSections
let remoteRepoSections =
......@@ -1127,9 +1129,15 @@ parseConfig src initial = \str -> do
. nubBy ((==) `on` remoteRepoName)
$ remoteRepoSections0
let localRepoSections =
reverse
. nubBy ((==) `on` localRepoName)
$ localRepoSections0
return . fixConfigMultilines $ config {
savedGlobalFlags = (savedGlobalFlags config) {
globalRemoteRepos = toNubList remoteRepoSections,
globalLocalNoIndexRepos = toNubList localRepoSections,
-- the global extra prog path comes from the configure flag prog path
globalProgPathExtra = configProgramPathExtra (savedConfigureFlags config)
},
......@@ -1185,61 +1193,57 @@ parseConfig src initial = \str -> do
parse = parseFields (configFieldDescriptions src
++ deprecatedFieldDescriptions) initial
parseSections (rs, h, i, u, g, p, a)
(ParseUtils.Section _ "repository" name fs) = do
parseSections (rs, ls, h, i, u, g, p, a)
(ParseUtils.Section lineno "repository" name fs) = do
r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs
when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $
warning $ "'key-threshold' for repository " ++ show (remoteRepoName r')
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys r'))
&& remoteRepoSecure r' /= Just True) $
warning $ "'root-keys' for repository " ++ show (remoteRepoName r')
++ " non-empty, but 'secure' not set to True."
return (r':rs, h, i, u, g, p, a)
parseSections (rs, h, i, u, g, p, a)
r'' <- postProcessRepo lineno name r'
case r'' of
Left local -> return (rs, local:ls, h, i, u, g, p, a)
Right remote -> return (remote:rs, ls, h, i, u, g, p, a)
parseSections (rs, ls, h, i, u, g, p, a)
(ParseUtils.F lno "remote-repo" raw) = do
let mr' = readRepo raw
let mr' = readRemoteRepo raw
r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr'
return (r':rs, h, i, u, g, p, a)
return (r':rs, ls, h, i, u, g, p, a)
parseSections accum@(rs, h, i, u, g, p, a)
parseSections accum@(rs, ls, h, i, u, g, p, a)
(ParseUtils.Section _ "haddock" name fs)
| name == "" = do h' <- parseFields haddockFlagsFields h fs
return (rs, h', i, u, g, p, a)
return (rs, ls, h', i, u, g, p, a)
| otherwise = do
warning "The 'haddock' section should be unnamed"
return accum
parseSections accum@(rs, h, i, u, g, p, a)
parseSections accum@(rs, ls, h, i, u, g, p, a)
(ParseUtils.Section _ "init" name fs)
| name == "" = do i' <- parseFields initFlagsFields i fs
return (rs, h, i', u, g, p, a)
return (rs, ls, h, i', u, g, p, a)
| otherwise = do
warning "The 'init' section should be unnamed"
return accum
parseSections accum@(rs, h, i, u, g, p, a)
parseSections accum@(rs, ls, h, i, u, g, p, a)
(ParseUtils.Section _ "install-dirs" name fs)
| name' == "user" = do u' <- parseFields installDirsFields u fs
return (rs, h, i, u', g, p, a)
return (rs, ls, h, i, u', g, p, a)
| name' == "global" = do g' <- parseFields installDirsFields g fs
return (rs, h, i, u, g', p, a)
return (rs, ls, h, i, u, g', p, a)
| otherwise = do
warning "The 'install-paths' section should be for 'user' or 'global'"
return accum
where name' = lowercase name
parseSections accum@(rs, h, i, u, g, p, a)
parseSections accum@(rs, ls, h, i, u, g, p, a)
(ParseUtils.Section _ "program-locations" name fs)
| name == "" = do p' <- parseFields withProgramsFields p fs
return (rs, h, i, u, g, p', a)
return (rs, ls, h, i, u, g, p', a)
| otherwise = do
warning "The 'program-locations' section should be unnamed"
return accum
parseSections accum@(rs, h, i, u, g, p, a)
parseSections accum@(rs, ls, h, i, u, g, p, a)
(ParseUtils.Section _ "program-default-options" name fs)
| name == "" = do a' <- parseFields withProgramOptionsFields a fs
return (rs, h, i, u, g, p, a')
return (rs, ls, h, i, u, g, p, a')
| otherwise = do
warning "The 'program-default-options' section should be unnamed"
return accum
......@@ -1247,6 +1251,34 @@ parseConfig src initial = \str -> do
warning $ "Unrecognized stanza on line " ++ show (lineNo f)
return accum
postProcessRepo :: Int -> String -> RemoteRepo -> ParseResult (Either LocalRepo RemoteRepo)
postProcessRepo lineno reponame repo0 = do
when (null reponame) $
syntaxError lineno $ "a 'repository' section requires the "
++ "repository name as an argument"
case uriScheme (remoteRepoURI repo0) of
-- TODO: check that there are no authority, query or fragment
-- Note: the trailing colon is important
"file+noindex:" -> do
let uri = remoteRepoURI repo0
return $ Left $ LocalRepo reponame (uriPath uri) (uriFragment uri == "#shared-cache")
_ -> do
let repo = repo0 { remoteRepoName = reponame }
when (remoteRepoKeyThreshold repo > length (remoteRepoRootKeys repo)) $
warning $ "'key-threshold' for repository "
++ show (remoteRepoName repo)
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys repo)) && remoteRepoSecure repo /= Just True) $
warning $ "'root-keys' for repository "
++ show (remoteRepoName repo)
++ " non-empty, but 'secure' not set to True."
return $ Right repo
showConfig :: SavedConfig -> String
showConfig = showConfigWithComments mempty
......@@ -1297,7 +1329,7 @@ installDirsFields = map viewAsFieldDescr installDirsOptions
ppRemoteRepoSection :: RemoteRepo -> RemoteRepo -> Doc
ppRemoteRepoSection def vals = ppSection "repository" (remoteRepoName vals)
remoteRepoFields (Just def) vals
remoteRepoFields (Just def) vals
remoteRepoFields :: [FieldDescr RemoteRepo]
remoteRepoFields =
......
......@@ -177,6 +177,7 @@ fetchRepoTarball verbosity' repoCtxt repo pkgid = do
downloadRepoPackage = case repo of
RepoLocal{..} -> return (packageFile repo pkgid)
RepoLocalNoIndex{..} -> return (packageFile repo pkgid)
RepoRemote{..} -> do
transport <- repoContextGetTransport repoCtxt
......@@ -292,6 +293,7 @@ packageFile repo pkgid = packageDir repo pkgid
-- the tarball for a given @PackageIdentifer@ is stored.
--
packageDir :: Repo -> PackageId -> FilePath
packageDir (RepoLocalNoIndex (LocalRepo _ dir _) _) _pkgid = dir
packageDir repo pkgid = repoLocalDir repo
</> display (packageName pkgid)
</> display (packageVersion pkgid)
......
......@@ -17,7 +17,7 @@ import Prelude ()
import Distribution.Client.Compat.Prelude
import Distribution.Client.Types
( Repo(..), RemoteRepo(..) )
( Repo(..), RemoteRepo(..), LocalRepo (..), localRepoCacheKey )
import Distribution.Simple.Setup
( Flag(..), fromFlag, flagToMaybe )
import Distribution.Utils.NubList
......@@ -27,7 +27,7 @@ import Distribution.Client.HttpUtils
import Distribution.Verbosity
( Verbosity )
import Distribution.Simple.Utils
( info )
( info, warn )
import Control.Concurrent
( MVar, newMVar, modifyMVar )
......@@ -48,6 +48,8 @@ import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote
import qualified Distribution.Client.Security.HTTP as Sec.HTTP
import qualified Distribution.Client.Security.DNS as Sec.DNS
import qualified System.FilePath.Posix as FilePath.Posix
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
......@@ -62,6 +64,7 @@ data GlobalFlags = GlobalFlags {
globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: NubList FilePath,
globalLocalNoIndexRepos :: NubList LocalRepo,
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath,
globalRequireSandbox :: Flag Bool,
......@@ -83,6 +86,7 @@ defaultGlobalFlags = GlobalFlags {
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLocalNoIndexRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = Flag False,
......@@ -141,20 +145,25 @@ withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a
withRepoContext verbosity globalFlags =
withRepoContext'
verbosity
(fromNubList (globalRemoteRepos globalFlags))
(fromNubList (globalLocalRepos globalFlags))
(fromFlag (globalCacheDir globalFlags))
(flagToMaybe (globalHttpTransport globalFlags))
(flagToMaybe (globalIgnoreExpiry globalFlags))
(fromNubList (globalProgPathExtra globalFlags))
withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath]
(fromNubList (globalRemoteRepos globalFlags))
(fromNubList (globalLocalRepos globalFlags))
(fromNubList (globalLocalNoIndexRepos globalFlags))
(fromFlag (globalCacheDir globalFlags))
(flagToMaybe (globalHttpTransport globalFlags))
(flagToMaybe (globalIgnoreExpiry globalFlags))
(fromNubList (globalProgPathExtra globalFlags))
withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] -> [LocalRepo]
-> FilePath -> Maybe String -> Maybe Bool
-> [FilePath]
-> (RepoContext -> IO a)
-> IO a
withRepoContext' verbosity remoteRepos localRepos
withRepoContext' verbosity remoteRepos localRepos localNoIndexRepos
sharedCacheDir httpTransport ignoreExpiry extraPaths = \callback -> do
for_ localNoIndexRepos $ \local ->
unless (FilePath.Posix.isAbsolute (localRepoPath local)) $
warn verbosity $ "file+noindex " ++ localRepoName local ++ " repository path is not absolute; this is fragile, and not recommended"
transportRef <- newMVar Nothing
let httpLib = Sec.HTTP.transportAdapter
verbosity
......@@ -162,6 +171,7 @@ withRepoContext' verbosity remoteRepos localRepos
initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' ->
callback RepoContext {
repoContextRepos = allRemoteRepos
++ allLocalNoIndexRepos
++ map RepoLocal localRepos
, repoContextGetTransport = getTransport transportRef
, repoContextWithSecureRepo = withSecureRepo secureRepos'
......@@ -170,6 +180,8 @@ withRepoContext' verbosity remoteRepos localRepos
where
secureRemoteRepos =
[ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ]
allRemoteRepos :: [Repo]
allRemoteRepos =
[ (if isSecure then RepoSecure else RepoRemote) remote cacheDir
| remote <- remoteRepos
......@@ -177,6 +189,14 @@ withRepoContext' verbosity remoteRepos localRepos
isSecure = remoteRepoSecure remote == Just True
]
allLocalNoIndexRepos :: [Repo]
allLocalNoIndexRepos =
[ RepoLocalNoIndex local cacheDir
| local <- localNoIndexRepos
, let cacheDir | localRepoSharedCache local = sharedCacheDir </> localRepoCacheKey local
| otherwise = localRepoPath local
]
getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport
getTransport transportRef =
modifyMVar transportRef $ \mTransport -> do
......
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Client.HashValue (
HashValue,
hashValue,
truncateHash,
showHashValue,
readFileHashValue,
hashFromTUF,
) where
import Distribution.Client.Compat.Prelude
import Prelude ()
import qualified Hackage.Security.Client as Sec
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Control.Exception (evaluate)
import System.IO (IOMode (..), withBinaryFile)
-----------------------------------------------
-- The specific choice of hash implementation
--
-- Is a crypto hash necessary here? One thing to consider is who controls the
-- inputs and what's the result of a hash collision. Obviously we should not
-- install packages we don't trust because they can run all sorts of code, but
-- if I've checked there's no TH, no custom Setup etc, is there still a
-- problem? If someone provided us a tarball that hashed to the same value as
-- some other package and we installed it, we could end up re-using that
-- installed package in place of another one we wanted. So yes, in general
-- there is some value in preventing intentional hash collisions in installed
-- package ids.
newtype HashValue = HashValue BS.ByteString
deriving (Eq, Generic, Show, Typeable)
-- Cannot do any sensible validation here. Although we use SHA256
-- for stuff we hash ourselves, we can also get hashes from TUF
-- and that can in principle use different hash functions in future.
--
-- Therefore, we simply derive this structurally.
instance Binary HashValue
instance Structured HashValue
-- | Hash some data. Currently uses SHA256.
--
hashValue :: LBS.ByteString -> HashValue
hashValue = HashValue . SHA256.hashlazy
showHashValue :: HashValue -> String
showHashValue (HashValue digest) = BS.unpack (Base16.encode digest)
-- | Hash the content of a file. Uses SHA256.
--
readFileHashValue :: FilePath -> IO HashValue
readFileHashValue tarball =
withBinaryFile tarball ReadMode $ \hnd ->
evaluate . hashValue =<< LBS.hGetContents hnd
-- | Convert a hash from TUF metadata into a 'PackageSourceHash'.
--
-- Note that TUF hashes don't neessarily have to be SHA256, since it can
-- support new algorithms in future.
--
hashFromTUF :: Sec.Hash -> HashValue
hashFromTUF (Sec.Hash hashstr) =
--TODO: [code cleanup] either we should get TUF to use raw bytestrings or
-- perhaps we should also just use a base16 string as the internal rep.
case Base16.decode (BS.pack hashstr) of
(hash, trailing) | not (BS.null hash) && BS.null trailing
-> HashValue hash
_ -> error "hashFromTUF: cannot decode base16 hash"
-- | Truncate a 32 byte SHA256 hash to
--
-- For example 20 bytes render as 40 hex chars, which we use for unit-ids.
-- Or even 4 bytes for 'hashedInstalledPackageIdShort'
--
truncateHash :: Int -> HashValue -> HashValue
truncateHash n (HashValue h) = HashValue (BS.take n h)
......@@ -20,13 +20,6 @@ module Distribution.Client.PackageHash (
-- ** Platform-specific variations
hashedInstalledPackageIdLong,
hashedInstalledPackageIdShort,
-- * Low level hash choice
HashValue,
hashValue,
showHashValue,
readFileHashValue,
hashFromTUF,
) where
import Prelude ()
......@@ -48,23 +41,16 @@ import Distribution.Pretty (prettyShow)
import Distribution.Deprecated.Text
( display )
import Distribution.Types.PkgconfigVersion (PkgconfigVersion)
import Distribution.Client.HashValue
import Distribution.Client.Types
( InstalledPackageId )
import qualified Distribution.Solver.Types.ComponentDeps as CD
import qualified Hackage.Security.Client as Sec
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Function (on)
import Control.Exception (evaluate)
import System.IO (withBinaryFile, IOMode(..))
-------------------------------
-- Calculating package hashes
......@@ -121,15 +107,11 @@ hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} =
-- max length now 64
[ truncateStr 14 (display name)
, truncateStr 8 (display version)
, showHashValue (truncateHash (hashPackageHashInputs pkghashinputs))
, showHashValue (truncateHash 20 (hashPackageHashInputs pkghashinputs))
]
where
PackageIdentifier name version = pkgHashPkgId
-- Truncate a 32 byte SHA256 hash to 160bits, 20 bytes :-(
-- It'll render as 40 hex chars.
truncateHash (HashValue h) = HashValue (BS.take 20 h)
-- Truncate a string, with a visual indication that it is truncated.
truncateStr n s | length s <= n = s
| otherwise = take (n-1) s ++ "_"
......@@ -163,11 +145,10 @@ hashedInstalledPackageIdVeryShort pkghashinputs@PackageHashInputs{pkgHashPkgId}
intercalate "-"
[ filter (not . flip elem "aeiou") (display name)
, display version
, showHashValue (truncateHash (hashPackageHashInputs pkghashinputs))
, showHashValue (truncateHash 4 (hashPackageHashInputs pkghashinputs))
]
where
PackageIdentifier name version = pkgHashPkgId
truncateHash (HashValue h) = HashValue (BS.take 4 h)
-- | All the information that contribues to a package's hash, and thus its
-- 'InstalledPackageId'.
......@@ -330,57 +311,3 @@ renderPackageHashInputs PackageHashInputs{
| otherwise = entry key format value
showFlagAssignment = unwords . map showFlagValue . sortBy (compare `on` fst) . unFlagAssignment
-----------------------------------------------
-- The specific choice of hash implementation
--
-- Is a crypto hash necessary here? One thing to consider is who controls the
-- inputs and what's the result of a hash collision. Obviously we should not
-- install packages we don't trust because they can run all sorts of code, but
-- if I've checked there's no TH, no custom Setup etc, is there still a
-- problem? If someone provided us a tarball that hashed to the same value as
-- some other package and we installed it, we could end up re-using that
-- installed package in place of another one we wanted. So yes, in general
-- there is some value in preventing intentional hash collisions in installed
-- package ids.
newtype HashValue = HashValue BS.ByteString
deriving (Eq, Generic, Show, Typeable)
-- Cannot do any sensible validation here. Although we use SHA256
-- for stuff we hash ourselves, we can also get hashes from TUF
-- and that can in principle use different hash functions in future.
--
-- Therefore, we simply derive this structurally.
instance Binary HashValue
instance Structured HashValue
-- | Hash some data. Currently uses SHA256.
--
hashValue :: LBS.ByteString -> HashValue
hashValue = HashValue . SHA256.hashlazy
showHashValue :: HashValue -> String
showHashValue (HashValue digest) = BS.unpack (Base16.encode digest)