mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-11 19:58:28 +01:00
Store fallback cabal files into pantry. And few follow up improvements:
* Fix atomic durable writing, since issue in RIO was fixed * Log information about falling back onto the core-cabal-files repo * Convert conduit pipe to Maybe fishes. * Make sure module names, package name and version are added for fallback cabal files
This commit is contained in:
parent
8e247dde03
commit
bdcdd1887a
@ -73,9 +73,7 @@ checkSpam pname inner = do
|
||||
$(widgetFile "spam-package")
|
||||
else inner
|
||||
|
||||
packagePage :: Maybe SnapshotPackageInfo
|
||||
-> PackageNameP
|
||||
-> Handler Html
|
||||
packagePage :: Maybe SnapshotPackageInfo -> PackageNameP -> Handler Html
|
||||
packagePage mspi pname =
|
||||
track "Handler.Package.packagePage" $
|
||||
checkSpam pname $
|
||||
@ -86,8 +84,6 @@ packagePage mspi pname =
|
||||
Just spi -> handlePackage $ Right spi
|
||||
|
||||
|
||||
|
||||
|
||||
handlePackage :: Either HackageCabalInfo SnapshotPackageInfo -> Handler Html
|
||||
handlePackage epi = do
|
||||
(isDeprecated, inFavourOf) <- getDeprecated pname
|
||||
|
||||
@ -46,6 +46,7 @@ import Pantry.Internal.Stackage (HackageTarballResult(..),
|
||||
import Path (parseAbsDir, toFilePath)
|
||||
import RIO
|
||||
import RIO.Directory
|
||||
import RIO.File
|
||||
import RIO.FilePath
|
||||
import RIO.List as L
|
||||
import qualified RIO.Map as Map
|
||||
@ -115,7 +116,6 @@ newHoogleLocker env man = mkSingleRun hoogleLocker
|
||||
hoogleLocker name =
|
||||
runRIO env $ do
|
||||
let fp = T.unpack $ hoogleKey name
|
||||
fptmp = fp <.> "tmp"
|
||||
exists <- doesFileExist fp
|
||||
if exists
|
||||
then return $ Just fp
|
||||
@ -126,24 +126,17 @@ newHoogleLocker env man = mkSingleRun hoogleLocker
|
||||
case responseStatus res of
|
||||
status
|
||||
| status == status200 -> do
|
||||
createDirectoryIfMissing True $ takeDirectory fptmp
|
||||
-- TODO: https://github.com/commercialhaskell/rio/issues/160
|
||||
-- withBinaryFileDurableAtomic fp WriteMode $ \h ->
|
||||
-- runConduitRes $
|
||||
-- bodyReaderSource (responseBody res) .| ungzip .|
|
||||
-- sinkHandle h
|
||||
runConduitRes $
|
||||
createDirectoryIfMissing True $ takeDirectory fp
|
||||
withBinaryFileDurableAtomic fp WriteMode $ \h ->
|
||||
runConduitRes $
|
||||
bodyReaderSource (responseBody res) .| ungzip .|
|
||||
sinkFile fptmp
|
||||
renamePath fptmp fp
|
||||
sinkHandle h
|
||||
return $ Just fp
|
||||
| status == status404 -> do
|
||||
logDebug $ "NotFound: " <> display (hoogleUrl name)
|
||||
return Nothing
|
||||
| otherwise -> do
|
||||
body <- liftIO $ brConsume $ responseBody res
|
||||
-- TODO: ideally only consume the body when log level set to
|
||||
-- LevelDebug, will require a way to get LogLevel from LogFunc
|
||||
mapM_ (logDebug . displayBytesUtf8) body
|
||||
return Nothing
|
||||
|
||||
@ -251,7 +244,9 @@ makeCorePackageGetters = do
|
||||
"Error parsing 'global-hints.yaml' file: " <> fromString (displayException exc)
|
||||
return mempty
|
||||
|
||||
getCoreCabalFiles :: FilePath -> RIO StackageCron (Map PackageIdentifierP GenericPackageDescription)
|
||||
getCoreCabalFiles ::
|
||||
FilePath
|
||||
-> RIO StackageCron (Map PackageIdentifierP (GenericPackageDescription, CabalFileIds))
|
||||
getCoreCabalFiles rootDir = do
|
||||
coreCabalFilesDir <- getCoreCabalFilesDir rootDir
|
||||
cabalFileNames <- getDirectoryContents coreCabalFilesDir
|
||||
@ -262,11 +257,10 @@ getCoreCabalFiles rootDir = do
|
||||
Nothing -> do
|
||||
logError $ "Invalid package identifier: " <> fromString cabalFileName
|
||||
pure Nothing
|
||||
Just pid@(PackageIdentifierP pname _) -> do
|
||||
mgpd <-
|
||||
readFileBinary (coreCabalFilesDir </> cabalFileName) >>=
|
||||
parseCabalBlobMaybe pname
|
||||
pure ((,) pid <$> mgpd)
|
||||
Just pid -> do
|
||||
cabalBlob <- readFileBinary (coreCabalFilesDir </> cabalFileName)
|
||||
mCabalInfo <- run $ addCabalFile pid cabalBlob
|
||||
pure ((,) pid <$> mCabalInfo)
|
||||
pure $ Map.fromList $ catMaybes cabalFiles
|
||||
|
||||
-- | Core package info rarely changes between the snapshots, therefore it would be wasteful to
|
||||
@ -275,7 +269,7 @@ getCoreCabalFiles rootDir = do
|
||||
-- package on subsequent invocations.
|
||||
makeCorePackageGetter ::
|
||||
CompilerP
|
||||
-> Map PackageIdentifierP GenericPackageDescription
|
||||
-> Map PackageIdentifierP (GenericPackageDescription, CabalFileIds)
|
||||
-> PackageNameP
|
||||
-> VersionP
|
||||
-> RIO StackageCron (Maybe CorePackageGetter)
|
||||
@ -284,9 +278,17 @@ makeCorePackageGetter _compiler fallbackCabalFileMap pname ver =
|
||||
Nothing -> do
|
||||
logWarn $
|
||||
"Core package from global-hints: '" <> display pid <> "' was not found in pantry."
|
||||
pure (pure . (,,,) Nothing Nothing pid <$> Map.lookup pid fallbackCabalFileMap)
|
||||
forM (Map.lookup pid fallbackCabalFileMap) $ \(gpd, cabalFileIds) -> do
|
||||
logInfo $
|
||||
"Falling back on '" <> display pid <>
|
||||
".cabal' file from the commercialhaskell/core-cabal-files repo"
|
||||
pure $ pure (Left cabalFileIds, Nothing, pid, gpd)
|
||||
Just (hackageCabalId, blobId, _) -> do
|
||||
pkgInfoRef <- newIORef Nothing -- use for caching of pkgInfo
|
||||
let getCabalFileIdsTree gpd =
|
||||
\case
|
||||
Just tree -> pure $ Right tree
|
||||
Nothing -> Left <$> getCabalFileIds blobId gpd
|
||||
let getMemoPackageInfo =
|
||||
readIORef pkgInfoRef >>= \case
|
||||
Just pkgInfo -> return pkgInfo
|
||||
@ -296,17 +298,21 @@ makeCorePackageGetter _compiler fallbackCabalFileMap pname ver =
|
||||
htr <- getHackageTarball pir Nothing
|
||||
case htrFreshPackageInfo htr of
|
||||
Just (gpd, treeId) -> do
|
||||
mTree <- run $ getEntity treeId
|
||||
let pkgInfo = (mTree, Just hackageCabalId, pid, gpd)
|
||||
eTree <-
|
||||
run $ do
|
||||
mTree <- getEntity treeId
|
||||
getCabalFileIdsTree gpd mTree
|
||||
let pkgInfo = (eTree, Just hackageCabalId, pid, gpd)
|
||||
gpd `deepseq` writeIORef pkgInfoRef $ Just pkgInfo
|
||||
pure pkgInfo
|
||||
Nothing -> do
|
||||
(cabalBlob, mTree) <-
|
||||
run
|
||||
((,) <$> loadBlobById blobId <*>
|
||||
getTreeForKey (packageTreeKey (htrPackage htr)))
|
||||
let gpd = parseCabalBlob cabalBlob
|
||||
pkgInfo = (mTree, Just hackageCabalId, pid, gpd)
|
||||
(gpd, eCabalTree) <-
|
||||
run $ do
|
||||
cabalBlob <- loadBlobById blobId
|
||||
let gpd = parseCabalBlob cabalBlob
|
||||
mTree <- getTreeForKey (packageTreeKey (htrPackage htr))
|
||||
(,) gpd <$> getCabalFileIdsTree gpd mTree
|
||||
let pkgInfo = (eCabalTree, Just hackageCabalId, pid, gpd)
|
||||
gpd `deepseq` writeIORef pkgInfoRef $ Just pkgInfo
|
||||
pure pkgInfo
|
||||
pure $ Just getMemoPackageInfo
|
||||
@ -348,11 +354,12 @@ addPantryPackage sid compiler isHidden flags (PantryPackage pc treeKey) = do
|
||||
, tid /= treeId -> do
|
||||
lift $ logError $ "Pantry Tree Key mismatch for: " <> display pc
|
||||
pure False
|
||||
mTree@(Just (Entity _ Tree {treeCabal}))
|
||||
Just tree@(Entity _ Tree {treeCabal})
|
||||
| Just treeCabal' <- treeCabal -> do
|
||||
gpd <- getCachedGPD treeCabal' mgpd
|
||||
let mhcid = Just hcid
|
||||
addSnapshotPackage sid compiler Hackage mTree mhcid isHidden flags pid gpd
|
||||
eTree = Right tree
|
||||
addSnapshotPackage sid compiler Hackage eTree mhcid isHidden flags pid gpd
|
||||
pure True
|
||||
_ -> do
|
||||
lift $ logError $ "Pantry is missing the source tree for " <> display pc
|
||||
@ -386,9 +393,7 @@ checkForDocs snapshotId snapName = do
|
||||
runConduit $
|
||||
AWS.paginate (req bucketName) .| concatMapC (^. lovrsContents) .|
|
||||
mapC (\obj -> toText (obj ^. oKey)) .|
|
||||
concatMapC (T.stripSuffix ".html") .|
|
||||
concatMapC (T.stripPrefix prefix) .|
|
||||
concatMapC pathToPackageModule .|
|
||||
concatMapC (T.stripSuffix ".html" >=> T.stripPrefix prefix >=> pathToPackageModule) .|
|
||||
sinkList
|
||||
-- it is faster to download all modules in this snapshot, than process them with a conduit all
|
||||
-- the way to the database.
|
||||
@ -515,7 +520,7 @@ decideOnSnapshotUpdate SnapshotFileInfo {sfiSnapName, sfiUpdatedOn, sfiSnapshotF
|
||||
_ -> return Nothing
|
||||
|
||||
type CorePackageGetter
|
||||
= RIO StackageCron ( Maybe (Entity Tree)
|
||||
= RIO StackageCron ( Either CabalFileIds (Entity Tree)
|
||||
, Maybe HackageCabalId
|
||||
, PackageIdentifierP
|
||||
, GenericPackageDescription)
|
||||
@ -617,8 +622,8 @@ updateSnapshot corePackageGetters snapshotId snapName updatedOn SnapshotFile {..
|
||||
]
|
||||
Just compilerCorePackages ->
|
||||
forM_ compilerCorePackages $ \getCorePackageInfo -> do
|
||||
(mTree, mhcid, pid, gpd) <- getCorePackageInfo
|
||||
run $ addSnapshotPackage snapshotId sfCompiler Core mTree mhcid False mempty pid gpd
|
||||
(eTree, mhcid, pid, gpd) <- getCorePackageInfo
|
||||
run $ addSnapshotPackage snapshotId sfCompiler Core eTree mhcid False mempty pid gpd
|
||||
return $ do
|
||||
checkForDocsSucceeded <-
|
||||
tryAny (checkForDocs snapshotId snapName) >>= \case
|
||||
@ -726,12 +731,8 @@ createHoogleDB snapshotId snapName =
|
||||
withResponseUnliftIO req {decompress = const True} man $ \res -> do
|
||||
throwErrorStatusCodes req res
|
||||
createDirectoryIfMissing True $ takeDirectory tarFP
|
||||
--withBinaryFileDurableAtomic tarFP WriteMode $ \tarHandle ->
|
||||
--FIXME: https://github.com/commercialhaskell/rio/issues/160
|
||||
let tmpTarFP = tarFP <.> "tmp"
|
||||
withBinaryFile tmpTarFP WriteMode $ \tarHandle ->
|
||||
withBinaryFileDurableAtomic tarFP WriteMode $ \tarHandle ->
|
||||
runConduitRes $ bodyReaderSource (responseBody res) .| sinkHandle tarHandle
|
||||
renameFile tmpTarFP tarFP
|
||||
void $ tryIO $ removeDirectoryRecursive bindir
|
||||
void $ tryIO $ removeFile outname
|
||||
createDirectoryIfMissing True bindir
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
module Stackage.Database.PackageInfo
|
||||
( PackageInfo(..)
|
||||
, Identifier(..)
|
||||
@ -14,10 +14,11 @@ module Stackage.Database.PackageInfo
|
||||
) where
|
||||
|
||||
import CMarkGFM
|
||||
import Data.Coerce
|
||||
import Data.Char (isSpace)
|
||||
import Data.Coerce
|
||||
import Data.Map.Merge.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text.Encoding as T
|
||||
import Distribution.Compiler (CompilerFlavor(GHC))
|
||||
import Distribution.Package (Dependency(..))
|
||||
import Distribution.PackageDescription (CondTree(..), Condition(..),
|
||||
@ -26,28 +27,29 @@ import Distribution.PackageDescription (CondTree(..), Condition(..),
|
||||
GenericPackageDescription, author,
|
||||
condExecutables, condLibrary,
|
||||
description, genPackageFlags, homepage,
|
||||
license, maintainer,
|
||||
packageDescription, synopsis)
|
||||
license, maintainer, packageDescription,
|
||||
synopsis)
|
||||
import Distribution.PackageDescription.Parsec (parseGenericPackageDescription,
|
||||
runParseResult)
|
||||
import Distribution.Pretty (prettyShow)
|
||||
import Distribution.System (Arch(X86_64), OS(Linux))
|
||||
import Distribution.Types.CondTree (CondBranch(..))
|
||||
import Distribution.Types.Library (exposedModules)
|
||||
import Distribution.Types.PackageDescription (PackageDescription(package))
|
||||
import Distribution.Types.VersionRange (VersionRange, intersectVersionRanges,
|
||||
normaliseVersionRange, withinRange)
|
||||
import Distribution.Version (simplifyVersionRange)
|
||||
import qualified Data.Text.Encoding as T
|
||||
import RIO
|
||||
import qualified RIO.Map as Map
|
||||
import qualified RIO.Map.Unchecked as Map (mapKeysMonotonic)
|
||||
import Stackage.Database.Haddock (renderHaddock)
|
||||
import Stackage.Database.Types (Changelog(..), Readme(..))
|
||||
import Text.Blaze.Html (Html, preEscapedToHtml, toHtml)
|
||||
import Types (CompilerP(..), FlagNameP(..), ModuleNameP(..), PackageNameP(..),
|
||||
SafeFilePath, VersionP(..), VersionRangeP(..), unSafeFilePath)
|
||||
import Yesod.Form.Fields (Textarea(..))
|
||||
import Text.Email.Validate
|
||||
import Types (CompilerP(..), FlagNameP(..), ModuleNameP(..), PackageIdentifierP,
|
||||
PackageNameP(..), SafeFilePath, VersionP(..), VersionRangeP(..),
|
||||
unSafeFilePath, dtDisplay)
|
||||
import Yesod.Form.Fields (Textarea(..))
|
||||
|
||||
|
||||
data PackageInfo = PackageInfo
|
||||
@ -79,7 +81,7 @@ toPackageInfo gpd mreadme mchangelog =
|
||||
, piHomepage =
|
||||
case T.strip $ T.pack $ homepage pd of
|
||||
"" -> Nothing
|
||||
x -> Just x
|
||||
x -> Just x
|
||||
, piLicenseName = T.pack $ prettyShow $ license pd
|
||||
}
|
||||
where
|
||||
@ -125,17 +127,23 @@ parseCabalBlob cabalBlob =
|
||||
|
||||
parseCabalBlobMaybe ::
|
||||
(MonadIO m, MonadReader env m, HasLogFunc env)
|
||||
=> PackageNameP
|
||||
=> PackageIdentifierP
|
||||
-> ByteString
|
||||
-> m (Maybe GenericPackageDescription)
|
||||
parseCabalBlobMaybe packageName cabalBlob =
|
||||
parseCabalBlobMaybe pidp cabalBlob =
|
||||
case snd $ runParseResult $ parseGenericPackageDescription cabalBlob of
|
||||
Left err ->
|
||||
Nothing <$
|
||||
logError
|
||||
("Problem parsing cabal blob for '" <> display packageName <> "': " <>
|
||||
displayShow err)
|
||||
Right pgd -> pure $ Just pgd
|
||||
("Problem parsing cabal blob for '" <> display pidp <> "': " <> displayShow err)
|
||||
Right gpd -> do
|
||||
let pid = package (packageDescription gpd)
|
||||
unless (textDisplay (dtDisplay pid :: Utf8Builder) == textDisplay pidp) $
|
||||
logError $
|
||||
"Supplied package identifier: '" <> display pidp <>
|
||||
"' does not match the one in cabal file: '" <>
|
||||
dtDisplay pid
|
||||
pure $ Just gpd
|
||||
|
||||
getCheckCond ::
|
||||
CompilerP -> Map FlagName Bool -> GenericPackageDescription -> Condition ConfVar -> Bool
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE NamedFieldPuns #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
module Stackage.Database.Query
|
||||
(
|
||||
@ -52,13 +53,15 @@ module Stackage.Database.Query
|
||||
, getTreeForKey
|
||||
, treeCabal
|
||||
-- ** Stackage server
|
||||
, CabalFileIds
|
||||
, addCabalFile
|
||||
, getCabalFileIds
|
||||
, addSnapshotPackage
|
||||
, getHackageCabalByRev0
|
||||
, getHackageCabalByKey
|
||||
, snapshotMarkUpdated
|
||||
, insertSnapshotName
|
||||
, markModuleHasDocs
|
||||
, insertSnapshotPackageModules
|
||||
, insertDeps
|
||||
-- ** For Hoogle db creation
|
||||
, lastLtsNightly
|
||||
@ -71,11 +74,14 @@ import qualified Data.List as L
|
||||
import Database.Esqueleto
|
||||
import Database.Esqueleto.Internal.Language (FromPreprocess)
|
||||
import Database.Esqueleto.Internal.Sql
|
||||
import Distribution.Types.PackageId (PackageIdentifier(PackageIdentifier))
|
||||
import Distribution.PackageDescription (packageDescription)
|
||||
import Distribution.Types.PackageDescription (PackageDescription(package))
|
||||
import qualified Database.Persist as P
|
||||
import Pantry.Internal.Stackage (EntityField(..), PackageName,
|
||||
Version, getBlobKey, getPackageNameById,
|
||||
getPackageNameId, getTreeForKey, getVersionId,
|
||||
loadBlobById, mkSafeFilePath)
|
||||
loadBlobById, storeBlob, mkSafeFilePath)
|
||||
import RIO hiding (on, (^.))
|
||||
import qualified RIO.Map as Map
|
||||
import qualified RIO.Set as Set
|
||||
@ -776,6 +782,75 @@ insertDeps pid snapshotPackageId dependencies =
|
||||
display dep
|
||||
return $ Just dep
|
||||
|
||||
data CabalFileIds = CabalFileIds
|
||||
{ cfiPackageNameId :: !PackageNameId
|
||||
, cfiVersionId :: !VersionId
|
||||
, cfiCabalBlobId :: !(Maybe BlobId)
|
||||
, cfiModuleNameIds :: ![ModuleNameId]
|
||||
}
|
||||
|
||||
getCabalFileIds ::
|
||||
HasLogFunc env
|
||||
=> BlobId
|
||||
-> GenericPackageDescription
|
||||
-> ReaderT SqlBackend (RIO env) CabalFileIds
|
||||
getCabalFileIds cabalBlobId gpd = do
|
||||
let PackageIdentifier name ver = package (packageDescription gpd)
|
||||
packageNameId <- getPackageNameId name
|
||||
versionId <- getVersionId ver
|
||||
moduleNameIds <- mapM insertModuleSafe (extractModuleNames gpd)
|
||||
pure
|
||||
CabalFileIds
|
||||
{ cfiPackageNameId = packageNameId
|
||||
, cfiVersionId = versionId
|
||||
, cfiCabalBlobId = Just cabalBlobId
|
||||
, cfiModuleNameIds = moduleNameIds
|
||||
}
|
||||
|
||||
addCabalFile ::
|
||||
HasLogFunc env
|
||||
=> PackageIdentifierP
|
||||
-> ByteString
|
||||
-> ReaderT SqlBackend (RIO env) (Maybe (GenericPackageDescription, CabalFileIds))
|
||||
addCabalFile pid cabalBlob = do
|
||||
mgpd <- lift $ parseCabalBlobMaybe pid cabalBlob
|
||||
forM mgpd $ \gpd -> do
|
||||
(cabalBlobId, _) <- storeBlob cabalBlob
|
||||
cabalIds <- getCabalFileIds cabalBlobId gpd
|
||||
pure (gpd, cabalIds)
|
||||
|
||||
getPackageIds ::
|
||||
GenericPackageDescription
|
||||
-> Either CabalFileIds (Entity Tree)
|
||||
-> ReaderT SqlBackend (RIO env) (CabalFileIds, Maybe (TreeId, BlobId))
|
||||
getPackageIds gpd =
|
||||
\case
|
||||
Left cabalFileIds -> pure (cabalFileIds, Nothing)
|
||||
Right (Entity treeId tree)
|
||||
-- -- TODO: Remove Maybe from cfiCabalBlobId and
|
||||
-- -- Generate cabal file from package.yaml:
|
||||
-- case treeCabal tree of
|
||||
-- Just cabalBlobId -> pure cabalBlobId
|
||||
-- Nothing -> do
|
||||
-- let rawMetaData = RawPackageMetadata {
|
||||
-- rpmName = Just pname
|
||||
-- , rpmVersion = Just pver
|
||||
-- , rpmTreeKey = treeKey tree
|
||||
-- }
|
||||
-- rpli = ... get
|
||||
-- generateHPack (RPLIArchive / RPLIRepo ..) treeId treeVersion tree
|
||||
-- ...
|
||||
-> do
|
||||
moduleNameIds <- mapM insertModuleSafe (extractModuleNames gpd)
|
||||
let cabalFileIds =
|
||||
CabalFileIds
|
||||
{ cfiPackageNameId = treeName tree
|
||||
, cfiVersionId = treeVersion tree
|
||||
, cfiCabalBlobId = treeCabal tree
|
||||
, cfiModuleNameIds = moduleNameIds
|
||||
}
|
||||
pure (cabalFileIds, Just (treeId, treeKey tree))
|
||||
|
||||
-- TODO: Optimize, whenever package is already in one snapshot only create the modules and new
|
||||
-- SnapshotPackage
|
||||
addSnapshotPackage ::
|
||||
@ -783,30 +858,27 @@ addSnapshotPackage ::
|
||||
=> SnapshotId
|
||||
-> CompilerP
|
||||
-> Origin
|
||||
-> Maybe (Entity Tree)
|
||||
-> Either CabalFileIds (Entity Tree)
|
||||
-> Maybe HackageCabalId
|
||||
-> Bool
|
||||
-> Map FlagNameP Bool
|
||||
-> PackageIdentifierP
|
||||
-> GenericPackageDescription
|
||||
-> ReaderT SqlBackend (RIO env) ()
|
||||
addSnapshotPackage snapshotId compiler origin mTree mHackageCabalId isHidden flags pid gpd = do
|
||||
let PackageIdentifierP pname pver = pid
|
||||
mTreeId = entityKey <$> mTree
|
||||
packageNameId <-
|
||||
maybe (getPackageNameId (unPackageNameP pname)) (pure . treeName . entityVal) mTree
|
||||
versionId <- maybe (getVersionId (unVersionP pver)) (pure . treeVersion . entityVal) mTree
|
||||
addSnapshotPackage snapshotId compiler origin eCabalTree mHackageCabalId isHidden flags pid gpd = do
|
||||
(CabalFileIds{..}, mTree) <- getPackageIds gpd eCabalTree
|
||||
let mTreeId = fst <$> mTree
|
||||
mrevision <- maybe (pure Nothing) getHackageRevision mHackageCabalId
|
||||
mreadme <- fromMaybe (pure Nothing) $ getContentTreeEntryId <$> mTreeId <*> mreadmeQuery
|
||||
mchangelog <- fromMaybe (pure Nothing) $ getContentTreeEntryId <$> mTreeId <*> mchangelogQuery
|
||||
let snapshotPackage =
|
||||
SnapshotPackage
|
||||
{ snapshotPackageSnapshot = snapshotId
|
||||
, snapshotPackagePackageName = packageNameId
|
||||
, snapshotPackageVersion = versionId
|
||||
, snapshotPackagePackageName = cfiPackageNameId
|
||||
, snapshotPackageVersion = cfiVersionId
|
||||
, snapshotPackageRevision = mrevision
|
||||
, snapshotPackageCabal = treeCabal =<< entityVal <$> mTree
|
||||
, snapshotPackageTreeBlob = treeKey . entityVal <$> mTree
|
||||
, snapshotPackageCabal = cfiCabalBlobId
|
||||
, snapshotPackageTreeBlob = snd <$> mTree
|
||||
, snapshotPackageOrigin = origin
|
||||
, snapshotPackageOriginUrl = "" -- TODO: add
|
||||
, snapshotPackageSynopsis = getSynopsis gpd
|
||||
@ -831,7 +903,8 @@ addSnapshotPackage snapshotId compiler origin mTree mHackageCabalId isHidden fla
|
||||
forM_ msnapshotPackageId $ \snapshotPackageId -> do
|
||||
_ <- insertDeps pid snapshotPackageId (extractDependencies compiler flags gpd)
|
||||
-- TODO: collect all missing dependencies and make a report
|
||||
insertSnapshotPackageModules snapshotPackageId (extractModuleNames gpd)
|
||||
forM_ cfiModuleNameIds $ \modNameId -> do
|
||||
void $ P.insertBy (SnapshotPackageModule snapshotPackageId modNameId False)
|
||||
|
||||
getContentTreeEntryId ::
|
||||
TreeId
|
||||
@ -978,16 +1051,6 @@ getSnapshotPackageCabalBlob snapshotId pname =
|
||||
(pn ^. PackageNameName ==. val pname))
|
||||
return (blob ^. BlobContents)
|
||||
|
||||
|
||||
-- | Add all modules available for the package in a particular snapshot. Initially they are marked
|
||||
-- as without available documentation.
|
||||
insertSnapshotPackageModules ::
|
||||
SnapshotPackageId -> [ModuleNameP] -> ReaderT SqlBackend (RIO env) ()
|
||||
insertSnapshotPackageModules snapshotPackageId =
|
||||
mapM_ $ \modName -> do
|
||||
moduleId <- insertModuleSafe modName
|
||||
void $ P.insertBy (SnapshotPackageModule snapshotPackageId moduleId False)
|
||||
|
||||
-- | Idempotent and thread safe way of adding a new module.
|
||||
insertModuleSafe :: ModuleNameP -> ReaderT SqlBackend (RIO env) ModuleNameId
|
||||
insertModuleSafe modName = do
|
||||
|
||||
@ -50,7 +50,7 @@ import Database.Persist.Postgresql
|
||||
import Database.Persist.TH
|
||||
import Pantry (HasPantryConfig(..), Revision)
|
||||
import Pantry.Internal.Stackage as PS (BlobId, HackageCabalId, ModuleNameId,
|
||||
PackageNameId, Tree(..), TreeEntry(..),
|
||||
PackageNameId, Tree(..),
|
||||
TreeEntryId, TreeId, Unique(..),
|
||||
VersionId, unBlobKey)
|
||||
import Pantry.Internal.Stackage (PantryConfig(..), Storage(..))
|
||||
|
||||
@ -51,8 +51,7 @@ import qualified Data.Text as T
|
||||
import Data.Text.Read (decimal)
|
||||
import Network.AWS (Env, HasEnv(..))
|
||||
import Pantry (BlobKey(..), CabalFileInfo(..), FileSize(..),
|
||||
HasPantryConfig(..), PackageIdentifierRevision(..), TreeKey(..))
|
||||
import Pantry.Internal.Stackage as Pantry (PantryConfig)
|
||||
HasPantryConfig(..), PantryConfig, PackageIdentifierRevision(..), TreeKey(..))
|
||||
import Pantry.SHA256 (fromHexText)
|
||||
import RIO
|
||||
import RIO.Process (HasProcessContext(..), ProcessContext)
|
||||
|
||||
@ -22,7 +22,7 @@ import Data.These
|
||||
import RIO
|
||||
import Stackage.Database (GetStackageDatabase, SnapshotId,
|
||||
getPackagesForSnapshot)
|
||||
import Stackage.Database.Types (PackageListingInfo(..), SnapName)
|
||||
import Stackage.Database.Types (PackageListingInfo(..))
|
||||
import Types
|
||||
import Web.PathPieces
|
||||
|
||||
|
||||
10
src/Types.hs
10
src/Types.hs
@ -61,18 +61,16 @@ import Database.Persist
|
||||
import Database.Persist.Sql (PersistFieldSql(sqlType))
|
||||
import qualified Distribution.ModuleName as DT (components, fromComponents,
|
||||
validModuleComponent)
|
||||
import Distribution.PackageDescription (FlagName, GenericPackageDescription)
|
||||
import Distribution.PackageDescription (GenericPackageDescription)
|
||||
import Distribution.Parsec as DT (Parsec)
|
||||
import Distribution.Pretty as DT (Pretty)
|
||||
import qualified Distribution.Text as DT (display, simpleParse)
|
||||
import Distribution.Types.VersionRange (VersionRange)
|
||||
import Distribution.Version (mkVersion, versionNumbers)
|
||||
import Pantry (Revision(..))
|
||||
import Pantry (FlagName, Revision(..), packageNameString, parsePackageName,
|
||||
parseVersionThrowing, parseVersion, versionString)
|
||||
import Pantry.Internal.Stackage (ModuleNameP(..), PackageNameP(..),
|
||||
SafeFilePath, VersionP(..), packageNameString,
|
||||
parsePackageName, parseVersion,
|
||||
parseVersionThrowing, unSafeFilePath,
|
||||
versionString)
|
||||
SafeFilePath, VersionP(..), unSafeFilePath)
|
||||
import RIO
|
||||
import qualified RIO.Map as Map
|
||||
import RIO.Time (Day)
|
||||
|
||||
@ -9,7 +9,7 @@ extra-deps:
|
||||
- yesod-gitrepo-0.3.0@sha256:7aad996935065726ce615c395d735cc01dcef3993b1788f670f6bfc866085e02,1191
|
||||
- lukko-0.1.1.1@sha256:5c674bdd8a06b926ba55d872abe254155ed49a58df202b4d842b643e5ed6bcc9,4289
|
||||
- github: commercialhaskell/pantry
|
||||
commit: 86462a97c4d8091993cc6e246fd0f2ae5aa608f0
|
||||
commit: ed48bebc30e539280ad7e13680480be2b87b97ea
|
||||
- github: fpco/casa
|
||||
commit: fc0ed26858bfc4f2966ed2dfb2871bae9266dda6
|
||||
subdirs:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user