Upgrade to lts-8.12

This commit is contained in:
Michael Snoyman 2017-06-20 14:55:47 +03:00
parent 48e944ab81
commit 13663c2ce9
No known key found for this signature in database
GPG Key ID: A048E8C057E86876
12 changed files with 31 additions and 38 deletions

View File

@ -34,7 +34,7 @@ loadWebsiteContent dir = do
readMarkdown fp = fmap (markdown def readMarkdown fp = fmap (markdown def
{ msXssProtect = False { msXssProtect = False
, msAddHeadingId = True , msAddHeadingId = True
}) } . fromStrict . decodeUtf8)
$ readFile $ dir </> fp $ readFile $ dir </> fp
data StackRelease = StackRelease data StackRelease = StackRelease

View File

@ -3,7 +3,7 @@ module Handler.BuildPlan where
import Import hiding (get, PackageName (..), Version (..), DList) import Import hiding (get, PackageName (..), Version (..), DList)
import Stackage.Types import Stackage.Types
import Stackage.BuildPlan import Stackage.ShowBuildPlan
import Stackage.Database import Stackage.Database
getBuildPlanR :: SnapName -> Handler TypedContent getBuildPlanR :: SnapName -> Handler TypedContent

View File

@ -9,7 +9,6 @@ import Yesod.GitRepo
import Data.WebsiteContent import Data.WebsiteContent
import Data.Aeson.Parser (json) import Data.Aeson.Parser (json)
import Data.Conduit.Attoparsec (sinkParser) import Data.Conduit.Attoparsec (sinkParser)
import Data.Monoid (First (..))
getDownloadStackListR :: Handler Html getDownloadStackListR :: Handler Html
getDownloadStackListR = track "Handler.DownloadStack.getDownloadStackListR" $ do getDownloadStackListR = track "Handler.DownloadStack.getDownloadStackListR" $ do

View File

@ -48,7 +48,7 @@ getHaddockR slug rest
] ]
addExtra t@(EventBeginElement "body" _) = [t] addExtra t@(EventBeginElement "body" _) = [t]
addExtra t = [t] addExtra t = [t]
req <- parseUrl $ unpack $ makeURL slug rest req <- parseRequest $ unpack $ makeURL slug rest
(_, res) <- acquireResponse req >>= allocateAcquire (_, res) <- acquireResponse req >>= allocateAcquire
doc <- responseBody res doc <- responseBody res
$$ eventConduit $$ eventConduit

View File

@ -6,7 +6,6 @@
module Settings where module Settings where
import ClassyPrelude.Yesod import ClassyPrelude.Yesod
import Control.Exception (throw)
import Data.Aeson (Result (..), fromJSON, withObject, (.!=), import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?)) (.:?))
import Data.FileEmbed (embedFile) import Data.FileEmbed (embedFile)
@ -108,7 +107,7 @@ configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@. -- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value configSettingsYmlValue :: Value
configSettingsYmlValue = either throw id $ decodeEither' configSettingsYmlBS configSettingsYmlValue = either impureThrow id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@. -- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings compileTimeAppSettings :: AppSettings

View File

@ -17,8 +17,7 @@ import Web.PathPieces (toPathPiece)
import Filesystem.Path.CurrentOS (parent, fromText, encodeString) import Filesystem.Path.CurrentOS (parent, fromText, encodeString)
import Network.HTTP.Types (status200) import Network.HTTP.Types (status200)
import Data.Streaming.Network (bindPortTCP) import Data.Streaming.Network (bindPortTCP)
import Network.AWS (Credentials (Discover), import Network.AWS (Credentials (Discover), newEnv,
Region (NorthVirginia), newEnv,
send, chunkedFile, defaultChunkSize, send, chunkedFile, defaultChunkSize,
envManager, runAWS) envManager, runAWS)
import Control.Monad.Trans.AWS (trying, _Error) import Control.Monad.Trans.AWS (trying, _Error)
@ -66,7 +65,7 @@ loadFromS3 develMode man = do
unless develMode $ handleIO print $ removeTree root unless develMode $ handleIO print $ removeTree root
createTree root createTree root
req <- parseUrl $ unpack url req <- parseRequest $ unpack url
let download = do let download = do
suffix <- atomically $ do suffix <- atomically $ do
x <- readTVar currSuffixVar x <- readTVar currSuffixVar
@ -139,11 +138,8 @@ newHoogleLocker toPrint man = mkSingleRun $ \name -> do
if exists if exists
then return $ Just (encodeString fp) then return $ Just (encodeString fp)
else do else do
req' <- parseUrl $ unpack $ hoogleUrl name req' <- parseRequest $ unpack $ hoogleUrl name
let req = req' let req = req' { decompress = const False }
{ checkStatus = \_ _ _ -> Nothing
, decompress = const False
}
withResponse req man $ \res -> if responseStatus res == status200 withResponse req man $ \res -> if responseStatus res == status200
then do then do
createTree $ parent (fromString fptmp) createTree $ parent (fromString fptmp)
@ -162,7 +158,7 @@ stackageServerCron = do
void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ -> void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ ->
error $ "cabal loader process already running, exiting" error $ "cabal loader process already running, exiting"
env <- newEnv NorthVirginia Discover env <- newEnv Discover
let upload :: FilePath -> ObjectKey -> IO () let upload :: FilePath -> ObjectKey -> IO ()
upload fp key = do upload fp key = do
let fpgz = fp <.> "gz" let fpgz = fp <.> "gz"
@ -219,7 +215,7 @@ stackageServerCron = do
createHoogleDB :: StackageDatabase -> Manager -> SnapName -> IO (Maybe FilePath) createHoogleDB :: StackageDatabase -> Manager -> SnapName -> IO (Maybe FilePath)
createHoogleDB db man name = handleAny (\e -> print e $> Nothing) $ do createHoogleDB db man name = handleAny (\e -> print e $> Nothing) $ do
putStrLn $ "Creating Hoogle DB for " ++ toPathPiece name putStrLn $ "Creating Hoogle DB for " ++ toPathPiece name
req' <- parseUrl $ unpack tarUrl req' <- parseRequest $ unpack tarUrl
let req = req' { decompress = const True } let req = req' { decompress = const True }
unlessM (isFile (fromString tarFP)) $ withResponse req man $ \res -> do unlessM (isFile (fromString tarFP)) $ withResponse req man $ \res -> do
@ -253,7 +249,9 @@ createHoogleDB db man name = handleAny (\e -> print e $> Nothing) $ do
, Just pkg2 <- stripSuffix ".cabal" (pack pkgcabal') , Just pkg2 <- stripSuffix ".cabal" (pack pkgcabal')
, pkg == pkg2 , pkg == pkg2
, lookup pkg allPackagePairs == Just ver -> , lookup pkg allPackagePairs == Just ver ->
writeFile (tmpdir </> unpack pkg <.> "cabal") cabalLBS runConduitRes
$ sourceLazy cabalLBS
.| sinkFile (tmpdir </> unpack pkg <.> "cabal")
_ -> return () _ -> return ()
L.hGetContents h >>= loop . Tar.read L.hGetContents h >>= loop . Tar.read
@ -300,7 +298,7 @@ singleDB db sname tmpdir e@(Tar.entryContent -> Tar.NormalFile lbs _) = do
Just (Entity _ sp) -> do Just (Entity _ sp) -> do
let out = tmpdir </> unpack pkg <.> "txt" let out = tmpdir </> unpack pkg <.> "txt"
-- FIXME add @url directive -- FIXME add @url directive
writeFile out lbs runConduitRes $ sourceLazy lbs .| sinkFile out
return $ singletonMap pkg (snapshotPackageVersion sp) return $ singletonMap pkg (snapshotPackageVersion sp)
{- {-
docsUrl = concat docsUrl = concat

View File

@ -56,3 +56,5 @@ hToHtml =
wrapper 4 = H.h4 wrapper 4 = H.h4
wrapper 5 = H.h5 wrapper 5 = H.h5
wrapper _ = H.h6 wrapper _ = H.h6
go (DocMathInline x) = H.pre $ H.code $ toHtml x
go (DocMathDisplay x) = H.pre $ H.code $ toHtml x

View File

@ -6,7 +6,7 @@ module Stackage.Database.Types
import ClassyPrelude.Conduit import ClassyPrelude.Conduit
import Web.PathPieces import Web.PathPieces
import Data.Aeson.Extra import Data.Aeson
import Data.Text.Read (decimal) import Data.Text.Read (decimal)
import Database.Persist import Database.Persist
import Database.Persist.Sql import Database.Persist.Sql
@ -23,8 +23,7 @@ isNightly :: SnapName -> Bool
isNightly SNLts{} = False isNightly SNLts{} = False
isNightly SNNightly{} = True isNightly SNNightly{} = True
instance ToJSONKey SnapName where instance ToJSONKey SnapName
toJSONKey = toPathPiece
instance ToJSON SnapName where instance ToJSON SnapName where
toJSON = String . toPathPiece toJSON = String . toPathPiece

View File

@ -9,7 +9,7 @@ module Stackage.Snapshot.Diff
) where ) where
import Data.Align import Data.Align
import Data.Aeson.Extra import Data.Aeson
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
import Control.Arrow import Control.Arrow
import ClassyPrelude import ClassyPrelude
@ -30,7 +30,7 @@ newtype SnapshotDiff
instance ToJSON (WithSnapshotNames SnapshotDiff) where instance ToJSON (WithSnapshotNames SnapshotDiff) where
toJSON (WithSnapshotNames nameA nameB (SnapshotDiff diff)) = toJSON (WithSnapshotNames nameA nameB (SnapshotDiff diff)) =
object [ "comparing" .= [toPathPiece nameA, toPathPiece nameB] object [ "comparing" .= [toPathPiece nameA, toPathPiece nameB]
, "diff" .= Object (toJSONMap (WithSnapshotNames nameA nameB <$> diff)) , "diff" .= toJSON (WithSnapshotNames nameA nameB <$> diff)
] ]
toDiffList :: SnapshotDiff -> [(PackageName, VersionChange)] toDiffList :: SnapshotDiff -> [(PackageName, VersionChange)]
@ -45,7 +45,7 @@ newtype VersionChange = VersionChange { unVersionChange :: These Version Version
deriving (Show, Eq, Generic, Typeable) deriving (Show, Eq, Generic, Typeable)
instance ToJSON (WithSnapshotNames VersionChange) where instance ToJSON (WithSnapshotNames VersionChange) where
toJSON (WithSnapshotNames (toJSONKey -> aKey) (toJSONKey -> bKey) change) = toJSON (WithSnapshotNames (toPathPiece -> aKey) (toPathPiece -> bKey) change) =
case change of case change of
VersionChange (This a) -> object [ aKey .= a ] VersionChange (This a) -> object [ aKey .= a ]
VersionChange (That b) -> object [ bKey .= b ] VersionChange (That b) -> object [ bKey .= b ]

View File

@ -1,7 +1,7 @@
module Types where module Types where
import ClassyPrelude.Yesod import ClassyPrelude.Yesod
import Data.Aeson.Extra import Data.Aeson
import Data.Hashable (hashUsing) import Data.Hashable (hashUsing)
import Text.Blaze (ToMarkup) import Text.Blaze (ToMarkup)
import Database.Persist.Sql (PersistFieldSql (sqlType)) import Database.Persist.Sql (PersistFieldSql (sqlType))
@ -31,8 +31,7 @@ newtype PackageName = PackageName { unPackageName :: Text }
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup, PersistField, IsString) deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup, PersistField, IsString)
instance ToJSON PackageName where instance ToJSON PackageName where
toJSON = toJSON . unPackageName toJSON = toJSON . unPackageName
instance ToJSONKey PackageName where instance ToJSONKey PackageName
toJSONKey = unPackageName
instance PersistFieldSql PackageName where instance PersistFieldSql PackageName where
sqlType = sqlType . liftM unPackageName sqlType = sqlType . liftM unPackageName
newtype Version = Version { unVersion :: Text } newtype Version = Version { unVersion :: Text }

View File

@ -1,15 +1,14 @@
resolver: lts-6.17 resolver: lts-8.12
packages: packages:
- . - .
- location: - location:
git: https://github.com/chrisdone/tagstream-conduit.git git: https://github.com/chrisdone/tagstream-conduit.git
commit: bacd7444596b2391b0ac302ad649b994b258d271 commit: bacd7444596b2391b0ac302ad649b994b258d271
extra-dep: true extra-dep: true
- location:
git: https://github.com/commercialhaskell/all-cabal-metadata-tool
commit: 1a4d8cff4e796ea0049537a38e38ec0a739caf64
extra-dep: true
extra-deps: extra-deps:
- prometheus-client-0.1.0.1 - aws-0.16
- prometheus-metrics-ghc-0.1.0.1 - barrier-0.1.1
- wai-middleware-prometheus-0.1.0.1
- hoogle-5.0.6
- haskell-src-exts-1.19.0
- persistent-sqlite-2.2.1.1
- yesod-bin-1.5.2.2

View File

@ -89,7 +89,6 @@ library
build-depends: build-depends:
base base
, aeson , aeson
, aeson-extra
, aws , aws
, barrier , barrier
, base16-bytestring , base16-bytestring
@ -168,8 +167,7 @@ library
, deepseq , deepseq
, deepseq-generics , deepseq-generics
, auto-update , auto-update
, stackage-types , stackage-curator
, stackage-build-plan
, yesod-sitemap , yesod-sitemap
, streaming-commons , streaming-commons
, classy-prelude-conduit , classy-prelude-conduit