From 5e6987b1d8a03e7bfaf618c255fa249562973520 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Mon, 13 Jun 2022 13:11:16 +0200 Subject: [PATCH 01/15] src: consistently import HashMap as HM, Map as M With both HashMap and Map imported as M in different modules, linter rules prevented ever importing both modules in one place. --- src/PostgREST/App.hs | 12 ++++----- src/PostgREST/Auth.hs | 10 +++---- src/PostgREST/DbStructure.hs | 14 +++++----- src/PostgREST/DbStructure/Proc.hs | 4 +-- src/PostgREST/DbStructure/Relationship.hs | 4 +-- src/PostgREST/DbStructure/Table.hs | 4 +-- src/PostgREST/GucHeader.hs | 4 +-- src/PostgREST/Middleware.hs | 8 +++--- src/PostgREST/OpenAPI.hs | 10 +++---- src/PostgREST/Request/ApiRequest.hs | 32 +++++++++++------------ src/PostgREST/Request/DbRequestBuilder.hs | 6 ++--- src/PostgREST/Request/QueryParams.hs | 12 ++++----- 12 files changed, 60 insertions(+), 60 deletions(-) diff --git a/src/PostgREST/App.hs b/src/PostgREST/App.hs index b5d9f59bd4..b4555f5b0c 100644 --- a/src/PostgREST/App.hs +++ b/src/PostgREST/App.hs @@ -28,7 +28,7 @@ import System.Posix.Types (FileMode) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.Set as S import qualified Hasql.DynamicStatements.Snippet as SQL (Snippet) import qualified Hasql.Pool as SQL @@ -314,7 +314,7 @@ handleCreate identifier@QualifiedIdentifier{..} context@RequestContext{..} = do let ApiRequest{..} = ctxApiRequest pkCols = if iPreferRepresentation /= None || isJust iPreferResolution - then maybe mempty tablePKCols $ M.lookup identifier $ dbTables ctxDbStructure + then maybe mempty tablePKCols $ HM.lookup identifier $ dbTables ctxDbStructure else mempty WriteQueryResult{..} <- writeQuery MutationCreate identifier True pkCols context @@ -371,7 +371,7 @@ handleUpdate identifier context@(RequestContext _ _ ApiRequest{..} _) = do handleSingleUpsert :: QualifiedIdentifier -> RequestContext-> DbHandler Wai.Response handleSingleUpsert identifier context@(RequestContext _ ctxDbStructure ApiRequest{..} _) = do - let pkCols = maybe mempty tablePKCols $ M.lookup identifier $ dbTables ctxDbStructure + let pkCols = maybe mempty tablePKCols $ HM.lookup identifier $ dbTables ctxDbStructure WriteQueryResult{..} <- writeQuery MutationSingleUpsert identifier False pkCols context @@ -419,7 +419,7 @@ handleInfo identifier RequestContext{..} = Nothing -> throwError Error.NotFound where - tbl = M.lookup identifier (dbTables ctxDbStructure) + tbl = HM.lookup identifier (dbTables ctxDbStructure) allOrigins = ("Access-Control-Allow-Origin", "*") allowH table = ( HTTP.hAllow @@ -489,8 +489,8 @@ handleOpenApi headersOnly tSchema (RequestContext conf@AppConfig{..} dbStructure <*> SQL.statement tSchema (DbStructure.schemaDescription configDbPreparedStatements) OAIgnorePriv -> OpenAPI.encode conf dbStructure - (M.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch == tSchema) $ DbStructure.dbTables dbStructure) - (M.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch == tSchema) $ DbStructure.dbProcs dbStructure) + (HM.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch == tSchema) $ DbStructure.dbTables dbStructure) + (HM.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch == tSchema) $ DbStructure.dbProcs dbStructure) <$> SQL.statement tSchema (DbStructure.schemaDescription configDbPreparedStatements) OADisabled -> pure mempty diff --git a/src/PostgREST/Auth.hs b/src/PostgREST/Auth.hs index 093300d018..5c1f029daa 100644 --- a/src/PostgREST/Auth.hs +++ b/src/PostgREST/Auth.hs @@ -22,7 +22,7 @@ import qualified Crypto.JWT as JWT import qualified Data.Aeson as JSON import qualified Data.Aeson.Types as JSON import qualified Data.ByteString.Lazy.Char8 as LBS -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.Text.Encoding as T import qualified Data.Vault.Lazy as Vault import qualified Data.Vector as V @@ -45,7 +45,7 @@ import Protolude data AuthResult = AuthResult - { authClaims :: M.HashMap Text JSON.Value + { authClaims :: HM.HashMap Text JSON.Value , authRole :: Text } @@ -78,13 +78,13 @@ parseClaims AppConfig{..} jclaims@(JSON.Object mclaims) = do role <- liftEither . maybeToRight JwtTokenRequired $ unquoted <$> walkJSPath (Just jclaims) configJwtRoleClaimKey <|> configDbAnonRole return AuthResult - { authClaims = mclaims & M.insert "role" (JSON.toJSON role) + { authClaims = mclaims & HM.insert "role" (JSON.toJSON role) , authRole = role } where walkJSPath :: Maybe JSON.Value -> JSPath -> Maybe JSON.Value walkJSPath x [] = x - walkJSPath (Just (JSON.Object o)) (JSPKey key:rest) = walkJSPath (M.lookup key o) rest + walkJSPath (Just (JSON.Object o)) (JSPKey key:rest) = walkJSPath (HM.lookup key o) rest walkJSPath (Just (JSON.Array ar)) (JSPIdx idx:rest) = walkJSPath (ar V.!? idx) rest walkJSPath _ _ = Nothing @@ -92,7 +92,7 @@ parseClaims AppConfig{..} jclaims@(JSON.Object mclaims) = do unquoted (JSON.String t) = t unquoted v = T.decodeUtf8 . LBS.toStrict $ JSON.encode v -- impossible case - just added to please -Wincomplete-patterns -parseClaims _ _ = return AuthResult { authClaims = M.empty, authRole = mempty } +parseClaims _ _ = return AuthResult { authClaims = HM.empty, authRole = mempty } -- | Validate authorization header. -- Parse and store JWT claims for future use in the request. diff --git a/src/PostgREST/DbStructure.hs b/src/PostgREST/DbStructure.hs index 33e3b20982..6a2a5b583a 100644 --- a/src/PostgREST/DbStructure.hs +++ b/src/PostgREST/DbStructure.hs @@ -27,7 +27,7 @@ module PostgREST.DbStructure ) where import qualified Data.Aeson as JSON -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.Set as S import qualified Hasql.Decoders as HD import qualified Hasql.Encoders as HE @@ -100,16 +100,16 @@ queryDbStructure schemas extraSearchPath prepared = do , dbProcs = procs } where - relsToMap = map sort . M.fromListWith (++) . map ((\(x, fSch, y) -> ((x, fSch), [y])) . addKey) + relsToMap = map sort . HM.fromListWith (++) . map ((\(x, fSch, y) -> ((x, fSch), [y])) . addKey) addKey rel = (relTable rel, qiSchema $ relForeignTable rel, rel) -- | Remove db objects that belong to an internal schema(not exposed through the API) from the DbStructure. removeInternal :: [Schema] -> DbStructure -> DbStructure removeInternal schemas dbStruct = DbStructure { - dbTables = M.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch `elem` schemas) $ dbTables dbStruct + dbTables = HM.filterWithKey (\(QualifiedIdentifier sch _) _ -> sch `elem` schemas) $ dbTables dbStruct , dbRelationships = filter (\r -> qiSchema (relForeignTable r) `elem` schemas && not (hasInternalJunction r)) <$> - M.filterWithKey (\(QualifiedIdentifier sch _, _) _ -> sch `elem` schemas ) (dbRelationships dbStruct) + HM.filterWithKey (\(QualifiedIdentifier sch _, _) _ -> sch `elem` schemas ) (dbRelationships dbStruct) , dbProcs = dbProcs dbStruct -- procs are only obtained from the exposed schemas, no need to filter them. } where @@ -119,7 +119,7 @@ removeInternal schemas dbStruct = decodeTables :: HD.Result TablesMap decodeTables = - M.fromList . map (\tbl@Table{tableSchema, tableName} -> (QualifiedIdentifier tableSchema tableName, tbl)) <$> HD.rowList tblRow + HM.fromList . map (\tbl@Table{tableSchema, tableName} -> (QualifiedIdentifier tableSchema tableName, tbl)) <$> HD.rowList tblRow where tblRow = Table <$> column HD.text @@ -176,7 +176,7 @@ viewKeyDepFromRow (s1,t1,s2,v2,cons,consType,sCols) = ViewKeyDependency (Qualifi decodeProcs :: HD.Result ProcsMap decodeProcs = -- Duplicate rows for a function means they're overloaded, order these by least args according to ProcDescription Ord instance - map sort . M.fromListWith (++) . map ((\(x,y) -> (x, [y])) . addKey) <$> HD.rowList procRow + map sort . HM.fromListWith (++) . map ((\(x,y) -> (x, [y])) . addKey) <$> HD.rowList procRow where procRow = ProcDescription <$> column HD.text @@ -385,7 +385,7 @@ addM2MRels :: TablesMap -> [Relationship] -> [Relationship] addM2MRels tbls rels = rels ++ catMaybes [ let jtCols = S.fromList $ (fst <$> cols) ++ (fst <$> fcols) - pkCols = S.fromList $ maybe mempty tablePKCols $ M.lookup jt1 tbls + pkCols = S.fromList $ maybe mempty tablePKCols $ HM.lookup jt1 tbls in if S.isSubsetOf jtCols pkCols then Just $ Relationship t ft (t == ft) (M2M $ Junction jt1 cons1 cons2 (swap <$> cols) (swap <$> fcols)) tblIsView fTblisView else Nothing diff --git a/src/PostgREST/DbStructure/Proc.hs b/src/PostgREST/DbStructure/Proc.hs index 3ec5817a9d..a62f436afb 100644 --- a/src/PostgREST/DbStructure/Proc.hs +++ b/src/PostgREST/DbStructure/Proc.hs @@ -15,7 +15,7 @@ module PostgREST.DbStructure.Proc ) where import qualified Data.Aeson as JSON -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import PostgREST.DbStructure.Identifiers (QualifiedIdentifier (..), Schema, TableName) @@ -66,7 +66,7 @@ instance Ord ProcDescription where -- | A map of all procs, all of which can be overloaded(one entry will have more than one ProcDescription). -- | It uses a HashMap for a faster lookup. -type ProcsMap = M.HashMap QualifiedIdentifier [ProcDescription] +type ProcsMap = HM.HashMap QualifiedIdentifier [ProcDescription] procReturnsScalar :: ProcDescription -> Bool procReturnsScalar proc = case proc of diff --git a/src/PostgREST/DbStructure/Relationship.hs b/src/PostgREST/DbStructure/Relationship.hs index 0a50c9741e..37f839dbcb 100644 --- a/src/PostgREST/DbStructure/Relationship.hs +++ b/src/PostgREST/DbStructure/Relationship.hs @@ -9,7 +9,7 @@ module PostgREST.DbStructure.Relationship ) where import qualified Data.Aeson as JSON -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import PostgREST.DbStructure.Identifiers (FieldName, QualifiedIdentifier, Schema) @@ -53,4 +53,4 @@ data Junction = Junction deriving (Eq, Ord, Generic, JSON.ToJSON) -- | Key based on the source table and the foreign table schema -type RelationshipsMap = M.HashMap (QualifiedIdentifier, Schema) [Relationship] +type RelationshipsMap = HM.HashMap (QualifiedIdentifier, Schema) [Relationship] diff --git a/src/PostgREST/DbStructure/Table.hs b/src/PostgREST/DbStructure/Table.hs index 3c6ab5a201..31a0770dd3 100644 --- a/src/PostgREST/DbStructure/Table.hs +++ b/src/PostgREST/DbStructure/Table.hs @@ -8,7 +8,7 @@ module PostgREST.DbStructure.Table ) where import qualified Data.Aeson as JSON -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import PostgREST.DbStructure.Identifiers (FieldName, QualifiedIdentifier (..), @@ -46,4 +46,4 @@ data Column = Column } deriving (Eq, Show, Ord, Generic, JSON.ToJSON) -type TablesMap = M.HashMap QualifiedIdentifier Table +type TablesMap = HM.HashMap QualifiedIdentifier Table diff --git a/src/PostgREST/GucHeader.hs b/src/PostgREST/GucHeader.hs index 23f9db3904..5b8b5ebd53 100644 --- a/src/PostgREST/GucHeader.hs +++ b/src/PostgREST/GucHeader.hs @@ -6,7 +6,7 @@ module PostgREST.GucHeader import qualified Data.Aeson as JSON import qualified Data.CaseInsensitive as CI -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import Network.HTTP.Types.Header (Header) @@ -21,7 +21,7 @@ newtype GucHeader = GucHeader (CI.CI ByteString, ByteString) instance JSON.FromJSON GucHeader where parseJSON (JSON.Object o) = - case M.toList o of + case HM.toList o of [(k, JSON.String s)] -> pure $ GucHeader (CI.mk $ toUtf8 k, toUtf8 s) _ -> mzero parseJSON _ = mzero diff --git a/src/PostgREST/Middleware.hs b/src/PostgREST/Middleware.hs index a32ee0808f..359bd1180f 100644 --- a/src/PostgREST/Middleware.hs +++ b/src/PostgREST/Middleware.hs @@ -11,7 +11,7 @@ module PostgREST.Middleware import qualified Data.Aeson as JSON import qualified Data.ByteString.Lazy.Char8 as LBS -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Hasql.Decoders as HD @@ -38,7 +38,7 @@ import PostgREST.Request.Preferences import Protolude -- | Runs local(transaction scoped) GUCs for every request, plus the pre-request function -runPgLocals :: AppConfig -> M.HashMap Text JSON.Value -> Text -> +runPgLocals :: AppConfig -> HM.HashMap Text JSON.Value -> Text -> (ApiRequest -> ExceptT Error SQL.Transaction Wai.Response) -> ApiRequest -> ByteString -> PgVersion -> ExceptT Error SQL.Transaction Wai.Response runPgLocals conf claims role app req jsonDbS actualPgVersion = do @@ -57,7 +57,7 @@ runPgLocals conf claims role app req jsonDbS actualPgVersion = do then setConfigLocal "request.cookie." <$> iCookies req else setConfigLocalJson "request.cookies" (iCookies req) claimsSql = if usesLegacyGucs - then setConfigLocal "request.jwt.claim." <$> [(toUtf8 c, toUtf8 $ unquoted v) | (c,v) <- M.toList claims] + then setConfigLocal "request.jwt.claim." <$> [(toUtf8 c, toUtf8 $ unquoted v) | (c,v) <- HM.toList claims] else [setConfigLocal mempty ("request.jwt.claims", LBS.toStrict $ JSON.encode claims)] roleSql = [setConfigLocal mempty ("role", toUtf8 role)] appSettingsSql = setConfigLocal mempty <$> (join bimap toUtf8 <$> configAppSettings conf) @@ -116,6 +116,6 @@ setConfigLocalJson :: ByteString -> [(ByteString, ByteString)] -> [SQL.Snippet] setConfigLocalJson prefix keyVals = [setConfigLocal mempty (prefix, gucJsonVal keyVals)] where gucJsonVal :: [(ByteString, ByteString)] -> ByteString - gucJsonVal = LBS.toStrict . JSON.encode . M.fromList . arrayByteStringToText + gucJsonVal = LBS.toStrict . JSON.encode . HM.fromList . arrayByteStringToText arrayByteStringToText :: [(ByteString, ByteString)] -> [(Text,Text)] arrayByteStringToText keyVal = (T.decodeUtf8 *** T.decodeUtf8) <$> keyVal diff --git a/src/PostgREST/OpenAPI.hs b/src/PostgREST/OpenAPI.hs index 0a446adff7..71363046f7 100644 --- a/src/PostgREST/OpenAPI.hs +++ b/src/PostgREST/OpenAPI.hs @@ -9,7 +9,7 @@ module PostgREST.OpenAPI (encode) where import qualified Data.Aeson as JSON import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.HashSet.InsOrd as Set import qualified Data.Text as T import qualified Data.Text.Encoding as T @@ -41,13 +41,13 @@ import PostgREST.ContentType import Protolude hiding (Proxy, get) -encode :: AppConfig -> DbStructure -> TablesMap -> M.HashMap k [ProcDescription] -> Maybe Text -> LBS.ByteString +encode :: AppConfig -> DbStructure -> TablesMap -> HM.HashMap k [ProcDescription] -> Maybe Text -> LBS.ByteString encode conf dbStructure tables procs schemaDescription = JSON.encode $ postgrestSpec (dbRelationships dbStructure) - (concat $ M.elems procs) - (snd <$> M.toList tables) + (concat $ HM.elems procs) + (snd <$> HM.toList tables) (proxyUri conf) schemaDescription @@ -100,7 +100,7 @@ makeProperty tbl rels col = (colName col, Inline s) rel = find (\case Relationship{relCardinality=(M2O _ relColumns)} -> [colName col] == (fst <$> relColumns) _ -> False - ) $ fromMaybe mempty $ M.lookup (QualifiedIdentifier (tableSchema tbl) (tableName tbl), tableSchema tbl) rels + ) $ fromMaybe mempty $ HM.lookup (QualifiedIdentifier (tableSchema tbl) (tableName tbl), tableSchema tbl) rels fCol = (headMay . (\r -> snd <$> relColumns (relCardinality r)) =<< rel) fTbl = qiName . relForeignTable <$> rel fTblCol = (,) <$> fTbl <*> fCol diff --git a/src/PostgREST/Request/ApiRequest.hs b/src/PostgREST/Request/ApiRequest.hs index ef82ba25e4..2cd2f309a8 100644 --- a/src/PostgREST/Request/ApiRequest.hs +++ b/src/PostgREST/Request/ApiRequest.hs @@ -22,7 +22,7 @@ import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS import qualified Data.CaseInsensitive as CI import qualified Data.Csv as CSV -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.List as L import qualified Data.List.NonEmpty as NonEmptyList import qualified Data.Set as S @@ -129,10 +129,10 @@ toRpcParamValue proc (k, v) | prmIsVariadic k = (k, Variadic [v]) jsonRpcParams :: ProcDescription -> [(Text, Text)] -> Payload jsonRpcParams proc prms = if not $ pdHasVariadic proc then -- if proc has no variadic param, save steps and directly convert to json - ProcessedJSON (JSON.encode $ M.fromList $ second JSON.toJSON <$> prms) (S.fromList $ fst <$> prms) + ProcessedJSON (JSON.encode $ HM.fromList $ second JSON.toJSON <$> prms) (S.fromList $ fst <$> prms) else - let paramsMap = M.fromListWith mergeParams $ toRpcParamValue proc <$> prms in - ProcessedJSON (JSON.encode paramsMap) (S.fromList $ M.keys paramsMap) + let paramsMap = HM.fromListWith mergeParams $ toRpcParamValue proc <$> prms in + ProcessedJSON (JSON.encode paramsMap) (S.fromList $ HM.keys paramsMap) where mergeParams :: RpcParamValue -> RpcParamValue -> RpcParamValue mergeParams (Variadic a) (Variadic b) = Variadic $ b ++ a @@ -153,7 +153,7 @@ targetToJsonRpcParams target params = -} data ApiRequest = ApiRequest { iAction :: Action -- ^ Similar but not identical to HTTP verb, e.g. Create/Invoke both POST - , iRange :: M.HashMap Text NonnegRange -- ^ Requested range of rows within response + , iRange :: HM.HashMap Text NonnegRange -- ^ Requested range of rows within response , iTopLevelRange :: NonnegRange -- ^ Requested range of rows from the top level , iTarget :: Target -- ^ The target, be it calling a proc or accessing a table , iPayload :: Maybe Payload -- ^ Data sent by client and used for mutation actions @@ -252,13 +252,13 @@ apiRequest conf@AppConfig{..} dbStructure req reqBody queryparams@QueryParams{.. json <- csvToJson <$> first BS.pack (CSV.decodeByName reqBody) note "All lines must have same number of fields" $ payloadAttributes (JSON.encode json) json (CTUrlEncoded, _) -> - let paramsMap = M.fromList $ (T.decodeUtf8 *** JSON.String . T.decodeUtf8) <$> parseSimpleQuery (LBS.toStrict reqBody) in - Right $ ProcessedJSON (JSON.encode paramsMap) $ S.fromList (M.keys paramsMap) + let paramsMap = HM.fromList $ (T.decodeUtf8 *** JSON.String . T.decodeUtf8) <$> parseSimpleQuery (LBS.toStrict reqBody) in + Right $ ProcessedJSON (JSON.encode paramsMap) $ S.fromList (HM.keys paramsMap) (CTTextPlain, True) -> Right $ RawPay reqBody (CTTextXML, True) -> Right $ RawPay reqBody (CTOctetStream, True) -> Right $ RawPay reqBody (ct, _) -> Left $ "Content-Type not acceptable: " <> ContentType.toMime ct - topLevelRange = fromMaybe allRange $ M.lookup "limit" ranges -- if no limit is specified, get all the request rows + topLevelRange = fromMaybe allRange $ HM.lookup "limit" ranges -- if no limit is specified, get all the request rows action = case method of -- The HEAD method is identical to GET except that the server MUST NOT return a message-body in the response @@ -335,12 +335,12 @@ apiRequest conf@AppConfig{..} dbStructure req reqBody queryparams@QueryParams{.. lookupHeader = flip lookup hdrs Preferences.Preferences{..} = Preferences.fromHeaders hdrs headerRange = rangeRequested hdrs - limitRange = fromMaybe allRange (M.lookup "limit" qsRanges) + limitRange = fromMaybe allRange (HM.lookup "limit" qsRanges) headerAndLimitRange = rangeIntersection headerRange limitRange -- Bypass all the ranges and send only the limit zero range (0 <= x <= -1) if -- limit=0 is present in the query params (not allowed for the Range header) - ranges = M.insert "limit" (if hasLimitZero limitRange then limitZeroRange else headerAndLimitRange) qsRanges + ranges = HM.insert "limit" (if hasLimitZero limitRange then limitZeroRange else headerAndLimitRange) qsRanges -- The only emptyRange allowed is the limit zero range isInvalidRange = topLevelRange == emptyRange && not (hasLimitZero limitRange) @@ -357,7 +357,7 @@ mutuallyAgreeable sProduces cAccepts = then listToMaybe sProduces else exact -type CsvData = V.Vector (M.HashMap Text LBS.ByteString) +type CsvData = V.Vector (HM.HashMap Text LBS.ByteString) {-| Converts CSV like @@ -376,7 +376,7 @@ csvToJson (_, vals) = JSON.Array $ V.map rowToJsonObj vals where rowToJsonObj = JSON.Object . - M.map (\str -> + HM.map (\str -> if str == "NULL" then JSON.Null else JSON.String . T.decodeUtf8 $ LBS.toStrict str @@ -389,9 +389,9 @@ payloadAttributes raw json = JSON.Array arr -> case arr V.!? 0 of Just (JSON.Object o) -> - let canonicalKeys = S.fromList $ M.keys o + let canonicalKeys = S.fromList $ HM.keys o areKeysUniform = all (\case - JSON.Object x -> S.fromList (M.keys x) == canonicalKeys + JSON.Object x -> S.fromList (HM.keys x) == canonicalKeys _ -> False) arr in if areKeysUniform then Just $ ProcessedJSON raw canonicalKeys @@ -399,7 +399,7 @@ payloadAttributes raw json = Just _ -> Nothing Nothing -> Just emptyPJArray - JSON.Object o -> Just $ ProcessedJSON raw (S.fromList $ M.keys o) + JSON.Object o -> Just $ ProcessedJSON raw (S.fromList $ HM.keys o) -- truncate everything else to an empty array. _ -> Just emptyPJArray @@ -449,7 +449,7 @@ findProc qi argumentsKeys paramsAsSingleObject allProcs contentType isInvPost = ([proc], _) -> Right proc (procs, _) -> Left $ AmbiguousRpc (toList procs) where - matchProc = overloadedProcPartition $ M.lookupDefault mempty qi allProcs -- first find the proc by name + matchProc = overloadedProcPartition $ HM.lookupDefault mempty qi allProcs -- first find the proc by name -- The partition obtained has the form (overloadedProcs,fallbackProcs) -- where fallbackProcs are functions with a single unnamed parameter overloadedProcPartition procs = foldr select ([],[]) procs diff --git a/src/PostgREST/Request/DbRequestBuilder.hs b/src/PostgREST/Request/DbRequestBuilder.hs index ab7fe5d285..8d9fe2e3c5 100644 --- a/src/PostgREST/Request/DbRequestBuilder.hs +++ b/src/PostgREST/Request/DbRequestBuilder.hs @@ -21,7 +21,7 @@ module PostgREST.Request.DbRequestBuilder , callRequest ) where -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.Set as S import Data.Either.Combinators (mapLeft) @@ -206,7 +206,7 @@ findRel schema allRels origin target hint = -- /users?select=tasks!users_tasks(*) many-to-many between users and tasks matchJunction hnt relCardinality -- users_tasks ) - ) $ fromMaybe mempty $ M.lookup (QualifiedIdentifier schema origin, schema) allRels + ) $ fromMaybe mempty $ HM.lookup (QualifiedIdentifier schema origin, schema) allRels -- previousAlias is only used for the case of self joins addJoinConditions :: Maybe Alias -> ReadRequest -> Either ApiRequestError ReadRequest @@ -283,7 +283,7 @@ addRanges ApiRequest{..} rReq = _ -> foldr addRangeToNode (Right rReq) =<< ranges where ranges :: Either ApiRequestError [(EmbedPath, NonnegRange)] - ranges = first QueryParamError $ QueryParams.pRequestRange `traverse` M.toList iRange + ranges = first QueryParamError $ QueryParams.pRequestRange `traverse` HM.toList iRange addRangeToNode :: (EmbedPath, NonnegRange) -> Either ApiRequestError ReadRequest -> Either ApiRequestError ReadRequest addRangeToNode = updateNode (\r (Node (q,i) f) -> Node (q{range_=r}, i) f) diff --git a/src/PostgREST/Request/QueryParams.hs b/src/PostgREST/Request/QueryParams.hs index d03d72a286..4ebb5d9a58 100644 --- a/src/PostgREST/Request/QueryParams.hs +++ b/src/PostgREST/Request/QueryParams.hs @@ -13,7 +13,7 @@ module PostgREST.Request.QueryParams ) where import qualified Data.ByteString.Char8 as BS -import qualified Data.HashMap.Strict as M +import qualified Data.HashMap.Strict as HM import qualified Data.List as L import qualified Data.Set as S import qualified Data.Text as T @@ -78,7 +78,7 @@ data QueryParams = -- ^ Canonical representation of the query params, sorted alphabetically , qsParams :: [(Text, Text)] -- ^ Parameters for RPC calls - , qsRanges :: M.HashMap Text (Range Integer) + , qsRanges :: HM.HashMap Text (Range Integer) -- ^ Ranges derived from &limit and &offset params , qsOrder :: [(EmbedPath, [OrderTerm])] -- ^ &order parameters for each level @@ -192,8 +192,8 @@ parse qs = isEmbedPath = T.isInfixOf "." replaceLast x s = T.intercalate "." $ L.init (T.split (=='.') s) <> [x] - ranges :: M.HashMap Text (Range Integer) - ranges = M.unionWith f limitParams offsetParams + ranges :: HM.HashMap Text (Range Integer) + ranges = HM.unionWith f limitParams offsetParams where f rl ro = Range (BoundaryBelow o) (BoundaryAbove $ o + l - 1) where @@ -201,10 +201,10 @@ parse qs = o = rangeOffset ro limitParams = - M.fromList [(k, restrictRange (readMaybe v) allRange) | (k,v) <- limits] + HM.fromList [(k, restrictRange (readMaybe v) allRange) | (k,v) <- limits] offsetParams = - M.fromList [(k, maybe allRange rangeGeq (readMaybe v)) | (k,v) <- offsets] + HM.fromList [(k, maybe allRange rangeGeq (readMaybe v)) | (k,v) <- offsets] operator :: Text -> Maybe SimpleOperator operator = \case From 48a3d1d5163e9795cfa09250e007e127b6c496cb Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Mon, 13 Jun 2022 10:30:20 +0200 Subject: [PATCH 02/15] cabal: update aeson to 2.0.3 --- postgrest.cabal | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/postgrest.cabal b/postgrest.cabal index 543be2dc91..75b1b1352d 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -71,7 +71,7 @@ library build-depends: base >= 4.9 && < 4.16 , HTTP >= 4000.3.7 && < 4000.4 , Ranged-sets >= 0.3 && < 0.5 - , aeson >= 1.4.7 && < 1.6 + , aeson >= 2.0.3 && < 2.1 , auto-update >= 0.1.4 && < 0.2 , base64-bytestring >= 1 && < 1.3 , bytestring >= 0.10.8 && < 0.11 @@ -206,7 +206,7 @@ test-suite spec SpecHelper TestTypes build-depends: base >= 4.9 && < 4.16 - , aeson >= 1.4.7 && < 1.6 + , aeson >= 2.0.3 && < 2.1 , aeson-qq >= 0.8.1 && < 0.9 , async >= 2.1.1 && < 2.3 , auto-update >= 0.1.4 && < 0.2 @@ -249,7 +249,7 @@ test-suite querycost main-is: QueryCost.hs other-modules: SpecHelper build-depends: base >= 4.9 && < 4.16 - , aeson >= 1.4.7 && < 1.6 + , aeson >= 2.0.3 && < 2.1 , base64-bytestring >= 1 && < 1.3 , bytestring >= 0.10.8 && < 0.11 , case-insensitive >= 1.2 && < 1.3 From 2b8ffc8e612f4f6a362a121910fdcd16939695a7 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Mon, 13 Jun 2022 10:31:13 +0200 Subject: [PATCH 03/15] src: update for changed map type in aeson-2 This means that we're now using Data.Map.Strict instead of Data.HashMap.Strict for JSON objects in general, and specifically for claims maps and CSV rows. This addresses certain hash flooding vulnerabilities, but may have performance downsides. Compare e.g. https://frasertweedale.github.io/blog-fp/posts/2021-10-12-aeson-hash-flooding-protection.html --- src/PostgREST/Auth.hs | 11 ++++++----- src/PostgREST/GucHeader.hs | 7 ++++--- src/PostgREST/Middleware.hs | 6 ++++-- src/PostgREST/Request/ApiRequest.hs | 15 +++++++++------ 4 files changed, 23 insertions(+), 16 deletions(-) diff --git a/src/PostgREST/Auth.hs b/src/PostgREST/Auth.hs index 5c1f029daa..d14c50ff47 100644 --- a/src/PostgREST/Auth.hs +++ b/src/PostgREST/Auth.hs @@ -20,9 +20,10 @@ module PostgREST.Auth import qualified Crypto.JWT as JWT import qualified Data.Aeson as JSON +import qualified Data.Aeson.Key as K +import qualified Data.Aeson.KeyMap as KM import qualified Data.Aeson.Types as JSON import qualified Data.ByteString.Lazy.Char8 as LBS -import qualified Data.HashMap.Strict as HM import qualified Data.Text.Encoding as T import qualified Data.Vault.Lazy as Vault import qualified Data.Vector as V @@ -45,7 +46,7 @@ import Protolude data AuthResult = AuthResult - { authClaims :: HM.HashMap Text JSON.Value + { authClaims :: KM.KeyMap JSON.Value , authRole :: Text } @@ -78,13 +79,13 @@ parseClaims AppConfig{..} jclaims@(JSON.Object mclaims) = do role <- liftEither . maybeToRight JwtTokenRequired $ unquoted <$> walkJSPath (Just jclaims) configJwtRoleClaimKey <|> configDbAnonRole return AuthResult - { authClaims = mclaims & HM.insert "role" (JSON.toJSON role) + { authClaims = mclaims & KM.insert "role" (JSON.toJSON role) , authRole = role } where walkJSPath :: Maybe JSON.Value -> JSPath -> Maybe JSON.Value walkJSPath x [] = x - walkJSPath (Just (JSON.Object o)) (JSPKey key:rest) = walkJSPath (HM.lookup key o) rest + walkJSPath (Just (JSON.Object o)) (JSPKey key:rest) = walkJSPath (KM.lookup (K.fromText key) o) rest walkJSPath (Just (JSON.Array ar)) (JSPIdx idx:rest) = walkJSPath (ar V.!? idx) rest walkJSPath _ _ = Nothing @@ -92,7 +93,7 @@ parseClaims AppConfig{..} jclaims@(JSON.Object mclaims) = do unquoted (JSON.String t) = t unquoted v = T.decodeUtf8 . LBS.toStrict $ JSON.encode v -- impossible case - just added to please -Wincomplete-patterns -parseClaims _ _ = return AuthResult { authClaims = HM.empty, authRole = mempty } +parseClaims _ _ = return AuthResult { authClaims = KM.empty, authRole = mempty } -- | Validate authorization header. -- Parse and store JWT claims for future use in the request. diff --git a/src/PostgREST/GucHeader.hs b/src/PostgREST/GucHeader.hs index 5b8b5ebd53..2deba6999e 100644 --- a/src/PostgREST/GucHeader.hs +++ b/src/PostgREST/GucHeader.hs @@ -5,8 +5,9 @@ module PostgREST.GucHeader ) where import qualified Data.Aeson as JSON +import qualified Data.Aeson.Key as K +import qualified Data.Aeson.KeyMap as KM import qualified Data.CaseInsensitive as CI -import qualified Data.HashMap.Strict as HM import Network.HTTP.Types.Header (Header) @@ -21,8 +22,8 @@ newtype GucHeader = GucHeader (CI.CI ByteString, ByteString) instance JSON.FromJSON GucHeader where parseJSON (JSON.Object o) = - case HM.toList o of - [(k, JSON.String s)] -> pure $ GucHeader (CI.mk $ toUtf8 k, toUtf8 s) + case KM.toList o of + [(k, JSON.String s)] -> pure $ GucHeader (CI.mk $ toUtf8 $ K.toText k, toUtf8 s) _ -> mzero parseJSON _ = mzero diff --git a/src/PostgREST/Middleware.hs b/src/PostgREST/Middleware.hs index 359bd1180f..507202d225 100644 --- a/src/PostgREST/Middleware.hs +++ b/src/PostgREST/Middleware.hs @@ -10,6 +10,8 @@ module PostgREST.Middleware ) where import qualified Data.Aeson as JSON +import qualified Data.Aeson.Key as K +import qualified Data.Aeson.KeyMap as KM import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Data.HashMap.Strict as HM import qualified Data.Text as T @@ -38,7 +40,7 @@ import PostgREST.Request.Preferences import Protolude -- | Runs local(transaction scoped) GUCs for every request, plus the pre-request function -runPgLocals :: AppConfig -> HM.HashMap Text JSON.Value -> Text -> +runPgLocals :: AppConfig -> KM.KeyMap JSON.Value -> Text -> (ApiRequest -> ExceptT Error SQL.Transaction Wai.Response) -> ApiRequest -> ByteString -> PgVersion -> ExceptT Error SQL.Transaction Wai.Response runPgLocals conf claims role app req jsonDbS actualPgVersion = do @@ -57,7 +59,7 @@ runPgLocals conf claims role app req jsonDbS actualPgVersion = do then setConfigLocal "request.cookie." <$> iCookies req else setConfigLocalJson "request.cookies" (iCookies req) claimsSql = if usesLegacyGucs - then setConfigLocal "request.jwt.claim." <$> [(toUtf8 c, toUtf8 $ unquoted v) | (c,v) <- HM.toList claims] + then setConfigLocal "request.jwt.claim." <$> [(toUtf8 $ K.toText c, toUtf8 $ unquoted v) | (c,v) <- KM.toList claims] else [setConfigLocal mempty ("request.jwt.claims", LBS.toStrict $ JSON.encode claims)] roleSql = [setConfigLocal mempty ("role", toUtf8 role)] appSettingsSql = setConfigLocal mempty <$> (join bimap toUtf8 <$> configAppSettings conf) diff --git a/src/PostgREST/Request/ApiRequest.hs b/src/PostgREST/Request/ApiRequest.hs index 2cd2f309a8..8dc4c5d899 100644 --- a/src/PostgREST/Request/ApiRequest.hs +++ b/src/PostgREST/Request/ApiRequest.hs @@ -18,6 +18,8 @@ module PostgREST.Request.ApiRequest ) where import qualified Data.Aeson as JSON +import qualified Data.Aeson.Key as K +import qualified Data.Aeson.KeyMap as KM import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS import qualified Data.CaseInsensitive as CI @@ -25,6 +27,7 @@ import qualified Data.Csv as CSV import qualified Data.HashMap.Strict as HM import qualified Data.List as L import qualified Data.List.NonEmpty as NonEmptyList +import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text.Encoding as T import qualified Data.Vector as V @@ -357,7 +360,7 @@ mutuallyAgreeable sProduces cAccepts = then listToMaybe sProduces else exact -type CsvData = V.Vector (HM.HashMap Text LBS.ByteString) +type CsvData = V.Vector (M.Map Text LBS.ByteString) {-| Converts CSV like @@ -375,8 +378,8 @@ csvToJson :: (CSV.Header, CsvData) -> JSON.Value csvToJson (_, vals) = JSON.Array $ V.map rowToJsonObj vals where - rowToJsonObj = JSON.Object . - HM.map (\str -> + rowToJsonObj = JSON.Object . KM.fromMapText . + M.map (\str -> if str == "NULL" then JSON.Null else JSON.String . T.decodeUtf8 $ LBS.toStrict str @@ -389,9 +392,9 @@ payloadAttributes raw json = JSON.Array arr -> case arr V.!? 0 of Just (JSON.Object o) -> - let canonicalKeys = S.fromList $ HM.keys o + let canonicalKeys = S.fromList $ K.toText <$> KM.keys o areKeysUniform = all (\case - JSON.Object x -> S.fromList (HM.keys x) == canonicalKeys + JSON.Object x -> S.fromList (K.toText <$> KM.keys x) == canonicalKeys _ -> False) arr in if areKeysUniform then Just $ ProcessedJSON raw canonicalKeys @@ -399,7 +402,7 @@ payloadAttributes raw json = Just _ -> Nothing Nothing -> Just emptyPJArray - JSON.Object o -> Just $ ProcessedJSON raw (S.fromList $ HM.keys o) + JSON.Object o -> Just $ ProcessedJSON raw (S.fromList $ K.toText <$> KM.keys o) -- truncate everything else to an empty array. _ -> Just emptyPJArray From e42bf30aeb5ad862860529ba9b37580eb311eda8 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Wed, 25 May 2022 21:20:51 +0200 Subject: [PATCH 04/15] cabal: allow swagger 2.8 --- postgrest.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/postgrest.cabal b/postgrest.cabal index 75b1b1352d..d0fe2e5f92 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -104,7 +104,7 @@ library , regex-tdfa >= 1.2.2 && < 1.4 , retry >= 0.7.4 && < 0.10 , scientific >= 0.3.4 && < 0.4 - , swagger2 >= 2.4 && < 2.7 + , swagger2 >= 2.4 && < 2.9 , text >= 1.2.2 && < 1.3 , time >= 1.6 && < 1.11 , unordered-containers >= 0.2.8 && < 0.3 From bb62e559d61699bb980ae861fd56b25f63868250 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Thu, 2 Jun 2022 23:23:46 +0200 Subject: [PATCH 05/15] cabal: require jose >= 0.8.5.1 (aeson-2 compat) jose version before 0.8.5.1 lacked an upper bound on aeson-2, causing build failures with aeson-2 present. --- postgrest.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/postgrest.cabal b/postgrest.cabal index d0fe2e5f92..89004d6627 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -92,7 +92,7 @@ library , http-types >= 0.12.2 && < 0.13 , insert-ordered-containers >= 0.2.2 && < 0.3 , interpolatedstring-perl6 >= 1 && < 1.1 - , jose >= 0.8.1 && < 0.9 + , jose >= 0.8.5.1 && < 0.10 , lens >= 4.14 && < 5.1 , lens-aeson >= 1.0.1 && < 1.2 , mtl >= 2.2.2 && < 2.3 From 7531c5490d05c3dbea5df7b44db12e4910e16a43 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 3 Jun 2022 12:59:28 +0200 Subject: [PATCH 06/15] cabal: allow hasql-1.5, hasql-dynamic-statements-0.3.* --- postgrest.cabal | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/postgrest.cabal b/postgrest.cabal index 89004d6627..0a9ed91526 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -83,8 +83,8 @@ library , cookie >= 0.4.2 && < 0.5 , either >= 4.4.1 && < 5.1 , gitrev >= 1.2 && < 1.4 - , hasql >= 1.4 && < 1.5 - , hasql-dynamic-statements == 0.3.1 + , hasql >= 1.4 && < 1.6 + , hasql-dynamic-statements >= 0.3.1 && < 0.4 , hasql-notifications >= 0.1 && < 0.3 , hasql-pool >= 0.5 && < 0.6 , hasql-transaction >= 1.0.1 && < 1.1 @@ -255,8 +255,8 @@ test-suite querycost , case-insensitive >= 1.2 && < 1.3 , containers >= 0.5.7 && < 0.7 , contravariant >= 1.4 && < 1.6 - , hasql >= 1.4 && < 1.5 - , hasql-dynamic-statements == 0.3.1 + , hasql >= 1.4 && < 1.6 + , hasql-dynamic-statements >= 0.3.1 && < 0.4 , hasql-pool >= 0.5 && < 0.6 , hasql-transaction >= 1.0.1 && < 1.1 , heredoc >= 0.2 && < 0.3 From f5afa419f14ec92d5788493eb51e87b0b0542dc9 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 3 Jun 2022 16:51:28 +0200 Subject: [PATCH 07/15] cabal, tests: require wai-logger 2.4.0, adapt tests wai-logger version 2.4.0 fixes log output to not say 'unknownSocket' for unix sockets. --- postgrest.cabal | 5 +++++ test/io/test_io.py | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/postgrest.cabal b/postgrest.cabal index 0a9ed91526..2c6fb8eaa2 100644 --- a/postgrest.cabal +++ b/postgrest.cabal @@ -113,6 +113,11 @@ library , wai >= 3.2.1 && < 3.3 , wai-cors >= 0.2.5 && < 0.3 , wai-extra >= 3.1.8 && < 3.2 + -- We already depend on wai-logger >= 2.3.7 indirectly via wai-extra, + -- but we want to depend on 2.4.0 which fixes 'unknownSocket' log output + -- for unix sockets; this is tested in test/io/test_io.py. See + -- https://github.com/kazu-yamamoto/logger/commit/3a71ca70afdbb93d4ecf0083eeba1fbbbcab3fc3 + , wai-logger >= 2.4.0 , warp >= 3.3.19 && < 3.4 -- -fno-spec-constr may help keep compile time memory use in check, -- see https://gitlab.haskell.org/ghc/ghc/issues/16017#note_219304 diff --git a/test/io/test_io.py b/test/io/test_io.py index ef31c036f4..bf4f4cb915 100644 --- a/test/io/test_io.py +++ b/test/io/test_io.py @@ -922,7 +922,7 @@ def test_log_level(level, has_output, defaultenv): assert response.status_code == 200 if has_output[0]: assert re.match( - r'unknownSocket - postgrest_test_anonymous \[.+\] "GET / HTTP/1.1" 200 - "" "python-requests/.+"', + r'- - postgrest_test_anonymous \[.+\] "GET / HTTP/1.1" 200 - "" "python-requests/.+"', postgrest.process.stdout.readline().decode(), ) @@ -930,7 +930,7 @@ def test_log_level(level, has_output, defaultenv): assert response.status_code == 404 if has_output[1]: assert re.match( - r'unknownSocket - postgrest_test_anonymous \[.+\] "GET /unknown HTTP/1.1" 404 - "" "python-requests/.+"', + r'- - postgrest_test_anonymous \[.+\] "GET /unknown HTTP/1.1" 404 - "" "python-requests/.+"', postgrest.process.stdout.readline().decode(), ) @@ -938,7 +938,7 @@ def test_log_level(level, has_output, defaultenv): assert response.status_code == 500 if has_output[2]: assert re.match( - r'unknownSocket - - \[.+\] "GET / HTTP/1.1" 500 - "" "python-requests/.+"', + r'- - - \[.+\] "GET / HTTP/1.1" 500 - "" "python-requests/.+"', postgrest.process.stdout.readline().decode(), ) From b056735535d7655fa24382e980544f1430c5dc79 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Thu, 2 Jun 2022 22:54:51 +0200 Subject: [PATCH 08/15] stack: update stack.yaml --- stack.yaml | 18 ++++++- stack.yaml.lock | 134 +++++++++++++++++++++++++++++++++++++++++------- 2 files changed, 132 insertions(+), 20 deletions(-) diff --git a/stack.yaml b/stack.yaml index 4cf15d5b75..6f27a3c917 100644 --- a/stack.yaml +++ b/stack.yaml @@ -11,9 +11,23 @@ nix: extra-deps: - hasql-dynamic-statements-0.3.1@sha256:c3a2c89c4a8b3711368dbd33f0ccfe46a493faa7efc2c85d3e354c56a01dfc48,2673 - - hasql-implicits-0.1.0.2@sha256:5d54e09cb779a209681b139fb3cc726bae75134557932156340cc0a56dd834a8,1361 + - hasql-implicits-0.1.0.3@sha256:0015b563ca0e34242f34c5ab518648a02f01b577a469d2d24caced301e7aa33f,1361 - protolude-0.3.1@sha256:1cc9e5a5c26c33a43c52b554443dd9779fef13974eaa0beec7ca6d2551b400da,2647 - ptr-0.16.8.1@sha256:525219ec5f5da5c699725f7efcef91b00a7d44120fc019878b85c09440bf51d6,2686 - wai-extra-3.1.8@sha256:bf3dbe8f4c707b502b2a88262ed71c807220651597b76b56983f864af6197890,7280 - - wai-logger-2.3.7@sha256:19a0dc5122e22d274776d80786fb9501956f5e75b8f82464bbdad5604d154d82,1671 + - wai-logger-2.4.0@sha256:630b3d573dce5c6a84dfb372d69f8d15f9dbb52d6a04df43e57a44f0ca0eab0a,1671 - warp-3.3.19@sha256:c6a47029537d42844386170d732cdfe6d85b2f4279bbaefdd9b50caff6faeebb,10910 + - aeson-2.0.3.0@sha256:130bda8e10dc6dd159b79b306abb10025d7f8b5d9cbc2f7d6d7e6768a0272058,5845 + - OneTuple-0.3.1@sha256:a848c096c9d29e82ffdd30a9998aa2931cbccb3a1bc137539d80f6174d31603e,2262 + - attoparsec-0.14.4@sha256:79584bdada8b730cb5138fca8c35c76fbef75fc1d1e01e6b1d815a5ee9843191,5810 + - indexed-traversable-0.1.2@sha256:d66228887242f93ccb4fc7101a1e25a6560c8e4708f6e9ee1d3dd21901756c65,2519 + - primitive-0.7.4.0@sha256:89b88a3e08493b7727fa4089b0692bfbdf7e1e666ef54635f458644eb8358764,2857 + - semialign-1.2.0.1@sha256:0e179b4d3a8eff79001d374d6c91917c6221696b9620f0a4d86852fc6a9b9501,2836 + - text-short-0.1.5@sha256:962c6228555debdc46f758d0317dea16e5240d01419b42966674b08a5c3d8fa6,3498 + - time-compat-1.9.6.1@sha256:42d8f2e08e965e1718917d54ad69e1d06bd4b87d66c41dc7410f59313dba4ed1,5033 + - base-orphans-0.8.6@sha256:eb6758d0160d607e0c45dbd6b196f515b9a589fd4f6d2f926929dd5d56282d37,3175 + - hashable-1.3.5.0@sha256:3a2beeafb220f9de706568a7e4a5b3c762cc4c9f25c94d7ef795b8c2d6a691d7,4240 + - swagger2-2.8@sha256:8166e453462ac6a903e761f27fa25f34074aa20f936bf311e3205e441f3f6b2d,4446 + - postgresql-binary-0.12.4.2@sha256:c0435803744923d022b01a0a518a680cc2bbb0272893adf6a86b84f0a0ef6455,5690 + - jose-0.9@sha256:0a312116d10cbddc915b77dbf82958307702e8716f3366bf7d166776498857e2,3251 + - base64-bytestring-1.2.1.0@sha256:50ec0e229255d4c45cbdd568da011311b8887f304b931564886016f4984334d8,2396 diff --git a/stack.yaml.lock b/stack.yaml.lock index 540508c290..2fd7a60392 100644 --- a/stack.yaml.lock +++ b/stack.yaml.lock @@ -5,57 +5,155 @@ packages: - completed: + hackage: hasql-dynamic-statements-0.3.1@sha256:c3a2c89c4a8b3711368dbd33f0ccfe46a493faa7efc2c85d3e354c56a01dfc48,2673 pantry-tree: - sha256: b1b9a6a26ec765e5fe29f9a670a5c9ec7067ea00dee8491f0819284ff0201b6f size: 641 - hackage: hasql-dynamic-statements-0.3.1@sha256:c3a2c89c4a8b3711368dbd33f0ccfe46a493faa7efc2c85d3e354c56a01dfc48,2673 + sha256: b1b9a6a26ec765e5fe29f9a670a5c9ec7067ea00dee8491f0819284ff0201b6f original: hackage: hasql-dynamic-statements-0.3.1@sha256:c3a2c89c4a8b3711368dbd33f0ccfe46a493faa7efc2c85d3e354c56a01dfc48,2673 - completed: + hackage: hasql-implicits-0.1.0.3@sha256:0015b563ca0e34242f34c5ab518648a02f01b577a469d2d24caced301e7aa33f,1361 pantry-tree: - sha256: 2f00d1467d0e226b966c2cd7bac433c8948e2f7bbdf8a44936029f66fc20b5f3 - size: 310 - hackage: hasql-implicits-0.1.0.2@sha256:5d54e09cb779a209681b139fb3cc726bae75134557932156340cc0a56dd834a8,1361 + size: 264 + sha256: da00659415431d41abb76cd7dbba2c283922432f67f25c9d7ebf21d72402c22f original: - hackage: hasql-implicits-0.1.0.2@sha256:5d54e09cb779a209681b139fb3cc726bae75134557932156340cc0a56dd834a8,1361 + hackage: hasql-implicits-0.1.0.3@sha256:0015b563ca0e34242f34c5ab518648a02f01b577a469d2d24caced301e7aa33f,1361 - completed: + hackage: protolude-0.3.1@sha256:1cc9e5a5c26c33a43c52b554443dd9779fef13974eaa0beec7ca6d2551b400da,2647 pantry-tree: - sha256: 6452a6ca8d395f7d810139779bb0fd16fc1dbb00f1862630bc08ef5a100430f9 size: 1645 - hackage: protolude-0.3.1@sha256:1cc9e5a5c26c33a43c52b554443dd9779fef13974eaa0beec7ca6d2551b400da,2647 + sha256: 6452a6ca8d395f7d810139779bb0fd16fc1dbb00f1862630bc08ef5a100430f9 original: hackage: protolude-0.3.1@sha256:1cc9e5a5c26c33a43c52b554443dd9779fef13974eaa0beec7ca6d2551b400da,2647 - completed: + hackage: ptr-0.16.8.1@sha256:525219ec5f5da5c699725f7efcef91b00a7d44120fc019878b85c09440bf51d6,2686 pantry-tree: - sha256: d2b8440a738719ef8430ec38fe33b129e3940e4ccf2c016a727a1110a43656bb size: 1089 - hackage: ptr-0.16.8.1@sha256:525219ec5f5da5c699725f7efcef91b00a7d44120fc019878b85c09440bf51d6,2686 + sha256: d2b8440a738719ef8430ec38fe33b129e3940e4ccf2c016a727a1110a43656bb original: hackage: ptr-0.16.8.1@sha256:525219ec5f5da5c699725f7efcef91b00a7d44120fc019878b85c09440bf51d6,2686 - completed: + hackage: wai-extra-3.1.8@sha256:bf3dbe8f4c707b502b2a88262ed71c807220651597b76b56983f864af6197890,7280 pantry-tree: - sha256: a544ea95288d188e893322a8e6d68f2b1f844f772dbea1f26e5c0c1a74694f56 size: 4053 - hackage: wai-extra-3.1.8@sha256:bf3dbe8f4c707b502b2a88262ed71c807220651597b76b56983f864af6197890,7280 + sha256: a544ea95288d188e893322a8e6d68f2b1f844f772dbea1f26e5c0c1a74694f56 original: hackage: wai-extra-3.1.8@sha256:bf3dbe8f4c707b502b2a88262ed71c807220651597b76b56983f864af6197890,7280 - completed: + hackage: wai-logger-2.4.0@sha256:630b3d573dce5c6a84dfb372d69f8d15f9dbb52d6a04df43e57a44f0ca0eab0a,1671 pantry-tree: - sha256: 52b5abf5c4c09bcfbc06e01f761a75c32cbd3e6ba23c8843981933fcc31ed53c size: 474 - hackage: wai-logger-2.3.7@sha256:19a0dc5122e22d274776d80786fb9501956f5e75b8f82464bbdad5604d154d82,1671 + sha256: e65ce23ad056ec07ff620d52b9c36c73a68d422dfca28e12e4c0d234f143389c original: - hackage: wai-logger-2.3.7@sha256:19a0dc5122e22d274776d80786fb9501956f5e75b8f82464bbdad5604d154d82,1671 + hackage: wai-logger-2.4.0@sha256:630b3d573dce5c6a84dfb372d69f8d15f9dbb52d6a04df43e57a44f0ca0eab0a,1671 - completed: + hackage: warp-3.3.19@sha256:c6a47029537d42844386170d732cdfe6d85b2f4279bbaefdd9b50caff6faeebb,10910 pantry-tree: - sha256: 99ff839445ba2c9e29a294b45904e3f4575336c7d2b4504ce310d611661c761d size: 3973 - hackage: warp-3.3.19@sha256:c6a47029537d42844386170d732cdfe6d85b2f4279bbaefdd9b50caff6faeebb,10910 + sha256: 99ff839445ba2c9e29a294b45904e3f4575336c7d2b4504ce310d611661c761d original: hackage: warp-3.3.19@sha256:c6a47029537d42844386170d732cdfe6d85b2f4279bbaefdd9b50caff6faeebb,10910 +- completed: + hackage: aeson-2.0.3.0@sha256:130bda8e10dc6dd159b79b306abb10025d7f8b5d9cbc2f7d6d7e6768a0272058,5845 + pantry-tree: + size: 38191 + sha256: bccfc5a7259a27aad7e4193f367c3d7d67799f42632fdcb90241c1e9fbb0cf40 + original: + hackage: aeson-2.0.3.0@sha256:130bda8e10dc6dd159b79b306abb10025d7f8b5d9cbc2f7d6d7e6768a0272058,5845 +- completed: + hackage: OneTuple-0.3.1@sha256:a848c096c9d29e82ffdd30a9998aa2931cbccb3a1bc137539d80f6174d31603e,2262 + pantry-tree: + size: 506 + sha256: a685b08622f1fe0641e4f228a290878a1db4f7ef3eb63d00b10e8632097d1e6f + original: + hackage: OneTuple-0.3.1@sha256:a848c096c9d29e82ffdd30a9998aa2931cbccb3a1bc137539d80f6174d31603e,2262 +- completed: + hackage: attoparsec-0.14.4@sha256:79584bdada8b730cb5138fca8c35c76fbef75fc1d1e01e6b1d815a5ee9843191,5810 + pantry-tree: + size: 5039 + sha256: 5b8e087f73d334019252606b1f64b986cef61946a085ccb77c6bc4d038691c48 + original: + hackage: attoparsec-0.14.4@sha256:79584bdada8b730cb5138fca8c35c76fbef75fc1d1e01e6b1d815a5ee9843191,5810 +- completed: + hackage: indexed-traversable-0.1.2@sha256:d66228887242f93ccb4fc7101a1e25a6560c8e4708f6e9ee1d3dd21901756c65,2519 + pantry-tree: + size: 495 + sha256: 7edfd8672d45016089df7227d9f6452117632a7236d4e71b24a2a1041cab4701 + original: + hackage: indexed-traversable-0.1.2@sha256:d66228887242f93ccb4fc7101a1e25a6560c8e4708f6e9ee1d3dd21901756c65,2519 +- completed: + hackage: primitive-0.7.4.0@sha256:89b88a3e08493b7727fa4089b0692bfbdf7e1e666ef54635f458644eb8358764,2857 + pantry-tree: + size: 1655 + sha256: 71a850c658b70e869da19f61615d87d9d6ecec597f0e3d4b498da56559114829 + original: + hackage: primitive-0.7.4.0@sha256:89b88a3e08493b7727fa4089b0692bfbdf7e1e666ef54635f458644eb8358764,2857 +- completed: + hackage: semialign-1.2.0.1@sha256:0e179b4d3a8eff79001d374d6c91917c6221696b9620f0a4d86852fc6a9b9501,2836 + pantry-tree: + size: 537 + sha256: 635bbbb517f0c063a4bc2e9e6efdb0e598b9d9fd467f52df81ab3a4af1fd923b + original: + hackage: semialign-1.2.0.1@sha256:0e179b4d3a8eff79001d374d6c91917c6221696b9620f0a4d86852fc6a9b9501,2836 +- completed: + hackage: text-short-0.1.5@sha256:962c6228555debdc46f758d0317dea16e5240d01419b42966674b08a5c3d8fa6,3498 + pantry-tree: + size: 727 + sha256: 1fc6561c4acb94a41e58cd8f8c7cce161c18c5629dac63a54a9c62f9c778c52b + original: + hackage: text-short-0.1.5@sha256:962c6228555debdc46f758d0317dea16e5240d01419b42966674b08a5c3d8fa6,3498 +- completed: + hackage: time-compat-1.9.6.1@sha256:42d8f2e08e965e1718917d54ad69e1d06bd4b87d66c41dc7410f59313dba4ed1,5033 + pantry-tree: + size: 4113 + sha256: b262c5ae8d72d2073d12e1de1863abd234fad8c138df28f32bb51bc232ced608 + original: + hackage: time-compat-1.9.6.1@sha256:42d8f2e08e965e1718917d54ad69e1d06bd4b87d66c41dc7410f59313dba4ed1,5033 +- completed: + hackage: base-orphans-0.8.6@sha256:eb6758d0160d607e0c45dbd6b196f515b9a589fd4f6d2f926929dd5d56282d37,3175 + pantry-tree: + size: 1272 + sha256: f889d6903383305688e6e703982bfdc985649b7d4538429e44f98f99d8f73c07 + original: + hackage: base-orphans-0.8.6@sha256:eb6758d0160d607e0c45dbd6b196f515b9a589fd4f6d2f926929dd5d56282d37,3175 +- completed: + hackage: hashable-1.3.5.0@sha256:3a2beeafb220f9de706568a7e4a5b3c762cc4c9f25c94d7ef795b8c2d6a691d7,4240 + pantry-tree: + size: 1248 + sha256: 4df2f6b536a0fcc5f7d562cb29e373f27dc4a2747452ac5cc74c1599cab22fc5 + original: + hackage: hashable-1.3.5.0@sha256:3a2beeafb220f9de706568a7e4a5b3c762cc4c9f25c94d7ef795b8c2d6a691d7,4240 +- completed: + hackage: swagger2-2.8@sha256:8166e453462ac6a903e761f27fa25f34074aa20f936bf311e3205e441f3f6b2d,4446 + pantry-tree: + size: 2192 + sha256: 1c3f5503292e0323a2fd925250ac534e0d8d32c1c3fcae3e0c61c35e12a78e83 + original: + hackage: swagger2-2.8@sha256:8166e453462ac6a903e761f27fa25f34074aa20f936bf311e3205e441f3f6b2d,4446 +- completed: + hackage: postgresql-binary-0.12.4.2@sha256:c0435803744923d022b01a0a518a680cc2bbb0272893adf6a86b84f0a0ef6455,5690 + pantry-tree: + size: 1665 + sha256: db2520640f426cdec35a1d4f5072cf6cf50a971cc2c5c5972104605f4307b831 + original: + hackage: postgresql-binary-0.12.4.2@sha256:c0435803744923d022b01a0a518a680cc2bbb0272893adf6a86b84f0a0ef6455,5690 +- completed: + hackage: jose-0.9@sha256:0a312116d10cbddc915b77dbf82958307702e8716f3366bf7d166776498857e2,3251 + pantry-tree: + size: 2105 + sha256: b787b8fa8a0ffe381a55a8b66f088d80ed7be0bc5acaf2c54cee78c4ab5fee1b + original: + hackage: jose-0.9@sha256:0a312116d10cbddc915b77dbf82958307702e8716f3366bf7d166776498857e2,3251 +- completed: + hackage: base64-bytestring-1.2.1.0@sha256:50ec0e229255d4c45cbdd568da011311b8887f304b931564886016f4984334d8,2396 + pantry-tree: + size: 850 + sha256: 1154783dd163be45502456716ca44c4244d0aa6e7cf1aa4e7763efb3485a509c + original: + hackage: base64-bytestring-1.2.1.0@sha256:50ec0e229255d4c45cbdd568da011311b8887f304b931564886016f4984334d8,2396 snapshots: - completed: - sha256: 87842ecbaa8ca9cee59a7e6be52369dbed82ed075cb4e0d152614a627e8fd488 size: 586069 url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/18/14.yaml + sha256: 87842ecbaa8ca9cee59a7e6be52369dbed82ed075cb4e0d152614a627e8fd488 original: lts-18.14 From 12967f86439f70f21fa5de7971f9e14f19769946 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Tue, 7 Jun 2022 10:53:54 +0200 Subject: [PATCH 09/15] nix: nix version bump --- nix/nixpkgs-version.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nix/nixpkgs-version.nix b/nix/nixpkgs-version.nix index 805169bebc..f5f42f5f4a 100644 --- a/nix/nixpkgs-version.nix +++ b/nix/nixpkgs-version.nix @@ -1,6 +1,6 @@ # Pinned version of Nixpkgs, generated with postgrest-nixpkgs-upgrade. { - date = "2021-11-02"; - rev = "7053541084bf5ce2921ef307e5585d39d7ba8b3f"; - tarballHash = "1flhh5d4zy43x6060hvzjb5hi5cmc51ivc0nwmija9n8d35kcc4x"; + date = "2022-03-30"; + rev = "9a5aa75d56ad4163521f1692469e6dc54b90068c"; + tarballHash = "1f3wyldcx1zpyk2q6122mkg16chf9j7swwx1v6f1dg126xz1238f"; } From ff56c3adf4dc2e02ab77d979522e11772f419908 Mon Sep 17 00:00:00 2001 From: Wolfgang Walther Date: Mon, 31 Jan 2022 08:19:49 +0100 Subject: [PATCH 10/15] nix: port openssl patch to updated nixpkgs --- ...t-runtime-dependencies-of-static-builds.patch | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nix/patches/nixpkgs-openssl-split-runtime-dependencies-of-static-builds.patch b/nix/patches/nixpkgs-openssl-split-runtime-dependencies-of-static-builds.patch index 43b58f8d61..e92f656385 100644 --- a/nix/patches/nixpkgs-openssl-split-runtime-dependencies-of-static-builds.patch +++ b/nix/patches/nixpkgs-openssl-split-runtime-dependencies-of-static-builds.patch @@ -2,11 +2,11 @@ diff --git a/pkgs/development/libraries/openssl/default.nix b/pkgs/development/l index d4be8cc2428..3979698711f 100644 --- a/pkgs/development/libraries/openssl/default.nix +++ b/pkgs/development/libraries/openssl/default.nix -@@ -50,9 +50,21 @@ let +@@ -43,9 +43,21 @@ let substituteInPlace crypto/async/arch/async_posix.h \ --replace '!defined(__ANDROID__) && !defined(__OpenBSD__)' \ '!defined(__ANDROID__) && !defined(__OpenBSD__) && 0' -+ '' + optionalString static ++ '' + lib.optionalString static + # On static builds, the ENGINESDIR will be empty, but its path will be + # compiled into the library. In order to minimize the runtime dependencies + # of packages that statically link openssl, we move it into the OPENSSLDIR, @@ -17,15 +17,15 @@ index d4be8cc2428..3979698711f 100644 + 'ENGINESDIR=$(OPENSSLDIR)/engines-{- $sover_dirname -}' ''; -- outputs = [ "bin" "dev" "out" "man" ] ++ optional withDocs "doc"; +- outputs = [ "bin" "dev" "out" "man" ] ++ lib.optional withDocs "doc"; + outputs = [ "bin" "dev" "out" "man" ] -+ ++ optional withDocs "doc" ++ ++ lib.optional withDocs "doc" + # Separate output for the runtime dependencies of the static build. -+ ++ optional static "etc"; ++ ++ lib.optional static "etc"; setOutputFlags = false; separateDebugInfo = !stdenv.hostPlatform.isDarwin && -@@ -101,7 +113,17 @@ let +@@ -95,7 +107,17 @@ let configureFlags = [ "shared" # "shared" builds both shared and static libraries "--libdir=lib" @@ -44,7 +44,7 @@ index d4be8cc2428..3979698711f 100644 ] ++ lib.optionals withCryptodev [ "-DHAVE_CRYPTODEV" "-DUSE_CRYPTODEV_DIGESTS" -@@ -131,6 +153,9 @@ let +@@ -126,6 +148,9 @@ let if [ -n "$(echo $out/lib/*.so $out/lib/*.dylib $out/lib/*.dll)" ]; then rm "$out/lib/"*.a fi @@ -54,7 +54,7 @@ index d4be8cc2428..3979698711f 100644 '' + lib.optionalString (!stdenv.hostPlatform.isWindows) # Fix bin/c_rehash's perl interpreter line # -@@ -152,14 +177,15 @@ let +@@ -147,14 +172,15 @@ let mv $out/include $dev/ # remove dependency on Perl at runtime From 14c882a56672f24832a6de559e2d16f3d2a41d1a Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Tue, 7 Jun 2022 10:53:59 +0200 Subject: [PATCH 11/15] nix: fetch PostgreSQL 9.6 via postgresql legacy overlay --- nix/overlays/postgresql-legacy.nix | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/nix/overlays/postgresql-legacy.nix b/nix/overlays/postgresql-legacy.nix index 6e3afbdd61..248f9f4248 100644 --- a/nix/overlays/postgresql-legacy.nix +++ b/nix/overlays/postgresql-legacy.nix @@ -2,19 +2,18 @@ self: super: # Overlay that adds legacy versions of PostgreSQL that are supported by # PostgREST. { - # PostgreSQL 9.5 was removed from Nixpkgs with - # https://github.com/NixOS/nixpkgs/commit/72ab382fb6b729b0d654f2c03f5eb25b39f11fbb + # PostgreSQL 9.6 was removed from Nixpkgs with + # https://github.com/NixOS/nixpkgs/commit/757dd008b2f2926fc0f7688fa8189f930ea47521 # We pin its parent commit to get the last version that was available. - # postgresql_9_5 = - # let - # rev = "55ac7d4580c9ab67848c98cb9519317a1cc399c8"; - # tarballHash = "02ffj9f8s1hwhmxj85nx04sv64qb6jm7w0122a1dz9n32fymgklj"; - # - # pinnedPkgs = - # builtins.fetchTarball { - # url = "https://github.com/nixos/nixpkgs/archive/${rev}.tar.gz"; - # sha256 = tarballHash; - # }; - # in - # (import pinnedPkgs { }).pkgs.postgresql_9_5; + postgresql_9_6 = + let + rev = "571cbf3d1db477058303cef8754fb85a14e90eb7"; + tarballHash = "0q74wn418i1bn5sssacmw8ykpmqvzr0s93sj6pbs3rf6bf134fkz"; + pinnedPkgs = + builtins.fetchTarball { + url = "https://github.com/nixos/nixpkgs/archive/${rev}.tar.gz"; + sha256 = tarballHash; + }; + in + (import pinnedPkgs { }).pkgs.postgresql_9_6; } From 34d0f34620fa0681635fbe3bfeb8a99a9cecc99d Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 3 Jun 2022 15:35:57 +0200 Subject: [PATCH 12/15] nix: bashCompletion -> bash-completion --- nix/hsie/default.nix | 4 ++-- nix/overlays/build-toolbox/build-toolbox.nix | 6 +++--- .../checked-shell-script/checked-shell-script.nix | 4 ++-- nix/tools/withTools.nix | 2 +- shell.nix | 8 ++++---- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/nix/hsie/default.nix b/nix/hsie/default.nix index f0809c99ae..1f6c2297d7 100644 --- a/nix/hsie/default.nix +++ b/nix/hsie/default.nix @@ -28,8 +28,8 @@ let mkdir -p $out/bin ln -s $hsie $out/bin/$name ''; - bashCompletion = + bash-completion = runCommand "${name}-bash-completion" { inherit bin name; } "$bin/bin/$name --bash-completion-script $bin/bin/$name > $out"; in -hsie // { inherit bashCompletion bin; } +hsie // { inherit bash-completion bin; } diff --git a/nix/overlays/build-toolbox/build-toolbox.nix b/nix/overlays/build-toolbox/build-toolbox.nix index a170f9a24e..e6b33e12fe 100644 --- a/nix/overlays/build-toolbox/build-toolbox.nix +++ b/nix/overlays/build-toolbox/build-toolbox.nix @@ -1,11 +1,11 @@ -# Creates an environment that exposes bashCompletion arguments from all checkedShellScripts +# Creates an environment that exposes bash-completion arguments from all checkedShellScripts { buildEnv }: { name , tools , extra ? { } }: let - bashCompletion = builtins.map (tool: tool.bashCompletion) tools; + bash-completion = builtins.map (tool: tool.bash-completion) tools; env = buildEnv { inherit name; @@ -13,4 +13,4 @@ let }; in -env // { inherit bashCompletion; } // extra +env // { inherit bash-completion; } // extra diff --git a/nix/overlays/checked-shell-script/checked-shell-script.nix b/nix/overlays/checked-shell-script/checked-shell-script.nix index 7de75f5249..e3c382e6d6 100644 --- a/nix/overlays/checked-shell-script/checked-shell-script.nix +++ b/nix/overlays/checked-shell-script/checked-shell-script.nix @@ -58,7 +58,7 @@ let sed '/_positionals_count + 1/a\\t\t\t\tset -- "''${@:1:1}" "--" "''${@:2}"' -i $out ''; - bashCompletion = + bash-completion = runCommand "${name}-completion" { } ( '' ${argbash}/bin/argbash --type completion --strip all ${argsTemplate}/${name}.m4 > $out @@ -138,4 +138,4 @@ let script = runCommand name { inherit bin name; } "ln -s $bin/bin/$name $out"; in -script // { inherit bin bashCompletion; } +script // { inherit bin bash-completion; } diff --git a/nix/tools/withTools.nix b/nix/tools/withTools.nix index 33c38681de..e070d112ce 100644 --- a/nix/tools/withTools.nix +++ b/nix/tools/withTools.nix @@ -1,4 +1,4 @@ -{ bashCompletion +{ bash-completion , buildToolbox , cabal-install , checkedShellScript diff --git a/shell.nix b/shell.nix index 1eb663765c..a073982503 100644 --- a/shell.nix +++ b/shell.nix @@ -48,14 +48,14 @@ lib.overrideDerivation postgrest.env ( '' export HISTFILE=.history - source ${pkgs.bashCompletion}/etc/profile.d/bash_completion.sh + source ${pkgs.bash-completion}/etc/profile.d/bash_completion.sh source ${pkgs.git}/share/git/contrib/completion/git-completion.bash - source ${postgrest.hsie.bashCompletion} + source ${postgrest.hsie.bash-completion} '' + builtins.concatStringsSep "\n" ( - builtins.map (bashCompletion: "source ${bashCompletion}") ( - builtins.concatLists (builtins.map (toolbox: toolbox.bashCompletion) toolboxes) + builtins.map (bash-completion: "source ${bash-completion}") ( + builtins.concatLists (builtins.map (toolbox: toolbox.bash-completion) toolboxes) ) ); } From d7f54e76f40d92ab6fd3453cc348519a796c9dfd Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Tue, 24 May 2022 19:44:50 +0200 Subject: [PATCH 13/15] nix: haskell overlay update --- nix/overlays/haskell-packages.nix | 38 +++++++++++++------------------ 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/nix/overlays/haskell-packages.nix b/nix/overlays/haskell-packages.nix index aed8262478..33a5a3cacb 100644 --- a/nix/overlays/haskell-packages.nix +++ b/nix/overlays/haskell-packages.nix @@ -41,41 +41,35 @@ let } { }; - wai-extra = + configurator-pg = prev.callHackageDirect { - pkg = "wai-extra"; - ver = "3.1.8"; - sha256 = "1ha8sxc2ii7k7xs5nm06wfwqmf4f1p2acp4ya0jnx6yn6551qps4"; + pkg = "configurator-pg"; + ver = "0.2.6"; + sha256 = "sha256-nkamTOpP/w0vQfOXsoQKEstW3n9qyRsv0TocrEerKlU="; } { }; - wai-logger = - prev.callHackageDirect + hasql-dynamic-statements = + lib.dontCheck (prev.callHackageDirect { - pkg = "wai-logger"; - ver = "2.3.7"; - sha256 = "1d23fdbwbahr3y1vdyn57m1qhljy22pm5cpgb20dy6mlxzdb30xd"; + pkg = "hasql-dynamic-statements"; + ver = "0.3.1.1"; + sha256 = "sha256-jF50GcCtEUV3TN1UsD4LaSBH6arcqfKhxOk+b+c8Bl8="; } - { }; + { }); - warp = + hasql-implicits = lib.dontCheck (prev.callHackageDirect { - pkg = "warp"; - ver = "3.3.19"; - sha256 = "0y3jj4bhviss6ff9lwxki0zbdcl1rb398bk4s80zvfpnpy7p94cx"; + pkg = "hasql-implicits"; + ver = "0.1.0.3"; + sha256 = "sha256-IpAOVHNdXJ53B/fmo+DeNUKiBSS6Bo7Uha/krpMt64g="; } { }); - hasql-dynamic-statements = - lib.dontCheck (lib.unmarkBroken prev.hasql-dynamic-statements); - - hasql-implicits = - lib.dontCheck (lib.unmarkBroken prev.hasql-implicits); - - ptr = - lib.dontCheck (lib.unmarkBroken prev.ptr); + hspec-wai-json = + lib.dontCheck (lib.unmarkBroken prev.hspec-wai-json); } // extraOverrides final prev; in { From 63ebf6e92bb64b4c3b9b1d8de62fedf08e2485e9 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 10 Jun 2022 11:14:03 +0000 Subject: [PATCH 14/15] nix: hack static-haskell-nix for updated nixpkgs This patches static-haskell-nix to work for building postgrest with updated nixpkgs (from ~202203): - The ncurses 'enableStatic' argument doesn't exist anymore. We use the vanilla package instead, which seems to work fine. - The 'isExecutable' check fails with a strange error related to trying to override 'mkDerivation'. We patch 'isExecutable' to check explicitly whether we're building postgrest. ('isExecutable' is used to determine whether to build a package statically.) --- nix/patches/default.nix | 5 +++++ .../static-haskell-nix-isexecutable.patch | 18 ++++++++++++++++++ nix/patches/static-haskell-nix-ncurses.patch | 13 +++++++++++++ nix/static-haskell-package.nix | 3 ++- 4 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 nix/patches/static-haskell-nix-isexecutable.patch create mode 100644 nix/patches/static-haskell-nix-ncurses.patch diff --git a/nix/patches/default.nix b/nix/patches/default.nix index 9c9109cd8a..880a8cece9 100644 --- a/nix/patches/default.nix +++ b/nix/patches/default.nix @@ -21,4 +21,9 @@ # See: https://github.com/NixOS/nixpkgs/pull/87879 nixpkgs-openssl-split-runtime-dependencies-of-static-builds = ./nixpkgs-openssl-split-runtime-dependencies-of-static-builds.patch; + + static-haskell-nix-ncurses = + ./static-haskell-nix-ncurses.patch; + static-haskell-nix-isexecutable = + ./static-haskell-nix-isexecutable.patch; } diff --git a/nix/patches/static-haskell-nix-isexecutable.patch b/nix/patches/static-haskell-nix-isexecutable.patch new file mode 100644 index 0000000000..a35adc8853 --- /dev/null +++ b/nix/patches/static-haskell-nix-isexecutable.patch @@ -0,0 +1,18 @@ +diff --git a/survey/default.nix b/survey/default.nix +index 46d8066..5f7950b 100644 +--- a/survey/default.nix ++++ b/survey/default.nix +@@ -85,10 +85,9 @@ let + # Function that tells us if a given Haskell package has an executable. + # Pass only Haskell packages to this! + # Filter away other stuff with `isProperHaskellPackage` first. +- isExecutable = pkg: +- (pkgs.haskell.lib.overrideCabal pkg (drv: { +- passthru.isExecutable = drv.isExecutable or false; +- })).isExecutable; ++ # FIXME postgrest: original fails with our nixpkgs version, we ++ # just hardcode the check to work for a static postgrest build. ++ isExecutable = pkg: pkg.name == "postgrest"; + + # Turn e.g. `Cabal_1_2_3_4` into `1.2.3.4`. + cabalDottedVersion = diff --git a/nix/patches/static-haskell-nix-ncurses.patch b/nix/patches/static-haskell-nix-ncurses.patch new file mode 100644 index 0000000000..39c1327412 --- /dev/null +++ b/nix/patches/static-haskell-nix-ncurses.patch @@ -0,0 +1,13 @@ +diff --git a/survey/default.nix b/survey/default.nix +index 46d8066..a47f214 100644 +--- a/survey/default.nix ++++ b/survey/default.nix +@@ -1519,7 +1519,7 @@ let + [ + "--enable-executable-static" # requires `useFixedCabal` + # `enableShared` seems to be required to avoid `recompile with -fPIC` errors on some packages. +- "--extra-lib-dirs=${final.ncurses.override { enableStatic = true; enableShared = true; }}/lib" ++ "--extra-lib-dirs=${final.ncurses}/lib" + ] + # TODO Figure out why this and the below libffi are necessary. + # `working` and `workingStackageExecutables` don't seem to need that, diff --git a/nix/static-haskell-package.nix b/nix/static-haskell-package.nix index 57d5e6a1f5..1c353970ac 100644 --- a/nix/static-haskell-package.nix +++ b/nix/static-haskell-package.nix @@ -17,7 +17,8 @@ let patches.applyPatches "patched-static-haskell-nix" static-haskell-nix [ - # No patches currently required. + patches.static-haskell-nix-ncurses + patches.static-haskell-nix-isexecutable ]; patchedNixpkgs = From 8d8074d500ebb27a8e74f63086e579f9c039b5a0 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 3 Jun 2022 15:49:49 +0200 Subject: [PATCH 15/15] nix: keep old hlint This keeps the pre-nixpkgs update hlint version, so we can defer addressing the warnings to a follow-up PR. --- default.nix | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/default.nix b/default.nix index 52c9ea7055..e086158618 100644 --- a/default.nix +++ b/default.nix @@ -141,7 +141,23 @@ rec { # Linting and styling tools. style = - pkgs.callPackage nix/tools/style.nix { inherit hsie; }; + let + # keep old version of hlint to not deal with linter changes for now + nixpkgsVersionPrev = { + date = "2021-11-02"; + rev = "7053541084bf5ce2921ef307e5585d39d7ba8b3f"; + tarballHash = "1flhh5d4zy43x6060hvzjb5hi5cmc51ivc0nwmija9n8d35kcc4x"; + }; + + nixpkgsPrev = + builtins.fetchTarball { + url = "https://github.com/nixos/nixpkgs/archive/${nixpkgsVersionPrev.rev}.tar.gz"; + sha256 = nixpkgsVersionPrev.tarballHash; + }; + pkgsPrev = import nixpkgsPrev { }; + inherit (pkgsPrev) hlint; + in + pkgs.callPackage nix/tools/style.nix { inherit hlint hsie; }; # Scripts for running tests. tests =