Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[inferno-ml] Change parameter input/output representation #147

Merged
merged 7 commits into from
Nov 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions inferno-ml-server-types/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# Revision History for inferno-ml-server-types
*Note*: we use https://pvp.haskell.org/ (MAJOR.MAJOR.MINOR.PATCH)

## 0.11.0
* Split parameter inputs and outputs

## 0.10.0
* Change `Id` to `UUID`
* Add new testing endpoint to override models, script, etc...
Expand Down
2 changes: 1 addition & 1 deletion inferno-ml-server-types/inferno-ml-server-types.cabal
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
cabal-version: 2.4
name: inferno-ml-server-types
version: 0.10.0
version: 0.11.0
synopsis: Types for Inferno ML server
description: Types for Inferno ML server
homepage: https://github.com/plow-technologies/inferno.git#readme
Expand Down
12 changes: 6 additions & 6 deletions inferno-ml-server-types/src/Inferno/ML/Server/Client.hs
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ cancelC = client $ Proxy @CancelAPI

-- | Run an inference parameter
inferenceC ::
forall gid p s.
forall gid p.
-- | SQL identifier of the inference parameter to be run
Id (InferenceParam gid p s) ->
Id (InferenceParam gid p) ->
-- | Optional resolution for scripts that use e.g. @valueAt@; defaults to
-- the param\'s stored resolution if not provided. This lets users override
-- the resolution on an ad-hoc basis without needing to alter the stored
Expand All @@ -44,16 +44,16 @@ inferenceC ::
-- (not defined in this repository) to verify this before directing
-- the writes to their final destination
ClientM (WriteStream IO)
inferenceC = client $ Proxy @(InferenceAPI gid p s)
inferenceC = client $ Proxy @(InferenceAPI gid p)

-- | Run an inference parameter
inferenceTestC ::
forall gid p s.
forall gid p.
ToJSON p =>
-- | SQL identifier of the inference parameter to be run
Id (InferenceParam gid p s) ->
Id (InferenceParam gid p) ->
Maybe Int64 ->
UUID ->
EvaluationEnv gid p ->
ClientM (WriteStream IO)
inferenceTestC = client $ Proxy @(InferenceTestAPI gid p s)
inferenceTestC = client $ Proxy @(InferenceTestAPI gid p)
123 changes: 40 additions & 83 deletions inferno-ml-server-types/src/Inferno/ML/Server/Types.hs
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@ import GHC.Generics (Generic)
import Inferno.Instances.Arbitrary ()
import Inferno.Types.Syntax (Ident)
import Inferno.Types.VersionControl
( VCHashUpdate,
VCHashUpdateViaShow (VCHashUpdateViaShow),
VCObjectHash,
( VCObjectHash,
byteStringToVCObjectHash,
vcObjectHashToByteString,
)
Expand Down Expand Up @@ -127,11 +125,11 @@ import Web.HttpApiData
)

-- API type for `inferno-ml-server`
type InfernoMlServerAPI gid p s =
type InfernoMlServerAPI gid p =
StatusAPI
-- Evaluate an inference script
:<|> InferenceAPI gid p s
:<|> InferenceTestAPI gid p s
:<|> InferenceAPI gid p
:<|> InferenceTestAPI gid p
:<|> CancelAPI

type StatusAPI =
Expand All @@ -140,19 +138,19 @@ type StatusAPI =

type CancelAPI = "inference" :> "cancel" :> Put '[JSON] ()

type InferenceAPI gid p s =
type InferenceAPI gid p =
"inference"
:> "run"
:> Capture "id" (Id (InferenceParam gid p s))
:> Capture "id" (Id (InferenceParam gid p))
:> QueryParam "res" Int64
:> QueryParam' '[Required] "uuid" UUID
:> StreamPost NewlineFraming JSON (WriteStream IO)

type InferenceTestAPI gid p s =
type InferenceTestAPI gid p =
-- Evaluate an inference script
"inference"
:> "test"
:> Capture "id" (Id (InferenceParam gid p s))
:> Capture "id" (Id (InferenceParam gid p))
:> QueryParam "res" Int64
:> QueryParam' '[Required] "uuid" UUID
:> ReqBody '[JSON] (EvaluationEnv gid p)
Expand Down Expand Up @@ -192,25 +190,28 @@ data ServerStatus
= Idle
| EvaluatingScript
deriving stock (Show, Eq, Generic)
deriving anyclass (FromJSON, ToJSON)
deriving anyclass (FromJSON, ToJSON, ToADTArbitrary, NFData)

instance Arbitrary ServerStatus where
arbitrary = genericArbitrary

-- | Information for contacting a bridge server that implements the 'BridgeAPI'
data BridgeInfo gid p s = BridgeInfo
{ id :: Id (InferenceParam gid p s),
data BridgeInfo gid p = BridgeInfo
{ id :: Id (InferenceParam gid p),
host :: IPv4,
port :: Word64
}
deriving stock (Show, Eq, Generic)
deriving anyclass (FromJSON, ToJSON, NFData)

instance FromRow (BridgeInfo gid p s) where
instance FromRow (BridgeInfo gid p) where
fromRow =
BridgeInfo
<$> field
<*> field
<*> fmap (fromIntegral @Int64) field

instance ToRow (BridgeInfo gid p s) where
instance ToRow (BridgeInfo gid p) where
toRow bi =
[ bi.id & toField,
bi.host & toField,
Expand Down Expand Up @@ -677,27 +678,21 @@ showVersion (Version ns ts) =

-- | Row of the inference parameter table, parameterized by the user, group, and
-- script type
data InferenceParam gid p s = InferenceParam
{ id :: Maybe (Id (InferenceParam gid p s)),
data InferenceParam gid p = InferenceParam
{ id :: Maybe (Id (InferenceParam gid p)),
-- | The script of the parameter
--
-- For new parameters, this will be textual or some other identifier
-- (e.g. a UUID for use with @inferno-lsp@)
--
-- For existing inference params, this is the foreign key for the specific
-- script in the 'InferenceScript' table (i.e. a @VCObjectHash@)
script :: s,
-- | This is called @inputs@ but is also used for script outputs as
-- well. The access (input or output) is controlled by the 'ScriptInputType'.
-- For example, if this field is set to @[("input0", Single (p, Readable))]@,
-- the script will only have a single read-only input and will not be able to
-- write anywhere (note that we should disallow this scenario, as script
-- evaluation would not work properly)
--
-- Mapping the input\/output to the Inferno identifier helps ensure that
script :: VCObjectHash,
-- | Mapping the input\/output to the Inferno identifier helps ensure that
-- Inferno identifiers are always pointing to the correct input\/output;
-- otherwise we would need to rely on the order of the original identifiers
inputs :: Map Ident (SingleOrMany p, ScriptInputType),
inputs :: Map Ident (SingleOrMany p),
outputs :: Map Ident (SingleOrMany p),
-- | Resolution, passed to bridge routes
resolution :: Word64,
-- | The time that this parameter was \"deleted\", if any. For active
Expand All @@ -709,14 +704,15 @@ data InferenceParam gid p s = InferenceParam
deriving anyclass (NFData, ToJSON)

{- ORMOLU_DISABLE -}
instance (FromJSON s, FromJSON p, FromJSON gid) => FromJSON (InferenceParam gid p s)
instance (FromJSON p, FromJSON gid) => FromJSON (InferenceParam gid p)
where
parseJSON = withObject "InferenceParam" $ \o ->
InferenceParam
-- The ID needs to be included when deserializing
<$> o .: "id"
<*> o .: "script"
<*> o .: "inputs"
<*> o .: "outputs"
<*> o .:? "resolution" .!= 128
-- We shouldn't require this field
<*> o .:? "terminated"
Expand All @@ -731,53 +727,44 @@ instance
Typeable gid,
Typeable p
) =>
FromRow (InferenceParam gid p VCObjectHash)
FromRow (InferenceParam gid p)
where
fromRow =
InferenceParam
<$> field
<*> fmap wrappedTo (field @VCObjectHashRow)
<*> fmap getAeson field
<*> fmap getAeson field
<*> fmap fromIntegral (field @Int64)
<*> field
<*> field

instance (ToJSON p, ToField gid) => ToRow (InferenceParam gid p VCObjectHash) where
instance (ToJSON p, ToField gid) => ToRow (InferenceParam gid p) where
-- NOTE: Do not change the order of the field actions
toRow ip =
[ ip.id & maybe (toField Default) toField,
ip.script & VCObjectHashRow & toField,
ip.inputs & Aeson & toField,
ip.outputs & Aeson & toField,
ip.resolution & Aeson & toField,
toField Default,
ip.gid & toField
]

-- Not derived generically in order to use special `Gen UTCTime`
instance
( Arbitrary gid,
Arbitrary p,
Arbitrary s
) =>
Arbitrary (InferenceParam gid p s)
where
instance (Arbitrary gid, Arbitrary p) => Arbitrary (InferenceParam gid p) where
arbitrary =
InferenceParam
<$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> genMUtc
<*> arbitrary

-- Can't be derived because there is (intentially) no `Arbitrary UTCTime` in scope
instance
( Arbitrary gid,
Arbitrary p,
Arbitrary s
) =>
ToADTArbitrary (InferenceParam gid p s)
where
instance (Arbitrary gid, Arbitrary p) => ToADTArbitrary (InferenceParam gid p) where
toADTArbitrarySingleton _ =
ADTArbitrarySingleton "Inferno.ML.Server.Types" "InferenceParam"
. ConstructorArbitraryPair "InferenceParam"
Expand All @@ -789,46 +776,12 @@ instance

-- | An 'InferenceParam' together with all of the model versions that are
-- linked to it indirectly via its script. This is provided for convenience
data InferenceParamWithModels gid p s = InferenceParamWithModels
{ param :: InferenceParam gid p s,
data InferenceParamWithModels gid p = InferenceParamWithModels
{ param :: InferenceParam gid p,
models :: Map Ident (Id (ModelVersion gid Oid))
}
deriving stock (Show, Eq, Generic)

-- | Controls input interaction within a script, i.e. ability to read from
-- and\/or write to this input. Although the term \"input\" is used, those with
-- writes enabled can also be described as \"outputs\"
data ScriptInputType
= -- | Script input can be read, but not written
Readable
| -- | Script input can be written, i.e. can be used in array of
-- write objects returned from script evaluation
Writable
| -- | Script input can be both read from and written to; this allows
-- the same script identifier to point to the same PID with both
-- types of access enabled
ReadableWritable
deriving stock (Show, Eq, Ord, Generic)
deriving anyclass (NFData, ToADTArbitrary)
deriving (VCHashUpdate) via (VCHashUpdateViaShow ScriptInputType)

instance FromJSON ScriptInputType where
parseJSON = withText "ScriptInputType" $ \case
"r" -> pure Readable
"w" -> pure Writable
"rw" -> pure ReadableWritable
s -> fail $ "Invalid script input type: " <> Text.unpack s

instance ToJSON ScriptInputType where
toJSON =
String . \case
Readable -> "r"
Writable -> "w"
ReadableWritable -> "rw"

instance Arbitrary ScriptInputType where
arbitrary = genericArbitrary

-- | Information about execution time and resource usage. This is saved by
-- @inferno-ml-server@ after script evaluation completes and can be queried
-- later by using the same job identifier that was provided to the @/inference@
Expand All @@ -837,7 +790,7 @@ data EvaluationInfo gid p = EvaluationInfo
{ -- | Note that this is the job identifier provided to the inference
-- evaluation route, and is also the primary key of the database table
id :: UUID,
param :: Id (InferenceParam gid p VCObjectHash),
param :: Id (InferenceParam gid p),
-- | When inference evaluation started
start :: UTCTime,
-- | When inference evaluation ended
Expand Down Expand Up @@ -1053,11 +1006,15 @@ instance Ord a => Ord (SingleOrMany a) where
-- evaluator. This allows for more interactive testing
data EvaluationEnv gid p = EvaluationEnv
{ script :: VCObjectHash,
inputs :: Map Ident (SingleOrMany p, ScriptInputType),
inputs :: Map Ident (SingleOrMany p),
outputs :: Map Ident (SingleOrMany p),
models :: Map Ident (Id (ModelVersion gid Oid))
}
deriving stock (Show, Eq, Generic)
deriving anyclass (FromJSON, ToJSON)
deriving anyclass (FromJSON, ToJSON, ToADTArbitrary)

instance Arbitrary p => Arbitrary (EvaluationEnv gid p) where
arbitrary = genericArbitrary

tshow :: Show a => a -> Text
tshow = Text.pack . show
Expand Down
Loading
Loading