diff --git a/Project.toml b/Project.toml
index 0a82267..966117e 100644
--- a/Project.toml
+++ b/Project.toml
@@ -6,6 +6,13 @@ version = "0.1.0"
 [deps]
 JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
 
+[weakdeps]
+GLM = "38e38edf-8417-5370-95a0-9cbb8c7f171a"
+
+[extensions]
+OmeletteGLMExt = "GLM"
+
 [compat]
+GLM = "1.9"
 JuMP = "1"
-julia = "1.6"
+julia = "1.9"
diff --git a/README.md b/README.md
index a315974..2ba382c 100644
--- a/README.md
+++ b/README.md
@@ -21,3 +21,28 @@ This project is inspired by two existing projects:
 
  * [OMLT](https://github.com/cog-imperial/OMLT)
  * [gurobi-machinelearning](https://github.com/Gurobi/gurobi-machinelearning)
+
+## Supported models
+
+Use `add_model` to add a model.
+```julia
+Omelette.add_model(model, model_ml, x, y)
+y = Omelette.add_model(model, model_ml, x)
+```
+
+### LinearRegression
+
+```julia
+num_features, num_observations = 2, 10
+X = rand(num_observations, num_features)
+θ = rand(num_features)
+Y = X * θ + randn(num_observations)
+model_glm = GLM.lm(X, Y)
+model_ml = Omelette.LinearRegression(model_glm)
+model = Model(HiGHS.Optimizer)
+set_silent(model)
+@variable(model, 0 <= x[1:num_features] <= 1)
+@constraint(model, sum(x) == 1.5)
+y = Omelette.add_model(model, model_ml, x)
+@objective(model, Max, y[1])
+```
diff --git a/ext/OmeletteGLMExt.jl b/ext/OmeletteGLMExt.jl
new file mode 100644
index 0000000..313b8b2
--- /dev/null
+++ b/ext/OmeletteGLMExt.jl
@@ -0,0 +1,15 @@
+# Copyright (c) 2024: Oscar Dowson and contributors
+#
+# Use of this source code is governed by an MIT-style license that can be found
+# in the LICENSE.md file or at https://opensource.org/licenses/MIT.
+
+module OmeletteGLMExt
+
+import Omelette
+import GLM
+
+function Omelette.LinearRegression(model::GLM.LinearModel)
+    return Omelette.LinearRegression(GLM.coef(model))
+end
+
+end #module
diff --git a/src/Omelette.jl b/src/Omelette.jl
index b705a5e..5d30697 100644
--- a/src/Omelette.jl
+++ b/src/Omelette.jl
@@ -54,7 +54,27 @@ function add_model(
         throw(DimensionMismatch(msg))
     end
     _add_model_inner(opt_model, ml_model, x, y)
-    return
+    return y
+end
+
+Base.size(x::AbstractModel, i::Int) = size(x)[i]
+
+function add_model(
+    opt_model::JuMP.Model,
+    ml_model::AbstractModel,
+    x::Vector{JuMP.VariableRef},
+    y::JuMP.VariableRef,
+)
+    return add_model(opt_model, ml_model, x, [y])
+end
+
+function add_model(
+    opt_model::JuMP.Model,
+    ml_model::AbstractModel,
+    x::Vector{JuMP.VariableRef},
+)
+    y = JuMP.@variable(opt_model, [1:size(ml_model, 1)])
+    return add_model(opt_model, ml_model, x, y)
 end
 
 for file in readdir(joinpath(@__DIR__, "models"); join = true)
diff --git a/test/Project.toml b/test/Project.toml
index 4050cb8..29e7c47 100644
--- a/test/Project.toml
+++ b/test/Project.toml
@@ -1,7 +1,13 @@
 [deps]
+GLM = "38e38edf-8417-5370-95a0-9cbb8c7f171a"
+HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
 JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
+Omelette = "e52c2cb8-508e-4e12-9dd2-9c4755b60e73"
 Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
 
 [compat]
+GLM = "1"
+HiGHS = "1"
+JuMP = "1"
 Test = "<0.0.1, 1.6"
-julia = "1.6"
+julia = "1.9"
diff --git a/test/models/test_LinearRegression.jl b/test/models/test_LinearRegression.jl
index 38d8cd0..5638ae2 100644
--- a/test/models/test_LinearRegression.jl
+++ b/test/models/test_LinearRegression.jl
@@ -5,17 +5,18 @@
 
 module LinearRegressionTests
 
-using Test
 using JuMP
+using Test
+
+import GLM
+import HiGHS
 import Omelette
 
+is_test(x) = startswith(string(x), "test_")
+
 function runtests()
-    for name in names(@__MODULE__; all = true)
-        if startswith("$name", "test_")
-            @testset "$name" begin
-                getfield(@__MODULE__, name)()
-            end
-        end
+    @testset "$name" for name in filter(is_test, names(@__MODULE__; all = true))
+        getfield(@__MODULE__, name)()
     end
     return
 end
@@ -47,6 +48,27 @@ function test_LinearRegression_dimension_mismatch()
     return
 end
 
+function test_LinearRegression_GLM()
+    num_features = 2
+    num_observations = 10
+    X = rand(num_observations, num_features)
+    θ = rand(num_features)
+    Y = X * θ + randn(num_observations)
+    model_glm = GLM.lm(X, Y)
+    model = Model(HiGHS.Optimizer)
+    set_silent(model)
+    model_ml = Omelette.LinearRegression(model_glm)
+    @variable(model, 0 <= x[1:num_features] <= 1)
+    @constraint(model, sum(x) == 1.5)
+    y = Omelette.add_model(model, model_ml, x)
+    @objective(model, Max, y[1])
+    optimize!(model)
+    @assert is_solved_and_feasible(model)
+    y_star_glm = GLM.predict(model_glm, value.(x)')
+    @test isapprox(objective_value(model), y_star_glm; atol = 1e-6)
+    return
+end
+
 end
 
 LinearRegressionTests.runtests()