Skip to content

Commit

Permalink
Squashed commit of the following:
Browse files Browse the repository at this point in the history
commit 65e9976
Author: William Moses <[email protected]>
Date:   Sat Dec 14 14:05:03 2024 -0600

    Interp2 (EnzymeAD#365)

    * WIP: kernels

    * more files

    * fix

    * wip

    * wqtmp

    * wip

    * inc

    * continuing

    * wip

    * more work

    * inf rec

    * fix

    * overload working

    * continuing

    * continuing

    * push

    * fix `call_with_reactant_generator` for Julia 1.11 (EnzymeAD#359)

    * conversion

    * continuing

    * Cleanup

    * Apply suggestions from code review

    Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>

    * Delete test/cuda.jl

    * fixup

    * Apply suggestions from code review

    Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>

    * fix apply

    * indep of change

    * minor fix in name

    * Update utils.jl

    * Interp take 2

    * continuing adentures

    * delcode

    * fix

    * tmp

    * make

    * fix

    * cleanup

    * continuing

    * more working

    * further simplify

    * fx

    * more improvements

    * minus show

    * less prints

    * even fewer

    * confusion

    * tmp

    * force clean

    * force oc

    * clean

    * Rewrite

    * fixup

    * fix

    * fix

    * fix

    * fixup

    * fix

    * wip

    * safe prints

    * fix

    * fix

    * stackoverflow

    * cleanup

    * dyindex

    * rt

    * continue

    * clean

    * fix

    * fix

    * fix

    * fix

    * fixup

    * fix

    * fix

    * capture oc

    * compile perf

    * v1.11 fix

    * other way 'round

    * formatting

    ---------

    Co-authored-by: William Moses <[email protected]>
    Co-authored-by: jumerckx <[email protected]>
    Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
    Co-authored-by: jumerckx <[email protected]>

commit 73899f5
Author: Avik Pal <[email protected]>
Date:   Sat Dec 14 14:58:47 2024 +0530

    fix: include files if they end with .jl (EnzymeAD#377)

commit 9f96c09
Author: Sergio Sánchez Ramírez <[email protected]>
Date:   Fri Dec 13 23:12:43 2024 +0100

    Run CI on aarch64 (EnzymeAD#350)

    * Run CI on aarch64

    * use julia pipeline for aarch64-linux

    * fix var

    * exclude aarch64-linux jobs from github ci

commit b56e661
Author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Date:   Fri Dec 13 08:58:51 2024 +0530

    chore: format code (EnzymeAD#371)

    Co-authored-by: mofeing <[email protected]>

commit 311498b
Author: Anton Oresten <[email protected]>
Date:   Thu Dec 12 05:41:39 2024 +0100

    feat: define outer `repeat` method for `TracedRArray` (EnzymeAD#361)

    * Add repeat method

    * Add repeat tests

    * Update test/basic.jl

    * Update src/TracedRArray.jl

commit 8b90501
Author: Avik Pal <[email protected]>
Date:   Thu Dec 12 10:11:00 2024 +0530

    fix: ensure printing of wrapped ConcreteRArrays goes through our show (EnzymeAD#367)

    * fix: ensure printing of wrapped ConcreteRArrays goes through our show

    * fix: allow wrapped arrays in mapreduce

commit ea97be3
Author: Sergio Sánchez Ramírez <[email protected]>
Date:   Wed Dec 11 22:02:14 2024 +0100

    Ignore versioned manifests
  • Loading branch information
jumerckx committed Dec 16, 2024
1 parent 38b1797 commit e3a201f
Show file tree
Hide file tree
Showing 26 changed files with 1,705 additions and 999 deletions.
103 changes: 71 additions & 32 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
@@ -1,37 +1,76 @@
steps:
- label: "CUDA Julia v{{matrix.version}} -- {{matrix.group}}"
matrix:
setup:
version:
- "1.10"
group:
- core
- neural_networks
- integration
plugins:
- JuliaCI/julia#v1:
version: "{{matrix.version}}"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
- src
- ext
- lib/ReactantCore/src
commands: |
julia --project=. -e 'println("--- :julia: Instantiating project")
using Pkg
Pkg.develop([PackageSpec(path="lib/ReactantCore")])'
- group: ":test_tube: Tests"
steps:
- label: "CUDA Julia v{{matrix.version}} -- {{matrix.group}}"
matrix:
setup:
version:
- "1.10"
group:
- core
- neural_networks
- integration
plugins:
- JuliaCI/julia#v1:
version: "{{matrix.version}}"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
- src
- ext
- lib/ReactantCore/src
commands: |
julia --project=. -e 'println("--- :julia: Instantiating project")
using Pkg
Pkg.develop([PackageSpec(path="lib/ReactantCore")])'
julia --project=. -e 'println("--- :julia: Run Tests")
using Pkg
Pkg.test(; coverage="user")'
agents:
queue: "juliagpu"
cuda: "*"
env:
REACTANT_TEST_GROUP: "{{matrix.group}}"
if: build.message !~ /\[skip tests\]/
timeout_in_minutes: 60

- label: ":julia: :linux: aarch64 - Julia v{{matrix.version}} -- {{matrix.group}}"
matrix:
setup:
version:
- "1.10"
- "1.11"
group:
- core
- neural_networks
- integration
plugins:
- JuliaCI/julia#v1:
version: "{{matrix.version}}"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
- src
- ext
- lib/ReactantCore/src
commands: |
julia --project=. -e 'println("--- :julia: Instantiating project")
using Pkg
Pkg.develop([PackageSpec(path="lib/ReactantCore")])'
julia --project=. -e 'println("--- :julia: Run Tests")
using Pkg
Pkg.test(; coverage="user")'
agents:
queue: "juliagpu"
cuda: "*"
env:
REACTANT_TEST_GROUP: "{{matrix.group}}"
if: build.message !~ /\[skip tests\]/
timeout_in_minutes: 60
julia --project=. -e 'println("--- :julia: Run Tests")
using Pkg
Pkg.test(; coverage="user")'
agents:
queue: "juliaecosystem"
os: "linux"
sandbox_capable: "true"
arch: "aarch64"
env:
REACTANT_TEST_GROUP: "{{matrix.group}}"
if: build.message !~ /\[skip tests\]/
timeout_in_minutes: 60

- group: ":racehorse: Benchmarks"
steps:
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ jobs:
- integration
arch:
- x64
- aarch64
assertions:
- false
libReactant: [packaged]
Expand Down Expand Up @@ -64,6 +65,10 @@ jobs:
libReactant: packaged
version: '1.10'
test_group: integration
exclude:
# these are run on Buildkite
- os: ubuntu-20.04
arch: aarch64
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,7 @@ docs/site/
# committed for packages, but should be committed for applications that require a static
# environment.
Manifest.toml
Manifest-v*.toml

.vscode/*
.vscode/settings.json
Expand Down
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ Adapt = "4"
ArrayInterface = "7.10"
CEnum = "0.4, 0.5"
Downloads = "1.6"
Enzyme = "0.13.21"
Enzyme = "0.13.22"
EnzymeCore = "0.8.8"
GPUArraysCore = "0.1.6, 0.2"
LinearAlgebra = "1.10"
NNlib = "0.9.26"
OrderedCollections = "1"
Preferences = "1.4"
ReactantCore = "0.1.2"
ReactantCore = "0.1.3"
Reactant_jll = "0.0.26"
ScopedValues = "1.2.1"
Scratch = "1.2"
Expand Down
10 changes: 10 additions & 0 deletions deps/ReactantExtra/API.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,16 @@ extern "C" MlirModule ConvertLLVMToMLIR(LLVMModuleRef lmod, MlirContext cctx) {
return wrap(res);
}

#include "llvm/IRReader/IRReader.h"
extern "C" MlirModule ConvertLLVMStrToMLIR(const char* lmod, MlirContext cctx) {
LLVMContext Context;
SMDiagnostic Err;
auto llvmModule = llvm::parseIR(llvm::MemoryBufferRef(lmod, "conversion"), Err, Context);
mlir::MLIRContext &context = *unwrap(cctx);
auto res = mlir::translateLLVMIRToModule(std::move(llvmModule), &context, /*emitExpensiveWarnings*/false, /*dropDICompositeElements*/false).release();
return wrap(res);
}


/* Note that this */
extern "C" xla::PjRtLoadedExecutable* ClientCompile(PjRtClient * client, MlirModule cmod) {
Expand Down
2 changes: 2 additions & 0 deletions deps/ReactantExtra/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,8 @@ cc_library(
"@llvm-project//mlir:SCFDialect",
"@llvm-project//mlir:TransformDialect",
"@llvm-project//mlir:Transforms",

"@llvm-project//llvm:IRReader",
"@llvm-project//llvm:Support",
"@llvm-project//llvm:AArch64AsmParser",
"@llvm-project//llvm:AArch64CodeGen",
Expand Down
27 changes: 11 additions & 16 deletions ext/ReactantNNlibExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,10 @@ module ReactantNNlibExt

using NNlib
using GPUArraysCore: @allowscalar
using Reactant:
Reactant,
Ops,
TracedRArray,
AnyTracedRArray,
materialize_traced_array,
MLIR,
TracedRNumber,
get_mlir_data,
set_mlir_data!
using Reactant: Reactant, Ops, TracedRArray, AnyTracedRArray, MLIR, TracedRNumber

using Reactant.TracedUtils: materialize_traced_array, get_mlir_data, set_mlir_data!

using ReactantCore: @trace
using LinearAlgebra: LinearAlgebra, triu

Expand Down Expand Up @@ -238,9 +232,9 @@ function NNlib.batched_mul!(
if size(x, 3) != size(y, 3)
B = max(size(x, 3), size(y, 3))
if size(x, 3) == 1
x = Reactant.broadcast_to_size(x, (size(x, 1), size(x, 2), B))
x = Reactant.TracedUtils.broadcast_to_size(x, (size(x, 1), size(x, 2), B))
elseif size(y, 3) == 1
y = Reactant.broadcast_to_size(y, (size(y, 1), size(y, 2), B))
y = Reactant.TracedUtils.broadcast_to_size(y, (size(y, 1), size(y, 2), B))
end
end

Expand All @@ -250,9 +244,9 @@ function NNlib.batched_mul!(
if size(x, 1) != size(y, 1)
B = max(size(x, 1), size(y, 1))
if size(x, 1) == 1
x = Reactant.broadcast_to_size(x, (B, size(x, 2), size(x, 3)))
x = Reactant.TracedUtils.broadcast_to_size(x, (B, size(x, 2), size(x, 3)))
elseif size(y, 1) == 1
y = Reactant.broadcast_to_size(y, (B, size(y, 2), size(y, 3)))
y = Reactant.TracedUtils.broadcast_to_size(y, (B, size(y, 2), size(y, 3)))
end
end

Expand All @@ -270,7 +264,7 @@ end
function NNlib.pad_constant(
x::AnyTracedRArray{T,N}, pad::NTuple{N,Tuple{Int,Int}}, value
) where {T,N}
value = Reactant.promote_to(TracedRNumber{T}, value)
value = Reactant.TracedUtils.promote_to(TracedRNumber{T}, value)
low = [i[1] for i in pad]
high = [i[2] for i in pad]
interior = [0 for i in pad]
Expand Down Expand Up @@ -329,7 +323,8 @@ function NNlib.gather!(dst::TracedRArray, src::AnyTracedRArray, idxs::AbstractAr
start_sizes = ntuple(i -> size(src, i), dims)
results = map(CartesianIndices(idxs)) do k
res = @allowscalar src[colons..., Tuple(idxs[k])...]
res isa TracedRNumber && (res = Reactant.broadcast_to_size(res, (1,)))
res isa TracedRNumber &&
(res = Reactant.TracedUtils.broadcast_to_size(res, (1,)))
return reshape(res, start_sizes..., :)
end
res = reshape(cat(results...; dims=(dims + 1)), size(dst))
Expand Down
3 changes: 2 additions & 1 deletion ext/ReactantStatisticsExt.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
module ReactantStatisticsExt

using Reactant: AnyTracedRArray, materialize_traced_array
using Reactant: AnyTracedRArray
using Reactant.TracedUtils: materialize_traced_array
using Statistics: Statistics

function Statistics.mean(A::AnyTracedRArray{T,N}; dims=:) where {T,N}
Expand Down
7 changes: 4 additions & 3 deletions ext/ReactantYaoBlocksExt.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
module ReactantYaoBlocksExt

using Reactant
using Reactant.TracedUtils: broadcast_to_size
using YaoBlocks

function YaoBlocks.mat(
::Type{T}, R::RotationGate{D,Reactant.TracedRNumber{S},<:XGate}
) where {D,T,S}
M = Reactant.broadcast_to_size(zero(T), (2, 2))
M = broadcast_to_size(zero(T), (2, 2))
c = cos(R.theta / 2)
s = -im * sin(R.theta / 2)
M[1, 1] = c
Expand All @@ -19,7 +20,7 @@ end
function YaoBlocks.mat(
::Type{T}, R::RotationGate{D,Reactant.TracedRNumber{S},<:YGate}
) where {D,T,S}
M = Reactant.broadcast_to_size(zero(T), (2, 2))
M = broadcast_to_size(zero(T), (2, 2))
c = cos(R.theta / 2)
s = sin(R.theta / 2)
M[1, 1] = c
Expand All @@ -32,7 +33,7 @@ end
function YaoBlocks.mat(
::Type{T}, R::RotationGate{D,Reactant.TracedRNumber{S},<:ZGate}
) where {D,T,S}
M = Reactant.broadcast_to_size(zero(T), (2, 2))
M = broadcast_to_size(zero(T), (2, 2))
x = exp(im * R.theta / 2)
M[1, 1] = conj(x)
M[2, 2] = x
Expand Down
2 changes: 1 addition & 1 deletion lib/ReactantCore/Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ReactantCore"
uuid = "a3311ec8-5e00-46d5-b541-4f83e724a433"
authors = ["William Moses <[email protected]>", "Valentin Churavy <[email protected]>", "Sergio Sánchez Ramírez <[email protected]>", "Paul Berg <[email protected]>", "Avik Pal <[email protected]>"]
version = "0.1.2"
version = "0.1.3"

[deps]
ExpressionExplorer = "21656369-7473-754a-2065-74616d696c43"
Expand Down
6 changes: 4 additions & 2 deletions lib/ReactantCore/src/ReactantCore.jl
Original file line number Diff line number Diff line change
Expand Up @@ -157,15 +157,17 @@ function trace_for(mod, expr)

all_syms = Expr(:tuple, counter, external_syms...)
args_init = Expr(
:tuple, :(Reactant.promote_to(Reactant.TracedRNumber{Int}, 0)), external_syms...
:tuple,
:(Reactant.TracedUtils.promote_to(Reactant.TracedRNumber{Int}, 0)),
external_syms...,
)

reactant_code_block = quote
let args = $(args_init)
cond_fn =
$(all_syms) -> begin
local num_iters = div($limit - $start, $step, RoundDown)
local num_iters = Reactant.promote_to(
local num_iters = Reactant.TracedUtils.promote_to(
Reactant.TracedRNumber{Int64}, num_iters
)
$counter < num_iters + 1
Expand Down
9 changes: 8 additions & 1 deletion src/Compiler.jl
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ function compile_mlir!(mod, f, args; optimize::Union{Bool,Symbol}=true)
linear_results = MLIR.IR.mmodule!(mod) do
MLIR.IR.block!(MLIR.IR.body(mod)) do
with(enable_tracing=>true, callcache=>Dict()) do
return Reactant.make_mlir_fn(f, args, (), "main", true)
return Reactant.TracedUtils.make_mlir_fn(f, args, (), "main", true)
end
end
end
Expand Down Expand Up @@ -785,6 +785,13 @@ function compile(f, args; client=nothing, optimize=true, sync=false)
return register_thunk(fname, body)
end

# Compiling within a compile should return simply the original function
Reactant.@reactant_override function Reactant.Compiler.compile(
f, args; client=nothing, optimize=true, sync=false
)
return f
end

# inspired by RuntimeGeneratedFunction.jl
const __thunk_body_cache = Dict{Symbol,Expr}()

Expand Down
20 changes: 15 additions & 5 deletions src/ConcreteRArray.jl
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ end
function Base.convert(
::Type{T}, X::WrappedConcreteRArray{ElType,N}
) where {T<:Array,ElType,N}
fn = compile(materialize_traced_array, (X,))
fn = compile(TracedUtils.materialize_traced_array, (X,))
return convert(Array, fn(X))
end
Base.Array(x::AnyConcreteRArray) = convert(Array, x)
Expand Down Expand Up @@ -195,16 +195,18 @@ function Base.show(io::IO, X::ConcreteRScalar{T}) where {T}
return nothing
end

function Base.print_array(io::IO, X::ConcreteRArray)
if X.data == XLA.AsyncEmptyBuffer
function Base.print_array(io::IO, X::AnyConcreteRArray)
data = ancestor(X).data
if data == XLA.AsyncEmptyBuffer
println(io, "<Empty buffer>")
return nothing
end
return Base.print_array(io, convert(Array, X))
end

function Base.show(io::IO, X::ConcreteRArray)
if X.data == XLA.AsyncEmptyBuffer
function Base.show(io::IO, X::AnyConcreteRArray)
data = ancestor(X).data
if data == XLA.AsyncEmptyBuffer
println(io, "<Empty buffer>")
return nothing
end
Expand Down Expand Up @@ -343,3 +345,11 @@ end

buffer_on_cpu(::Any) = true
buffer_on_cpu(x::ConcreteRArray) = XLA.BufferOnCPU(x.data.buffer)

function Ops.constant(x::ConcreteRArray; kwargs...)
return Ops.constant(Base.convert(Array, x); kwargs...)
end

function Ops.constant(x::ConcreteRNumber{T}; kwargs...) where {T}
return Ops.constant(Base.convert(T, x); kwargs...)
end
Loading

0 comments on commit e3a201f

Please sign in to comment.