Skip to content

Commit

Permalink
Revise examples (#672)
Browse files Browse the repository at this point in the history
Keep examples up-to-date with current version of Owl/OCaml.
  • Loading branch information
jzstark authored Jul 3, 2024
1 parent 600c21a commit 455bde1
Show file tree
Hide file tree
Showing 39 changed files with 11,144 additions and 145 deletions.
2 changes: 1 addition & 1 deletion examples/README.md
Original file line number Diff line number Diff line change
@@ -1 +1 @@
The examples in this folder are made based on the OCaml's toplevel and Owl's Zoo system. Please refer to [Tutorial 9: Scripting and Zoo System](https://github.com/ryanrhymes/owl/wiki/Tutorial:-Scripting-and-Zoo-System).
The examples can be compiled using `dune build`, and then the executables can be found in `owl/_build/default/examples`.
6 changes: 4 additions & 2 deletions examples/backprop.ml
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#!/usr/bin/env owl
(* This example demonstrates how to write the backpropogation algorithm from
scratch using Algodiff module. With the backprop algorithm, we further make
a naive neural network without using Owl' DNN to train on mnist dataset.
Execute 'Dataset.download_all ()' to accquire all necessary dataset before running this example.'
*)


open Owl

open Algodiff.S
Expand Down Expand Up @@ -49,7 +51,7 @@ let backprop nn eta x y =
loss |> unpack_flt

let test nn x y =
Dense.Matrix.S.iter2_rows (fun u v ->
Dense.Matrix.S.iter2_rows (fun u _ ->
Dataset.print_mnist_image u;
let p = run_network (Arr u) nn |> unpack_arr in
Dense.Matrix.Generic.print p;
Expand Down
1 change: 0 additions & 1 deletion examples/checkpoint.ml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env owl
(* This example shows how to use checkpoint in a stateful optimisation. *)

open Owl
Expand Down
1 change: 0 additions & 1 deletion examples/cifar10_vgg.ml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env owl
(* This example demonstrates how to build a VGG-like convolutional neural
* network for CIFAR10 dataset.
*)
Expand Down
19 changes: 11 additions & 8 deletions examples/computation_graph_01.ml
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env owl
(*
* Please install the graphvis tool before executing this example.
E.g. on Ubuntu system: `sudo apt install graphviz`
*)


open Owl
module G = Owl_computation_cpu_engine.Make (Owl_algodiff_primal_ops.D)
include Owl_algodiff_generic.Make (G)

Expand All @@ -14,7 +17,7 @@ let visualise_01 () =
let z = f x y in
let inputs = [| unpack_elt x |> G.elt_to_node; unpack_elt y |> G.elt_to_node |] in
let outputs = [| unpack_elt z |> G.elt_to_node |] in
let graph = G.make_graph inputs outputs "graph" in
let graph = G.make_graph ~input:inputs ~output:outputs "graph" in
let s = G.graph_to_dot graph in
Owl_io.write_file "cgraph_01.dot" s;
Sys.command "dot -Tpdf cgraph_01.dot -o cgraph_01.pdf" |> ignore
Expand All @@ -26,7 +29,7 @@ let visualise_02 () =
let z = (grad (f x)) y in
let inputs = [| unpack_elt x |> G.elt_to_node; unpack_elt y |> G.elt_to_node |] in
let outputs = [| unpack_elt z |> G.elt_to_node |] in
let s = G.make_graph inputs outputs "graph" |> G.graph_to_dot in
let s = G.make_graph ~input:inputs ~output:outputs "graph" |> G.graph_to_dot in
Owl_io.write_file "cgraph_02.dot" s;
Sys.command "dot -Tpdf cgraph_02.dot -o cgraph_02.pdf" |> ignore

Expand All @@ -38,18 +41,18 @@ let visualise_03 () =
let z = f x y in
let i0 = [| unpack_arr x |> G.arr_to_node; unpack_arr y |> G.arr_to_node |] in
let o0 = [| primal z |> unpack_elt |> G.elt_to_node |] in
let s0 = G.make_graph i0 o0 "graph" |> G.graph_to_dot in
let s0 = G.make_graph ~input:i0 ~output:o0 "graph" |> G.graph_to_dot in
Owl_io.write_file "cgraph_03_forward.dot" s0;
Sys.command "dot -Tpdf cgraph_03_forward.dot -o cgraph_03_forward.pdf" |> ignore;

reverse_prop (pack_flt 1.) z;
let x' = adjval x |> unpack_arr |> G.arr_to_node in
let y' = adjval y |> unpack_arr |> G.arr_to_node in
let i1 = [| unpack_arr x |> G.arr_to_node |] in
let s1 = G.make_graph i1 [| x' |] "graph" |> G.graph_to_dot in
let s1 = G.make_graph ~input:i1 ~output:[| x' |] "graph" |> G.graph_to_dot in
let i2 = [| unpack_arr y |> G.arr_to_node |] in
let s2 = G.make_graph i2 [| y' |] "graph" |> G.graph_to_dot in
let s3 = G.make_graph i0 [| x'; y' |] "graph" |> G.graph_to_dot in
let s2 = G.make_graph ~input:i2 ~output:[| y' |] "graph" |> G.graph_to_dot in
let s3 = G.make_graph ~input:i0 ~output:[| x'; y' |] "graph" |> G.graph_to_dot in
Owl_io.write_file "cgraph_03_backward_x.dot" s1;
Sys.command "dot -Tpdf cgraph_03_backward_x.dot -o cgraph_03_backward_x.pdf" |> ignore;
Owl_io.write_file "cgraph_03_backward_y.dot" s2;
Expand Down
14 changes: 9 additions & 5 deletions examples/computation_graph_02.ml
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
#!/usr/bin/env owl
(*
* Please install the graphvis tool before executing this example.
E.g. on Ubuntu system: `sudo apt install graphviz`
*)


open Owl
module G = Owl_computation_cpu_engine.Make (Owl_algodiff_primal_ops.D)
Expand Down Expand Up @@ -32,14 +36,14 @@ let visualise_mnist () =
let _, adj0 = Graph.(backward network loss) in
let inputs = [| xt |> A.unpack_arr |> G.arr_to_node |] in
let s0_outputs = [| loss |> A.unpack_elt |> G.elt_to_node |] in
let s0 = G.make_graph inputs s0_outputs "mnist_loss" |> G.graph_to_dot in
let s0 = G.make_graph ~input:inputs ~output:s0_outputs "mnist_loss" |> G.graph_to_dot in
Owl_io.write_file "cgraph_04_mnist_loss.dot" s0;
Sys.command "dot -Tpdf cgraph_04_mnist_loss.dot -o cgraph_04_mnist_loss.pdf" |> ignore;
let s1_outputs = adj0
|> Utils.Array.flatten
|> Array.map (fun a -> A.unpack_arr a |> G.arr_to_node)
in
let s1 = G.make_graph inputs s1_outputs "mnist_loss" |> G.graph_to_dot in
let s1 = G.make_graph ~input:inputs ~output:s1_outputs "mnist_loss" |> G.graph_to_dot in
Owl_io.write_file "cgraph_04_mnist_grad.dot" s1;
Sys.command "dot -Tpdf cgraph_04_mnist_grad.dot -o cgraph_04_mnist_grad.pdf" |> ignore

Expand All @@ -63,14 +67,14 @@ let visualise_lstm () =
let _, adj0 = Graph.(backward network loss) in
let inputs = [| xt |> A.unpack_arr |> G.arr_to_node |] in
let s0_outputs = [| loss |> A.unpack_elt |> G.elt_to_node |] in
let s0 = G.make_graph inputs s0_outputs "mnist_loss" |> G.graph_to_dot in
let s0 = G.make_graph ~input:inputs ~output:s0_outputs "mnist_loss" |> G.graph_to_dot in
Owl_io.write_file "cgraph_04_lstm_loss.dot" s0;
(* Sys.command "dot -Tpdf -Gnslimit=1 cgraph_04_lstm_loss.dot -o cgraph_04_lstm_loss.pdf" |> ignore; *)
let s1_outputs = adj0
|> Utils.Array.flatten
|> Array.map (fun a -> A.unpack_arr a |> G.arr_to_node)
in
let s1 = G.make_graph inputs s1_outputs "mnist_loss" |> G.graph_to_dot in
let s1 = G.make_graph ~input:inputs ~output:s1_outputs "mnist_loss" |> G.graph_to_dot in
Owl_io.write_file "cgraph_04_lstm_grad.dot" s1
(* Sys.command "dot -Tpdf -Gnslimit=1 cgraph_04_lstm_grad.dot -o cgraph_04_lstm_grad.pdf" |> ignore *)

Expand Down
1 change: 0 additions & 1 deletion examples/countmin_distributed.ml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env owl
(* This example demonstrates the use of distributed count-min sketches. It
* fills a single count-min sketch using the news.txt corpus at
* https://github.com/ryanrhymes/owl_dataset, then initializes two new empty
Expand Down
1 change: 0 additions & 1 deletion examples/countmin_texts.ml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env owl
(* This example demonstrates the use of the HeavyHitters sketch,
* which is based on the Count-Min sketch in Owl_base library.
* This example finds the words which appear with relative frequency
Expand Down
20 changes: 9 additions & 11 deletions examples/custom_algodiff_op.ml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#!/usr/bin/env owl

(* This example demonstrates how to build a custom operation in Algodiff . *)

open Owl
Expand Down Expand Up @@ -33,13 +31,13 @@ let custom_cos =
: Siso)

let _ =
let input = Mat.uniform 1 2 in
let input = _f 1. in (* Mat.uniform 1 2 in *)
(* [f] must be [f : vector -> scalar]. *)
let g' = grad custom_cos in
let h' = grad g' in
let g = grad Maths.cos in
let h = grad g in
Mat.print (g' input);
Mat.print (g input);
Mat.print (h' input);
Mat.print (h input);
let g' = diff custom_cos in
let h' = diff g' in
let g = diff Maths.cos in
let h = diff g in
print_float (g' input |> unpack_flt); print_endline "\n";
print_float (g input |> unpack_flt); print_endline "\n";
print_float (h' input |> unpack_flt); print_endline "\n";
print_float (h input |> unpack_flt); print_endline "\n"
Loading

0 comments on commit 455bde1

Please sign in to comment.