well done sending a network over mpi, pat pat

now do a lot of nets in parallel and then we'll talk
This commit is contained in:
2019-11-25 20:23:33 -08:00
parent a80a3de4fa
commit 04e0b9829c
2 changed files with 144 additions and 26 deletions

View File

@@ -2,25 +2,38 @@ cimport numpy as np
import numpy as np
import mynet as mn
from libc.stdlib cimport malloc
ctr = []
X_train, y_train, X_test, y_test = mn.load_mnist()
cdef extern from "numpy/arrayobject.h":
object PyArray_SimpleNewFromData(
int nd, long* dims, int typenum, void* data
)
void *PyArray_DATA(np.ndarray arr)
ctypedef public struct Dense:
long[2] shape
int ownmem
float* W
float* b
ctypedef public struct Network:
Py_ssize_t n_layers;
Dense* layers;
cdef public char * greeting():
return f'The value is {3**3**3}'.encode('utf-8')
cdef public void debug_print(object o):
print(o.flags)
# print(o)
cdef public np.ndarray[np.float32_t, ndim=2, mode='c'] dot(
np.ndarray[np.float32_t, ndim=2, mode='c'] x,
np.ndarray[np.float32_t, ndim=2, mode='c'] y
):
return x @ y
cdef public np.ndarray[np.float32_t, ndim=2, mode='c'] predict(
@@ -66,7 +79,41 @@ cdef public np.ndarray[np.float32_t, ndim=2, mode='c'] mnist_batch(
arr = np.concatenate([X_train[idx], y_train[idx]], axis=1)
return arr
cdef public float arrsum(
cdef public void inspect_array(
np.ndarray[np.float32_t, ndim=2, mode='c'] a
):
return np.sum(a)
print(a.flags)
print(a.dtype)
print(a.sum())
cdef public void be_like_cified(
object net,
Network* c_net
):
"""WARNING this function makes an assumption that `net` and `c_net`
have the same shape and hopefully is going to crash horribly otherwise."""
for i, l in enumerate(net.layers):
w1, w2 = l.W.shape
l.W[:] = <float[:w1,:w2]>c_net.layers[i].W
l.b[:] = <float[:w2]>c_net.layers[i].b
cdef public void cify_network(
object net, Network* c_net
):
"""WARNING `c_net` is valid as long as `net` is
Whoever has `c_net` is responsible for freeing c_net.layers list
Layers themselves don't need any de-init.
"""
c_net.n_layers = len(net.layers)
c_net.layers = <Dense*>malloc(len(net.layers) * sizeof(Dense))
for i, l in enumerate(net.layers):
w1, w2 = l.W.shape
c_net.layers[i].shape[0] = w1
c_net.layers[i].shape[1] = w2
c_net.layers[i].W = <float*>PyArray_DATA(l.W)
c_net.layers[i].b = <float*>PyArray_DATA(l.b)
c_net.layers[i].ownmem = 0