1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
|
//------------------------------------------------------------------------------
// LAGraph/experimental/test/test_dnn: test a small sparse deep neural network
//------------------------------------------------------------------------------
// LAGraph, (c) 2019-2022 by The LAGraph Contributors, All Rights Reserved.
// SPDX-License-Identifier: BSD-2-Clause
//
// For additional details (including references to third party source code and
// other files) see the LICENSE file or contact permission@sei.cmu.edu. See
// Contributors.txt for a full list of contributors. Created, in part, with
// funding and support from the U.S. Government (see Acknowledgments.txt file).
// DM22-0790
// Contributed by Timothy A. Davis, Texas A&M University
//------------------------------------------------------------------------------
#include <stdio.h>
#include <acutest.h>
#include "LAGraphX.h"
#include "LAGraph_test.h"
#include "LG_Xtest.h"
#include "LG_internal.h"
char msg [LAGRAPH_MSG_LEN] ;
//------------------------------------------------------------------------------
// setup: start a test
//------------------------------------------------------------------------------
void setup (void)
{
OK (LAGraph_Init (msg)) ;
OK (LAGraph_Random_Init (msg)) ;
}
//------------------------------------------------------------------------------
// teardown: finalize a test
//------------------------------------------------------------------------------
void teardown (void)
{
OK (LAGraph_Random_Finalize (msg)) ;
OK (LAGraph_Finalize (msg)) ;
}
//------------------------------------------------------------------------------
// test_dnn: test a small DNN from https://graphchallenge.mit.edu/data-sets
//------------------------------------------------------------------------------
// This test uses the smallest sparse deep neural network at
// https://graphchallenge.mit.edu/data-sets . The original problem has 120
// layers, but the categories converge to the correct result in the first 27
// layers, so only the first 32 layers are included in this test.
// The original problem also hase 60,000 features (images) but in this
// truncated problem, only the first 2000 features are used.
void test_dnn (void)
{
GrB_Info info ;
setup ( ) ;
#define NLAYERS 30
#define NLAYERS_ORIG 120
int nlayers = NLAYERS ;
float bias = -0.3 ;
int nneurons = 1024 ;
int nfeatures = 60000 ;
int nfeatures_subset = 1200 ;
printf ("\nSparse deep neural network from"
" https://graphchallenge.mit.edu/data-sets\n"
"# neurons: %d, bias: %g\n"
"original # of layers: %d, layers used here: %d\n"
"original # of features: %d, features used here: %d\n",
nneurons, bias, NLAYERS_ORIG, nlayers, nfeatures, nfeatures_subset) ;
GrB_Matrix Y0 = NULL, Y = NULL, W [NLAYERS], Bias [NLAYERS], T = NULL ;
GrB_Vector TrueCategories = NULL, Categories = NULL, C = NULL ;
for (int layer = 0 ; layer < nlayers ; layer++)
{
W [layer] = NULL ;
Bias [layer] = NULL ;
}
#define LEN 512
char filename [LEN] ;
//--------------------------------------------------------------------------
// read in the problem
//--------------------------------------------------------------------------
snprintf (filename, LEN, LG_DATA_DIR
"/dnn_data/sparse-images-%d_subset.mtx", nneurons) ;
FILE *f = fopen (filename, "r") ;
TEST_CHECK (f != NULL) ;
OK (LAGraph_MMRead (&Y0, f, msg)) ;
fclose (f) ;
char type_name [LAGRAPH_MAX_NAME_LEN] ;
OK (LAGraph_Matrix_TypeName (type_name, Y0, msg)) ;
TEST_CHECK (MATCHNAME (type_name, "float")) ;
OK (GrB_wait (Y0, GrB_MATERIALIZE)) ;
for (int layer = 0 ; layer < nlayers ; layer++)
{
// read the neuron layer: W [layer]
snprintf (filename, LEN, LG_DATA_DIR "/dnn_data/n%d-l%d.mtx",
nneurons, layer+1) ;
f = fopen (filename, "r") ;
TEST_CHECK (f != NULL) ;
OK (LAGraph_MMRead (&(W [layer]), f, msg)) ;
fclose (f) ;
OK (LAGraph_Matrix_TypeName (type_name, W [layer], msg)) ;
TEST_CHECK (MATCHNAME (type_name, "float")) ;
// construct the bias matrix: Bias [layer]. Note that all Bias
// matrices are the same for all layers, and all diagonal
// entries are also the same.
OK (GrB_Matrix_new (&(Bias [layer]), GrB_FP32, nneurons, nneurons)) ;
for (int i = 0 ; i < nneurons ; i++)
{
OK (GrB_Matrix_setElement (Bias [layer], bias, i, i)) ;
}
OK (GrB_wait (Bias [layer], GrB_MATERIALIZE)) ;
}
// read T as a boolean nfeatures_subset-by-1 matrix
snprintf (filename, LEN, LG_DATA_DIR
"/dnn_data/neuron%d-l%d-categories_subset.mtx",
nneurons, NLAYERS_ORIG) ;
f = fopen (filename, "r") ;
TEST_CHECK (f != NULL) ;
OK (LAGraph_MMRead (&T, f, msg)) ;
OK (LAGraph_Matrix_TypeName (type_name, T, msg)) ;
TEST_CHECK (MATCHNAME (type_name, "bool")) ;
// TrueCategories = T, as a boolean nfeatures-by-1 vector
printf ("\nTrue categories:\n") ;
OK (GrB_Vector_new (&TrueCategories, GrB_BOOL, nfeatures_subset)) ;
OK (GrB_Col_extract (TrueCategories, NULL, NULL, T, GrB_ALL,
nfeatures_subset, 0, NULL)) ;
OK (LAGraph_Vector_Print (TrueCategories, LAGraph_COMPLETE, stdout, msg)) ;
GrB_free (&T) ;
//--------------------------------------------------------------------------
// solve the problem
//--------------------------------------------------------------------------
OK (LAGraph_dnn (&Y, W, Bias, nlayers, Y0)) ;
//--------------------------------------------------------------------------
// check the result
//--------------------------------------------------------------------------
// C = sum (Y)
OK (GrB_Vector_new (&C, GrB_FP32, nfeatures_subset)) ;
OK (GrB_reduce (C, NULL, NULL, GrB_PLUS_FP32, Y, NULL));
// Categories = pattern of C
OK (GrB_Vector_new (&Categories, GrB_BOOL, nfeatures_subset)) ;
OK (GrB_apply (Categories, NULL, NULL, GrB_ONEB_BOOL, C, (bool) true,
NULL)) ;
// check if Categories and TrueCategories are the same
bool isequal ;
printf ("\nComputed categories:\n") ;
OK (LAGraph_Vector_Print (Categories, LAGraph_COMPLETE, stdout, msg)) ;
OK (LAGraph_Vector_IsEqual (&isequal, TrueCategories, Categories, NULL)) ;
TEST_CHECK (isequal) ;
//--------------------------------------------------------------------------
// free everything and finish the test
//--------------------------------------------------------------------------
GrB_free (&TrueCategories) ;
GrB_free (&Categories) ;
GrB_free (&C) ;
GrB_free (&Y) ;
GrB_free (&Y0) ;
for (int layer = 0 ; layer < nlayers ; layer++)
{
GrB_free (& (W [layer])) ;
GrB_free (& (Bias [layer])) ;
}
//--------------------------------------------------------------------------
// error tests
//--------------------------------------------------------------------------
int result = LAGraph_dnn (NULL, NULL, NULL, nlayers, NULL) ;
TEST_CHECK (result == GrB_NULL_POINTER) ;
teardown ( ) ;
}
//------------------------------------------------------------------------------
// TEST_LIST: all tests to run
//------------------------------------------------------------------------------
TEST_LIST = {
{"DNN", test_dnn},
{NULL, NULL}
} ;
|