Skip to content

Commit

Permalink
Merge pull request #655 from bo3z/sofstign-stream-opt
Browse files Browse the repository at this point in the history
Quartus Streaming Softsign (PR #585 contd.)
  • Loading branch information
vloncar committed Sep 20, 2022
2 parents 40ae7f9 + 9271648 commit 9ca7af2
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,8 @@ template<class data_T, class res_T, typename CONFIG_T>
void softsign(stream<data_T> &data, stream<res_T> &res) {
#include "activation_tables/softsign_table.tb"

static const int MAX_VALUE = 8;

SoftsignActLoop:
#pragma ii 1
for (int i = 0; i < CONFIG_T::n_in / res_T::size; i++) {
Expand All @@ -232,11 +234,21 @@ void softsign(stream<data_T> &data, stream<res_T> &res) {
SoftsignPackLoop:
#pragma unroll
for (int j = 0; j < res_T::size; j++) {
hls_register int data_round = (in_data[j]*CONFIG_T::table_size/16).to_int();
hls_register int index = data_round + 8*CONFIG_T::table_size/16;
if (index < 0) index = 0;
else if (index > CONFIG_T::table_size-1) index = CONFIG_T::table_size-1;
out_data[j] = softsign_table[index];
hls_register typename data_T::value_type absValue;;
if(in_data[j] < 0){
absValue = -in_data[j];
}
else{
absValue = in_data[j];
}
ac_int<16> index = (absValue * CONFIG_T::table_size / MAX_VALUE).to_int();
if (absValue > MAX_VALUE) index = CONFIG_T::table_size - 1;
if(in_data[j] < 0) {
out_data[j] = -(typename res_T::value_type) softsign_table[index];
}
else {
out_data[j] = (typename res_T::value_type) softsign_table[index];
}
}

res.write(out_data);
Expand Down
41 changes: 10 additions & 31 deletions test/pytest/test_softsign.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,48 +7,27 @@

test_root_path = Path(__file__).parent

def flat_distribution(shape):
return np.random.rand(*shape)


@pytest.fixture()
def generate_data(function, input_shape):
return function((1000, *input_shape))


# TODO: include latency strategy with flat_distribution when it can be made to pass
@pytest.mark.parametrize('backend,strategy,function,input_shape,io_type', [
('Vivado', 'stable', flat_distribution, (4,), 'io_parallel'),
('Quartus', 'stable', flat_distribution, (4,), 'io_parallel'),
# IO_stram avaliable just for VIVADO
('Vivado', 'stable', flat_distribution, (4,), 'io_stream'),
('Vivado', 'stable', flat_distribution, (4, 4, 3), 'io_stream')
@pytest.mark.parametrize('backend', ['Vivado', 'Quartus'])
@pytest.mark.parametrize('input_shape, io_type', [
((8, ), 'io_parallel'),
((8, ), 'io_stream'),
((8, 8, 3), 'io_stream')
])
def test_softsign(backend, strategy, generate_data, input_shape, io_type):
X = generate_data
def test_softsign(backend, input_shape, io_type):
X = np.random.rand(1000, *input_shape)
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Activation(input_shape=input_shape, activation='softsign', name='softsign'))
model.compile()

f_type = 'ac_fixed<18,8,true,AC_RND,AC_SAT>' if backend == 'Quartus' else 'ap_fixed<18,8,AP_RND,AP_SAT>'
cfg = hls4ml.utils.config_from_keras_model(model, granularity='name')
cfg['LayerName']['softsign']['Strategy'] = strategy
cfg['LayerName']['softsign']['inv_table_t'] = f_type
cfg['LayerName']['softsign']['exp_table_t'] = f_type

odir = str(test_root_path / 'hls4mlprj_softsign_{}'.format(strategy))
cfg = hls4ml.utils.config_from_keras_model(model, granularity='name')
odir = str(test_root_path / 'hls4mlprj_softsign_{}_{}_{}'.format(backend, io_type, str(input_shape)))
hls_model = hls4ml.converters.convert_from_keras_model(model, hls_config=cfg, io_type=io_type,
output_dir=odir, backend=backend)
hls_model.compile()

y_keras = model.predict(X)
y_hls4ml = hls_model.predict(X).reshape(y_keras.shape)

acc_hls4ml = accuracy_score(np.argmax(y_keras, axis=-1).ravel(), np.argmax(y_hls4ml, axis=-1).ravel())

print('Accuracy hls4ml relative to keras: {}'.format(acc_hls4ml))

assert acc_hls4ml >= 0.98


assert acc_hls4ml >= 0.97

0 comments on commit 9ca7af2

Please sign in to comment.