Skip to content

Commit

Permalink
Quartus Extensions
Browse files Browse the repository at this point in the history
  • Loading branch information
bo3z authored and vloncar committed Aug 12, 2022
1 parent 5376181 commit 9f71389
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 49 deletions.
13 changes: 8 additions & 5 deletions hls4ml/backends/quartus/quartus_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,29 +43,32 @@ def _register_flows(self):
]
quantization_flow = register_flow('quantization', quantization_passes, requires=[init_flow], backend=self.name)

optimization_passes = []
optimization_flow = register_flow('optimize', optimization_passes, requires=[init_flow], backend=self.name)

templates = self._get_layer_templates()
template_flow = register_flow('apply_templates', templates, requires=[init_flow], backend=self.name)
template_flow = register_flow('apply_templates', self._get_layer_templates, requires=[init_flow], backend=self.name)

writer_passes = [
'make_stamp',
'quartus:write_hls'
]
writer_flow_requirements = ['optimize', quartus_types_flow, template_flow]
self._writer_flow = register_flow('write', writer_passes, requires=writer_flow_requirements, backend=self.name)

self._writer_flow = register_flow('write', writer_passes, requires=['quartus:ip'], backend=self.name)

all_passes = get_backend_passes(self.name)

extras = [
# Ideally this should be empty
opt_pass for opt_pass in all_passes if opt_pass not in initializers + quartus_types + templates + writer_passes
opt_pass for opt_pass in all_passes if opt_pass not in initializers + streaming_passes + quartus_types + quantization_passes + templates + optimization_passes + writer_passes
]

if len(extras) > 0:
extras_flow = register_flow('extras', extras, requires=[init_flow], backend=self.name)
else:
extras_flow = None

ip_flow_requirements = ['optimize', init_flow, streaming_flow, quantization_flow, quartus_types_flow, extras_flow, template_flow]
ip_flow_requirements = ['optimize', init_flow, streaming_flow, quantization_flow, optimization_flow, quartus_types_flow, extras_flow, template_flow]
ip_flow_requirements = list(filter(None, ip_flow_requirements))

self._default_flow = register_flow('ip', None, requires=ip_flow_requirements, backend=self.name)
Expand Down
85 changes: 41 additions & 44 deletions test/pytest/test_extensions.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import pytest
import hls4ml
import tensorflow as tf
import numpy as np
import pytest
import tensorflow as tf
from pathlib import Path

test_root_path = Path(__file__).parent

# Keras implementation of a custom layer

class KReverse(tf.keras.layers.Layer):
''' Keras implementation of a hypothetical custom layer '''
def __init__(self):
Expand All @@ -16,8 +15,7 @@ def __init__(self):
def call(self, inputs):
return tf.reverse(inputs, axis=[-1])

# hls4ml implementations

# hls4ml layer implementation
class HReverse(hls4ml.model.layers.Layer):
''' hls4ml implementation of a hypothetical custom layer '''

Expand All @@ -27,8 +25,35 @@ def initialize(self):
dims = inp.dim_names
self.add_output_variable(shape, dims)

# hls4ml optimizer to remove duplicate optimizer
class RemoveDuplicateReverse(hls4ml.model.optimizer.OptimizerPass):
'''OptimizerPass to remove consecutive HReverse layers.'''

def match(self, node):
return isinstance(node, HReverse) and \
isinstance(node.get_input_node(), HReverse)

def transform(self, model, node):
first = node.get_input_node()
second = node

# Templates
model.remove_node(first, rewire=True)
model.remove_node(second, rewire=True)
return True

# Parser for converter
def parse_reverse_layer(keras_layer, input_names, input_shapes, data_reader, config):
layer = {}
layer['class_name'] = 'HReverse'
layer['name'] = keras_layer['config']['name']
layer['n_in'] = input_shapes[0][1]

if input_names is not None:
layer['inputs'] = input_names

return layer, [shape for shape in input_shapes[0]]

# HLS Templates - No specific pragmas used; generic enough for both Intel and Vivado

rev_config_template = """struct config{index} : nnet::reverse_config {{
static const unsigned n_in = {n_in};
Expand All @@ -55,8 +80,6 @@ def format(self, node):
params = self._default_function_params(node)
return self.template.format(**params)


# HLS implementation
rev_hls = \
"""#ifndef NNET_REVERSE_H_
#define NNET_REVERSE_H_
Expand All @@ -74,8 +97,6 @@ def format(self, node):
data_T input[CONFIG_T::n_in],
data_T reversed[CONFIG_T::n_in]
) {
#pragma HLS PIPELINE
for (int i = 0; i < CONFIG_T::n_in; i++) {
reversed[CONFIG_T::n_in - 1 - i] = input[i];
}
Expand All @@ -86,43 +107,19 @@ def format(self, node):
#endif
"""

class RemoveDuplicateReverse(hls4ml.model.optimizer.OptimizerPass):
'''OptimizerPass to remove consecutive HReverse layers.'''

def match(self, node):
return isinstance(node, HReverse) and \
isinstance(node.get_input_node(), HReverse)

def transform(self, model, node):
first = node.get_input_node()
second = node

model.remove_node(first, rewire=True)
model.remove_node(second, rewire=True)
return True

# Parser for converter
def parse_reverse_layer(keras_layer, input_names, input_shapes, data_reader, config):
layer = {}
layer['class_name'] = 'HReverse'
layer['name'] = keras_layer['config']['name']
layer['n_in'] = input_shapes[0][1]

if input_names is not None:
layer['inputs'] = input_names

return layer, [shape for shape in input_shapes[0]]

def test_extensions(tmp_path):
@pytest.fixture(scope='session', autouse=True)
def regsister_custom_layer():
# Register the converter for custom Keras layer
hls4ml.converters.register_keras_layer_handler('KReverse', parse_reverse_layer)

# Register the hls4ml's IR layer
hls4ml.model.layers.register_layer('HReverse', HReverse)

@pytest.mark.parametrize('backend_id', ['Vivado', 'Quartus'])
def test_extensions(tmp_path, backend_id):
# Register the optimization passes (if any)
backend = hls4ml.backends.get_backend('Vivado')
backend.register_pass('remove_duplicate_reverse', RemoveDuplicateReverse, flow='vivado:optimize')
backend = hls4ml.backends.get_backend(backend_id)
backend.register_pass('remove_duplicate_reverse', RemoveDuplicateReverse, flow=f'{backend_id.lower()}:optimize')

# Register template passes for the given backend
backend.register_template(HReverseConfigTemplate)
Expand All @@ -148,15 +145,15 @@ def test_extensions(tmp_path):

hmodel = hls4ml.converters.convert_from_keras_model(
kmodel,
output_dir=str(test_root_path / 'hls4mlprj_extensions'),
backend='Vivado',
output_dir=str(test_root_path / f'hls4mlprj_extensions_{backend_id}'),
backend=backend_id,
io_type='io_parallel',
hls_config={ 'Model': { 'Precision': 'ap_int<4>', 'ReuseFactor': 1} })
hls_config={ 'Model': { 'Precision': 'ap_int<6>', 'ReuseFactor': 1} })

hmodel.compile()
hres = hmodel.predict(x.astype('float32'))

# Check if the optimizer pass was applied
assert 'vivado:remove_duplicate_reverse' in hmodel._applied_flows[0]['vivado:optimize']
assert f'{backend_id.lower()}:remove_duplicate_reverse' in hmodel._applied_flows[0][f'{backend_id.lower()}:optimize']

np.testing.assert_array_equal(kres, hres)

0 comments on commit 9f71389

Please sign in to comment.