diff --git a/SAI/sai_api_gen.py b/SAI/sai_api_gen.py deleted file mode 100755 index eaf8638e8..000000000 --- a/SAI/sai_api_gen.py +++ /dev/null @@ -1,294 +0,0 @@ -#!/usr/bin/env python3 - -try: - import os - import json - import argparse - from git import Repo - from jinja2 import Template, Environment, FileSystemLoader -except ImportError as ie: - print("Import failed for " + ie.name) - exit(1) - -PIPELINES_TAG = 'pipelines' -NAME_TAG = 'name' -INGRESS_PIPE = 'ingress' -TABLES_TAG = 'tables' -KEY_TAG = 'key' - -def get_ingress_pipeline(json_program): - for pipe in json_program[PIPELINES_TAG]: - if pipe[NAME_TAG] == INGRESS_PIPE: - return pipe - - return None - - -def get_tables(ingress_pipeline): - tables = [] - for table in ingress_pipeline[TABLES_TAG]: - if len(table[KEY_TAG]) > 0: - tables.append(table) - - return tables - - -def get_sai_key_type(key_size, key_header, key_field): - if key_size == 1: - return 'bool' - elif key_size <= 8: - return 'sai_uint8_t' - elif key_size <= 16: - return 'sai_uint16_t' - elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): - return 'sai_ip_address_t' - elif key_size <= 32: - return 'sai_uint32_t' - elif key_size == 48 and ('addr' in key_field or 'mac' in key_header): - return 'sai_mac_t' - elif key_size <= 64: - return 'sai_uint64_t' - - -def get_sai_lpm_type(key_size, key_header, key_field): - if key_size == 32 and ('addr' in key_field or 'ip' in key_header): - return 'sai_ip_prefix_t' - - -def get_sai_list_type(key_size, key_header, key_field): - if key_size <= 8: - return 'sai_u8_list_t' - elif key_size <= 16: - return 'sai_u16_list_t' - elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): - return 'sai_ip_address_list_t' - elif key_size <= 32: - return 'sai_u32_list_t' - elif key_size <= 64: - return 'sai_u64_list_t' - - -def get_sai_range_list_type(key_size, key_header, key_field): - if key_size <= 8: - return 'sai_u8_range_list_t' - elif key_size <= 16: - return 'sai_u16_range_list_t' - elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): - return 'sai_ipaddr_range_list_t' - elif key_size <= 32: - return 'sai_u32_range_list_t' - elif key_size <= 64: - return 'sai_u64_range_list_t' - - -def get_sai_key_data(program, key): - sai_key_data = dict() - full_key_name, sai_key_name = key[NAME_TAG].split(':') - key_tuple = full_key_name.split('.') - if len(key_tuple) == 3: - key_struct, key_header, key_field = key_tuple - else: - key_header, key_field = key_tuple - sai_key_data['sai_key_name'] = sai_key_name - - key_header_type = None - for header in program['headers']: - if header['name'] == key_header: - key_header_type = header['header_type'] - - key_size = 0 - for header_type in program['header_types']: - if header_type['name'] == key_header_type: - for field in header_type['fields']: - if field[0] == key_field: - key_size = int(field[1]) - - sai_key_data['match_type'] = key['match_type'] - if sai_key_data['match_type'] == 'exact': - sai_key_data['sai_key_type'] = get_sai_key_type(key_size, key_header, key_field) - elif sai_key_data['match_type'] == 'lpm': - sai_key_data['sai_lpm_type'] = get_sai_lpm_type(key_size, key_header, key_field) - elif sai_key_data['match_type'] == 'list': - sai_key_data['sai_list_type'] = get_sai_list_type(key_size, key_header, key_field) - elif sai_key_data['match_type'] == 'range_list': - sai_key_data['sai_range_list_type'] = get_sai_range_list_type(key_size, key_header, key_field) - - return sai_key_data - - -def get_sai_action_data(program, action_name): - sai_action_data = dict() - sai_action_data['name'] = action_name.split('.')[-1] - params = [] - - for action in program['actions']: - if action['name'] == action_name: - for rdata in action['runtime_data']: - param = dict() - param['name'] = rdata['name'] - param['type'] = get_sai_key_type(int(rdata['bitwidth']), param['name'], param['name']) - params.append(param) - - sai_action_data['params'] = params - return sai_action_data - - -def generate_sai_api(program, ignore_tables): - sai_api = dict() - tables = get_tables(get_ingress_pipeline(program)) - sai_tables = [] - for table in tables: - sai_table_data = dict() - sai_table_data['keys'] = [] - sai_table_data['actions'] = [] - sai_table_data['stages'] = [] - table_control, table_name = table[NAME_TAG].split('.', 1) - - # chechk if table belongs to a group - is_new_group = True - if ':' in table_name: - stage, group_name = table_name.split(':') - table_name = group_name - stage = stage.replace('.' , '_') - for sai_table in sai_tables: - if sai_table['name'] == table_name: - sai_table['stages'].append(stage) - is_new_group = False - break - if is_new_group: - sai_table_data['stages'].append(stage) - else: - continue - - sai_table_data['name'] = table_name.replace('.' , '_') - - if sai_table_data['name'] in ignore_tables: - continue - - for key in table[KEY_TAG]: - sai_table_data['keys'].append(get_sai_key_data(program, key)) - - for action in table['actions']: - if action != 'NoAction': - sai_table_data['actions'].append(get_sai_action_data(program, action)) - - if len(sai_table_data['keys']) == 1 and sai_table_data['keys'][0]['sai_key_name'] == (table_name.split('.')[-1] + '_id'): - sai_table_data['is_object'] = 'true' - # Object ID itself is a key - sai_table_data['keys'] = [] - elif len(sai_table_data['keys']) > 5: - sai_table_data['is_object'] = 'true' - else: - sai_table_data['is_object'] = 'false' - sai_table_data['name'] = sai_table_data['name'] + '_entry' - - sai_tables.append(sai_table_data) - - sai_api['tables'] = sai_tables - return sai_api - - -def write_sai_files(sai_api): - # The main file - with open('saiapi.h.j2', 'r') as sai_header_tm_file: - sai_header_tm_str = sai_header_tm_file.read() - - env = Environment(loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True) - sai_header_tm = env.get_template('saiapi.h.j2') - sai_header_str = sai_header_tm.render(sai_api = sai_api) - - with open('./SAI/experimental/saiexperimental' + sai_api['app_name'] + '.h', 'w') as o: - o.write(sai_header_str) - - # The SAI Extensions - with open('./SAI/experimental/saiextensions.h', 'r') as f: - lines = f.readlines() - - new_lines = [] - for line in lines: - if 'Add new experimental APIs above this line' in line: - new_lines.append(' SAI_API_' + sai_api['app_name'].upper() + ',\n\n') - if 'new experimental object type includes' in line: - new_lines.append(line) - new_lines.append('#include "saiexperimental' + sai_api['app_name'] + '.h"\n') - continue - - new_lines.append(line) - - with open('./SAI/experimental/saiextensions.h', 'w') as f: - f.write(''.join(new_lines)) - - # The SAI Type Extensions - with open('./SAI/experimental/saitypesextensions.h', 'r') as f: - lines = f.readlines() - - new_lines = [] - for line in lines: - if 'Add new experimental object types above this line' in line: - for table in sai_api['tables']: - new_lines.append(' SAI_OBJECT_TYPE_' + table['name'].upper() + ',\n\n') - - new_lines.append(line) - - with open('./SAI/experimental/saitypesextensions.h', 'w') as f: - f.write(''.join(new_lines)) - - # The SAI object struct for entries - with open('./SAI/inc/saiobject.h', 'r') as f: - lines = f.readlines() - - new_lines = [] - for line in lines: - if 'Add new experimental entries above this line' in line: - for table in sai_api['tables']: - if table['is_object'] == 'false': - new_lines.append(' /** @validonly object_type == SAI_OBJECT_TYPE_' + table['name'].upper() + ' */\n') - new_lines.append(' sai_' + table['name'] + '_t ' + table['name'] + ';\n\n') - if 'new experimental object type includes' in line: - new_lines.append(line) - new_lines.append('#include "../experimental/saiexperimental' + sai_api['app_name'] + '.h"\n') - continue - - new_lines.append(line) - - with open('./SAI/inc/saiobject.h', 'w') as f: - f.write(''.join(new_lines)) - - - -# CLI -parser = argparse.ArgumentParser(description='P4 SAI API generator') -parser.add_argument('filepath', type=str, help='Path to P4 program BMV2 JSON file') -parser.add_argument('apiname', type=str, help='Name of the new SAI API') -parser.add_argument('--print-sai-lib', type=bool) -parser.add_argument('--sai-git-url', type=str, default='https://github.com/Opencomputeproject/SAI') -parser.add_argument('--ignore-tables', type=str, default='', help='Comma separated list of tables to ignore') -parser.add_argument('--sai-git-branch', type=str, default='master') -args = parser.parse_args() - -if not os.path.isfile(args.filepath): - print('File ' + args.filepath + ' does not exist') - exit(1) - - -if os.path.exists('./SAI'): - print('Directory ./SAI already exists. Please remove in order to proceed') - exit(1) - -# Get SAI dictionary from P4 dictionary -print("Generating SAI API...") -with open(args.filepath) as json_program_file: - json_program = json.load(json_program_file) - -sai_api = generate_sai_api(json_program, args.ignore_tables.split(',')) -sai_api['app_name'] = args.apiname - -# Clone a clean SAI repo -print("Cloning SAI repository...") -Repo.clone_from(args.sai_git_url, './SAI', branch=args.sai_git_branch) - -# Write SAI dictionary into SAI API headers -write_sai_files(sai_api) - -if args.print_sai_lib: - print(json.dumps(sai_api, indent=2)) diff --git a/sirius-pipeline/Dockerfile b/sirius-pipeline/Dockerfile index 7a99a6fc1..43f852dae 100644 --- a/sirius-pipeline/Dockerfile +++ b/sirius-pipeline/Dockerfile @@ -1,4 +1,92 @@ -FROM ubuntu:16.04 +FROM p4lang/behavioral-model:no-pi +LABEL maintainer="P4 Developers " +LABEL description="This Docker image includes all of the PI artifacts, \ +including the Thrift-based PI implementation for the bmv2 backend. It is \ +currently used to run CI tests." + +# Default to using 2 make jobs, which is a good default for CI. If you're +# building locally or you know there are more cores available, you may want to +# override this. +ARG MAKEFLAGS=-j + +# Select the type of image we're building. Use `build` for a normal build, which +# is optimized for image size. Use `test` if this image will be used for +# testing; in this case, the source code and build-only dependencies will not be +# removed from the image. +ARG IMAGE_TYPE=test + +# Select the compiler to use. +# We install the default version of GCC (GCC 9), as well as clang 8 and clang 10. +ARG sswitch_grpc=yes +ARG CC=gcc +ARG CXX=g++ +ENV TZ=America/Los_Angeles +RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone + +ENV GIT_SSL_NO_VERIFY=true + +ENV PI_DEPS automake \ + build-essential \ + clang-8 \ + clang-10 \ + clang-format-8 \ + g++ \ + libboost-dev \ + libboost-system-dev \ + libboost-thread-dev \ + libtool \ + libtool-bin \ + pkg-config \ + libreadline-dev \ + libpcap-dev \ + libmicrohttpd-dev \ + doxygen \ + valgrind \ + vim \ + git-core \ + python3 \ + cmake \ + python3-pip +ENV PI_RUNTIME_DEPS libboost-system1.71.0 \ + libboost-thread1.71.0 \ + libpcap0.8 \ + python3 \ + python-is-python3 + +RUN apt-get update && apt-get install -y --no-install-recommends $PI_DEPS $PI_RUNTIME_DEPS + +RUN cd / && git clone --depth=1 -b v1.43.2 https://github.com/google/grpc.git && \ + cd grpc/ && \ + git submodule update --init --recursive && \ + mkdir -p cmake/build && \ + cd cmake/build && \ + cmake -DBUILD_SHARED_LIBS=ON -DgRPC_INSTALL=ON --parallel 1 ../.. && \ + make && \ + make install + +ENV LD_LIBRARY_PATH=/usr/local/lib + +WORKDIR / +# COPY proto/sysrepo/docker_entry_point.sh /docker_entry_point.sh +# COPY . /PI/ +RUN git clone https://github.com/p4lang/PI +WORKDIR /PI/ +RUN git submodule update --init --recursive +RUN apt-get update && \ + ./autogen.sh && \ + ./configure --enable-Werror --with-bmv2 --with-proto --with-cli --with-internal-rpc --with-sysrepo && \ + ./proto/sysrepo/install_yangs.sh && \ + make && \ + make install + + +# RUN git clone https://github.com/marian-pritsak/DASH-1.git && \ +RUN pip install gitpython +RUN pip install jinja2 +# ./sai_api_gen.py rt_sirius.json --ignore-tables=appliance,eni_meter,slb_decap --sai-git-url=https://github.com/marian-pritsak/SAI.git --sai-git-branch=base --impl_filepath=./impl.cpp dash +#g++ -I./SAI/inc/ -I./SAI/experimental/ impl.cpp -L/usr/local/lib/ -lpiprotogrpc -lpiprotobuf -lprotobuf -lgrpc++ -lpiall -lpi_dummy + +ENV DEBIAN_FRONTEND noninteractive RUN apt update && \ apt install -y g++ gcc git make automake sudo && \ @@ -7,49 +95,46 @@ RUN apt update && \ libboost-filesystem-dev libboost-thread-dev libevent-dev libtool flex bison pkg-config g++ libssl-dev && \ apt-get install -y cmake g++ git automake libtool libgc-dev bison flex \ libfl-dev libgmp-dev libboost-dev libboost-iostreams-dev \ - libboost-graph-dev llvm pkg-config python python-scapy python-ipaddr python-ply python3-pip \ + libboost-graph-dev llvm pkg-config python python-ply python3-pip \ tcpdump && \ - pip3 install scapy ply && \ + pip3 install scapy ply ipaddr && \ apt-get install -y autoconf automake libtool curl make g++ unzip -RUN git clone https://github.com/protocolbuffers/protobuf.git && \ - cd protobuf && \ - git checkout v3.6.1 && \ - git submodule update --init --recursive && \ - ./autogen.sh && \ - ./configure && \ - make && \ - make install && ldconfig && \ - cd .. && rm -rf protobuf - -RUN git clone --recursive https://github.com/p4lang/p4c.git && \ - cd p4c && \ - git submodule update --init --recursive && \ - mkdir build && \ - cd build && \ - cmake .. && \ - make && \ - make install && \ - cd / && \ - rm -rf p4lang - -RUN pip3 install six - -RUN apt-get update && apt-get install -y lsb-release - -RUN pip3 install ipaddr - -RUN git clone https://github.com/marian-pritsak/behavioral-model && \ - cd behavioral-model && \ - bash install_deps.sh && \ - ./autogen.sh && \ - ./configure && \ - make && \ - make install && \ - cd .. && \ - rm -rf behavioral-model - -ENV LD_LIBRARY_PATH /usr/local/lib/ + + RUN git clone --recursive https://github.com/p4lang/p4c.git && \ + cd p4c && \ + git submodule update --init --recursive && \ + mkdir build && \ + cd build && \ + cmake --parallel 1 .. && \ + make && \ + make install + +ENV BM_RUNTIME_DEPS libboost-program-options1.71.0 \ + libboost-system1.71.0 \ + libboost-filesystem1.71.0 \ + libboost-thread1.71.0 \ + libgmp10 \ + libpcap0.8 \ + python3 \ + python-is-python3 + +RUN apt-get update && apt-get install -y git + +WORKDIR / + +RUN git clone https://github.com/p4lang/behavioral-model.git + +WORKDIR /behavioral-model/ + +RUN apt-get update -qq && \ + apt-get install -qq --no-install-recommends $BM_DEPS $BM_RUNTIME_DEPS && \ + ./autogen.sh && \ + ./configure --with-pdfixed --with-pi --with-stress-tests --enable-debugger --enable-coverage --enable-Werror && \ + make && \ + make install + +WORKDIR / ARG user ARG uid diff --git a/sirius-pipeline/Makefile b/sirius-pipeline/Makefile index e2c78c2d3..8494c41c5 100644 --- a/sirius-pipeline/Makefile +++ b/sirius-pipeline/Makefile @@ -6,23 +6,54 @@ DOCKER_RUN := docker run \ -it \ --name simple_switch \ -v $(PWD)/bmv2:/bmv2 \ - -v $(PWD)/behavioral-model:/bm-src \ + -v $(PWD)/SAI:/SAI \ + -v $(PWD)/tests:/tests \ --network=host \ - --rm \ - bmv2-$(USER) + --rm bmv2/sirius_pipeline.bmv2/sirius_pipeline.json: - $(DOCKER_RUN) p4c -b bmv2 bmv2/sirius_pipeline.p4 -o bmv2/sirius_pipeline.bmv2 + $(DOCKER_RUN) bmv2-$(USER) p4c \ + -b \ + bmv2 \ + bmv2/sirius_pipeline.p4 \ + -o bmv2/sirius_pipeline.bmv2 \ + --p4runtime-files bmv2/sirius_pipeline.bmv2/sirius_pipeline_p4rt.json,bmv2/sirius_pipeline.bmv2/sirius_pipeline_p4rt.txt clean: rm -rf bmv2/sirius_pipeline.bmv2 + rm -rf SAI/SAI + rm -rf SAI/lib + make -C tests/vnet_out clean run-switch: - $(DOCKER_RUN) simple_switch --log-console --interface 0@veth0 --interface 1@veth2 /bmv2/sirius_pipeline.bmv2/sirius_pipeline.json + $(DOCKER_RUN) \ + -v $(PWD)/bmv2/sirius_pipeline.bmv2/sirius_pipeline.json:/etc/dash/sirius_pipeline.json \ + -v $(PWD)/bmv2/sirius_pipeline.bmv2/sirius_pipeline_p4rt.txt:/etc/dash/sirius_pipeline_p4rt.txt \ + --name simple_switch-$(USER) \ + bmv2-$(USER) \ + sudo \ + env LD_LIBRARY_PATH=/usr/local/lib \ + simple_switch_grpc \ + --interface 0@veth0 \ + --interface 1@veth2 \ + --log-console \ + --no-p4 + +sai: + $(DOCKER_RUN) -w /SAI bmv2-$(USER) \ + ./generate_dash_api.sh + $(DOCKER_RUN) -w /SAI/lib bmv2-$(USER) \ + make + +test: + $(DOCKER_RUN) -w /tests/vnet_out bmv2-$(USER) \ + make + +run-test: + docker exec -it -w /tests/vnet_out simple_switch-$(USER) ./vnet_out docker: docker build \ - --no-cache \ -t bmv2-$(USER) \ --build-arg user=$(USER) \ --build-arg uid=$(shell id -u) \ diff --git a/sirius-pipeline/README.md b/sirius-pipeline/README.md index 2c19dfc03..72a7b9db5 100644 --- a/sirius-pipeline/README.md +++ b/sirius-pipeline/README.md @@ -9,6 +9,8 @@ make docker ``` make clean make bmv2/sirius_pipeline.bmv2/sirius_pipeline.json +make sai +make test ``` ## Run software switch @@ -16,51 +18,7 @@ make bmv2/sirius_pipeline.bmv2/sirius_pipeline.json make run-switch ``` -## Control plane +## from a different terminal, run tests (run-switch will run interactive docker view in foreground) ``` -docker exec -it simple_switch_CLI +make run-test ``` - -## Configuration example -``` -table_add direction_lookup set_direction 60 => 1 -table_add direction_lookup set_direction 70 => 2 -table_add eni_lookup_from_vm outbound.set_eni cc:cc:cc:cc:cc:cc => 7 -table_add eni_lookup_to_vm inbound.set_eni c:cc:cc:cc:cc:cc => 7 -table_add eni_to_vni set_vni 7 => 9 -table_add routing route_vnet 7 0 0x01010100/24 => 14 -table_add ca_to_pa set_tunnel_mapping 14 0 0x01010102 => 0x02020202 88:88:88:88:88:88 1 -table_add ca_to_pa set_tunnel_mapping 14 0 0x01010103 => 0x02020202 88:88:88:88:88:88 0 -table_add appliance set_appliance 0&&&0 => 77:77:77:77:77:77 66:66:66:66:66:66 0x02020201 0 -table_add ca_to_pa set_tunnel_mapping 14 1 0x01010104 => 0x02020202 88:88:88:88:88:88 1 -table_add ca_to_pa set_tunnel_mapping 14 1 0x01010105 => 0x02020202 88:88:88:88:88:88 0 -``` - -# Sirius Pipeline P4 Behavior Models -**TODO** - -TCP AND UDP - -Fragmentation - -NACK write-up in documentation - -Sequence # tracking for FIN and final ACK (already started) FIN/ACK ACK - Reshma/Anjali to write into PR - -Tracking the ACKs, close down xns more quickly? - -Do we need to track the sequence # to ensure it is tracking for the FIN? - add to doc - -Are we garbage-collecting re: how long to wait? (look for stale or temporal xns) - add to doc - -Absolute timer? For most cases we will get the close. Timers are expensive; expands flow table, esp at high xn rates? - -If flow cache is behaving correctly (aging out, etc...) s/not have active xns. - -3 variables: rate, working set of flows, backup. - -Background tasks removing temporal flows? Advantage here vs. sequence # tracking? - -Expense of timer vs. sequence #. Timer = less expensive. - -Enforce xn rate limit? diff --git a/sirius-pipeline/SAI/Makefile b/sirius-pipeline/SAI/Makefile new file mode 100644 index 000000000..db818adb1 --- /dev/null +++ b/sirius-pipeline/SAI/Makefile @@ -0,0 +1,19 @@ +dashone: test.cpp + g++ \ + -I SAI/inc \ + -I SAI/experimental/ \ + -o test test.cpp saidash.cpp \ + -L/usr/local/lib/ \ + -lpthread \ + -lpiprotogrpc \ + -lpiprotobuf \ + -lprotobuf \ + -lgrpc++ \ + -lgrpc \ + -lpiall \ + -lpi_dummy \ + -lpthread \ + -labsl_synchronization \ + -g + + diff --git a/SAI/README.md b/sirius-pipeline/SAI/README.md similarity index 100% rename from SAI/README.md rename to sirius-pipeline/SAI/README.md diff --git a/sirius-pipeline/SAI/generate_dash_api.sh b/sirius-pipeline/SAI/generate_dash_api.sh new file mode 100755 index 000000000..24f5a992a --- /dev/null +++ b/sirius-pipeline/SAI/generate_dash_api.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +./sai_api_gen.py \ + /bmv2/sirius_pipeline.bmv2/sirius_pipeline_p4rt.json \ + --ignore-tables=appliance,eni_meter,slb_decap \ + --overwrite=true \ + dash diff --git a/sirius-pipeline/SAI/sai_api_gen.py b/sirius-pipeline/SAI/sai_api_gen.py new file mode 100755 index 000000000..eb823a3ab --- /dev/null +++ b/sirius-pipeline/SAI/sai_api_gen.py @@ -0,0 +1,372 @@ +#!/usr/bin/env python3 + +try: + import os + import json + import argparse + import shutil + from git import Repo + from jinja2 import Template, Environment, FileSystemLoader +except ImportError as ie: + print("Import failed for " + ie.name) + exit(1) + +NAME_TAG = 'name' +TABLES_TAG = 'tables' +BITWIDTH_TAG = 'bitwidth' +ACTIONS_TAG = 'actions' +PREAMBLE_TAG = 'preamble' +OTHER_MATCH_TYPE_TAG = 'otherMatchType' +MATCH_TYPE_TAG = 'matchType' +PARAMS_TAG = 'params' +ACTION_REFS_TAG = 'actionRefs' +MATCH_FIELDS_TAG = 'matchFields' +NOACTION = 'NoAction' +STAGES_TAG = 'stages' + +def get_sai_key_type(key_size, key_header, key_field): + if key_size == 1: + return 'bool', "booldata" + elif key_size <= 8: + return 'sai_uint8_t', "u8" + elif key_size == 16 and ('_id' in key_field): + return 'sai_object_id_t', "u16" + elif key_size <= 16: + return 'sai_uint16_t', "u16" + elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): + return 'sai_ip_address_t', "ipaddr" + elif key_size == 32 and ('_id' in key_field): + return 'sai_object_id_t', "u32" + elif key_size <= 32: + return 'sai_uint32_t', "u32" + elif key_size == 48 and ('addr' in key_field or 'mac' in key_header): + return 'sai_mac_t', "mac" + elif key_size <= 64: + return 'sai_uint64_t', "u64" + elif key_size == 128: + return 'sai_ip_address_t', "ipaddr" + else: + raise ValueError(f'key_size={key_size} is not supported') + + +def get_sai_lpm_type(key_size, key_header, key_field): + if key_size == 32 and ('addr' in key_field or 'ip' in key_header): + return 'sai_ip_prefix_t', 'ipPrefix' + elif key_size == 128 and ('addr' in key_field or 'ip' in key_header): + return 'sai_ip_prefix_t', 'ipPrefix' + raise ValueError(f'key_size={key_size}, key_header={key_header}, and key_field={key_field} is not supported') + + +def get_sai_list_type(key_size, key_header, key_field): + if key_size <= 8: + return 'sai_u8_list_t', "u8list" + elif key_size <= 16: + return 'sai_u16_list_t', "u16list" + elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): + return 'sai_ip_address_list_t', "ipaddrlist" + elif key_size <= 32: + return 'sai_u32_list_t', "u32list" + elif key_size <= 64: + ValueError(f'sai_u64_list_t is not supported') + return 'sai_u64_list_t', "no mapping" + raise ValueError(f'key_size={key_size} is not supported') + + +def get_sai_range_list_type(key_size, key_header, key_field): + if key_size <= 8: + return 'sai_u8_range_list_t', 'u8rangelist' + elif key_size <= 16: + return 'sai_u16_range_list_t', 'u16rangelist' + elif key_size == 32 and ('addr' in key_field or 'ip' in key_header): + return 'sai_ipaddr_range_list_t', 'ipaddrrangelist' + elif key_size <= 32: + return 'sai_u32_range_list_t', 'u32rangelist' + elif key_size <= 64: + return 'sai_u64_range_list_t', 'u64rangelist' + raise ValueError(f'key_size={key_size} is not supported') + + +def get_sai_key_data(key): + sai_key_data = dict() + sai_key_data['id'] = key['id'] + full_key_name, sai_key_name = key[NAME_TAG].split(':') + key_tuple = full_key_name.split('.') + if len(key_tuple) == 3: + key_struct, key_header, key_field = key_tuple + else: + key_header, key_field = key_tuple + sai_key_data['sai_key_name'] = sai_key_name + + key_size = key[BITWIDTH_TAG] + + if OTHER_MATCH_TYPE_TAG in key: + sai_key_data['match_type'] = key[OTHER_MATCH_TYPE_TAG].lower() + elif MATCH_TYPE_TAG in key: + sai_key_data['match_type'] = key[MATCH_TYPE_TAG].lower() + else: + raise ValueError(f'No valid match tag found') + + if sai_key_data['match_type'] == 'exact': + sai_key_data['sai_key_type'], sai_key_data['sai_key_field'] = get_sai_key_type(key_size, key_header, key_field) + elif sai_key_data['match_type'] == 'lpm': + sai_key_data['sai_lpm_type'], sai_key_data['sai_lpm_field'] = get_sai_lpm_type(key_size, key_header, key_field) + elif sai_key_data['match_type'] == 'list': + sai_key_data['sai_list_type'], sai_key_data['sai_list_field'] = get_sai_list_type(key_size, key_header, key_field) + elif sai_key_data['match_type'] == 'range_list': + sai_key_data['sai_range_list_type'], sai_key_data['sai_range_list_field'] = get_sai_range_list_type(key_size, key_header, key_field) + else: + raise ValueError(f"match_type={sai_key_data['match_type']} is not supported") + + sai_key_data['bitwidth'] = key_size + return sai_key_data + + +def extract_action_data(program): + action_data = {} + for action in program[ACTIONS_TAG]: + preable = action[PREAMBLE_TAG] + id = preable['id'] + name = preable[NAME_TAG].split('.')[-1] + params = [] + if PARAMS_TAG in action: + for p in action[PARAMS_TAG]: + param = dict() + param['id'] = p['id'] + param[NAME_TAG] = p[NAME_TAG] + param['type'], param['field'] = get_sai_key_type(int(p[BITWIDTH_TAG]), p[NAME_TAG], p[NAME_TAG]) + param['bitwidth'] = p[BITWIDTH_TAG] + params.append(param) + action_data[id] = {'id': id, NAME_TAG: name, PARAMS_TAG: params} + return action_data + + +def table_with_counters(program, table_id): + for counter in program['directCounters']: + if counter['directTableId'] == table_id: + return 'true' + return 'false' + + +def generate_sai_apis(program, ignore_tables): + sai_apis = [] + all_actions = extract_action_data(program) + tables = sorted(program[TABLES_TAG], key=lambda k: k[PREAMBLE_TAG][NAME_TAG]) + for table in tables: + sai_table_data = dict() + sai_table_data['keys'] = [] + sai_table_data[ACTIONS_TAG] = [] + sai_table_data[STAGES_TAG] = [] + + table_control, table_name = table[PREAMBLE_TAG][NAME_TAG].split('.', 1) + if table_name in ignore_tables: + continue + + table_name, api_name = table_name.split('|') + sai_table_data[NAME_TAG] = table_name.replace('.' , '_') + sai_table_data['id'] = table[PREAMBLE_TAG]['id'] + sai_table_data['with_counters'] = table_with_counters(program, sai_table_data['id']) + + # chechk if table belongs to a group + is_new_group = True + if ':' in table_name: + stage, group_name = table_name.split(':') + table_name = group_name + stage = stage.replace('.' , '_') + for sai_api in sai_apis: + for sai_table in sai_api[TABLES_TAG]: + if sai_table['name'] == table_name: + sai_table[STAGES_TAG].append(stage) + is_new_group = False + break + if is_new_group: + sai_table_data[NAME_TAG] = table_name + sai_table_data[STAGES_TAG].append(stage) + else: + continue + + for key in table[MATCH_FIELDS_TAG]: + # skip v4/v6 selector + if 'v4_or_v6' in key[NAME_TAG]: + continue + sai_table_data['keys'].append(get_sai_key_data(key)) + + for action in table[ACTION_REFS_TAG]: + action_id = action["id"] + if all_actions[action_id][NAME_TAG] != NOACTION: + sai_table_data[ACTIONS_TAG].append(all_actions[action_id]) + + if len(sai_table_data['keys']) == 1 and sai_table_data['keys'][0]['sai_key_name'].endswith(table_name.split('.')[-1] + '_id'): + sai_table_data['is_object'] = 'true' + # Object ID itself is a key + sai_table_data['keys'] = [] + elif len(sai_table_data['keys']) > 5: + sai_table_data['is_object'] = 'true' + else: + sai_table_data['is_object'] = 'false' + sai_table_data['name'] = sai_table_data['name'] + '_entry' + + is_new_api = True + for sai_api in sai_apis: + if sai_api['app_name'] == api_name: + sai_api[TABLES_TAG].append(sai_table_data) + is_new_api = False + break + + if is_new_api: + new_api = dict() + new_api['app_name'] = api_name + new_api[TABLES_TAG] = [sai_table_data] + sai_apis.append(new_api) + + return sai_apis + +def write_sai_impl_files(sai_api): + env = Environment(loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True) + sai_impl_tm = env.get_template('/templates/saiapi.cpp.j2') + sai_impl_str = sai_impl_tm.render(tables = sai_api[TABLES_TAG], app_name = sai_api['app_name']) + + with open('./lib/sai' + sai_api['app_name'].replace('_', '') + '.cpp', 'w') as o: + o.write(sai_impl_str) + +def write_sai_makefile(sai_api_name_list): + env = Environment(loader=FileSystemLoader('.')) + makefile_tm = env.get_template('/templates/Makefile.j2') + makefile_str = makefile_tm.render(api_names = sai_api_name_list) + + with open('./lib/Makefile', 'w') as o: + o.write(makefile_str) + + env = Environment(loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True) + sai_impl_tm = env.get_template('/templates/utils.cpp.j2') + sai_impl_str = sai_impl_tm.render(tables = sai_api[TABLES_TAG], app_name = sai_api['app_name']) + + with open('./lib/utils.cpp', 'w') as o: + o.write(sai_impl_str) + + env = Environment(loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True) + sai_impl_tm = env.get_template('/templates/utils.h.j2') + sai_impl_str = sai_impl_tm.render(tables = sai_api[TABLES_TAG], app_name = sai_api['app_name']) + + with open('./lib/utils.h', 'w') as o: + o.write(sai_impl_str) + + +def write_sai_files(sai_api): + # The main file + with open('templates/saiapi.h.j2', 'r') as sai_header_tm_file: + sai_header_tm_str = sai_header_tm_file.read() + + env = Environment(loader=FileSystemLoader('.'), trim_blocks=True, lstrip_blocks=True) + sai_header_tm = env.get_template('templates/saiapi.h.j2') + sai_header_str = sai_header_tm.render(sai_api = sai_api) + + with open('./SAI/experimental/saiexperimental' + sai_api['app_name'].replace('_', '') + '.h', 'w') as o: + o.write(sai_header_str) + + # The SAI Extensions + with open('./SAI/experimental/saiextensions.h', 'r') as f: + lines = f.readlines() + + new_lines = [] + for line in lines: + if 'Add new experimental APIs above this line' in line: + new_lines.append(' SAI_API_' + sai_api['app_name'].upper() + ',\n\n') + if 'new experimental object type includes' in line: + new_lines.append(line) + new_lines.append('#include "saiexperimental' + sai_api['app_name'].replace('_', '') + '.h"\n') + continue + + new_lines.append(line) + + with open('./SAI/experimental/saiextensions.h', 'w') as f: + f.write(''.join(new_lines)) + + # The SAI Type Extensions + with open('./SAI/experimental/saitypesextensions.h', 'r') as f: + lines = f.readlines() + + new_lines = [] + for line in lines: + if 'Add new experimental object types above this line' in line: + for table in sai_api[TABLES_TAG]: + new_lines.append(' SAI_OBJECT_TYPE_' + table[NAME_TAG].upper() + ',\n\n') + + new_lines.append(line) + + with open('./SAI/experimental/saitypesextensions.h', 'w') as f: + f.write(''.join(new_lines)) + + # The SAI object struct for entries + with open('./SAI/inc/saiobject.h', 'r') as f: + lines = f.readlines() + + new_lines = [] + for line in lines: + if 'Add new experimental entries above this line' in line: + for table in sai_api[TABLES_TAG]: + if table['is_object'] == 'false': + new_lines.append(' /** @validonly object_type == SAI_OBJECT_TYPE_' + table[NAME_TAG].upper() + ' */\n') + new_lines.append(' sai_' + table[NAME_TAG] + '_t ' + table[NAME_TAG] + ';\n\n') + if 'new experimental object type includes' in line: + new_lines.append(line) + new_lines.append('#include "../experimental/saiexperimental' + sai_api['app_name'].replace('_', '') + '.h"\n') + continue + + new_lines.append(line) + + with open('./SAI/inc/saiobject.h', 'w') as f: + f.write(''.join(new_lines)) + + + +# CLI +parser = argparse.ArgumentParser(description='P4 SAI API generator') +parser.add_argument('filepath', type=str, help='Path to P4 program RUNTIME JSON file') +parser.add_argument('apiname', type=str, help='Name of the new SAI API') +parser.add_argument('--print-sai-lib', type=bool) +parser.add_argument('--sai-git-url', type=str, default='https://github.com/Opencomputeproject/SAI') +parser.add_argument('--ignore-tables', type=str, default='', help='Comma separated list of tables to ignore') +parser.add_argument('--sai-git-branch', type=str, default='master') +parser.add_argument('--overwrite', type=bool, default=False, help='Overwrite the existing SAI repo') +args = parser.parse_args() + +if not os.path.isfile(args.filepath): + print('File ' + args.filepath + ' does not exist') + exit(1) + +if os.path.exists('./SAI'): + if args.overwrite == False: + print('Directory ./SAI already exists. Please remove in order to proceed') + exit(1) + else: + shutil.rmtree('./SAI') + +if os.path.exists('./lib'): + if args.overwrite == False: + print('Directory ./lib already exists. Please remove in order to proceed') + exit(1) + else: + shutil.rmtree('./lib') + +# Get SAI dictionary from P4 dictionary +print("Generating SAI API...") +with open(args.filepath) as json_program_file: + json_program = json.load(json_program_file) + +sai_apis = generate_sai_apis(json_program, args.ignore_tables.split(',')) + +# Clone a clean SAI repo +print("Cloning SAI repository...") +Repo.clone_from(args.sai_git_url, './SAI', branch=args.sai_git_branch) +os.mkdir("lib") + +# Write SAI dictionary into SAI API headers +sai_api_name_list = [] +for sai_api in sai_apis: + write_sai_files(sai_api) + write_sai_impl_files(sai_api) + sai_api_name_list.append(sai_api['app_name'].replace('_', '')) +write_sai_makefile(sai_api_name_list) + +if args.print_sai_lib: + print(json.dumps(sai_api, indent=2)) diff --git a/sirius-pipeline/SAI/templates/Makefile.j2 b/sirius-pipeline/SAI/templates/Makefile.j2 new file mode 100644 index 000000000..cc178187c --- /dev/null +++ b/sirius-pipeline/SAI/templates/Makefile.j2 @@ -0,0 +1,15 @@ +libsai.so: {% for api in api_names %}sai{{ api }}.cpp {% endfor %} + g++ \ + -c \ + -I ../SAI/inc/ \ + -I ../SAI/experimental/ \ + -fPIC \ + -g \ + utils.cpp \ + {% for api in api_names %}sai{{ api }}.cpp {% endfor %} + g++ \ + -shared \ + -g \ + -o libsai.so \ + utils.o \ + {% for api in api_names %}sai{{ api }}.o {% endfor %} diff --git a/sirius-pipeline/SAI/templates/saiapi.cpp.j2 b/sirius-pipeline/SAI/templates/saiapi.cpp.j2 new file mode 100644 index 000000000..98ca79195 --- /dev/null +++ b/sirius-pipeline/SAI/templates/saiapi.cpp.j2 @@ -0,0 +1,324 @@ +#include +#include +#include +#include +#include +#include +#include +#include "p4/v1/p4runtime.grpc.pb.h" +#include "utils.h" +#include "saiexperimental{{ app_name | replace('_', '') }}.h" +#include "saitypes.h" +#include +#include +#include + +#if __APPLE__ +#include +#else +#include +#include +#endif + +#define LOG(x) std::cerr<mutable_action(); + auto action = entry->mutable_action(); + auto expectedParams = 0; + auto matchedParams = 0; + + // Search the action + pi_p4_id_t actionId; + + matchActionEntry->set_table_id(tableId); + + for (uint32_t i = 0; i < attr_count; i++) { + switch(attr_list[i].id) { + {% for key in table['keys'] %} + case SAI_{{ table.name | upper }}_ATTR_{{ key.sai_key_name | upper }}: { + auto mf = matchActionEntry->add_match(); + mf->set_field_id({{key.id}}); + {% if key.match_type == 'exact' %} + auto mf_exact = mf->mutable_exact(); + {{key.sai_key_field}}SetVal(attr_list[i].value, mf_exact, {{key.bitwidth}}); + {% elif key.match_type == 'lpm' %} + auto mf_lpm = mf->mutable_lpm(); + {{key.sai_lpm_field}}SetVal(attr_list[i].value, mf_lpm, {{key.bitwidth}}); + {% elif key.match_type == 'list' %} + assert(0 && "mutable_list is not supported"); + goto ErrRet; + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% elif key.match_type == 'range_list' %} + goto ErrRet; + assert(0 && "range_list is not supported"); + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_range_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% endif %} + // TODO: if it is ternary, need to set the mask + break; + } + {% endfor %} + } + } + + + // If there is only one action, simply set it. + // Else, search in the attrs. + {% if table.actions|length == 1 %} + {% for action in table.actions %} + actionId = {{action.id}}; // SAI_{{ table.name | upper }}_ACTION_{{ action.name | upper }} + expectedParams = {{ action.params|length }}; + {% endfor %} + {% else %} + // Search the action + for (uint32_t i = 0; i < attr_count; i++) { + switch(attr_list[i].value.s32) { + {% for action in table.actions %} + case SAI_{{ table.name | upper }}_ACTION_{{ action.name | upper }}: { + actionId = {{action.id}}; + expectedParams = {{ action.params|length }}; + break; + } + {% endfor %} + } + // only one action + break; + } + {% endif %} + action->set_action_id(actionId); + + for (uint32_t i = 0; i < attr_count; i++) { + switch(attr_list[i].id) { + {% for action in table.actions %} + {% for param in action.params %} + case SAI_{{ table.name | upper }}_ATTR_{{ param.name | upper }}: { + auto param = action->add_params(); + param->set_param_id({{param.id}}); + {{param.field}}SetVal(attr_list[i].value, param, {{param.bitwidth}}); + matchedParams++; + break; + } + {% endfor %} + {% endfor %} + } + } + + assert((matchedParams == expectedParams)); + + if (matchedParams != expectedParams) { + goto ErrRet; + } + // TODO: ternaly needs to set priority + uint64_t objId; + if (true == InsertInTable(matchActionEntry, &objId)) { + *{{ table.name }}_id = objId; + return 0; + } +ErrRet: + delete matchActionEntry; + return -1; +} + +sai_status_t sai_remove_{{ table.name }}(_In_ sai_object_id_t {{ table.name }}_id) { + if (RemoveFromTable({{ table.name }}_id)) { + return 0; + } + return -1; +} + +sai_status_t sai_set_{{ table.name }}_attribute ( + _In_ sai_object_id_t {{ table.name }}_id, + _In_ const sai_attribute_t *attr) { + assert(0 && "sai_set_{{ table.name }}_attribute NYI"); + return -1; +} + +sai_status_t sai_get_{{ table.name }}_attribute( + _In_ sai_object_id_t {{ table.name }}_id, + _In_ uint32_t attr_count, + _Inout_ sai_attribute_t *attr_list) { + assert(0 && "sai_get_{{ table.name }}_attribute NYI"); + return -1; +} +{% else %} +sai_status_t sai_create_{{ table.name }}( + _In_ const sai_{{ table.name }}_t *{{ table.name }}, + _In_ uint32_t attr_count, + _In_ const sai_attribute_t *attr_list) { + p4::v1::TableEntry * matchActionEntry = new p4::v1::TableEntry(); + pi_p4_id_t tableId = {{table.id}}; + matchActionEntry->set_table_id(tableId); + auto tableEntry = {{ table.name }}; + // There shall be one and only one action_type + auto entry = matchActionEntry->mutable_action(); + auto action = entry->mutable_action(); + auto expectedParams = 0; + auto matchedParams = 0; + pi_p4_id_t actionId; + grpc::StatusCode retCode; + + {% for key in table['keys'] %} + { + auto mf = matchActionEntry->add_match(); + mf->set_field_id({{key.id}}); + {% if key.match_type == 'exact' %} + auto mf_exact = mf->mutable_exact(); + {{key.sai_key_field}}SetVal(tableEntry->{{ key.sai_key_name | lower }}, mf_exact, {{key.bitwidth}}); + {% elif key.match_type == 'lpm' %} + auto mf_lpm = mf->mutable_lpm(); + {{key.sai_lpm_field}}SetVal(tableEntry->{{ key.sai_key_name | lower }}, mf_lpm, {{key.bitwidth}}); + {% elif key.match_type == 'list' %} + assert(0 && "mutable_list is not supported"); + goto ErrRet; + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% elif key.match_type == 'range_list' %} + assert(0 && "range_list is not supported"); + goto ErrRet; + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_range_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% endif %} + // TODO: if it is ternary, need to set the mask + } + {% endfor %} + + + {% if table.actions|length == 1 %} + {% for action in table.actions %} + actionId = {{action.id}}; // SAI_{{ table.name | upper }}_ACTION_{{ action.name | upper }} + expectedParams = {{ action.params|length }}; + {% endfor %} + {% else %} + // Search the action + for (uint32_t i = 0; i < attr_count; i++) { + switch(attr_list[i].value.s32) { + {% for action in table.actions %} + case SAI_{{ table.name | upper }}_ACTION_{{ action.name | upper }}: { + actionId = {{action.id}}; + expectedParams = {{ action.params|length }}; + break; + } + {% endfor %} + } + // only one action + break; + } + {% endif %} + action->set_action_id(actionId); + + for (uint32_t i = 0; i < attr_count; i++) { + switch(attr_list[i].id) { + {% for action in table.actions %} + {% for param in action.params %} + case SAI_{{ table.name | upper }}_ATTR_{{ param.name | upper }}: { + auto param = action->add_params(); + param->set_param_id({{param.id}}); + {{param.field}}SetVal(attr_list[i].value, param, {{param.bitwidth}}); + matchedParams++; + break; + } + {% endfor %} + {% endfor %} + } + } + + assert((matchedParams == expectedParams)); + + if (matchedParams != expectedParams) { + goto ErrRet; + } + // TODO: ternaly needs to set priority + retCode = MutateTableEntry(matchActionEntry, p4::v1::Update_Type_INSERT); + if (grpc::StatusCode::OK == retCode) { + delete matchActionEntry; + return 0; + } +ErrRet: + delete matchActionEntry; + return -1; +} + +sai_status_t sai_remove_{{ table.name }}( + _In_ const sai_{{ table.name }}_t *{{ table.name }}) { + p4::v1::TableEntry * matchActionEntry = new p4::v1::TableEntry(); + pi_p4_id_t tableId = {{table.id}}; + matchActionEntry->set_table_id(tableId); + auto tableEntry = {{ table.name }}; + grpc::StatusCode retCode; + + {% for key in table['keys'] %} + { + auto mf = matchActionEntry->add_match(); + mf->set_field_id({{key.id}}); + {% if key.match_type == 'exact' %} + auto mf_exact = mf->mutable_exact(); + {{key.sai_key_field}}SetVal(tableEntry->{{ key.sai_key_name | lower }}, mf_exact, {{key.bitwidth}}); + {% elif key.match_type == 'lpm' %} + auto mf_lpm = mf->mutable_lpm(); + {{key.sai_lpm_field}}SetVal(tableEntry->{{ key.sai_key_name | lower }}, mf_lpm, {{key.bitwidth}}); + {% elif key.match_type == 'list' %} + assert(0 && "mutable_list is not supported"); + goto ErrRet; + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% elif key.match_type == 'range_list' %} + assert(0 && "range_list is not supported"); + goto ErrRet; + // auto mf1_list = mf1->mutable_xxx(); + //{{key.sai_range_list_field}}SetVal(attr_list[i].value, mf1_list, {{key.bitwidth}}); + {% endif %} + // TODO: if it is ternary, need to set the mask + } + {% endfor %} + + retCode = MutateTableEntry(matchActionEntry, p4::v1::Update_Type_DELETE); + if (grpc::StatusCode::OK != retCode) { + delete matchActionEntry; + return 0; + } + +ErrRet: + + delete matchActionEntry; + return -1; +} + +sai_status_t sai_set_{{ table.name }}_attribute( + _In_ const sai_{{ table.name }}_t *{{ table.name }}, + _In_ const sai_attribute_t *attr) { + assert(0 && "sai_set_{{ table.name }}_attribute NYI"); + return -1; +} + +sai_status_t sai_get_{{ table.name }}_attribute( + _In_ const sai_{{ table.name }}_t *{{ table.name }}, + _In_ uint32_t attr_count, + _Inout_ sai_attribute_t *attr_list) { + assert(0 && "sai_get_{{ table.name }}_attribute NYI"); + return -1; +} +{% endif %} +{% endfor %} + +static sai_{{ app_name }}_api_t sai_{{app_name }}_api_impl = { +{% for table in tables %} + .create_{{ table.name }} = sai_create_{{ table.name }}, + .remove_{{ table.name }} = sai_remove_{{ table.name }}, + .set_{{ table.name }}_attribute = sai_set_{{ table.name }}_attribute, + .get_{{ table.name }}_attribute = sai_get_{{ table.name }}_attribute, +{% endfor %} +}; diff --git a/SAI/saiapi.h.j2 b/sirius-pipeline/SAI/templates/saiapi.h.j2 similarity index 83% rename from SAI/saiapi.h.j2 rename to sirius-pipeline/SAI/templates/saiapi.h.j2 index 70b9f995b..56adc34d2 100644 --- a/SAI/saiapi.h.j2 +++ b/sirius-pipeline/SAI/templates/saiapi.h.j2 @@ -17,13 +17,13 @@ * assistance with these files: Intel Corporation, Mellanox Technologies Ltd, * Dell Products, L.P., Facebook, Inc., Marvell International Ltd. * - * @file saiexperimental{{ sai_api.app_name }}.h + * @file saiexperimental{{ sai_api.app_name | replace('_', '') }}.h * * @brief This module defines SAI P4 extension interface */ -#if !defined (__SAIEXPERIMENTAL{{ sai_api.app_name | upper }}_H_) -#define __SAIEXPERIMENTAL{{ sai_api.app_name | upper}}_H_ +#if !defined (__SAIEXPERIMENTAL{{ sai_api.app_name | replace('_', '') | upper }}_H_) +#define __SAIEXPERIMENTAL{{ sai_api.app_name | replace('_', '') | upper}}_H_ #include @@ -78,15 +78,19 @@ typedef struct _sai_{{ table.name }}_t {% for key in table['keys'] %} /** * @brief {{ key.match_type | capitalize | replace('Lpm', 'LPM') }} matched key {{ key.sai_key_name }} +{% if key.sai_key_type == 'sai_object_id_t' %} + * + * @objects SAI_OBJECT_TYPE_{{ key.sai_key_name | replace('_id', '') | upper }} +{% endif %} */ {% if key.match_type == 'lpm' %} - {{ key.sai_lpm_type }} {{ key.sai_key_name }}; + {{ key.sai_lpm_type }} {{ key.sai_key_name | lower }}; {% elif key.match_type == 'list' %} - {{ key.sai_list_type }} {{ key.sai_key_name }}; + {{ key.sai_list_type }} {{ key.sai_key_name | lower }}; {% elif key.match_type == 'range_list' %} - {{ key.sai_range_list_type }} {{ key.sai_key_name }}; + {{ key.sai_range_list_type }} {{ key.sai_key_name | lower }}; {% else %} - {{ key.sai_key_type }} {{ key.sai_key_name }}; + {{ key.sai_key_type }} {{ key.sai_key_name | lower }}; {% endif %} {% endfor %} @@ -94,7 +98,7 @@ typedef struct _sai_{{ table.name }}_t {% endif %} /** - * @brief Attribute ID for {{ table.name }} + * @brief Attribute ID for {{ sai_api.app_name }}_{{ table.name }} */ typedef enum _sai_{{ table.name }}_attr_t { @@ -133,6 +137,9 @@ typedef enum _sai_{{ table.name }}_attr_t * @flags MANDATORY_ON_CREATE | CREATE_ONLY {% if key.sai_key_type == 'sai_uint16_t' %} * @isvlan false +{% endif %} +{% if key.sai_key_type == 'sai_object_id_t' %} + * @objects SAI_OBJECT_TYPE_{{ key.sai_key_name | replace('_id', '') | upper }} {% endif %} */ {% if not ns.firstattr %} @@ -163,6 +170,10 @@ typedef enum _sai_{{ table.name }}_attr_t * @default 0:0:0:0:0:0 {% elif param.type == 'bool' %} * @default false +{% elif param.type == 'sai_object_id_t' %} + * @objects SAI_OBJECT_TYPE_{{ param.name | replace('_id', '') | upper }} + * @allownull true + * @default SAI_NULL_OBJECT_ID {% else %} * @default 0 {% endif %} @@ -186,6 +197,33 @@ typedef enum _sai_{{ table.name }}_attr_t */ SAI_{{ table.name | upper }}_ATTR_STAGE, +{% endif %} +{% if table.with_counters == 'true' %} + /** + * @brief Attach a counter + * + * When it is empty, then packet hits won't be counted + * + * @type sai_object_id_t + * @flags CREATE_AND_SET + * @objects SAI_OBJECT_TYPE_COUNTER + * @allownull true + * @default SAI_NULL_OBJECT_ID + */ + SAI_{{ table.name | upper }}_ATTR_COUNTER_ID, + +{% endif %} +{% if table['keys'] | selectattr('match_type', 'ne', 'exact') | list | length > 0 %} +{% if table['keys'] | selectattr('match_type', 'eq', 'lpm') | list | length == 0 %} + /** + * @brief Rule priority in table + * + * @type sai_uint32_t + * @flags MANDATORY_ON_CREATE | CREATE_ONLY + */ + SAI_{{ table.name | upper }}_ATTR_PRIORITY, + +{% endif %} {% endif %} /** * @brief End of attributes @@ -203,7 +241,7 @@ typedef enum _sai_{{ table.name }}_attr_t {% endfor %} {% for table in sai_api.tables %} /** - * @brief Create {{ table.name }} + * @brief Create {{ sai_api.app_name }}_{{ table.name }} * {% if table.is_object == 'true' %} * @param[out] {{ table.name }}_id Entry id @@ -227,7 +265,7 @@ typedef sai_status_t (*sai_create_{{ table.name }}_fn)( _In_ const sai_attribute_t *attr_list); /** - * @brief Remove {{ table.name }} + * @brief Remove {{ sai_api.app_name }}_{{ table.name }} * {% if table.is_object == 'true' %} * @param[in] {{ table.name }}_id Entry id @@ -245,7 +283,7 @@ typedef sai_status_t (*sai_remove_{{ table.name }}_fn)( {% endif %} /** - * @brief Set attribute for {{ table.name }} + * @brief Set attribute for {{ sai_api.app_name }}_{{ table.name }} * {% if table.is_object == 'true' %} * @param[in] {{ table.name }}_id Entry id @@ -265,7 +303,7 @@ typedef sai_status_t (*sai_set_{{ table.name }}_attribute_fn)( _In_ const sai_attribute_t *attr); /** - * @brief Get attribute for {{ table.name }} + * @brief Get attribute for {{ sai_api.app_name }}_{{ table.name }} * {% if table.is_object == 'true' %} * @param[in] {{ table.name }}_id Entry id @@ -288,7 +326,7 @@ typedef sai_status_t (*sai_get_{{ table.name }}_attribute_fn)( {% if table.is_object == 'false' %} /** - * @brief Bulk create {{ table.name }} + * @brief Bulk create {{ sai_api.app_name }}_{{ table.name }} * * @param[in] object_count Number of objects to create * @param[in] {{ table.name }} List of object to create @@ -313,7 +351,7 @@ typedef sai_status_t (*sai_bulk_create_{{ table.name }}_fn)( _Out_ sai_status_t *object_statuses); /** - * @brief Bulk remove {{ table.name }} + * @brief Bulk remove {{ sai_api.app_name }}_{{ table.name }} * * @param[in] object_count Number of objects to remove * @param[in] {{ table.name }} List of objects to remove @@ -350,11 +388,12 @@ typedef struct _sai_{{ sai_api.app_name }}_api_t sai_bulk_create_{{ table.name }}_fn {{ " " * space_offset }}create_{{ table.name | replace("entry", "entries") }}; sai_bulk_remove_{{ table.name }}_fn {{ " " * space_offset }}remove_{{ table.name | replace("entry", "entries") }}; {% endif %} + {% endfor %} } sai_{{ sai_api.app_name }}_api_t; /** * @} */ -#endif /** __SAIEXPERIMENTAL{{ sai_api.app_name | upper}}_H_ */ +#endif /** __SAIEXPERIMENTAL{{ sai_api.app_name | replace('_', '') | upper}}_H_ */ diff --git a/sirius-pipeline/SAI/templates/utils.cpp.j2 b/sirius-pipeline/SAI/templates/utils.cpp.j2 new file mode 100644 index 000000000..cc492cebf --- /dev/null +++ b/sirius-pipeline/SAI/templates/utils.cpp.j2 @@ -0,0 +1,181 @@ +#include +#include +#include +#include +#include +#include +#include +#include "p4/v1/p4runtime.grpc.pb.h" +#include "utils.h" +#include "saitypes.h" +#include +#include +#include + +#if __APPLE__ +#include +#else +#include +#include +#endif + +#define LOG(x) std::cerr< _grpcChannel; +static const grpc::string _grpcTarget = "localhost:9876"; +static int deviceId; +static unordered_map tableEntryMap; +static std::mutex tableLock; +static atomic nextId; +static std::unique_ptr stub; + +int leadingNonZeroBits(const uint32_t ipv4) { + auto firstSetBit = __builtin_ffs(ipv4); + if (0==firstSetBit) { + return 0; + } + return 33 - firstSetBit; +} + +int leadingNonZeroBits(const sai_ip6_t ipv6) { + int trailingZeros = 0; + for (int i = 0; i < 16; i+=4) { + auto num = static_cast(ipv6[i]) + + (static_cast(ipv6[i+1]) << 8) + + (static_cast(ipv6[i+2]) << 16) + + (static_cast(ipv6[i+3]) << 24); + auto firstSetBit = leadingNonZeroBits(num); + if (firstSetBit > 0) { + return 129-trailingZeros-firstSetBit; + } + trailingZeros += 32; + } + return 0; +} + +p4::config::v1::P4Info parse_p4info(const char *path) { + p4::config::v1::P4Info p4info; + std::ifstream istream(path); + assert(istream.good()); + google::protobuf::io::IstreamInputStream istream_(&istream); + google::protobuf::TextFormat::Parse(&istream_, &p4info); + return p4info; +} + +__attribute__((constructor)) +static void Init() { + nextId = 0; + const grpc::string _grpcTarget = "0.0.0.0:9559"; + char test_json[] = "/etc/dash/sirius_pipeline.json"; + char test_proto_json[] = "/etc/dash/sirius_pipeline_p4rt.txt"; + int dev_id = 0; + + auto p4info = parse_p4info(test_proto_json); + + auto set_election_id = [](p4::v1::Uint128 *election_id) { + election_id->set_high(0); + election_id->set_low(1); + }; + + grpc::ClientContext stream_context; + _grpcChannel = grpc::CreateChannel(_grpcTarget, grpc::InsecureChannelCredentials()); + stub = p4::v1::P4Runtime::NewStub(_grpcChannel); + auto stream = stub->StreamChannel(&stream_context); + { + p4::v1::StreamMessageRequest request; + auto arbitration = request.mutable_arbitration(); + arbitration->set_device_id(dev_id); + set_election_id(arbitration->mutable_election_id()); + stream->Write(request); + p4::v1::StreamMessageResponse response; + stream->Read(&response); + assert(response.update_case() == p4::v1::StreamMessageResponse::kArbitration); + assert(response.arbitration().status().code() == ::google::rpc::Code::OK); + } + { + p4::v1::SetForwardingPipelineConfigRequest request; + request.set_device_id(dev_id); + request.set_action( + p4::v1::SetForwardingPipelineConfigRequest_Action_VERIFY_AND_COMMIT); + set_election_id(request.mutable_election_id()); + auto config = request.mutable_config(); + config->set_allocated_p4info(&p4info); + std::ifstream istream(test_json); + config->mutable_p4_device_config()->assign( + (std::istreambuf_iterator(istream)), + std::istreambuf_iterator()); + + p4::v1::SetForwardingPipelineConfigResponse rep; + grpc::ClientContext context; + auto status = stub->SetForwardingPipelineConfig( + &context, request, &rep); + assert(status.ok()); + config->release_p4info(); + } +} + +int GetDeviceId() { + return deviceId; +} + +grpc::StatusCode MutateTableEntry(p4::v1::TableEntry *entry, p4::v1::Update_Type updateType) { + p4::v1::WriteRequest request; + request.set_device_id(GetDeviceId()); + auto update = request.add_updates(); + update->set_type(updateType); + auto entity = update->mutable_entity(); + entity->set_allocated_table_entry(entry); + + p4::v1::WriteResponse rep; + grpc::ClientContext context; + grpc::Status status = stub->Write(&context, request, &rep); + if (status.ok()) { + LOG("GRPC call Write::add_one_entry OK: "); + } + else { + LOG("GRPC ERROR["<< status.error_code() <<"]: " << status.error_message() << ", " << status.error_details()); + LOG("GRPC call Write::add_one_entry ERROR: " << std::endl << entry->ShortDebugString()); + } + //MILIND?? What is this? reference release? memory release? + entity->release_table_entry(); + return status.error_code(); +} + +bool InsertInTable(p4::v1::TableEntry *entry, sai_object_id_t *objId) { + auto retCode = MutateTableEntry(entry, p4::v1::Update_Type_INSERT); + if (grpc::StatusCode::OK != retCode) { + delete entry; + return false; + } + + tableLock.lock(); + *objId = nextId++; + tableEntryMap[*objId] = entry; + tableLock.unlock(); + return true; +} + +bool RemoveFromTable(sai_object_id_t id) { + tableLock.lock(); + auto itr = tableEntryMap.find(id); + if (itr == tableEntryMap.end()) { + tableLock.unlock(); + LOG("id: " << id << " not present in the table for deletion!" <second; + auto retCode = MutateTableEntry(entry, p4::v1::Update_Type_DELETE); + if (grpc::StatusCode::OK != retCode) { + tableLock.unlock(); + return false; + } + + tableEntryMap.erase(itr); + tableLock.unlock(); + delete entry; + return true; +} + diff --git a/sirius-pipeline/SAI/templates/utils.h.j2 b/sirius-pipeline/SAI/templates/utils.h.j2 new file mode 100644 index 000000000..a13ba8c56 --- /dev/null +++ b/sirius-pipeline/SAI/templates/utils.h.j2 @@ -0,0 +1,227 @@ +#ifndef __UTILS_H__ +#define __UTILS_H__ + +#include +#include +#include +#include +#include +#include +#include +#include "p4/v1/p4runtime.grpc.pb.h" +#include "saitypes.h" +#include +#include +#include + +#if __APPLE__ +#include +#else +#include +#include +#endif + + +template +void booldataSetVal(const sai_attribute_value_t &value, T &t, int bits = 8){ + assert(bits <= 8); + t->set_value(const_cast(&value.booldata), 1); +} + +template +void booldataSetVal(const bool &value, T &t, int bits = 8){ + assert(bits <= 8); + t->set_value(const_cast(&value), 1); +} + +template +void u8SetVal(const sai_attribute_value_t &value, T &t, int bits = 8){ + assert(bits <= 8); + t->set_value(const_cast(&value.u8), 1); +} + +template +void u8SetVal(const sai_uint8_t &value, T &t, int bits = 8){ + assert(bits <= 8); + t->set_value(const_cast(&value), 1); +} + +template +void u16SetVal(const sai_attribute_value_t &value, T &t, int bits = 16){ + assert(bits <= 16); + uint16_t val = value.u16; + val = htons(val); + t->set_value(&val, 2);} + +template +void u16SetVal(const sai_uint16_t &value, T &t, int bits = 16){ + assert(bits <= 16); + uint16_t val = value; + val = htons(val); + t->set_value(&val, 2); +} + +template +void u32SetVal(const sai_attribute_value_t &value, T &t, int bits = 32){ + assert(bits <= 32); + uint32_t val = value.u32; + val = htons(val); + val = val >> (32 - bits); + int bytes = (bits + 7) / 8; + t->set_value(&val, bytes); +} + +template +void u32SetVal(const sai_uint32_t &value, T &t, int bits = 32){ + assert(bits <= 32); + uint32_t val = value; + val = htons(val); + val = val >> (32 - bits); + int bytes = (bits + 7) / 8; + t->set_value(&val, bytes); +} + +template +void u64SetVal(const sai_attribute_value_t &value, T &t, int bits = 64){ + assert(bits <= 64); + uint64_t val = value.u64; + if (*reinterpret_cast("\0\x01") == 0) { // Little Endian + const uint32_t high_part = htonl(static_cast(val >> 32)); + const uint32_t low_part = htonl(static_cast(val & 0xFFFFFFFFLL)); + val = (static_cast(low_part) << 32) | high_part; + val = val >> (64-bits); + } + int bytes = (bits + 7) / 8; + t->set_value(&val, bytes);} + +template +void u64SetVal(const sai_uint64_t &value, T &t, int bits = 64) { + assert(bits <= 64); + uint64_t val = value; + if (*reinterpret_cast("\0\x01") == 0) { // Little Endian + const uint32_t high_part = htonl(static_cast(val >> 32)); + const uint32_t low_part = htonl(static_cast(val & 0xFFFFFFFFLL)); + val = (static_cast(low_part) << 32) | high_part; + val = val >> (64-bits); + } + int bytes = (bits + 7) / 8; + t->set_value(&val, bytes); +} + +template +void ipaddrSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + ipaddrSetVal(value.ipaddr, t); +} + +template +void ipaddrSetVal(const sai_ip_address_t &value, T &t, int bits = -1){ + switch(value.addr_family) { + case SAI_IP_ADDR_FAMILY_IPV4: { + uint32_t val = value.addr.ip4; + val = htonl(val); + t->set_value(&val, 4); + } + break; + case SAI_IP_ADDR_FAMILY_IPV6: { + t->set_value(const_cast(&value.addr.ip6[0]), 16); + } + break; + default: assert(0 && "unrecognzed value.ipaddr.addr_family"); + } +} + +template +void macSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + t->set_value(const_cast(&value.mac[0]), 6); +} + +template +void macSetVal(const sai_mac_t &value, T &t, int bits = -1){ + t->set_value(const_cast(&value[0]), 6); +} + +int leadingNonZeroBits(const uint32_t ipv4); + +int leadingNonZeroBits(const sai_ip6_t ipv6); + +template +void ipPrefixSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + ipPrefixSetVal(value.ipprefix, t); +} + +template +void ipPrefixSetVal(const sai_ip_prefix_t &value, T &t, int bits = -1){ + switch(value.addr_family) { + case SAI_IP_ADDR_FAMILY_IPV4: { + uint32_t val = value.addr.ip4; + val = htonl(val); + t->set_value(&val, 4); + t->set_prefix_len(leadingNonZeroBits(value.addr.ip4)); + } + break; + case SAI_IP_ADDR_FAMILY_IPV6: { + t->set_value(const_cast(&value.addr.ip6[0]), 16); + t->set_prefix_len(leadingNonZeroBits(value.addr.ip6)); + } + break; + default: assert(0 && "unrecognzed value.ipaddr.addr_family"); + } +} + +template +void u8listSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u16listVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u32listSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u64listSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void ipaddrlistSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u8rangelistSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u16rangelistVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u32rangelistSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void u64rangelistSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +template +void ipaddrrangelistSetVal(const sai_attribute_value_t &value, T &t, int bits = -1){ + assert (0 && "NYI"); +} + +grpc::StatusCode MutateTableEntry(p4::v1::TableEntry *entry, p4::v1::Update_Type updateType); + +bool InsertInTable(p4::v1::TableEntry *entry, sai_object_id_t *objId); + +bool RemoveFromTable(sai_object_id_t id); + +#endif diff --git a/sirius-pipeline/bmv2/sirius_acl.p4 b/sirius-pipeline/bmv2/sirius_acl.p4 index 67c5210e8..ffec8ce5d 100644 --- a/sirius-pipeline/bmv2/sirius_acl.p4 +++ b/sirius-pipeline/bmv2/sirius_acl.p4 @@ -13,16 +13,27 @@ match_kind { range_list } +#ifdef DASH_MATCH +#define LIST_MATCH list +#define RANGE_LIST_MATCH range_list +#else +#define LIST_MATCH exact +#define RANGE_LIST_MATCH exact +#endif + +#define str(name) #name + #define ACL_STAGE(table_name) \ direct_counter(CounterType.packets_and_bytes) ## table_name ##_counter; \ + @name(str(table_name##:dash_acl|dash_acl)) \ table table_name { \ key = { \ - meta.eni : exact @name("meta.eni:eni"); \ - hdr.ipv4.dst_addr : list @name("hdr.ipv4.dst_addr:dip"); \ - hdr.ipv4.src_addr : list @name("hdr.ipv4.src_addr:sip"); \ - hdr.ipv4.protocol : list @name("hdr.ipv4.src_addr:protocol"); \ - hdr.tcp.src_port : range_list @name("hdr.tcp.src_port:sport"); \ - hdr.tcp.dst_port : range_list @name("hdr.tcp.dst_port:dport"); \ + meta.eni_id : exact @name("meta.eni_id:eni_id"); \ + hdr.ipv4.dst_addr : LIST_MATCH @name("hdr.ipv4.dst_addr:dip"); \ + hdr.ipv4.src_addr : LIST_MATCH @name("hdr.ipv4.src_addr:sip"); \ + hdr.ipv4.protocol : LIST_MATCH @name("hdr.ipv4.src_addr:protocol"); \ + hdr.tcp.src_port : RANGE_LIST_MATCH @name("hdr.tcp.src_port:src_port"); \ + hdr.tcp.dst_port : RANGE_LIST_MATCH @name("hdr.tcp.dst_port:dst_port"); \ } \ actions = { \ permit; \ diff --git a/sirius-pipeline/bmv2/sirius_inbound.p4 b/sirius-pipeline/bmv2/sirius_inbound.p4 index 7d0677112..aed77ad95 100644 --- a/sirius-pipeline/bmv2/sirius_inbound.p4 +++ b/sirius-pipeline/bmv2/sirius_inbound.p4 @@ -19,13 +19,14 @@ control inbound(inout headers_t hdr, meta.encap_data.vni = vni; } - action set_vm_id(bit<16> vm_id) { - meta.vm_id = vm_id; + action set_vm_id(bit<16> inbound_vm_id) { + meta.inbound_vm_id = inbound_vm_id; } + @name("eni_to_vm|dash_vnet") table eni_to_vm { key = { - meta.eni: exact @name("meta.eni:eni"); + meta.eni_id: exact @name("meta.eni_id:eni_id"); } actions = { @@ -33,9 +34,10 @@ control inbound(inout headers_t hdr, } } + @name("vm|dash_vnet") table vm { key = { - meta.vm_id: exact @name("meta.vm_id:vm_id"); + meta.inbound_vm_id: exact @name("meta.inbound_vm_id:inbound_vm_id"); } actions = { diff --git a/sirius-pipeline/bmv2/sirius_metadata.p4 b/sirius-pipeline/bmv2/sirius_metadata.p4 index a5773d8ec..d81249b92 100644 --- a/sirius-pipeline/bmv2/sirius_metadata.p4 +++ b/sirius-pipeline/bmv2/sirius_metadata.p4 @@ -13,10 +13,10 @@ struct encap_data_t { EthernetAddress overlay_dmac; } -enum direction_t { - INVALID, - OUTBOUND, - INBOUND +enum bit<16> direction_t { + INVALID = 0, + OUTBOUND = 1, + INBOUND = 2 } struct conntrack_data_t { @@ -24,13 +24,20 @@ struct conntrack_data_t { bool allow_out; } +struct eni_data_t { + bit<32> cps; + bit<32> pps; + bit<32> flows; +} + struct metadata_t { bool dropped; direction_t direction; encap_data_t encap_data; EthernetAddress eni_addr; - bit<16> eni; - bit<16> vm_id; + bit<16> eni_id; + eni_data_t eni_data; + bit<16> inbound_vm_id; bit<8> appliance_id; bit<1> is_dst_ip_v6; IPv4ORv6Address dst_ip_addr; diff --git a/sirius-pipeline/bmv2/sirius_outbound.p4 b/sirius-pipeline/bmv2/sirius_outbound.p4 index 70d300a57..4e6563941 100644 --- a/sirius-pipeline/bmv2/sirius_outbound.p4 +++ b/sirius-pipeline/bmv2/sirius_outbound.p4 @@ -13,9 +13,10 @@ control outbound(inout headers_t hdr, meta.encap_data.vni = vni; } + @name("eni_to_vni|dash_vnet") table eni_to_vni { key = { - meta.eni : exact @name("meta.eni:eni"); + meta.eni_id : exact @name("meta.eni_id:eni_id"); } actions = { @@ -29,9 +30,10 @@ control outbound(inout headers_t hdr, direct_counter(CounterType.packets_and_bytes) routing_counter; + @name("routing|dash_vnet") table routing { key = { - meta.eni : exact @name("meta.eni:eni"); + meta.eni_id : exact @name("meta.eni_id:eni_id"); meta.is_dst_ip_v6 : exact @name("meta.is_dst_ip_v6:v4_or_v6"); meta.dst_ip_addr : lpm @name("meta.dst_ip_addr:destination"); } @@ -59,6 +61,7 @@ control outbound(inout headers_t hdr, direct_counter(CounterType.packets_and_bytes) ca_to_pa_counter; + @name("ca_to_pa|dash_vnet") table ca_to_pa { key = { /* Flow for express route */ diff --git a/sirius-pipeline/bmv2/sirius_pipeline.p4 b/sirius-pipeline/bmv2/sirius_pipeline.p4 index 19bfcd93f..aab760d2f 100644 --- a/sirius-pipeline/bmv2/sirius_pipeline.p4 +++ b/sirius-pipeline/bmv2/sirius_pipeline.p4 @@ -28,17 +28,40 @@ control sirius_ingress(inout headers_t hdr, mark_to_drop(standard_metadata); } - action set_direction(direction_t direction) { - meta.direction = direction; + action deny() { + meta.dropped = true; } + action accept() { + } + + @name("vip|dash") + table vip { + key = { + hdr.ipv4.dst_addr : exact @name("hdr.ipv4.dst_addr:VIP"); + } + + actions = { + accept; + deny; + } + + const default_action = deny; + } + + action set_outbound_direction() { + meta.direction = direction_t.OUTBOUND; + } + + @name("direction_lookup|dash") table direction_lookup { key = { - hdr.vxlan.vni : exact @name("hdr.vxlan.vni:vni"); + hdr.vxlan.vni : exact @name("hdr.vxlan.vni:VNI"); } actions = { - set_direction; + set_outbound_direction; + deny; } } @@ -60,11 +83,30 @@ control sirius_ingress(inout headers_t hdr, } } + action set_eni_attrs(bit<32> cps, + bit<32> pps, + bit<32> flows) { + meta.eni_data.cps = cps; + meta.eni_data.pps = pps; + meta.eni_data.flows = flows; + } + + @name("eni|dash") + table eni { + key = { + meta.eni_id : exact @name("meta.eni_id:eni_id"); + } + + actions = { + set_eni_attrs; + } + } + direct_counter(CounterType.packets_and_bytes) eni_counter; table eni_meter { key = { - meta.eni : exact @name("meta.eni:eni"); + meta.eni_id : exact @name("meta.eni_id:eni_id"); meta.direction : exact @name("meta.direction:direction"); meta.dropped : exact @name("meta.dropped:dropped"); } @@ -78,26 +120,43 @@ control sirius_ingress(inout headers_t hdr, meta.dropped = false; } - action deny() { - meta.dropped = true; + action vxlan_decap_pa_validate() {} + + @name("pa_validation|dash_vnet") + table pa_validation { + key = { + meta.eni_id: exact @name("meta.eni_id:eni_id"); + hdr.ipv4.src_addr : exact @name("hdr.ipv4.src_addr:sip"); + hdr.vxlan.vni : exact @name("hdr.vxlan.vni:VNI"); + } + + actions = { + permit; + @defaultonly deny; + } + + const default_action = deny; } + @name("inbound_routing|dash_vnet") table inbound_routing { key = { - hdr.vxlan.vni : exact @name("hdr.vxlan.vni:vni"); + hdr.vxlan.vni : exact @name("hdr.vxlan.vni:VNI"); } actions = { vxlan_decap(hdr); + vxlan_decap_pa_validate; @defaultonly deny; } const default_action = deny; } - action set_eni(bit<16> eni) { - meta.eni = eni; + action set_eni(bit<16> eni_id) { + meta.eni_id = eni_id; } + @name("eni_ether_address_map|dash") table eni_ether_address_map { key = { meta.eni_addr : exact @name("meta.eni_addr:address"); @@ -109,6 +168,12 @@ control sirius_ingress(inout headers_t hdr, } apply { + vip.apply(); + if (meta.dropped) { + return; + } + + meta.direction = direction_t.INBOUND; direction_lookup.apply(); appliance.apply(); @@ -118,7 +183,12 @@ control sirius_ingress(inout headers_t hdr, if (meta.direction == direction_t.OUTBOUND) { vxlan_decap(hdr); } else if (meta.direction == direction_t.INBOUND) { - inbound_routing.apply(); + switch (inbound_routing.apply().action_run) { + vxlan_decap_pa_validate: { + pa_validation.apply(); + vxlan_decap(hdr); + } + } } meta.dst_ip_addr = 0; @@ -137,6 +207,7 @@ control sirius_ingress(inout headers_t hdr, hdr.ethernet.src_addr : hdr.ethernet.dst_addr; eni_ether_address_map.apply(); + eni.apply(); if (meta.direction == direction_t.OUTBOUND) { outbound.apply(hdr, meta, standard_metadata); diff --git a/sirius-pipeline/tests/vnet_out/Makefile b/sirius-pipeline/tests/vnet_out/Makefile new file mode 100644 index 000000000..3091c3f01 --- /dev/null +++ b/sirius-pipeline/tests/vnet_out/Makefile @@ -0,0 +1,24 @@ +vnet_out: vnet_out.cpp + g++ \ + -I /SAI/SAI/inc \ + -I /SAI/SAI/experimental/ \ + -o vnet_out \ + vnet_out.cpp \ + -Wl,-rpath,/SAI/lib \ + -L/SAI/lib/ \ + -lsai \ + -L/usr/local/lib/ \ + -lpthread \ + -lpiprotogrpc \ + -lpiprotobuf \ + -lprotobuf \ + -lgrpc++ \ + -lgrpc \ + -lpiall \ + -lpi_dummy \ + -lpthread \ + -labsl_synchronization \ + -g + +clean: + rm -rf vnet_out diff --git a/sirius-pipeline/tests/vnet_out/vnet_out.cpp b/sirius-pipeline/tests/vnet_out/vnet_out.cpp new file mode 100644 index 000000000..9f80ec668 --- /dev/null +++ b/sirius-pipeline/tests/vnet_out/vnet_out.cpp @@ -0,0 +1,93 @@ +#include +#include +#include + +#include + + +extern sai_status_t sai_create_direction_lookup_entry( + _In_ const sai_direction_lookup_entry_t *direction_lookup_entry, + _In_ uint32_t attr_count, + _In_ const sai_attribute_t *attr_list); + +extern sai_status_t sai_create_eni_ether_address_map_entry( + _In_ const sai_eni_ether_address_map_entry_t *outbound_eni_lookup_from_vm_entry, + _In_ uint32_t attr_count, + _In_ const sai_attribute_t *attr_list); + +extern sai_status_t sai_create_outbound_eni_to_vni_entry( + _In_ const sai_outbound_eni_to_vni_entry_t *outbound_eni_to_vni_entry, + _In_ uint32_t attr_count, + _In_ const sai_attribute_t *attr_list); + +extern sai_dash_api_t sai_dash_api_impl; + +int main(int argc, char **argv) +{ + sai_object_id_t switch_id = SAI_NULL_OBJECT_ID; + sai_attribute_t attr; + std::vector attrs; + + sai_direction_lookup_entry_t dle = {}; + dle.switch_id = switch_id; + dle.vni = 60; + + attr.id = SAI_DIRECTION_LOOKUP_ENTRY_ATTR_ACTION; + attr.value.u32 = SAI_DIRECTION_LOOKUP_ENTRY_ACTION_SET_OUTBOUND_DIRECTION; + attrs.push_back(attr); + + /* sai_status_t status = sai_dash_api_impl.create_direction_lookup_entry(&dle, attrs.size(), attrs.data()); */ + sai_status_t status = sai_create_direction_lookup_entry(&dle, attrs.size(), attrs.data()); + if (status != SAI_STATUS_SUCCESS) + { + std::cout << "Failed to create Direction Lookup Entry" << std::endl; + return 1; + } + + attrs.clear(); + + sai_eni_ether_address_map_entry_t eam; + eam.switch_id = switch_id; + eam.address[0] = 0xaa; + eam.address[1] = 0xcc; + eam.address[2] = 0xcc; + eam.address[3] = 0xcc; + eam.address[4] = 0xcc; + eam.address[5] = 0xcc; + + attr.id = SAI_ENI_ETHER_ADDRESS_MAP_ENTRY_ATTR_ENI_ID; + attr.value.u16 = 7; + attrs.push_back(attr); + + status = sai_create_eni_ether_address_map_entry(&eam, attrs.size(), attrs.data()); + if (status != SAI_STATUS_SUCCESS) + { + std::cout << "Failed to create ENI Lookup From VM" << std::endl; + return 1; + } + + attrs.clear(); + + sai_outbound_eni_to_vni_entry_t e2v = {}; + e2v.switch_id = switch_id; + e2v.eni_id = 7; + + attr.id = SAI_OUTBOUND_ENI_TO_VNI_ENTRY_ATTR_VNI; + attr.value.u32 = 9; + attrs.push_back(attr); + + /* status = sai_dash_api_impl.create_outbound_eni_to_vni_entry(&e2v, attrs.size(), attrs.data()); */ + status = sai_create_outbound_eni_to_vni_entry(&e2v, attrs.size(), attrs.data()); + if (status != SAI_STATUS_SUCCESS) + { + std::cout << "Failed to create ENI To VNI" << std::endl; + return 1; + } + + attrs.clear(); + + + std::cout << "Done." << std::endl; + + return 0; +} diff --git a/slides/DASH_P4_CI_Testing_2022-06-08.pdf b/slides/DASH_P4_CI_Testing_2022-06-08.pdf new file mode 100644 index 000000000..56e73366a Binary files /dev/null and b/slides/DASH_P4_CI_Testing_2022-06-08.pdf differ