diff --git a/README.md b/README.md
index d226d34..99c518c 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,8 @@ data from IBM Cloudant and write the data elsewhere.
These examples use Db2 Warehouse on Cloud as a destination.
**N.B. None of these examples are a direct replacement for the deprecated
-Cloudant warehouse integration.**
+Cloudant warehouse integration.
+DB2 Warehouse on Cloud is a paid subscription.**
The examples can be used as a starting point to develop application specific
data flows, but will need to be modified to meet your application requirements.
@@ -21,8 +22,7 @@ For example care should be taken around handling:
1. [IBM Cloud Functions](./cloud-functions/README.md)
1. [Apache Spark on Watson Studio](./spark-on-watson-studio/README.md)
1. [Node-RED on IBM Cloud](./node-red/README.md)
-1. [IBM Message Hub](ibm-event-streams/README.md)
-1. [IBM Streams Designer (Beta) on Watson Studio](./streams-designer/README.md)
+1. [IBM Event Streams](ibm-event-streams/README.md)
[Learn more](./warehousing-alternatives-compare-table.md) about the differences between these technologies for extracting Cloudant documents
and writing the data to a Db2 Warehouse on Cloud table.
diff --git a/streaming-analytics/create-streaming-analytics.png b/streaming-analytics/create-streaming-analytics.png
deleted file mode 100644
index 606119b..0000000
Binary files a/streaming-analytics/create-streaming-analytics.png and /dev/null differ
diff --git a/streaming-analytics/create.md b/streaming-analytics/create.md
deleted file mode 100644
index 1990641..0000000
--- a/streaming-analytics/create.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# Creating an IBM Streaming Analytics instance
-1. Create an [IBM Streaming Analytics](https://console.bluemix.net/catalog/services/streaming-analytics) instance.
-
-
diff --git a/streaming-analytics/streaming-analytics-catalog.png b/streaming-analytics/streaming-analytics-catalog.png
deleted file mode 100644
index 47cfdec..0000000
Binary files a/streaming-analytics/streaming-analytics-catalog.png and /dev/null differ
diff --git a/streams-designer/Cloudant_to_Db2_streams_flow.stp b/streams-designer/Cloudant_to_Db2_streams_flow.stp
deleted file mode 100644
index 8d274b8..0000000
--- a/streams-designer/Cloudant_to_Db2_streams_flow.stp
+++ /dev/null
@@ -1 +0,0 @@
-{"metadata":{"guid":"264f8509-4ed6-41dc-9a31-70adbeadbdff","url":"/v2/streaming_pipelines/264f8509-4ed6-41dc-9a31-70adbeadbdff","created_at":"2018-03-23T20:02:17Z","updated_at":"2018-04-19T19:02:02Z","revision":1524164522903},"entity":{"name":"Cloudant to Db2 streams flow","description":"","project_guid":"9e53bd07-ee6d-4337-ad30-81191353bad7","graph":{"doc_type":"pipeline","version":"1.0","json_schema":"http://www.ibm.com/ibm/wdp/flow-v1.0/pipeline-flow-v1-schema.json","id":"","app_data":{"ui_data":{"name":"Cloudant to Db2 streams flow"}},"primary_pipeline":"primary-pipeline","pipelines":[{"id":"primary-pipeline","runtime":"streams","nodes":[{"id":"code_etw4bub2vtf","type":"binding","op":"ibm.streams.sources.code","outputs":[{"id":"target","schema_ref":"schema0","links":[{"node_id_ref":"dashdb_6srey7s6m8d","port_id_ref":"source"}]}],"parameters":{"code":"from cloudant import Cloudant\n\n\n# init() function will be called once on pipeline initialization\n# @state a Python dictionary object for keeping state. The state object is passed to the produce function\n\n\ndef init(state):\n # do something once on pipeline initialization and save in the state object\n pass\n\n\n# produce() function will be called when the job starts to run.\n# It is called on a background thread, and it will typically invoke the 'submit()' callback\n# whenever a tuple of data is ready to be emitted from this operator.\n# This allows for using asynchronous data services as well as synchronous data generation or retrieval.\n# @submit a Python callback function that takes one argument: a dictionary representing a single tuple.\n# @state a Python dictionary object for keeping state\n# You must declare all output attributes in the Edit Schema window.\ndef produce(submit, state):\n # Replace with your Cloudant credentials\n # Note: In general, 'account' is the username.\n client = Cloudant(\n 'username',\n 'password',\n account='account',\n connect=True\n )\n db = client['animaldb']\n feed = db.changes(feed='continuous', include_docs=True)\n for change in feed:\n doc = change['doc']\n # Submit the doc in each iteration:\n submit(doc)","schema_mapping":[{"name":"diet","type":"string","length":0,"source_elem_name":"","target_elem_name":""},{"name":"class","type":"string","length":0,"source_elem_name":"","target_elem_name":""},{"name":"wiki_page","type":"string","length":0,"source_elem_name":"","target_elem_name":""},{"name":"latin_name","type":"string","length":0,"source_elem_name":"","target_elem_name":""},{"name":"max_length","type":"double","length":0,"source_elem_name":"","target_elem_name":""},{"name":"min_length","type":"double","length":0,"source_elem_name":"","target_elem_name":""},{"name":"max_weight","type":"double","length":0,"source_elem_name":"","target_elem_name":""},{"name":"min_weight","type":"double","length":0,"source_elem_name":"","target_elem_name":""},{"name":"_deleted","type":"boolean","length":0,"source_elem_name":"","target_elem_name":""},{"name":"_rev","type":"string","length":0,"source_elem_name":"","target_elem_name":""},{"name":"_id","type":"string","length":0,"source_elem_name":""}]},"app_data":{"ui_data":{"label":"Code","x_pos":-570,"y_pos":160}}},{"id":"dashdb_6srey7s6m8d","type":"binding","op":"ibm.streams.targets.dashdb","parameters":{"schema_mapping":[{"name":"max_length","type":"double","length":0,"source_elem_name":"","target_elem_name":"max_length"},{"name":"min_length","type":"double","length":0,"source_elem_name":"","target_elem_name":"min_length"},{"name":"latin_name","type":"string","length":0,"source_elem_name":"","target_elem_name":"latin_name"},{"name":"_deleted","type":"boolean","length":0,"source_elem_name":"","target_elem_name":"_deleted"},{"name":"diet","type":"string","length":0,"target_elem_name":"diet"},{"name":"class","type":"string","length":0,"target_elem_name":"class"},{"name":"max_weight","type":"double","length":0,"target_elem_name":"max_weight"},{"name":"min_weight","type":"double","length":0,"target_elem_name":"min_weight"},{"name":"wiki_page","type":"string","length":0,"target_elem_name":"wiki_page"},{"name":"_rev","type":"string","length":0,"target_elem_name":"_rev"},{"name":"_id","type":"string","length":0,"target_elem_name":"_id"}]},"connection":{"ref":"91b3ebcd-6356-4285-90b0-23e302b5a193","project_ref":"9e53bd07-ee6d-4337-ad30-81191353bad7","properties":{"asset":{"path":"/DASH7751/ANIMALS","asset_types":[],"assets":[],"fields":[{"name":"_id","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}},{"name":"_rev","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}},{"name":"_deleted","type":{"type":"boolean","length":1,"scale":0,"nullable":true,"signed":false}},{"name":"min_weight","type":{"type":"double","length":53,"scale":0,"nullable":true,"signed":true}},{"name":"max_weight","type":{"type":"double","length":53,"scale":0,"nullable":true,"signed":true}},{"name":"min_length","type":{"type":"double","length":53,"scale":0,"nullable":true,"signed":true}},{"name":"max_length","type":{"type":"double","length":53,"scale":0,"nullable":true,"signed":true}},{"name":"latin_name","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}},{"name":"wiki_page","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}},{"name":"class","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}},{"name":"diet","type":{"type":"varchar","length":1024,"scale":0,"nullable":true,"signed":false}}],"extended_metadata":[],"first":{"href":"https://api.dataplatform.ibm.com/v2/connections/91b3ebcd-6356-4285-90b0-23e302b5a193/assets?project_id=9e53bd07-ee6d-4337-ad30-81191353bad7&offset=0&limit=100&path=%2FDASH7751%2FANIMALS"},"total_count":1,"logs":[]}}},"app_data":{"ui_data":{"label":"Db2 Warehouse on Cloud","x_pos":-280,"y_pos":160}}}]}],"schemas":[{"id":"schema0","fields":[{"name":"diet","type":"string"},{"name":"class","type":"string"},{"name":"wiki_page","type":"string"},{"name":"latin_name","type":"string"},{"name":"max_length","type":"double"},{"name":"min_length","type":"double"},{"name":"max_weight","type":"double"},{"name":"min_weight","type":"double"},{"name":"_deleted","type":"boolean"},{"name":"_rev","type":"string"},{"name":"_id","type":"string"}]}]},"engines":{"streams":{"instance_id":"f23160b7-6412-43c2-bdf8-bc598b2af4be"}}}}
\ No newline at end of file
diff --git a/streams-designer/README.md b/streams-designer/README.md
deleted file mode 100644
index a7e6f51..0000000
--- a/streams-designer/README.md
+++ /dev/null
@@ -1,160 +0,0 @@
-# Simple load from Cloudant to Db2 in `Streams Designer`
-This tutorial is an introduction on loading Cloudant documents and saving that data into Db2 within Streams Designer.
-In this tutorial you will:
-
-1. Set up IBM services: Cloudant, Streaming Analytics, Watson Studio, and Db2 Warehouse on Cloud.
-1. Create a Python `Code` operator that will load Cloudant documents from the `_changes` endpoint.
-1. Edit the output schema for the Python `Code` operator.
-1. Create a Db2 operator and add the connection details.
-1. Map the `Code` operator schema with the target columns in the Db2 table.
-1. Save and run the streams flow.
-
-**N.B. This is an example only intended as a _starting point_ for using IBM Streams Designer to extract data from Cloudant and insert into Db2 Warehouse on Cloud. It is _not_ intended as a drop-in replacement for the deprecated Cloudant warehouse integration. In particular this example does not create or alter Db2 tables or handle document deletion.**
-
-## Before you begin
-
-These are the IBM Watson Studio services required to run this tutorial:
-1. [Cloudant](https://console.bluemix.net/catalog/services/cloudant-nosql-db)
-1. [Db2 Warehouse on Cloud](https://console.bluemix.net/catalog/services/dashdb)
-1. [IBM Watson Studio](https://dataplatform.cloud.ibm.com)
-
-# Setting up IBM Cloud services
-
-## Cloudant
-1. [Create an IBM Cloudant](../cloudant/create.md) instance.
-1. [Replicate the tutorial data](../cloudant/replicate.md).
-
-## Streaming Analytics
-1. [Create an IBM Streaming Analytics](../streaming-analytics/create.md) instance.
-
-## Watson Studio
-1. [Creating an IBM Watson Studio](../watson-studio/create.md) instance.
-
-## Db2 Warehouse on Cloud
-1. [Create an IBM Db2 Warehouse on Cloud](../db2/create.md) instance.
-1. [Create a Db2 table to house the data](../db2/animaldb_table.md).
-
-## Create a Watson Studio project
-1. In the Watson Studio instance, open the Watson Studio app by pressing `Get Started`.
-
-1. Create a new project by selecting `New Project`.
-
-1. Select the `Streams Designer` tile and press OK.
-1. On the **New project** screen, add a name and optional description for the project, then press **Create**.
-**Note**: The previously created Streaming Analytics service should be pre-selected as seen in the screenshot. If the
-Cloud Object Storage instance was not created beforehand, you will have the option of adding one.
-
-
-## Option 1: Import the Streams flow file
-1. Create a new Streams flow under the `Add to project` drop-down list.
-
-1. On the **New Streams Flow** screen, Select **From file**. Drag and drop [Cloudant_to_Db2_streams_flow.stp](Cloudant_to_Db2_streams_flow.stp)
-under the **File** section.
-The Streaming Analytics service drop-down box should already be filled in. Press **Create**.
-
-1. Edit the streams flow by clicking the pencil icon.
-
-1. Select the Code operator to open the code editor in the right pane. Replace the lines with `username`, `password`, and
-`account` with your Cloudant credentials.
-
-1. [Setup the Db2 operator.](#setup-the-db2-operator)
-
-## Option 2: Create a new Streams flow
-1. Create a new Streams flow under the `Add to project` drop-down list.
-
-1. On the **New Streams Flow** screen, add a name and optional description for the project.
-The Streaming Analytics service drop-down box should already be filled in.
-Select the **Manually** box, then press **Create**.
-
-
-The first node we'll create is a Python node for loading documents from Cloudant.
-1. Drag and drop the `Code` operator under **Sources** on to the canvas.
-1. Select the `Code` operator and a right pane will open for editing the code.
-
-1. Select `Python 3.5` as the coding language and replace what's existing with the code below.
- **Note:** Add your Cloudant credentials as parameters in the Cloudant initializer `client = Cloudant(...)`.
- ```python
- from cloudant import Cloudant
-
- # init() function will be called once on pipeline initialization
- # @state a Python dictionary object for keeping state. The state object is passed to the produce function
-
-
- def init(state):
- # do something once on pipeline initialization and save in the state object
- pass
-
-
- # produce() function will be called when the job starts to run.
- # It is called on a background thread, and it will typically invoke the 'submit()' callback
- # whenever a tuple of data is ready to be emitted from this operator.
- # This allows for using asynchronous data services as well as synchronous data generation or retrieval.
- # @submit a Python callback function that takes one argument: a dictionary representing a single tuple.
- # @state a Python dictionary object for keeping state
- # You must declare all output attributes in the Edit Schema window.
- def produce(submit, state):
- # Replace with your Cloudant credentials
- # Note: In general, 'account' is the username.
- client = Cloudant(
- 'username',
- 'password',
- account='account',
- connect=True
- )
- db = client['animaldb']
- feed = db.changes(feed='continuous', include_docs=True)
- for change in feed:
- doc = change['doc']
- # Submit the Cloudant doc in each iteration:
- submit(doc)
- ```
-1. Select the `Edit Output Schema` below the code editor.
-1. Add the following attribute names and their types using the **Add Attribute** button.
- Press **Save** then **Close** when finished.
-
- Name | Type
- --- | ---
- \_id | Text
- \_rev | Text
- \_deleted | Boolean
- wiki_page | Text
- min_weight | Number
- max_weight | Number
- min_length | Number
- max_length | Number
- latin_name | Text
- class | Text
- diet | Text
-
- 
-
-1. Select the floppy disk icon in the Stream Designer toolbar to save the flow.
-1. Drag and drop the Db2 Warehouse on Cloud operator under **Targets** on to the canvas.
-
-## Setup the Db2 operator
-1. Select the Db2 Warehouse on Cloud operator and a right pane will open for adding connection details.
-
-1. Click `Add Connection` and select the Db2 Warehouse on Cloud instance that was previously created.
-Select **Create** to finish.
-**Note:** If `Your service instances in IBM Cloud` is empty, you can [grab the JDBC connection string](../db2/connection_details.md)
-and create a connection manually.
-
-1. Click on the icon under **Schema/Table** to select the Db2 table.
-1. Select your schema and `ANIMALS` table, then press **Select**.
-
-1. Link the two operators together by dragging your mouse cursor from the output port of the `Code` operator to the input
-port of the Db2 operator.
-
-1. Select **Map Schema** in the Db2 operator's right pane.
-
-1. Assign the attributes previously created in the `Code` operator to it's equivalent Db2 target column, and then press **Save**.
-
-
-## Run the Streams flow
-1. Press the play icon in the Stream Designer toolbar to save and run the streams flow.
-
-1. The Status indicator in the Metrics page will change from _stopped_ to _starting_, and then to _running_.
-
-1. [View the extracted data imported into the Db2 table](../db2/view_data.md).
-
-
diff --git a/streams-designer/assign-attributes-db2-operator.png b/streams-designer/assign-attributes-db2-operator.png
deleted file mode 100644
index 5d8c2dc..0000000
Binary files a/streams-designer/assign-attributes-db2-operator.png and /dev/null differ
diff --git a/streams-designer/attributes-code-streams-designer.png b/streams-designer/attributes-code-streams-designer.png
deleted file mode 100644
index 4698208..0000000
Binary files a/streams-designer/attributes-code-streams-designer.png and /dev/null differ
diff --git a/streams-designer/code-operator-streams-designer.png b/streams-designer/code-operator-streams-designer.png
deleted file mode 100644
index ce9afab..0000000
Binary files a/streams-designer/code-operator-streams-designer.png and /dev/null differ
diff --git a/streams-designer/create-new-project-watson-studio.png b/streams-designer/create-new-project-watson-studio.png
deleted file mode 100644
index 8dbcb05..0000000
Binary files a/streams-designer/create-new-project-watson-studio.png and /dev/null differ
diff --git a/streams-designer/create-streams-flow-watson-studio.png b/streams-designer/create-streams-flow-watson-studio.png
deleted file mode 100644
index 9950302..0000000
Binary files a/streams-designer/create-streams-flow-watson-studio.png and /dev/null differ
diff --git a/streams-designer/db2-operator-add-connection.png b/streams-designer/db2-operator-add-connection.png
deleted file mode 100644
index 793e05f..0000000
Binary files a/streams-designer/db2-operator-add-connection.png and /dev/null differ
diff --git a/streams-designer/db2-operator-streams-designer.png b/streams-designer/db2-operator-streams-designer.png
deleted file mode 100644
index c3621d5..0000000
Binary files a/streams-designer/db2-operator-streams-designer.png and /dev/null differ
diff --git a/streams-designer/get-started-watson-studio.png b/streams-designer/get-started-watson-studio.png
deleted file mode 100644
index 78163ca..0000000
Binary files a/streams-designer/get-started-watson-studio.png and /dev/null differ
diff --git a/streams-designer/import-streams-flow-watson-studio.png b/streams-designer/import-streams-flow-watson-studio.png
deleted file mode 100644
index 9611dc4..0000000
Binary files a/streams-designer/import-streams-flow-watson-studio.png and /dev/null differ
diff --git a/streams-designer/link-operators-streams-designer.png b/streams-designer/link-operators-streams-designer.png
deleted file mode 100644
index 06714dc..0000000
Binary files a/streams-designer/link-operators-streams-designer.png and /dev/null differ
diff --git a/streams-designer/map-schema-db2-operator.png b/streams-designer/map-schema-db2-operator.png
deleted file mode 100644
index 7ac29c2..0000000
Binary files a/streams-designer/map-schema-db2-operator.png and /dev/null differ
diff --git a/streams-designer/new-project-watson-studio.png b/streams-designer/new-project-watson-studio.png
deleted file mode 100644
index ed22383..0000000
Binary files a/streams-designer/new-project-watson-studio.png and /dev/null differ
diff --git a/streams-designer/open-streams-designer-editor.png b/streams-designer/open-streams-designer-editor.png
deleted file mode 100644
index 73b851f..0000000
Binary files a/streams-designer/open-streams-designer-editor.png and /dev/null differ
diff --git a/streams-designer/play-icon-stream-designer.png b/streams-designer/play-icon-stream-designer.png
deleted file mode 100644
index c8d0542..0000000
Binary files a/streams-designer/play-icon-stream-designer.png and /dev/null differ
diff --git a/streams-designer/running-status-streams-flow.png b/streams-designer/running-status-streams-flow.png
deleted file mode 100644
index 17ada9d..0000000
Binary files a/streams-designer/running-status-streams-flow.png and /dev/null differ
diff --git a/streams-designer/select-schema-table-db2-operator.png b/streams-designer/select-schema-table-db2-operator.png
deleted file mode 100644
index e84424f..0000000
Binary files a/streams-designer/select-schema-table-db2-operator.png and /dev/null differ
diff --git a/streams-designer/streams-designer-tile-watson-studio.png b/streams-designer/streams-designer-tile-watson-studio.png
deleted file mode 100644
index 9c24ac9..0000000
Binary files a/streams-designer/streams-designer-tile-watson-studio.png and /dev/null differ
diff --git a/streams-designer/streams-flow-watson-studio.png b/streams-designer/streams-flow-watson-studio.png
deleted file mode 100644
index 7991917..0000000
Binary files a/streams-designer/streams-flow-watson-studio.png and /dev/null differ