AT&T 2.0.19 Code drop, stage 3

Issue-ID: AAF-197
Change-Id: I8b02cb073ccba318ccaf6ea0276446bdce88fb82
Signed-off-by: Instrumental <jcgmisc@stl.gathman.org>
diff --git a/auth/auth-cass/.gitignore b/auth/auth-cass/.gitignore
new file mode 100644
index 0000000..d7c9d7e
--- /dev/null
+++ b/auth/auth-cass/.gitignore
@@ -0,0 +1,9 @@
+/.settings
+/bin
+/Old
+/target
+/.classpath
+/.project
+/AAF_2.0.8_alter.zip
+/logs/
+/target/
diff --git a/auth/auth-cass/docker/dinstall b/auth/auth-cass/docker/dinstall
new file mode 100644
index 0000000..17d3e07
--- /dev/null
+++ b/auth/auth-cass/docker/dinstall
@@ -0,0 +1,29 @@
+
+if [ "`docker ps -a | grep aaf_cass`" == "" ]; then
+  docker run --name aaf_cass  -d cassandra:3.11
+else 
+  docker exec aaf_cass mkdir -p /opt/app/cass_init
+  docker cp "../src/main/cql/." aaf_cass:/opt/app/cass_init
+fi
+
+echo "Docker Installed Basic Cassandra on aaf_cass.  Executing the following "
+echo "NOTE: This creator provided is only a Single Instance. For more complex Cassandra, create independently"
+echo ""
+echo " cd /opt/app/cass_init"  
+echo " cqlsh -u root -p root -f keyspace.cql"
+echo " cqlsh -u root -p root -f init.cql"
+echo " cqlsh -u root -p root -f osaaf.cql"
+echo ""
+echo "The following will give you a temporary identity with which to start working, or emergency"
+echo " cqlsh -u root -p root -f temp_identity.cql"
+echo "Sleeping 10 seconds to allow Cassandra to start"
+sleep 10
+docker exec -it aaf_cass bash -c '\
+cd /opt/app/cass_init; \
+echo "Creating Keyspace";cqlsh -u root -p root -f keyspace.cql;\
+echo "Creating init";cqlsh -u root -p root -f init.cql;\
+echo "Creating osaaf";cqlsh -u root -p root -f osaaf.cql;\
+echo "Creating temp Identity";cqlsh -u root -p root -f temp_identity.cql'
+
+echo "Inspecting aafcassadra.  Use to get the IP address to update org.osaaf.cassandra.props"
+docker inspect aaf_cass | grep '"IPAddress' | head -1
diff --git a/auth/auth-cass/pom.xml b/auth/auth-cass/pom.xml
new file mode 100644
index 0000000..56f367d
--- /dev/null
+++ b/auth/auth-cass/pom.xml
@@ -0,0 +1,110 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.onap.aaf.auth</groupId>
+		<artifactId>parent</artifactId>
+		<version>2.1.0-SNAPSHOT</version>
+		<relativePath>../pom.xml</relativePath>
+	</parent>
+
+	<artifactId>aaf-auth-cass</artifactId>
+	<name>AAF Auth Cass</name>
+	<description>Cassandra Data Libraries for AAF Auth</description>
+	<packaging>jar</packaging>
+	
+	<developers>
+		<developer>
+			<name>Jonathan Gathman</name>
+			<email>jonathan.gathman@att.com</email>
+			<organization>ATT</organization>
+			<roles>
+				<role>Architect</role>
+				<role>Lead Developer</role>
+			</roles>
+		</developer>
+		<developer>
+			<name>Gabe Maurer</name>
+			<email>gabe.maurer@att.com</email>
+			<organization>ATT</organization>
+			<roles>
+				<role>Developer</role>
+			</roles>
+		</developer>
+		<developer>
+			<name>Ian Howell</name>
+			<email>ian.howell@att.com</email>
+			<organization>ATT</organization>
+			<roles>
+				<role>Developer</role>
+			</roles>
+		</developer>
+	</developers>
+
+	
+	<dependencies>
+		<dependency>
+			<groupId>org.onap.aaf.auth</groupId>
+			<artifactId>aaf-auth-core</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>org.onap.aaf.cadi</groupId>
+			<artifactId>aaf-cadi-aaf</artifactId>
+		</dependency>
+
+		<dependency>
+			<groupId>com.datastax.cassandra</groupId>
+			<artifactId>cassandra-driver-core</artifactId>
+		</dependency>
+
+		<!-- Cassandra prefers Snappy and LZ4 libs for performance -->
+		<dependency>
+			<groupId>org.xerial.snappy</groupId>
+			<artifactId>snappy-java</artifactId>
+			<version>1.1.1-M1</version>
+		</dependency>
+
+		<dependency>
+			<groupId>net.jpountz.lz4</groupId>
+			<artifactId>lz4</artifactId>
+			<version>1.2.0</version>
+		</dependency>
+
+		<dependency>
+			<groupId>com.googlecode.jcsv</groupId>
+			<artifactId>jcsv</artifactId>
+			<version>1.4.0</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<scope>test</scope>
+		</dependency>
+
+
+	</dependencies>
+</project>
+
diff --git a/auth/auth-cass/src/main/cql/.gitignore b/auth/auth-cass/src/main/cql/.gitignore
new file mode 100644
index 0000000..f17048e
--- /dev/null
+++ b/auth/auth-cass/src/main/cql/.gitignore
@@ -0,0 +1 @@
+/init.cql
diff --git a/auth/auth-cass/src/main/cql/keyspace.cql b/auth/auth-cass/src/main/cql/keyspace.cql
new file mode 100644
index 0000000..ad58090
--- /dev/null
+++ b/auth/auth-cass/src/main/cql/keyspace.cql
@@ -0,0 +1,9 @@
+// For Developer Machine single instance
+CREATE KEYSPACE authz
+  WITH REPLICATION = {'class' : 'SimpleStrategy','replication_factor':1};
+// 
+//
+ 
+// Example of Network Topology, with Datacenter dc1 & dc2
+// CREATE KEYSPACE authz WITH replication = { 'class': 'NetworkTopologyStrategy', 'dc1': '2', 'dc2': '2' };
+// 
diff --git a/auth/auth-cass/src/main/cql/osaaf.cql b/auth/auth-cass/src/main/cql/osaaf.cql
new file mode 100644
index 0000000..83c7fdf
--- /dev/null
+++ b/auth/auth-cass/src/main/cql/osaaf.cql
@@ -0,0 +1,61 @@
+USE authz;
+
+// Create 'org' root NS
+INSERT INTO ns (name,description,parent,scope,type)
+  VALUES('org','Root Namespace','.',1,1);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org','admin',{'org.access|*|*'},'Org Admins');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org','owner',{'org.access|*|read,approve'},'Org Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org','access','*','read,approve',{'org.owner'},'Org Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org','access','*','*',{'org.admin'},'Org Write Access');
+
+// Create Root pass
+INSERT INTO cred (id,ns,type,cred,expires)
+  VALUES ('initial@osaaf.org','org.osaaf',1,0x008c5926ca861023c1d2a36653fd88e2,'2099-12-31') using TTL 14400;
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('initial@osaaf.org','org.admin','2099-12-31','org','admin') using TTL 14400;
+
+
+// Create org.osaaf
+INSERT INTO ns (name,description,parent,scope,type)
+  VALUES('org.osaaf','OSAAF Namespace','org',2,2);
+
+INSERT INTO role(ns, name, perms,description)
+  VALUES('org.osaaf','admin',{'org.osaaf.access|*|*'},'OSAAF Admins');
+
+INSERT INTO perm(ns, type, instance, action, roles,description) 
+  VALUES ('org.osaaf','access','*','*',{'org.osaaf.admin'},'OSAAF Write Access');
+
+INSERT INTO role(ns, name, perms,description)
+  VALUES('org.osaaf','owner',{'org.osaaf.access|*|read,approve'},'OSAAF Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles,description) 
+  VALUES ('org.osaaf','access','*','read,appove',{'org.osaaf.owner'},'OSAAF Read Access');
+
+// Create org.osaaf.aaf
+INSERT INTO ns (name,description,parent,scope,type)
+  VALUES('org.osaaf.aaf','Application Authorization Framework','org.osaaf',3,3);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org.osaaf.aaf','admin',{'org.osaaf.aaf.access|*|*'},'AAF Admins');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org.osaaf.aaf','access','*','*',{'org.osaaf.aaf.admin'},'AAF Write Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org.osaaf.aaf','access','*','read,approve',{'org.osaaf.aaf.owner'},'AAF Read Access');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org.osaaf.aaf','owner',{'org.osaaf.aaf.access|*|read,approve'},'AAF Owners');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('initial@osaaf.org','org.osaaf.aaf.admin','2099-12-31','org.osaaf.aaf','admin') using TTL 14400;
+
diff --git a/auth/auth-cass/src/main/cql/temp_identity.cql b/auth/auth-cass/src/main/cql/temp_identity.cql
new file mode 100644
index 0000000..ba6e782
--- /dev/null
+++ b/auth/auth-cass/src/main/cql/temp_identity.cql
@@ -0,0 +1,8 @@
+USE authz;
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('jonathan@people.osaaf.org','org.admin','2099-12-31','org','admin') ;
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('jonathan@people.osaaf.org','org.osaaf.aaf.admin','2099-12-31','org.osaaf.aaf','admin') ;
+
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java
new file mode 100644
index 0000000..9794b2e
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java
@@ -0,0 +1,504 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.List;
+import java.util.concurrent.ConcurrentLinkedDeque;
+
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.TimeTaken;
+import org.onap.aaf.misc.env.TransStore;
+
+import com.datastax.driver.core.BoundStatement;
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ConsistencyLevel;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.ResultSetFuture;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.exceptions.DriverException;
+
+public abstract class AbsCassDAO<TRANS extends TransStore,DATA> {
+	protected static final char DOT = '.';
+	protected static final char DOT_PLUS_ONE = '.'+1;
+	protected static final String FIRST_CHAR = Character.toString((char)0);
+	protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE);
+	protected static final int FIELD_COMMAS = 0;
+	protected static final int QUESTION_COMMAS = 1;
+	protected static final int ASSIGNMENT_COMMAS = 2;
+	protected static final int WHERE_ANDS = 3;
+
+	private Cluster cluster; 
+	/*
+	 * From DataStax
+	 * com.datastax.driver.core.Session
+		A session holds connections to a Cassandra cluster, allowing it to be queried. Each session maintains multiple connections to the cluster nodes, 
+		provides policies to choose which node to use for each query (round-robin on all nodes of the cluster by default), and handles retries for 
+		failed query (when it makes sense), etc...
+		Session instances are thread-safe and usually a single instance is enough per application. However, a given session can only be set to one 
+		keyspace at a time, so one instance per keyspace is necessary.
+	 */
+	private Session session;
+	private final String keyspace;
+	// If this is null, then we own session
+	private final AbsCassDAO<TRANS,?> owningDAO;
+	protected Class<DATA> dataClass;
+	private final String name;
+//	private static Slot sessionSlot; // not used since 2015
+	private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo>();
+	private static final List<Object> EMPTY = new ArrayList<Object>(0);
+	private static final Deque<ResetRequest> resetDeque = new ConcurrentLinkedDeque<ResetRequest>();
+	private static boolean resetTrigger = false;
+	private static long nextAvailableReset = 0;
+	
+	public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass) {
+		this.name = name;
+		this.cluster = cluster;
+		this.keyspace = keyspace;
+		owningDAO = null;  // we own session
+		session = null;
+		this.dataClass = dataClass;
+	}
+
+	public AbsCassDAO(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass) {
+		this.name = name;
+		cluster = aDao.cluster;
+		keyspace = aDao.keyspace;
+		session = null;
+		// We do not own session
+		owningDAO = aDao;
+		this.dataClass = dataClass;
+	}
+	
+// Not used since 2015
+//	public static void setSessionSlot(Slot slot) {
+//		sessionSlot = slot;
+//	}
+
+	//Note: Lower case ON PURPOSE. These names used to create History Messages
+	public enum CRUD {
+		create,read,update,delete;
+	}
+
+	public class PSInfo {
+		private PreparedStatement ps;
+		private final int size;
+		private final Loader<DATA> loader;
+		private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader
+		private final String cql;
+		private final ConsistencyLevel consistency;
+
+
+		/**
+		 * Create a PSInfo and create Prepared Statement
+		 * 
+		 * @param trans
+		 * @param theCQL
+		 * @param loader
+		 */
+		public PSInfo(TRANS trans, String theCQL, Loader<DATA> loader, ConsistencyLevel consistency) {
+			this.loader = loader;
+			this.consistency=consistency;
+			psinfos.add(this);
+
+			cql = theCQL.trim().toUpperCase();
+			if(cql.startsWith("INSERT")) {
+				crud = CRUD.create;
+			} else if(cql.startsWith("UPDATE")) {
+				crud = CRUD.update;
+			} else if(cql.startsWith("DELETE")) {
+				crud = CRUD.delete;
+			} else {
+				crud = CRUD.read;
+			}
+			
+			int idx = 0, count=0;
+			while((idx=cql.indexOf('?',idx))>=0) {
+				++idx;
+				++count;
+			}
+			size=count;
+		}
+		
+		public synchronized void reset() {
+			ps = null;
+		}
+		
+		private synchronized BoundStatement ps(TransStore trans) throws APIException, IOException {
+			/* From Datastax
+				You should prepare only once, and cache the PreparedStatement in your application (it is thread-safe). 
+				If you call prepare multiple times with the same query string, the driver will log a warning.
+			*/
+			if(ps==null) {
+				TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB);
+				try {
+					ps = getSession(trans).prepare(cql);
+					ps.setConsistencyLevel(consistency);
+				} catch (DriverException e) {
+					reportPerhapsReset(trans,e);
+					throw e;
+				} finally {
+					tt.done();
+				}
+			}
+			// BoundStatements are NOT threadsafe... need a new one each time.
+			return new BoundStatement(ps);
+		}
+
+		/**
+		 * Execute a Prepared Statement by extracting from DATA object
+		 * 
+		 * @param trans
+		 * @param text
+		 * @param data
+		 * @return
+		 */
+		public Result<ResultSetFuture> execAsync(TRANS trans, String text, DATA data) {
+			TimeTaken tt = trans.start(text, Env.REMOTE);
+			try {
+				return Result.ok(getSession(trans).executeAsync(
+						ps(trans).bind(loader.extract(data, size, crud))));
+			} catch (DriverException | APIException | IOException e) {
+				AbsCassDAO.this.reportPerhapsReset(trans,e);
+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+			} finally {
+				tt.done();
+			}
+		}
+
+		/**
+		 * Execute a Prepared Statement on Object[] key
+		 * 
+		 * @param trans
+		 * @param text
+		 * @param objs
+		 * @return
+		 */
+		public Result<ResultSetFuture> execAsync(TRANS trans, String text, Object ... objs) {
+			TimeTaken tt = trans.start(text, Env.REMOTE);
+			try {
+				return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs)));
+			} catch (DriverException | APIException | IOException e) {
+				AbsCassDAO.this.reportPerhapsReset(trans,e);
+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+			} finally {
+				tt.done();
+			}
+		}
+		
+		/* 
+		 * Note:
+		 * 
+		 */
+
+		/**
+		 * Execute a Prepared Statement by extracting from DATA object
+		 * 
+		 * @param trans
+		 * @param text
+		 * @param data
+		 * @return
+		 */
+		public Result<ResultSet> exec(TRANS trans, String text, DATA data) {
+			TimeTaken tt = trans.start(text, Env.REMOTE);
+			try {
+				/*
+				 * "execute" (and executeAsync)
+				 * Executes the provided query.
+					This method blocks until at least some result has been received from the database. However, 
+					for SELECT queries, it does not guarantee that the result has been received in full. But it 
+					does guarantee that some response has been received from the database, and in particular 
+					guarantee that if the request is invalid, an exception will be thrown by this method.
+
+					Parameters:
+					statement - the CQL query to execute (that can be any Statement).
+					Returns:
+						the result of the query. That result will never be null but can be empty (and will 
+						be for any non SELECT query).
+				 */
+				return Result.ok(getSession(trans).execute(
+						ps(trans).bind(loader.extract(data, size, crud))));
+			} catch (DriverException | APIException | IOException e) {
+				AbsCassDAO.this.reportPerhapsReset(trans,e);
+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+			} finally {
+				tt.done();
+			}
+		}
+
+		/**
+		 * Execute a Prepared Statement on Object[] key
+		 * 
+		 * @param trans
+		 * @param text
+		 * @param objs
+		 * @return
+		 */
+		public Result<ResultSet> exec(TRANS trans, String text, Object ... objs) {
+			TimeTaken tt = trans.start(text, Env.REMOTE);
+			try {
+				return Result.ok(getSession(trans).execute(ps(trans).bind(objs)));
+			} catch (DriverException | APIException | IOException e) {
+				AbsCassDAO.this.reportPerhapsReset(trans,e);
+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+			} finally {
+				tt.done();
+			}
+		}
+
+		/**
+		 * Read the Data from Cassandra given a Prepared Statement (defined by the
+		 * DAO Instance)
+		 *
+		 * This is common behavior among all DAOs.
+		 * @throws DAOException
+		 */
+		public Result<List<DATA>> read(TRANS trans, String text, Object[] key) {
+			TimeTaken tt = trans.start(text,Env.REMOTE);
+			
+			ResultSet rs;
+			try {
+				rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key));
+/// TEST CODE for Exception				
+//				boolean force = true; 
+//				if(force) {
+//					Map<InetSocketAddress, Throwable> misa = new HashMap<InetSocketAddress,Throwable>();
+//					//misa.put(new InetSocketAddress(444),new Exception("no host was tried"));
+//					misa.put(new InetSocketAddress(444),new Exception("Connection has been closed"));
+//					throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa);
+////					throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried");
+//				}
+//// END TEST CODE
+			} catch (DriverException | APIException | IOException e) {
+				AbsCassDAO.this.reportPerhapsReset(trans,e);
+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+			} finally {
+				tt.done();
+			}
+			
+			return extract(loader,rs,null /*let Array be created if necessary*/,dflt);
+		}
+		
+		public Result<List<DATA>> read(TRANS trans, String text, DATA data) {
+			return read(trans,text, loader.extract(data, size, crud));
+		}
+		
+		public Object[] keyFrom(DATA data) {
+			return loader.extract(data, size, CRUD.delete); // Delete is key only
+		}
+
+		/*
+		 * Note: in case PSInfos are deleted, we want to remove them from list.  This is not expected, 
+		 * but we don't want a data leak if it does.  Finalize doesn't have to happen quickly
+		 */
+		@Override
+		protected void finalize() throws Throwable {
+			psinfos.remove(this);
+		}
+	}
+
+	protected final Accept<DATA> dflt = new Accept<DATA>() {
+		@Override
+		public boolean ok(DATA data) {
+			return true;
+		}
+	};
+
+
+	@SuppressWarnings("unchecked")
+    protected final Result<List<DATA>> extract(Loader<DATA> loader, ResultSet rs, List<DATA> indata, Accept<DATA> accept) {
+		List<Row> rows = rs.all();
+		if(rows.isEmpty()) {
+			return Result.ok((List<DATA>)EMPTY); // Result sets now .emptyList(true);
+		} else {
+			DATA d;
+			List<DATA> data = indata==null?new ArrayList<DATA>(rows.size()):indata;
+			
+			for(Row row : rows) {
+				try {
+					d = loader.load(dataClass.newInstance(),row);
+					if(accept.ok(d)) {
+						data.add(d);
+					}
+				} catch(Exception e) {
+					return Result.err(e);
+				}
+			}
+			return Result.ok(data);
+		}
+    }
+    
+	private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created";
+	private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created";
+	private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session";
+
+	private static class ResetRequest {
+		//package on purpose
+		Session session;
+		long timestamp;
+		
+		public ResetRequest(Session session) {
+			this.session = session;
+			timestamp = System.currentTimeMillis();
+		}
+	}
+
+	
+	public static final void primePSIs(TransStore trans) throws APIException, IOException {
+		for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
+			if(psi.ps==null) {
+				psi.ps(trans);
+			}
+		}
+	}
+	
+	public final Session getSession(TransStore trans) throws APIException, IOException {
+		// SessionFilter unused since 2015
+		// Try to use Trans' session, if exists
+//		if(sessionSlot!=null) { // try to get from Trans
+//			Session sess = trans.get(sessionSlot, null);
+//			if(sess!=null) {
+//				return sess;
+//			}
+//		}
+		
+		// If there's an owning DAO, use it's session
+		if(owningDAO!=null) { 
+			return owningDAO.getSession(trans);
+		}
+		
+		// OK, nothing else works... get our own.
+		if(session==null || resetTrigger) {
+			Cluster tempCluster = null;
+			Session tempSession = null;
+			try {
+				synchronized(NEW_CASSANDRA_SESSION_CREATED) {
+					boolean reset = false;
+					for(ResetRequest r : resetDeque) {
+						if(r.session == session) {
+							if(r.timestamp>nextAvailableReset) {
+								reset=true;
+								nextAvailableReset = System.currentTimeMillis() + 60000;
+								tempCluster = cluster;
+								tempSession = session;
+								break;
+							} else {
+								trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset");
+							}
+						}
+					}
+	
+					if(reset || session == null) {
+						TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB);
+						try {
+							// Note: Maitrayee recommended not closing the cluster, just
+							// overwrite it. Jonathan 9/30/2016 assuming same for Session
+							// This was a bad idea.  Ran out of File Handles as I suspected, Jonathan
+							if(reset) {
+								for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
+									psi.reset();
+								}
+							}
+							if(reset || cluster==null) {
+								cluster = CassAccess.cluster(trans, keyspace);
+								trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED);
+							}
+							if(reset || session==null) {
+								session = cluster.connect(keyspace);
+								trans.warn().log(NEW_CASSANDRA_SESSION_CREATED);
+							}
+						} finally {
+							resetTrigger=false;
+							tt.done();
+						}
+					}
+				}
+			} finally {
+				TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB);
+				try {
+					resetDeque.clear();
+					// Not clearing Session/Cluster appears to kill off FileHandles
+					if(tempSession!=null && !tempSession.isClosed()) {
+						tempSession.close();
+					}
+					if(tempCluster!=null && !tempCluster.isClosed()) {
+						tempCluster.close();
+					}
+				} finally {
+					tt.done();
+				}
+			}
+		}
+		return session;
+	}
+	
+	public final boolean reportPerhapsReset(TransStore trans, Exception e) {
+		if(owningDAO!=null) {
+			return owningDAO.reportPerhapsReset(trans, e);
+		} else {
+			boolean rv = false;
+			if(CassAccess.isResetException(e)) {
+				trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName());
+				resetDeque.addFirst(new ResetRequest(session));
+				rv = resetTrigger = true;
+			} 
+			trans.error().log(e);
+			return rv;
+		}
+	}
+
+	public void close(TransStore trans) {
+		if(owningDAO==null) {
+			if(session!=null) {
+				TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB);
+				try {
+					session.close();
+				} finally {
+					tt.done();
+				}
+				session = null;
+			} else {
+				trans.debug().log("close called(), Session already closed");
+			}
+		} else {
+			owningDAO.close(trans);
+		}
+	}
+
+	protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) {
+	}
+	
+	protected interface Accept<DATA> {
+		public boolean ok(DATA data);
+	}
+
+}
+
+
+
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java
new file mode 100644
index 0000000..279f399
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java
@@ -0,0 +1,30 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+public interface Bytification {
+	public ByteBuffer bytify() throws IOException;
+	public void reconstitute(ByteBuffer bb) throws IOException;
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java
new file mode 100644
index 0000000..83b13c3
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java
@@ -0,0 +1,50 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.util.Date;
+
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Trans;
+
+public interface CIDAO<TRANS extends Trans> {
+
+	/**
+	 * Touch the date field for given Table
+	 *  
+	 * @param trans
+	 * @param name
+	 * @return
+	 */
+	public abstract Result<Void> touch(TRANS trans, String name, int ... seg);
+
+	/**
+	 * Read all Info entries, and set local Date objects
+	 * 
+	 * This is to support regular data checks on the Database to speed up Caching behavior
+	 * 
+	 */
+	public abstract Result<Void> check(TRANS trans);
+
+	public abstract Date get(TRANS trans, String table, int seg);
+
+}
\ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java
new file mode 100644
index 0000000..d697b90
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java
@@ -0,0 +1,34 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+/**
+ * Interface to obtain Segment Integer from DAO Data
+ * for use in Caching mechanism
+ * 
+ * This should typically be obtained by getting the Hash of the key, then using modulus on the size of segment.
+ * 
+ * @author Jonathan
+ *
+ */
+public interface Cacheable {
+	public int[] invalidate(Cached<?,?> cache);
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java
new file mode 100644
index 0000000..0797b04
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java
@@ -0,0 +1,199 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+
+import org.onap.aaf.auth.cache.Cache;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.Trans;
+
+public class Cached<TRANS extends Trans, DATA extends Cacheable> extends Cache<TRANS,DATA> {
+	// Java does not allow creation of Arrays with Generics in them...
+	// private Map<String,Dated> cache[];
+	protected final CIDAO<TRANS> info;
+	
+	private static Timer infoTimer;
+	private Object cache[];
+	public final int segSize;
+
+	protected final String name;
+
+	private final long expireIn;
+	
+
+
+	// Taken from String Hash, but coded, to ensure consistent across Java versions.  Also covers negative case;
+	public int cacheIdx(String key) {
+		int h = 0;
+		for (int i = 0; i < key.length(); i++) {
+		    h = 31*h + key.charAt(i);
+		}
+		if(h<0)h*=-1;
+		return h%segSize;
+	}
+	
+	public Cached(CIDAO<TRANS> info, String name, int segSize, long expireIn) {
+		this.name =name;
+		this.segSize = segSize;
+		this.info = info;
+		this.expireIn = expireIn;
+		cache = new Object[segSize];
+		// Create a new Map for each Segment, and store locally
+		for(int i=0;i<segSize;++i) {
+			cache[i]=obtain(name+i);
+		}
+	}
+	
+	public void add(String key, List<DATA> data) {
+		@SuppressWarnings("unchecked")
+		Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);
+		map.put(key, new Dated(data, expireIn));
+	}
+
+
+	public int invalidate(String key)  {
+		int cacheIdx = cacheIdx(key);
+		@SuppressWarnings("unchecked")
+		Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+//		if(map.remove(key)!=null) // Not seeming to remove all the time
+		if(map!=null)map.clear();
+//			System.err.println("Remove " + name + " " + key);
+		return cacheIdx;
+	}
+
+	public Result<Void> invalidate(int segment)  {
+		if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));
+		@SuppressWarnings("unchecked")
+		Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);
+		if(map!=null) {
+			map.clear();
+		}
+		return Result.ok();
+	}
+
+	protected interface Getter<D> {
+		public abstract Result<List<D>> get();
+	};
+	
+	// TODO utilize Segmented Caches, and fold "get" into "reads"
+	@SuppressWarnings("unchecked")
+	public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
+		List<DATA> ld = null;
+		Result<List<DATA>> rld = null;
+		
+		int cacheIdx = cacheIdx(key);
+		Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+		
+		// Check for saved element in cache
+		Dated cached = map.get(key);
+		// Note: These Segment Timestamps are kept up to date with DB
+		Date dbStamp = info.get(trans, name,cacheIdx);
+		
+		// Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
+		if(cached!=null && dbStamp.before(cached.timestamp)) {
+			ld = (List<DATA>)cached.data;
+			rld = Result.ok(ld);
+		} else {
+			rld = getter.get();
+			if(rld.isOK()) { // only store valid lists
+				map.put(key, new Dated(rld.value,expireIn));  // successful item found gets put in cache
+//			} else if(rld.status == Result.ERR_Backend){
+//				map.remove(key);
+			}
+		}
+		return rld;
+	}
+
+	/**
+	 * Each Cached object has multiple Segments that need cleaning.  Derive each, and add to Cleansing Thread
+	 * @param env
+	 * @param dao
+	 */
+	public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {
+		for(CachedDAO<?,?,?> d : dao) {  
+			for(int i=0;i<d.segSize;++i) {
+				startCleansing(env, d.table()+i);
+			}
+		}
+	}
+
+
+	public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {
+		if(infoTimer==null) {
+			infoTimer = new Timer("CachedDAO Info Refresh Timer");
+			int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh 
+			infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately
+		}
+	}
+	
+	public static void stopTimer() {
+		Cache.stopTimer();
+		if(infoTimer!=null) {
+			infoTimer.cancel();
+			infoTimer = null;
+		}
+	}
+	
+	private final static class Refresh extends TimerTask {
+		private static final int maxRefresh = 2*60*10000; // 20 mins
+		private AuthzEnv env;
+		private CIDAO<AuthzTrans> cidao;
+		private int minRefresh;
+		private long lastRun;
+		
+		public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {
+			this.env = env;
+			this.cidao = cidao;
+			this.minRefresh = minRefresh;
+			lastRun = System.currentTimeMillis()-maxRefresh-1000;
+		}
+		
+		@Override
+		public void run() {
+			// Evaluate whether to refresh based on transaction rate
+			long now = System.currentTimeMillis();
+			long interval = now-lastRun;
+
+			if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;
+			lastRun = now;
+			AuthzTrans trans = env.newTransNoAvg();
+			Result<Void> rv = cidao.check(trans);
+			if(rv.status!=Result.OK) {
+				env.error().log("Error in CacheInfo Refresh",rv.details);
+			}
+			if(env.debug().isLoggable()) {
+				StringBuilder sb = new StringBuilder("Cache Info Refresh: ");
+				trans.auditTrail(0, sb, Env.REMOTE);
+				env.debug().log(sb);
+			}
+		}
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java
new file mode 100644
index 0000000..017f878
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java
@@ -0,0 +1,228 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Trans;
+
+/**
+ * CachedDAO
+ * 
+ * Cache the response of "get" of any DAO.  
+ * 
+ * For simplicity's sake, at this time, we only do this for single Object keys  
+ * 
+ * @author Jonathan
+ *
+ * @param <DATA>
+ */
+public class CachedDAO<TRANS extends Trans,D extends DAO<TRANS,DATA>,DATA extends Cacheable> 
+		extends Cached<TRANS,DATA> implements DAO_RO<TRANS,DATA>{
+//	private final String dirty_str; 
+	
+	private final D dao;
+
+	public CachedDAO(D dao, CIDAO<TRANS> info, int segsize, long expireIn) {
+		super(info, dao.table(), segsize, expireIn);
+		
+		// Instantiate a new Cache per DAO name (so separate instances use the same cache) 
+		this.dao = dao;
+		//read_str = "Cached READ for " + dao.table();
+//		dirty_str = "Cache DIRTY on " + dao.table();
+		if(dao instanceof CassDAOImpl) {
+			((CassDAOImpl<?,?>)dao).cache = this;
+		}
+	}
+	
+	public static<T extends Trans, DA extends DAO<T,DT>, DT extends Cacheable> 
+			CachedDAO<T,DA,DT> create(DA dao, CIDAO<T> info, int segsize, long expireIn) {
+		return new CachedDAO<T,DA,DT>(dao,info, segsize, expireIn);
+	}
+
+	public void add(DATA data)  {
+		String key = keyFromObjs(dao.keyFrom(data));
+		List<DATA> list = new ArrayList<DATA>();
+		list.add(data);
+		super.add(key,list);
+	}
+	
+//	public void invalidate(TRANS trans, Object ... objs)  {
+//		TimeTaken tt = trans.start(dirty_str, Env.SUB);
+//		try {
+//			super.invalidate(keyFromObjs(objs));
+//		} finally {
+//			tt.done();
+//		}
+//	}
+
+	public static String keyFromObjs(Object ... objs) {
+		String key;
+		if(objs.length==1 && objs[0] instanceof String) {
+			key = (String)objs[0];
+		} else {
+			StringBuilder sb = new StringBuilder();
+			boolean first = true;
+			for(Object o : objs) {
+				if(o!=null) {
+					if(first) {
+					    first =false;
+					} else {
+					    sb.append('|');
+					}
+					sb.append(o.toString());
+				}
+			}
+			key = sb.toString();
+		}
+		return key;
+	}
+
+	public Result<DATA> create(TRANS trans, DATA data) {
+		Result<DATA> d = dao.create(trans,data);
+		if(d.status==Status.OK) {
+		    add(d.value);
+		} else {
+			trans.error().log(d.errorString());
+		}
+		// dao.create already modifies cache. Do not invalidate again. invalidate(trans,data);
+		return d;
+	}
+
+	protected class DAOGetter implements Getter<DATA> {
+		protected TRANS trans;
+		protected Object objs[];
+		protected D dao;
+		public Result<List<DATA>> result;
+
+		public DAOGetter(TRANS trans, D dao, Object ... objs) {
+			this.trans = trans;
+			this.dao = dao;
+			this.objs = objs;
+		}
+		
+		/**
+		 * Separated into single call for easy overloading
+		 * @return
+		 */
+		public Result<List<DATA>> call() {
+			return dao.read(trans, objs);
+		}
+		
+		@Override
+		public final Result<List<DATA>> get() {
+			return call();
+//			if(result.isOKhasData()) { // Note, given above logic, could exist, but stale
+//				return result.value;
+//			} else {
+//				return null;
+//			}
+		}
+	}
+
+	@Override
+	public Result<List<DATA>> read(final TRANS trans, final Object ... objs) {
+		DAOGetter getter = new DAOGetter(trans,dao,objs); 
+		return get(trans, keyFromObjs(objs),getter);
+//		if(ld!=null) {
+//			return Result.ok(ld);//.emptyList(ld.isEmpty());
+//		}
+//		// Result Result if exists
+//		if(getter.result==null) {
+//			return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
+//		}
+//		return getter.result;
+	}
+
+	// Slight Improved performance available when String and Obj versions are known. 
+	public Result<List<DATA>> read(final String key, final TRANS trans, final Object[] objs) {
+		DAOGetter getter = new DAOGetter(trans,dao,objs); 
+		return get(trans, key, getter);
+//		if(ld!=null) {
+//			return Result.ok(ld);//.emptyList(ld.isEmpty());
+//		}
+//		// Result Result if exists
+//		if(getter.result==null) {
+//			return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
+//		}
+//		return getter.result;
+	}
+	
+	@Override
+	public Result<List<DATA>> read(TRANS trans, DATA data) {
+		return read(trans,dao.keyFrom(data));
+	}
+	public Result<Void> update(TRANS trans, DATA data) {
+		Result<Void> d = dao.update(trans, data);
+		if(d.status==Status.OK) {
+		    add(data);
+		} else {
+			trans.error().log(d.errorString());
+		}
+		return d;
+	}
+
+	public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
+		if(reread) { // If reread, get from Cache, if possible, not DB exclusively
+			Result<List<DATA>> rd = read(trans,data);
+			if(rd.notOK()) {
+			    return Result.err(rd);
+//			} else {
+//				trans.error().log(rd.errorString());
+			}
+			if(rd.isEmpty()) {
+				data.invalidate(this);
+				return Result.err(Status.ERR_NotFound,"Not Found");
+			}
+			data = rd.value.get(0);
+		}
+		Result<Void> rv=dao.delete(trans, data, false);
+		data.invalidate(this);
+		return rv;
+	}
+	
+	@Override
+	public void close(TRANS trans) {
+		if(dao!=null) {
+		    dao.close(trans);
+		}
+	}
+	
+
+	@Override
+	public String table() {
+		return dao.table();
+	}
+	
+	public D dao() {
+		return dao;
+	}
+	
+	public void invalidate(TRANS trans, DATA data) {
+        if(info.touch(trans, dao.table(),data.invalidate(this)).notOK()) {
+	    trans.error().log("Cannot touch CacheInfo for Role");
+	}
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java
new file mode 100644
index 0000000..e70bffb
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java
@@ -0,0 +1,223 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.cadi.config.Config;
+import org.onap.aaf.cadi.routing.GreatCircle;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.util.Split;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Cluster.Builder;
+import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy;
+import com.datastax.driver.core.policies.TokenAwarePolicy;
+
+public class CassAccess {
+	public static final String KEYSPACE = "authz";
+	public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
+	public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
+	public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
+	public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
+	public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
+	private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();
+	public static final String ERR_ACCESS_MSG = "Accessing Backend";
+	private static Builder cb = null;
+
+	/**
+	 * To create DCAwareRoundRobing Policy:
+	 * 	 Need Properties
+	 * 		LATITUDE (or AFT_LATITUDE)
+	 * 		LONGITUDE (or AFT_LONGITUDE)
+	 * 		CASSANDRA CLUSTERS with additional information:
+	 * 			machine:DC:lat:long,machine:DC:lat:long
+	 * @param env
+	 * @param prefix
+	 * @return
+	 * @throws APIException
+	 * @throws IOException
+	 */
+
+//	@SuppressWarnings("deprecation")
+	public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException {
+		if(cb == null) {
+			String pre;
+			if(prefix==null) {
+				pre="";
+			} else {
+				env.info().log("Cassandra Connection for ",prefix);
+				pre = prefix+'.';
+			}
+			cb = Cluster.builder();
+			String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,env.getProperty(CASSANDRA_CLUSTERS_PORT,"9042"));
+			if(str!=null) {
+				env.init().log("Cass Port = ",str );
+				cb.withPort(Integer.parseInt(str));
+			}
+			str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,env.getProperty(CASSANDRA_CLUSTERS_USER_NAME,null));
+			if(str!=null) {
+				env.init().log("Cass User = ",str );
+				String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,env.getProperty(CASSANDRA_CLUSTERS_PASSWORD,null));
+				if(epass==null) {
+					throw new APIException("No Password configured for " + str);
+				}
+				//TODO Figure out way to ensure Decryptor setting in AuthzEnv
+				if(env instanceof AuthzEnv) {
+					cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true));
+				} else {
+					cb.withCredentials(str, env.decryptor().decrypt(epass));
+				}
+			}
+	
+			str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,env.getProperty(CASSANDRA_RESET_EXCEPTIONS,null));
+			if(str!=null) {
+				env.init().log("Cass ResetExceptions = ",str );
+				for(String ex : Split.split(',', str)) {
+					resetExceptions.add(new Resettable(env,ex));
+				}
+			}
+	
+			str = env.getProperty(Config.CADI_LATITUDE);
+			Double lat = str!=null?Double.parseDouble(str):null;
+			str = env.getProperty(Config.CADI_LONGITUDE);
+			Double lon = str!=null?Double.parseDouble(str):null;
+			if(lat == null || lon == null) {
+				throw new APIException(Config.CADI_LATITUDE + " and/or " + Config.CADI_LONGITUDE + " are not set");
+			}
+			
+			env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon);
+			
+			str = env.getProperty(pre+CASSANDRA_CLUSTERS,env.getProperty(CASSANDRA_CLUSTERS,"localhost"));
+			env.init().log("Cass Clusters = ",str );
+			String[] machs = Split.split(',', str);
+			String[] cpoints = new String[machs.length];
+			String bestDC = null;
+			int numInBestDC = 1;
+			double mlat, mlon,temp,distance = Double.MAX_VALUE;
+			for(int i=0;i<machs.length;++i) {
+				String[] minfo = Split.split(':',machs[i]);
+				if(minfo.length>0) {
+					cpoints[i]=minfo[0];
+				}
+				
+				if(minfo.length>3) {
+					if(minfo[1].equals(bestDC)) {
+						++numInBestDC;
+					} else {
+						// Calc closest DC with Great Circle
+						mlat = Double.parseDouble(minfo[2]);
+						mlon = Double.parseDouble(minfo[3]);
+						// Note: GreatCircle Distance is always >= 0.0 (not negative)
+						if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) < distance) {
+							distance = temp;
+							if(bestDC==null || !bestDC.equals(minfo[1])) {
+								bestDC = minfo[1];
+								numInBestDC = 1;
+							}
+						}
+					}
+				}
+			}
+			
+			cb.addContactPoints(cpoints);
+			
+			if(bestDC!=null) {
+				// 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations
+				// Local DC Aware Load Balancing appears to have the highest normal performance, with the best
+				// Degraded Accuracy
+				DCAwareRoundRobinPolicy dcrrPolicy = DCAwareRoundRobinPolicy.builder()
+					.withLocalDc(bestDC)
+					.withUsedHostsPerRemoteDc(numInBestDC)
+					.build();
+//				cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy(
+//						bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */));
+				cb.withLoadBalancingPolicy(new TokenAwarePolicy(dcrrPolicy));
+				env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy with best DC at %s with emergency remote of up to %d node(s)"
+					,bestDC, numInBestDC);
+			} else {
+				env.init().printf("Cassandra is using Default Policy, which is not DC aware");
+			}
+		}
+		return cb.build();
+	}
+	
+	private static class Resettable {
+		private Class<? extends Exception> cls;
+		private List<String> messages;
+		
+		@SuppressWarnings("unchecked")
+		public Resettable(Env env, String propData) throws APIException {
+			if(propData!=null && propData.length()>1) {
+				String[] split = Split.split(':', propData);
+				if(split.length>0) {
+					try {
+						cls = (Class<? extends Exception>)Class.forName(split[0]);
+					} catch (ClassNotFoundException e) {
+						throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded");
+					}
+				}
+				if(split.length>1) {
+					messages=new ArrayList<String>();
+					for(int i=1;i<split.length;++i) {
+						String str = split[i];
+						int start = str.startsWith("\"")?1:0;
+						int end = str.length()-(str.endsWith("\"")?1:0);
+						messages.add(split[i].substring(start, end));
+					}
+				} else {
+					messages = null;
+				}
+			}
+		}
+		
+		public boolean matches(Exception ex) {
+			if(ex.getClass().equals(cls)) {
+				if(messages!=null) {
+					String msg = ex.getMessage();
+					for(String m : messages) {
+						if(msg.contains(m)) {
+							return true;
+						}
+					}
+				}
+			}
+			return false;
+		}
+	}
+	
+	public static final boolean isResetException(Exception e) {
+		if(e==null) {
+			return true;
+		}
+		for(Resettable re : resetExceptions) {
+			if(re.matches(e)) {
+				return true;
+			}
+		}
+		return false;
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java
new file mode 100644
index 0000000..bd6d086
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java
@@ -0,0 +1,348 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.lang.reflect.Field;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.TransStore;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ConsistencyLevel;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.ResultSetFuture;
+
+/**
+ * AbsCassDAO
+ *
+ * Deal with the essentials of Interaction with Cassandra DataStore for all Cassandra DAOs
+ *
+ * @author Jonathan
+ *
+ * @param <DATA>
+ */
+public class CassDAOImpl<TRANS extends TransStore,DATA> extends AbsCassDAO<TRANS, DATA> implements DAO<TRANS,DATA> {
+	public static final String USER_NAME = "__USER_NAME__";
+	protected static final String CREATE_SP = "CREATE ";
+	protected static final String UPDATE_SP = "UPDATE ";
+	protected static final String DELETE_SP = "DELETE ";
+	protected static final String SELECT_SP = "SELECT ";
+
+	protected final String C_TEXT = getClass().getSimpleName() + " CREATE";
+	protected final String R_TEXT = getClass().getSimpleName() + " READ";
+	protected final String U_TEXT = getClass().getSimpleName() + " UPDATE";
+	protected final String D_TEXT = getClass().getSimpleName() + " DELETE";
+	private String table;
+	
+	protected final ConsistencyLevel readConsistency,writeConsistency;
+	
+	// Setteable only by CachedDAO
+	protected Cached<?, ?> cache;
+
+	/**
+	 * A Constructor from the originating Cluster.  This DAO will open the Session at need,
+	 * and shutdown the session when "close()" is called.
+	 *
+	 * @param cluster
+	 * @param keyspace
+	 * @param dataClass
+	 */
+	public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
+		super(trans, name, cluster,keyspace,dataClass);
+		this.table = table;
+		readConsistency = read;
+		writeConsistency = write;
+	}
+	
+	/**
+	 * A Constructor to share Session with other DAOs.
+	 *
+	 * This method get the Session and Cluster information from the calling DAO, and won't
+	 * touch the Session on closure.
+	 *
+	 * @param aDao
+	 * @param dataClass
+	 */
+	public CassDAOImpl(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
+		super(trans, name, aDao,dataClass);
+		this.table = table;
+		readConsistency = read;
+		writeConsistency = write;
+	}
+
+	protected PSInfo createPS;
+	protected PSInfo readPS;
+	protected PSInfo updatePS;
+	protected PSInfo deletePS;
+	protected boolean async=false;
+
+	public void async(boolean bool) {
+		async = bool;
+	}
+
+	public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader) {
+		return setCRUD(trans, table, dc, loader, -1);
+	}
+	
+	public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader, int max) {
+				Field[] fields = dc.getDeclaredFields();
+				int end = max>=0 & max<fields.length?max:fields.length;
+				// get keylimit from a non-null Loader
+				int keylimit = loader.keylimit();
+			
+				StringBuilder sbfc = new StringBuilder();
+				StringBuilder sbq = new StringBuilder();
+				StringBuilder sbwc = new StringBuilder();
+				StringBuilder sbup = new StringBuilder();
+			
+				if(keylimit>0) {
+					for(int i=0;i<end;++i) {
+						if(i>0) {
+							sbfc.append(',');
+							sbq.append(',');
+							if(i<keylimit) {
+								sbwc.append(" AND ");
+							}
+						}
+						sbfc.append(fields[i].getName());
+						sbq.append('?');
+						if(i>=keylimit) {
+							if(i>keylimit) {
+								sbup.append(',');
+							}
+							sbup.append(fields[i].getName());
+							sbup.append("=?");
+						}
+						if(i<keylimit) {
+							sbwc.append(fields[i].getName());
+							sbwc.append("=?");
+						}
+					}
+			
+					createPS = new PSInfo(trans, "INSERT INTO " + table + " ("+ sbfc +") VALUES ("+ sbq +");",loader,writeConsistency);
+			
+					readPS = new PSInfo(trans, "SELECT " + sbfc + " FROM " + table + " WHERE " + sbwc + ';',loader,readConsistency);
+			
+					// Note: UPDATES can't compile if there are no fields besides keys... Use "Insert"
+					if(sbup.length()==0) {
+						updatePS = createPS; // the same as an insert
+					} else {
+						updatePS = new PSInfo(trans, "UPDATE " + table + " SET " + sbup + " WHERE " + sbwc + ';',loader,writeConsistency);
+					}
+			
+					deletePS = new PSInfo(trans, "DELETE FROM " + table + " WHERE " + sbwc + ';',loader,writeConsistency);
+				}
+				return new String[] {sbfc.toString(), sbq.toString(), sbup.toString(), sbwc.toString()};
+			}
+
+	public void replace(CRUD crud, PSInfo psInfo) {
+		switch(crud) {
+			case create: createPS = psInfo; break;
+			case read:   readPS = psInfo; break;
+			case update: updatePS = psInfo; break;
+			case delete: deletePS = psInfo; break;
+		}
+	}
+
+	public void disable(CRUD crud) {
+		switch(crud) {
+			case create: createPS = null; break;
+			case read:   readPS = null; break;
+			case update: updatePS = null; break;
+			case delete: deletePS = null; break;
+		}
+	}
+
+	
+	/**
+	 * Given a DATA object, extract the individual elements from the Data into an Object Array for the
+	 * execute element.
+	 */
+	public Result<DATA> create(TRANS trans, DATA data)  {
+		if(createPS==null) {
+			Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName());
+		}
+		if(async) /*ResultSetFuture */ {
+			Result<ResultSetFuture> rs = createPS.execAsync(trans, C_TEXT, data);
+			if(rs.notOK()) {
+				return Result.err(rs);
+			}
+		} else {
+			Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+			if(rs.notOK()) {
+				return Result.err(rs);
+			}
+		}
+		wasModified(trans, CRUD.create, data);
+		return Result.ok(data);
+	}
+
+	/**
+	 * Read the Unique Row associated with Full Keys
+	 */
+	public Result<List<DATA>> read(TRANS trans, DATA data) {
+		if(readPS==null) {
+			Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+		}
+		return readPS.read(trans, R_TEXT, data);
+	}
+
+	public Result<List<DATA>> read(TRANS trans, Object ... key) {
+		if(readPS==null) {
+			Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+		}
+		return readPS.read(trans, R_TEXT, key);
+	}
+	
+	public Result<DATA> readPrimKey(TRANS trans, Object ... key) {
+		if(readPS==null) {
+			Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+		}
+		Result<List<DATA>> rld = readPS.read(trans, R_TEXT, key);
+		if(rld.isOK()) {
+			if(rld.isEmpty()) {
+				return Result.err(Result.ERR_NotFound,rld.details);
+			} else {
+				return Result.ok(rld.value.get(0));
+			}
+		} else {
+			return Result.err(rld);
+		}
+	}
+
+	public Result<Void> update(TRANS trans, DATA data) {
+		return update(trans, data, async);
+	}
+
+	public Result<Void> update(TRANS trans, DATA data, boolean async) {
+		if(updatePS==null) {
+			Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName());
+		}
+		if(async)/* ResultSet rs =*/ {
+			Result<ResultSetFuture> rs = updatePS.execAsync(trans, U_TEXT, data);
+			if(rs.notOK()) {
+				return Result.err(rs);
+			}
+		} else {
+			Result<ResultSet> rs = updatePS.exec(trans, U_TEXT, data);
+			if(rs.notOK()) {
+				return Result.err(rs);
+			}
+		}
+		
+		wasModified(trans, CRUD.update, data);
+		return Result.ok();
+	}
+
+	// This method Sig for Cached...
+	public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
+		if(deletePS==null) {
+			Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName());
+		}
+		// Since Deleting will be stored off, for possible re-constitution, need the whole thing
+		if(reread) {
+			Result<List<DATA>> rd = read(trans,data);
+			if(rd.notOK()) {
+				return Result.err(rd);
+			}
+			if(rd.isEmpty()) {
+				return Result.err(Status.ERR_NotFound,"Not Found");
+			}
+			for(DATA d : rd.value) { 
+				if(async) {
+					Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, d);
+					if(rs.notOK()) {
+						return Result.err(rs);
+					}
+				} else {
+					Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, d);
+					if(rs.notOK()) {
+						return Result.err(rs);
+					}
+				}
+				wasModified(trans, CRUD.delete, d);
+			}
+		} else {
+			if(async)/* ResultSet rs =*/ {
+				Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, data);
+				if(rs.notOK()) {
+					return Result.err(rs);
+				}
+			} else {
+				Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, data);
+				if(rs.notOK()) {
+					return Result.err(rs);
+				}
+			}
+			wasModified(trans, CRUD.delete, data);
+		}
+		return Result.ok();
+	}
+	
+	public final Object[] keyFrom(DATA data) {
+		return createPS.keyFrom(data);
+	}
+
+	@Override
+	public String table() {
+		return table;
+	}
+	
+	public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";
+	public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";
+	protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) {
+		String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table);
+		if(prop==null) {
+			prop = trans.getProperty(CASS_READ_CONSISTENCY);
+			if(prop==null) {
+				return ConsistencyLevel.ONE; // this is Cassandra Default
+			}
+		}
+		return ConsistencyLevel.valueOf(prop);
+	}
+
+	protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) {
+		String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table);
+		if(prop==null) {
+			prop = trans.getProperty(CASS_WRITE_CONSISTENCY);
+			if(prop==null) {
+				return ConsistencyLevel.ONE; // this is Cassandra Default\
+			}
+		}
+		return ConsistencyLevel.valueOf(prop);
+	}
+
+	public static DataInputStream toDIS(ByteBuffer bb) {
+		byte[] b = bb.array();
+		return new DataInputStream(
+			new ByteArrayInputStream(b,bb.position(),bb.limit())
+		);
+	}
+
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java
new file mode 100644
index 0000000..70db430
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java
@@ -0,0 +1,44 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Trans;
+
+
+/**
+ * DataAccessObject Interface
+ *
+ * Extend the ReadOnly form (for Get), and add manipulation methods
+ *
+ * @author Jonathan
+ *
+ * @param <DATA>
+ */
+public interface DAO<TRANS extends Trans,DATA> extends DAO_RO<TRANS,DATA> {
+	public Result<DATA> create(TRANS trans, DATA data);
+	public Result<Void> update(TRANS trans, DATA data);
+	// In many cases, the data has been correctly read first, so we shouldn't read again
+	// Use reread=true if you are using DATA with only a Key
+	public Result<Void> delete(TRANS trans, DATA data, boolean reread);
+	public Object[] keyFrom(DATA data);
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java
new file mode 100644
index 0000000..207576e
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java
@@ -0,0 +1,51 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+public class DAOException extends Exception {
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = 1527904125585539823L;
+
+//    // TODO -   enum in result class == is our intended design, currently the DAO layer does not use Result<RV> so we still use these for now
+//    public final static DAOException RoleNotFoundDAOException = new DAOException("RoleNotFound");
+//    public final static DAOException PermissionNotFoundDAOException = new DAOException("PermissionNotFound");
+//    public final static DAOException UserNotFoundDAOException = new DAOException("UserNotFound");
+
+    public DAOException() {
+	}
+
+	public DAOException(String message) {
+		super(message);
+	}
+
+	public DAOException(Throwable cause) {
+		super(cause);
+	}
+
+	public DAOException(String message, Throwable cause) {
+		super(message, cause);
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java
new file mode 100644
index 0000000..4bffb5f
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java
@@ -0,0 +1,70 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.util.List;
+
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Trans;
+
+/**
+ * DataAccessObject - ReadOnly
+ * 
+ * It is useful to have a ReadOnly part of the interface for CachedDAO
+ * 
+ * Normal DAOs will implement full DAO
+ * 
+ * @author Jonathan
+ *
+ * @param <DATA>
+ */
+public interface DAO_RO<TRANS extends Trans,DATA> {
+	/**
+	 * Get a List of Data given Key of Object Array
+	 * @param objs
+	 * @return
+	 * @throws DAOException
+	 */
+	public Result<List<DATA>> read(TRANS trans, Object ... key);
+
+	/**
+	 * Get a List of Data given Key of DATA Object
+	 * @param trans
+	 * @param key
+	 * @return
+	 * @throws DAOException
+	 */
+	public Result<List<DATA>> read(TRANS trans, DATA key);
+
+	/**
+	 * close DAO
+	 */
+	public void close(TRANS trans);
+
+	/**
+	 * Return name of referenced Data
+	 * @return
+	 */
+	public String table();
+
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java
new file mode 100644
index 0000000..485eabc
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java
@@ -0,0 +1,214 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import com.datastax.driver.core.Row;
+
+public abstract class Loader<DATA> {
+	private int keylimit;
+	public Loader(int keylimit) {
+		this.keylimit = keylimit;
+	}
+	
+	public int keylimit() {
+		return keylimit;
+	}
+	
+	protected abstract DATA load(DATA data, Row row);
+	protected abstract void key(DATA data, int idx, Object[] obj);
+	protected abstract void body(DATA data, int idx, Object[] obj);
+
+	public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) {
+		Object[] rv=null;
+		switch(type) {
+			case delete:
+				rv = new Object[keylimit()];
+				key(data,0,rv);
+				break;
+			case update:
+				rv = new Object[size];
+				body(data,0,rv);
+				int body = size-keylimit();
+				if(body>0) {
+				    key(data,body,rv);
+				}
+				break;
+			default:
+				rv = new Object[size];
+				key(data,0,rv);
+				if(size>keylimit()) {
+				    body(data,keylimit(),rv);
+				}
+				break;
+		}
+		return rv;
+	}
+	
+	public static void writeString(DataOutputStream os, String s) throws IOException {
+		if(s==null) {
+			os.writeInt(-1);
+		} else {
+			switch(s.length()) {
+				case 0:
+					os.writeInt(0);
+					break;
+				default:
+					byte[] bytes = s.getBytes();
+					os.writeInt(bytes.length);
+					os.write(bytes);
+			}
+		}
+	}
+	
+	
+	/**
+	 * We use bytes here to set a Maximum
+	 * 
+	 * @param is
+	 * @param MAX
+	 * @return
+	 * @throws IOException
+	 */
+	public static String readString(DataInputStream is, byte[] _buff) throws IOException {
+		int l = is.readInt();
+		byte[] buff = _buff;
+		switch(l) {
+			case -1: return null;
+			case  0: return "";
+			default:
+				// Cover case where there is a large string, without always allocating a large buffer.
+				if(l>buff.length) {
+				    buff = new byte[l];
+				}
+				is.read(buff,0,l);
+				return new String(buff,0,l);
+		}
+	}
+
+	/**
+	 * Write a set with proper sizing
+	 * 
+	 * Note: at the moment, this is just String.  Probably can develop system where types
+	 * are supported too... but not now.
+	 * 
+	 * @param os
+	 * @param set
+	 * @throws IOException
+	 */
+	public static void writeStringSet(DataOutputStream os, Collection<String> set) throws IOException {
+		if(set==null) {
+			os.writeInt(-1);
+		} else {
+			os.writeInt(set.size());
+			for(String s : set) {
+				writeString(os, s);
+			}
+		}
+
+	}
+	
+	public static Set<String> readStringSet(DataInputStream is, byte[] buff) throws IOException {
+		int l = is.readInt();
+		if(l<0) {
+		    return null;
+		}
+		Set<String> set = new HashSet<String>(l);
+		for(int i=0;i<l;++i) {
+			set.add(readString(is,buff));
+		}
+		return set;
+	}
+	
+	public static List<String> readStringList(DataInputStream is, byte[] buff) throws IOException {
+		int l = is.readInt();
+		if(l<0) {
+		    return null;
+		}
+		List<String> list = new ArrayList<String>(l);
+		for(int i=0;i<l;++i) {
+			list.add(Loader.readString(is,buff));
+		}
+		return list;
+	}
+
+	/** 
+	 * Write a map
+	 * @param os
+	 * @param map
+	 * @throws IOException
+	 */
+	public static void writeStringMap(DataOutputStream os, Map<String,String> map) throws IOException {
+		if(map==null) {
+			os.writeInt(-1);
+		} else {
+			Set<Entry<String, String>> es = map.entrySet();
+			os.writeInt(es.size());
+			for(Entry<String,String> e : es) {
+				writeString(os, e.getKey());
+				writeString(os, e.getValue());
+			}
+		}
+
+	}
+
+	public static Map<String,String> readStringMap(DataInputStream is, byte[] buff) throws IOException {
+		int l = is.readInt();
+		if(l<0) {
+		    return null;
+		}
+		Map<String,String> map = new HashMap<String,String>(l);
+		for(int i=0;i<l;++i) {
+			String key = readString(is,buff);
+			map.put(key,readString(is,buff));
+		}
+		return map;
+	}
+	public static void writeHeader(DataOutputStream os, int magic, int version) throws IOException {
+		os.writeInt(magic);
+		os.writeInt(version);
+	}
+	
+	public static int readHeader(DataInputStream is, final int magic, final int version) throws IOException {
+		if(is.readInt()!=magic) {
+		    throw new IOException("Corrupted Data Stream");
+		}
+		int v = is.readInt();
+		if(version<0 || v>version) {
+		    throw new IOException("Unsupported Data Version: " + v);
+		}
+		return v;
+	}
+
+}
+
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java
new file mode 100644
index 0000000..c40d74f
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java
@@ -0,0 +1,31 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+public interface Streamer<DATA> {
+	public abstract void marshal(DATA data, DataOutputStream os) throws IOException;
+	public abstract void unmarshal(DATA data, DataInputStream is) throws IOException;
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java
new file mode 100644
index 0000000..c00c104
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java
@@ -0,0 +1,26 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao;
+
+public interface Touchable {
+	 // Or make all DAOs accept list of CIDAOs...
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java
new file mode 100644
index 0000000..9526bf2
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import java.util.List;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.CertDAO;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+public class CachedCertDAO extends CachedDAO<AuthzTrans, CertDAO, CertDAO.Data> {
+	public CachedCertDAO(CertDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, CertDAO.CACHE_SEG, expiresIn);
+	}
+	
+	/**
+	 * Pass through Cert ID Lookup
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @return
+	 */
+	
+	public Result<List<CertDAO.Data>> readID(AuthzTrans trans, final String id) {
+		return dao().readID(trans, id);
+	}
+	
+	public Result<List<CertDAO.Data>> readX500(AuthzTrans trans, final String x500) {
+		return dao().readX500(trans, x500);
+	}
+
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java
new file mode 100644
index 0000000..76fd553
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java
@@ -0,0 +1,66 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import java.util.List;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+public class CachedCredDAO extends CachedDAO<AuthzTrans, CredDAO, CredDAO.Data> {
+	public CachedCredDAO(CredDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, CredDAO.CACHE_SEG, expiresIn);
+	}
+	
+	/**
+	 * Pass through Cred Lookup
+	 * 
+	 * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete.
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @return
+	 */
+	public Result<List<CredDAO.Data>> readNS(AuthzTrans trans, final String ns) {
+		
+		return dao().readNS(trans, ns);
+	}
+	
+	public Result<List<CredDAO.Data>> readID(AuthzTrans trans, final String id) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<CredDAO.Data>> call() {
+				return dao().readID(trans, id);
+			}
+		};
+		
+		Result<List<CredDAO.Data>> lurd = get(trans, id, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_UserNotFound,"No User Cred found");
+		}
+		return lurd;
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java
new file mode 100644
index 0000000..be86048
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java
@@ -0,0 +1,33 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.env.AuthzTrans;
+
+public class CachedNSDAO extends CachedDAO<AuthzTrans, NsDAO, NsDAO.Data> {
+	public CachedNSDAO(NsDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, NsDAO.CACHE_SEG, expiresIn);
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java
new file mode 100644
index 0000000..4cb7cf2
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java
@@ -0,0 +1,124 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import java.util.List;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.PermDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+public class CachedPermDAO extends CachedDAO<AuthzTrans,PermDAO, PermDAO.Data> {
+
+	public CachedPermDAO(PermDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, PermDAO.CACHE_SEG, expiresIn);
+	}
+
+	public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				return dao.readNS(trans, ns);
+			}
+		};
+		
+		Result<List<Data>> lurd = get(trans, ns, getter);
+		if(lurd.isOKhasData()) {
+			return lurd;
+		} else {
+			
+		}
+//		if(getter.result==null) {
+//			if(lurd==null) {
+				return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details);
+//			} else {
+//				return Result.ok(lurd);
+//			}
+//		}
+//		return getter.result;
+	}
+
+	public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String type) {
+		return dao().readChildren(trans,ns,type);
+	}
+
+	/**
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @param type
+	 * @return
+	 */
+	public Result<List<Data>> readByType(AuthzTrans trans, final String ns, final String type) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				return dao.readByType(trans, ns, type);
+			}
+		};
+		
+		// Note: Can reuse index1 here, because there is no name collision versus response
+		Result<List<Data>> lurd = get(trans, ns+'|'+type, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_PermissionNotFound,"No Permission found");
+		}
+		return lurd;
+	}
+	
+	/**
+	 * Add desciption to this permission
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @param type
+	 * @param instance
+	 * @param action
+	 * @param description
+	 * @return
+	 */
+	public Result<Void> addDescription(AuthzTrans trans, String ns, String type, 
+			String instance, String action, String description) {
+		//TODO Invalidate?
+		return dao().addDescription(trans, ns, type, instance, action, description);
+	}
+	
+	public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) {
+		Result<Void> rv = dao().addRole(trans,perm,role.encode());
+		if(trans.debug().isLoggable())
+			trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole");
+		invalidate(trans,perm);
+		return rv;
+	}
+
+	public Result<Void> delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) {
+		Result<Void> rv = dao().delRole(trans,perm,role.encode());
+		if(trans.debug().isLoggable())
+			trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole");
+		invalidate(trans,perm);
+		return rv;
+	}
+
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java
new file mode 100644
index 0000000..5fac680
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import java.util.List;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.RoleDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+public class CachedRoleDAO extends CachedDAO<AuthzTrans,RoleDAO, RoleDAO.Data> {
+	public CachedRoleDAO(RoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, RoleDAO.CACHE_SEG, expiresIn);
+	}
+
+	public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				return dao.readNS(trans, ns);
+			}
+		};
+		
+		Result<List<Data>> lurd = get(trans, ns, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_RoleNotFound,"No Role found");
+		}
+		return lurd;
+	}
+
+	public Result<List<Data>> readName(AuthzTrans trans, final String name) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				return dao().readName(trans, name);
+			}
+		};
+		
+		Result<List<Data>> lurd = get(trans, name, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_RoleNotFound,"No Role found");
+		}
+		return lurd;
+	}
+
+	public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String name) {
+		// At this point, I'm thinking it's better not to try to cache "*" results
+		// Data probably won't be accurate, and adding it makes every update invalidate most of the cache
+		// Jonathan 2/4/2014
+		return dao().readChildren(trans,ns,name);
+	}
+
+	public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
+		Result<Void> rv = dao().addPerm(trans,rd,perm);
+		if(trans.debug().isLoggable())
+			trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm");
+		invalidate(trans, rd);
+		return rv;
+	}
+
+	public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
+		Result<Void> rv = dao().delPerm(trans,rd,perm);
+		if(trans.debug().isLoggable())
+			trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm");
+		invalidate(trans, rd);
+		return rv;
+	}
+	
+	/**
+	 * Add description to this role
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @param name
+	 * @param description
+	 * @return
+	 */
+	public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
+		//TODO Invalidate?
+		return dao().addDescription(trans, ns, name, description);
+
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java
new file mode 100644
index 0000000..dce2bea
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java
@@ -0,0 +1,115 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cached;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Slot;
+
+public class CachedUserRoleDAO extends CachedDAO<AuthzTrans,UserRoleDAO, UserRoleDAO.Data> {
+	private Slot transURSlot;
+
+	public CachedUserRoleDAO(UserRoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+		super(dao, info, UserRoleDAO.CACHE_SEG, expiresIn);
+		transURSlot = dao.transURSlot;
+	}
+
+	/**
+	 * Special Case.  
+	 * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..."
+	 * Pull result, and make accessible by the Trans, which is always keyed by User.
+	 * @param trans
+	 * @param user
+	 * @return
+	 */
+	public Result<List<Data>> readByUser(AuthzTrans trans, final String user) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				// If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not.
+				if(user!=null && user.equals(trans.user())) {
+					Result<List<Data>> transLD = trans.get(transURSlot,null);
+					if(transLD==null ) {
+						transLD = dao.readByUser(trans, user);
+					}
+					return transLD;
+				} else {
+					return dao.readByUser(trans, user);
+				}
+			}
+		};
+		Result<List<Data>> lurd = get(trans, user, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user);
+		}
+		return lurd;
+	}
+
+	
+	public Result<List<Data>> readByRole(AuthzTrans trans, final String role) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				return dao.readByRole(trans, role);
+			}
+		};
+		Result<List<Data>> lurd = get(trans, role, getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role);
+		}
+		return lurd;
+	}
+
+	public Result<List<UserRoleDAO.Data>> readUserInRole(final AuthzTrans trans, final String user, final String role) {
+		DAOGetter getter = new DAOGetter(trans,dao()) {
+			public Result<List<Data>> call() {
+				if(user.equals(trans.user())) {
+					Result<List<Data>> rrbu = readByUser(trans, user);
+					if(rrbu.isOK()) {
+						List<Data> ld = new ArrayList<Data>(1);
+						for(Data d : rrbu.value) {
+							if(d.role.equals(role)) {
+								ld.add(d);
+								break;
+							}
+						}
+						return Result.ok(ld).emptyList(ld.isEmpty());
+					} else {
+						return rrbu;
+					}
+				}
+				return dao.readByUserRole(trans, user, role);
+			}
+		};
+		Result<List<Data>> lurd = get(trans, keyFromObjs(user,role), getter);
+		if(lurd.isOK() && lurd.isEmpty()) {
+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user);
+		}
+		return lurd;
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/.gitignore b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/.gitignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/.gitignore
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java
new file mode 100644
index 0000000..284d0a8
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java
@@ -0,0 +1,277 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.TimeTaken;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.exceptions.DriverException;
+
+
+public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {
+	public static final String PENDING = "pending";
+	public static final String DENIED = "denied";
+	public static final String APPROVED = "approved";
+	
+	private static final String TABLE = "approval";
+	private static final String TABLELOG = "approved";
+	private HistoryDAO historyDAO;
+	private PSInfo psByUser, psByApprover, psByTicket, psByStatus;
+
+	
+	public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+		super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        historyDAO = new HistoryDAO(trans, this);
+		init(trans);
+	}
+
+
+	public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) {
+		super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		historyDAO=hDAO;
+		init(trans);
+	}
+
+	private static final int KEYLIMIT = 1;
+	public static class Data {
+		public UUID   id;
+        public UUID   ticket;
+		public String user;
+		public String approver;
+		public String type;
+		public String status;
+		public String memo;
+		public String operation;
+		public Date last_notified;
+		public Date updated;
+	}
+	
+	private static class ApprovalLoader extends Loader<Data> {
+		public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT);
+		
+		public ApprovalLoader(int keylimit) {
+			super(keylimit);
+		}
+		
+		@Override
+		public Data load(Data data, Row row) {
+			data.id = row.getUUID(0);
+			data.ticket = row.getUUID(1);
+			data.user = row.getString(2);
+			data.approver = row.getString(3);
+			data.type = row.getString(4);
+			data.status = row.getString(5);
+			data.memo = row.getString(6);
+			data.operation = row.getString(7);
+			data.last_notified = row.getTimestamp(8);
+			// This is used to get "WRITETIME(STATUS)" from Approval, which gives us an "updated" 
+			if(row.getColumnDefinitions().size()>9) {
+				// Rows reported in MicroSeconds
+				data.updated = new Date(row.getLong(9)/1000);
+			}
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int idx, Object[] obj) {
+			obj[idx]=data.id;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.ticket;
+			obj[++idx]=data.user;
+			obj[++idx]=data.approver;
+			obj[++idx]=data.type;
+			obj[++idx]=data.status;
+			obj[++idx]=data.memo;
+			obj[++idx]=data.operation;
+			obj[++idx]=data.last_notified;
+		}
+	}	
+	
+	private void init(AuthzTrans trans) {
+		String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,9);
+		psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 
+				" WHERE user = ?", new ApprovalLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.user;
+			}
+		}, readConsistency);
+		
+		psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 
+				" WHERE approver = ?", new ApprovalLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.approver;
+			}
+		}, readConsistency);
+
+		psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 
+				" WHERE ticket = ?", new ApprovalLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.ticket;
+			}
+		}, readConsistency);
+
+		psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 
+				" WHERE status = ?", new ApprovalLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.status;
+			}
+		}, readConsistency);
+
+
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.CassDAOImpl#create(com.att.inno.env.TransStore, java.lang.Object)
+	 */
+	@Override
+	public Result<Data> create(AuthzTrans trans, Data data) {
+		// If ID is not set (typical), create one.
+		if(data.id==null) {
+			data.id = Chrono.dateToUUID(System.currentTimeMillis());
+		}
+		Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+		if(rs.notOK()) {
+			return Result.err(rs);
+		}
+		return Result.ok(data);	
+	}
+
+
+	public Result<List<ApprovalDAO.Data>> readByUser(AuthzTrans trans, String user) {
+		return psByUser.read(trans, R_TEXT, new Object[]{user});
+	}
+
+	public Result<List<ApprovalDAO.Data>> readByApprover(AuthzTrans trans, String approver) {
+		return psByApprover.read(trans, R_TEXT, new Object[]{approver});
+	}
+
+	public Result<List<ApprovalDAO.Data>> readByTicket(AuthzTrans trans, UUID ticket) {
+		return psByTicket.read(trans, R_TEXT, new Object[]{ticket});
+	}
+
+	public Result<List<ApprovalDAO.Data>> readByStatus(AuthzTrans trans, String status) {
+		return psByStatus.read(trans, R_TEXT, new Object[]{status});
+	}	
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.CassDAOImpl#delete(com.att.inno.env.TransStore, java.lang.Object, boolean)
+	 */
+	@Override
+	public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
+		if(reread || data.status == null) { // if Memo is empty, likely not full record
+			Result<ResultSet> rd = readPS.exec(trans, R_TEXT, data);
+			if(rd.notOK()) {
+				return Result.err(rd);
+			}
+			ApprovalLoader.deflt.load(data, rd.value.one());
+		}
+		if("approved".equals(data.status) || "denied".equals(data.status)) { 
+			StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
+			sb.append("INSERT INTO ");
+			sb.append(TABLELOG);
+			sb.append(" (id,user,approver,type,status,memo,operation) VALUES (");
+			sb.append(data.id);
+			sb.append(",'"); sb.append(data.user);
+			sb.append("','"); sb.append(data.approver);
+			sb.append("','"); sb.append(data.type);
+			sb.append("','"); sb.append(data.status);
+			sb.append("','"); sb.append(data.memo.replace("'", "''"));
+			sb.append("','"); sb.append(data.operation);
+			sb.append("');\n");
+			sb.append("DELETE FROM ");
+			sb.append(TABLE);
+			sb.append(" WHERE id=");
+			sb.append(data.id);
+			sb.append(";\n");
+			sb.append("APPLY BATCH;\n");
+			TimeTaken tt = trans.start("DELETE APPROVAL",Env.REMOTE);
+			try {
+				if(async) {
+					getSession(trans).executeAsync(sb.toString());
+					return Result.ok();
+				} else {
+					getSession(trans).execute(sb.toString());
+					return Result.ok();
+				}
+			} catch (DriverException | APIException | IOException e) {
+				reportPerhapsReset(trans,e);
+				return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+			} finally {
+				tt.done();
+			}
+		} else {
+			return super.delete(trans, data, false);
+		}
+
+	}
+
+
+	/**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject?override[1]:data.user + "|" + data.approver;
+        hd.memo = memo
+                ? String.format("%s by %s", override[0], hd.user)
+                : (modified.name() + "d approval for " + data.user);
+        // Detail?
+        // Reconstruct?
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+    }
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java
new file mode 100644
index 0000000..391b55b
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java
@@ -0,0 +1,303 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+/**
+ * CredDAO manages credentials. 
+ * @author Jonathan
+ * Date: 7/19/13
+ */
+public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
+    public static final String TABLE = "artifact";
+    
+    private HistoryDAO historyDAO;
+    private PSInfo psByMechID,psByMachine, psByNs;
+	
+    public ArtiDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+        super(trans, ArtiDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+
+    public ArtiDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) {
+        super(trans, ArtiDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        historyDAO = hDao;
+        init(trans);
+    }
+
+    public static final int KEYLIMIT = 2;
+	public static class Data implements Bytification {
+		public String       			mechid;
+		public String       			machine;
+        private Set<String>      		type;
+        public String					sponsor;
+        public String					ca;
+        public String					dir;
+        public String					ns;
+        public String					os_user;
+        public String					notify;
+        public Date      				expires;
+        public int						renewDays;
+        public Set<String>				sans;
+        
+//      // Getters
+		public Set<String> type(boolean mutable) {
+			if (type == null) {
+				type = new HashSet<String>();
+			} else if (mutable && !(type instanceof HashSet)) {
+				type = new HashSet<String>(type);
+			}
+			return type;
+		}
+
+		public Set<String> sans(boolean mutable) {
+			if (sans == null) {
+				sans = new HashSet<String>();
+			} else if (mutable && !(sans instanceof HashSet)) {
+				sans = new HashSet<String>(sans);
+			}
+			return sans;
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			ArtifactLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		public String toString() {
+			return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires);
+		}
+    }
+
+    private static class ArtifactLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=95829343;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48; // Note: 
+
+    	public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT);
+    	public ArtifactLoader(int keylimit) {
+            super(keylimit);
+        }
+
+    	@Override
+        public Data load(Data data, Row row) {
+            data.mechid = row.getString(0);
+            data.machine = row.getString(1);
+            data.type = row.getSet(2, String.class);
+            data.sponsor = row.getString(3);
+            data.ca = row.getString(4);
+            data.dir = row.getString(5);
+            data.ns = row.getString(6);
+            data.os_user = row.getString(7);
+            data.notify = row.getString(8);
+            data.expires = row.getTimestamp(9);
+            data.renewDays = row.getInt(10);
+            data.sans = row.getSet(11, String.class);
+            return data;
+        }
+
+        @Override
+        protected void key(final Data data, final int idx, Object[] obj) {
+        	int i;
+            obj[i=idx] = data.mechid;
+            obj[++i] = data.machine;
+        }
+
+        @Override
+        protected void body(final Data data, final int idx, Object[] obj) {
+            int i;
+            obj[i=idx] = data.type;
+            obj[++i] = data.sponsor;
+            obj[++i] = data.ca;
+            obj[++i] = data.dir;
+            obj[++i] = data.ns;
+            obj[++i] = data.os_user;
+            obj[++i] = data.notify;
+            obj[++i] = data.expires;
+            obj[++i] = data.renewDays;
+            obj[++i] = data.sans;
+        }
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.mechid);
+			writeString(os, data.machine);
+			os.writeInt(data.type.size());
+			for(String s : data.type) {
+				writeString(os, s);
+			}
+			writeString(os, data.sponsor);
+			writeString(os, data.ca);
+			writeString(os, data.dir);
+			writeString(os, data.ns);
+			writeString(os, data.os_user);
+			writeString(os, data.notify);
+			os.writeLong(data.expires==null?-1:data.expires.getTime());
+			os.writeInt(data.renewDays);
+			if(data.sans!=null) {
+				os.writeInt(data.sans.size());
+				for(String s : data.sans) {
+					writeString(os, s);
+				}
+			} else {
+				os.writeInt(0);
+			}
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.mechid = readString(is,buff);
+			data.machine = readString(is,buff);
+			int size = is.readInt();
+			data.type = new HashSet<String>(size);
+			for(int i=0;i<size;++i) {
+				data.type.add(readString(is,buff));
+			}
+			data.sponsor = readString(is,buff);
+			data.ca = readString(is,buff);
+			data.dir = readString(is,buff);
+			data.ns = readString(is,buff);
+			data.os_user = readString(is,buff);
+			data.notify = readString(is,buff);
+			long l = is.readLong();
+			data.expires = l<0?null:new Date(l);
+			data.renewDays = is.readInt();
+			size = is.readInt();
+			data.sans = new HashSet<String>(size);
+			for(int i=0;i<size;++i) {
+				data.sans.add(readString(is,buff));
+			}
+		}
+    }
+
+    private void init(AuthzTrans trans) {
+        // Set up sub-DAOs
+        if(historyDAO==null) {
+        	historyDAO = new HistoryDAO(trans,this);
+        }
+        
+        String[] helpers = setCRUD(trans, TABLE, Data.class, ArtifactLoader.deflt);
+
+		psByMechID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 
+				" WHERE mechid = ?", new ArtifactLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.type;
+			}
+		},readConsistency);
+
+		psByMachine = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 
+				" WHERE machine = ?", new ArtifactLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.type;
+			}
+		},readConsistency);
+
+		psByNs = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 
+				" WHERE ns = ?", new ArtifactLoader(1) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.type;
+			}
+		},readConsistency);
+
+}
+    
+	
+    public Result<List<Data>> readByMechID(AuthzTrans trans, String mechid) {
+		return psByMechID.read(trans, R_TEXT, new Object[]{mechid});
+	}
+
+	public Result<List<ArtiDAO.Data>> readByMachine(AuthzTrans trans, String machine) {
+		return psByMachine.read(trans, R_TEXT, new Object[]{machine});
+	}
+
+	public Result<List<org.onap.aaf.auth.dao.cass.ArtiDAO.Data>> readByNs(AuthzTrans trans, String ns) {
+		return psByNs.read(trans, R_TEXT, new Object[]{ns});
+	}
+
+	/**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject?override[1]: data.mechid;
+        hd.memo = memo
+                ? String.format("%s by %s", override[0], hd.user)
+                : String.format("%sd %s for %s",modified.name(),data.mechid,data.machine);
+        // Detail?
+   		if(modified==CRUD.delete) {
+        			try {
+        				hd.reconstruct = data.bytify();
+        			} catch (IOException e) {
+        				trans.error().log(e,"Could not serialize CredDAO.Data");
+        			}
+        		}
+
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+    }
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java
new file mode 100644
index 0000000..e47e935
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java
@@ -0,0 +1,464 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.CadiException;
+import org.onap.aaf.cadi.SecuritySetter;
+import org.onap.aaf.cadi.client.Future;
+import org.onap.aaf.cadi.client.Rcli;
+import org.onap.aaf.cadi.client.Retryable;
+import org.onap.aaf.cadi.http.HMangr;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.TimeTaken;
+import org.onap.aaf.misc.env.Trans;
+
+import com.datastax.driver.core.BoundStatement;
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.exceptions.DriverException;
+
+public class CacheInfoDAO extends CassDAOImpl<AuthzTrans,CacheInfoDAO.Data> implements CIDAO<AuthzTrans> {
+
+	private static final String TABLE = "cache";
+	public static final Map<String,Date[]> info = new ConcurrentHashMap<String,Date[]>();
+
+	private static CacheUpdate cacheUpdate;
+	
+	// Hold current time stamps from Tables
+	private final Date startTime;
+	private PreparedStatement psCheck;
+	
+	public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+		super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		startTime = new Date();
+		init(trans);
+	}
+
+	public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {
+		super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		startTime = new Date();
+		init(trans);
+	}
+
+
+    //////////////////////////////////////////
+    // Data Definition, matches Cassandra DM
+    //////////////////////////////////////////
+    private static final int KEYLIMIT = 2;
+	/**
+     * @author Jonathan
+     */
+	public static class Data {
+		public Data() {
+			name = null;
+			touched = null;
+		}
+		public Data(String name, int seg) {
+			this.name = name;
+			this.seg = seg;
+			touched = null;
+		}
+		
+		public String		name;
+		public int			seg;
+		public Date			touched;
+    }
+
+    private static class InfoLoader extends Loader<Data> {
+    	public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);
+    	
+		public InfoLoader(int keylimit) {
+			super(keylimit);
+		}
+		
+		@Override
+		public Data load(Data data, Row row) {
+			// Int more efficient
+			data.name = row.getString(0);
+			data.seg = row.getInt(1);
+			data.touched = row.getTimestamp(2);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+
+			obj[idx]=data.name;
+			obj[++idx]=data.seg;
+		}
+
+		@Override
+		protected void body(Data data, int idx, Object[] obj) {
+			obj[idx]=data.touched;
+		}
+    }
+    
+	public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
+		if(cacheUpdate==null) {
+			Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");
+			t.setDaemon(true);
+			t.start();
+		}
+	}
+
+	public static<T extends Trans> void stopUpdate() {
+		if(cacheUpdate!=null) {
+			cacheUpdate.go=false;
+		}
+	}
+
+	private final static class CacheUpdate extends Thread {
+		public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);
+
+		private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";
+		private AuthzEnv env;
+		private HMangr hman;
+		private SecuritySetter<HttpURLConnection> ss;
+		private final String authority;
+		public boolean go = true;
+		
+		public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
+			this.env = env;
+			this.hman = hman;
+			this.ss = ss;
+			
+			this.authority = ip+':'+port;
+		}
+		
+		private static class Transfer {
+			public String table;
+			public int segs[];
+			public Transfer(String table, int[] segs)  {
+				this.table = table;
+				this.segs = segs;
+			}
+		}
+		private class CacheClear extends Retryable<Integer> {
+			public int total=0;
+			private AuthzTrans trans;
+			private String type;
+			private String segs;
+			
+			public CacheClear(AuthzTrans trans) {
+				this.trans = trans;
+			}
+
+			public void set(Entry<String, IntHolder> es) {
+				type = es.getKey();
+				segs = es.getValue().toString();
+			}
+			
+		@Override
+			public Integer code(Rcli<?> client) throws APIException, CadiException {
+				URI to = client.getURI();
+				if(!to.getAuthority().equals(authority)) {
+					Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);
+					if(f.get(hman.readTimeout())) {
+					    ++total;
+					} else {
+					    trans.error().log("Error During AAF Peer Notify",f.code(),f.body());
+					}
+				}
+				return total;
+			}
+		}
+		
+		private class IntHolder {
+			private int[] raw;
+			HashSet<Integer> set;
+			
+			public IntHolder(int ints[]) {
+				raw = ints;
+				set = null;
+			}
+			public void add(int[] ints) {
+				if(set==null) {
+					set = new HashSet<Integer>();
+					
+					for(int i=0;i<raw.length;++i) {
+						set.add(raw[i]);
+					}
+				}
+				for(int i=0;i<ints.length;++i) {
+					set.add(ints[i]);
+				}
+			}
+
+			@Override
+			public String toString() {
+				StringBuilder sb = new StringBuilder();
+				boolean first = true;
+				if(set==null) {
+					for(int i : raw) {
+						if(first) {
+							first=false;
+						} else {
+							sb.append(',');
+						}
+						sb.append(i);
+					}
+				} else {
+					for(Integer i : set) {
+						if(first) {
+							first=false;
+						} else {
+							sb.append(',');
+						}
+						sb.append(i);
+					}
+				}
+				return sb.toString();
+			}
+		}
+		
+		@Override
+		public void run() {
+			do {
+				try {
+					Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);
+					if(data==null) {
+						continue;
+					}
+					
+					int count = 0;
+					CacheClear cc = null;
+					Map<String,IntHolder> gather = null;
+					AuthzTrans trans = null;
+					long start=0;
+					// Do a block poll first
+					do {
+						if(gather==null) {
+							start = System.nanoTime();
+							trans = env.newTransNoAvg();
+							cc = new CacheClear(trans);
+							gather = new HashMap<String,IntHolder>();
+						}
+						IntHolder prev = gather.get(data.table);
+						if(prev==null) {
+							gather.put(data.table,new IntHolder(data.segs));
+						} else {
+							prev.add(data.segs);
+						}
+						// continue while there is data
+					} while((data = notifyDQ.poll())!=null);
+					if(gather!=null) {
+						for(Entry<String, IntHolder> es : gather.entrySet()) {
+							cc.set(es);
+							try {
+								if(hman.all(ss, cc, false)!=null) {
+									++count;
+								}
+							} catch (Exception e) {
+								trans.error().log(e, "Error on Cache Update");
+							}
+						}
+						if(env.debug().isLoggable()) {
+							float millis = (System.nanoTime()-start)/1000000f;
+							StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");
+							sb.append("Updated ");
+							sb.append(count);
+							if(count==1) {
+								sb.append(" entry for ");
+							} else { 
+								sb.append(" entries for ");
+							}
+							int peers = count<=0?0:cc.total/count;
+							sb.append(peers);
+							sb.append(" client");
+							if(peers!=1) {
+								sb.append('s');
+							}
+							sb.append(" in ");
+							sb.append(millis);
+							sb.append("ms");
+							trans.auditTrail(0, sb, Env.REMOTE);
+							env.debug().log(sb);
+						}
+					}
+				} catch (InterruptedException e1) {
+					go = false;
+				}
+			} while(go);
+		}
+	}
+
+	private void init(AuthzTrans trans) throws APIException, IOException {
+		
+		String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);
+		psCheck = getSession(trans).prepare(SELECT_SP +  helpers[FIELD_COMMAS] + " FROM " + TABLE);
+
+		disable(CRUD.create);
+		disable(CRUD.delete);
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.cass.CIDAO#touch(org.onap.aaf.auth.env.test.AuthzTrans, java.lang.String, int)
+	 */
+	
+	@Override
+	public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {
+		/////////////
+		// Direct Service Cache Invalidation
+		/////////////
+		// ConcurrentQueues are open-ended.  We don't want any Memory leaks 
+		// Note: we keep a separate counter, because "size()" on a Linked Queue is expensive
+		if(cacheUpdate!=null) {
+			try {
+				if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {
+					trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );
+				}
+			} catch (InterruptedException e) {
+				trans.error().log("Cache Notify Queue posting was interrupted" );
+			}
+		}
+
+		/////////////
+		// Table Based Cache Invalidation (original)
+		/////////////
+		// Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN
+		StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");
+		start.append(name);
+		start.append(": ");
+		StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
+		boolean first = true;
+		for(int s : seg) {
+			sb.append(UPDATE_SP);
+			sb.append(TABLE);
+			sb.append(" SET touched=dateof(now()) WHERE name = '");
+			sb.append(name);
+			sb.append("' AND seg = ");
+			sb.append(s);
+			sb.append(";\n");	
+			if(first) {
+				first =false;
+			} else {
+				start.append(',');
+			}
+			start.append(s);
+		}
+		sb.append("APPLY BATCH;");
+		TimeTaken tt = trans.start(start.toString(),Env.REMOTE);
+		try {
+			getSession(trans).executeAsync(sb.toString());
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		} finally {
+			tt.done();
+		}
+		return Result.ok();
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.cass.CIDAO#check(org.onap.aaf.auth.env.test.AuthzTrans)
+	 */
+	@Override
+	public Result<Void> check(AuthzTrans trans) {
+		ResultSet rs;
+		TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);
+		try {
+			rs = getSession(trans).execute(new BoundStatement(psCheck));
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		} finally {
+			tt.done();
+		}
+		
+		String lastName = null;
+		Date[] dates = null;
+		for(Row row : rs.all()) {
+			String name = row.getString(0);
+			int seg = row.getInt(1);
+			if(!name.equals(lastName)) {
+				dates = info.get(name);
+				lastName=name;
+			}
+			if(dates==null) {
+				dates=new Date[seg+1];
+				info.put(name,dates);
+			} else if(dates.length<=seg) {
+				Date[] temp = new Date[seg+1];
+				System.arraycopy(dates, 0, temp, 0, dates.length);
+				dates = temp;
+				info.put(name, dates);
+			}
+			Date temp = row.getTimestamp(2);
+			if(dates[seg]==null || dates[seg].before(temp)) {
+				dates[seg]=temp;
+			}
+		}
+		return Result.ok();
+	}
+	
+    /* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.cass.CIDAO#get(java.lang.String, int)
+	 */
+    @Override
+	public Date get(AuthzTrans trans, String table, int seg) {
+		Date[] dates = info.get(table);
+		if(dates==null) {
+			dates = new Date[seg+1];
+			touch(trans,table, seg);
+		} else if(dates.length<=seg) {
+			Date[] temp = new Date[seg+1];
+			System.arraycopy(dates, 0, temp, 0, dates.length);
+			dates = temp;
+		}
+		Date rv = dates[seg];
+		if(rv==null) {
+			rv=dates[seg]=startTime;
+		}
+		return rv;
+	}
+
+	@Override
+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+		// Do nothing
+	}
+
+}
\ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java
new file mode 100644
index 0000000..af4b230
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import org.onap.aaf.auth.dao.Cacheable;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CachedDAO;
+
+public abstract class CacheableData implements Cacheable {
+	// WARNING:  DON'T attempt to add any members here, as it will 
+	// be treated by system as fields expected in Tables
+	protected int seg(Cached<?,?> cache, Object ... fields) {
+		return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields));
+	}
+	
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java
new file mode 100644
index 0000000..a47b8c9
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java
@@ -0,0 +1,244 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+/**
+ * CredDAO manages credentials. 
+ * @author Jonathan
+ * Date: 7/19/13
+ */
+public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
+    public static final String TABLE = "x509";
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+    
+    private HistoryDAO historyDAO;
+	private CIDAO<AuthzTrans> infoDAO;
+	private PSInfo psX500,psID;
+	
+    public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+        super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+
+    public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {
+        super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        historyDAO = hDao;
+        infoDAO = ciDao;
+        init(trans);
+    }
+    
+    public static final int KEYLIMIT = 2;
+	public static class Data extends CacheableData implements Bytification {
+    	
+        public String					ca;
+		public BigInteger 				serial;
+        public String	      			id;
+        public String					x500;
+        public String					x509;
+
+        @Override
+		public int[] invalidate(Cached<?,?> cache) {
+        	return new int[] {
+        		seg(cache,ca,serial)
+        	};
+		}
+        
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			CertLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			CertLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+    }
+
+    private static class CertLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=85102934;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48; // Note: 
+
+    	public static final CertLoader deflt = new CertLoader(KEYLIMIT);
+    	public CertLoader(int keylimit) {
+            super(keylimit);
+        }
+
+    	@Override
+        public Data load(Data data, Row row) {
+        	data.ca = row.getString(0);
+            ByteBuffer bb = row.getBytesUnsafe(1);
+            byte[] bytes = new byte[bb.remaining()];
+            bb.get(bytes);
+            data.serial = new BigInteger(bytes);
+            data.id = row.getString(2);
+            data.x500 = row.getString(3);
+            data.x509 = row.getString(4);
+            return data;
+        }
+
+        @Override
+        protected void key(Data data, int idx, Object[] obj) {
+            obj[idx] = data.ca;
+            obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray());
+        }
+
+        @Override
+        protected void body(Data data, int _idx, Object[] obj) {
+        	int idx = _idx;
+
+            obj[idx] = data.id;
+            obj[++idx] = data.x500;
+            obj[++idx] = data.x509;
+
+            
+        }
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.id);
+			writeString(os, data.x500);
+			writeString(os, data.x509);
+			writeString(os, data.ca);
+			if(data.serial==null) {
+				os.writeInt(-1);
+			} else {
+				byte[] dsba = data.serial.toByteArray();
+				int l = dsba.length;
+				os.writeInt(l);
+				os.write(dsba,0,l);
+			}
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.id = readString(is,buff);
+			data.x500 = readString(is,buff);
+			data.x509 = readString(is,buff);
+			data.ca = readString(is,buff);
+			int i = is.readInt();
+			if(i<0) {
+				data.serial=null;
+			} else {
+				byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
+				is.read(bytes);
+				data.serial = new BigInteger(bytes);
+			}
+		}
+    }
+    
+    public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {
+    	// Translate BigInteger to Byte array for lookup
+    	return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));
+    }
+
+    private void init(AuthzTrans trans) throws APIException, IOException {
+        // Set up sub-DAOs
+        if(historyDAO==null) {
+        	historyDAO = new HistoryDAO(trans,this);
+        }
+		if(infoDAO==null) {
+			infoDAO = new CacheInfoDAO(trans,this);
+		}
+
+		String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);
+
+		psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE id = ?", CertLoader.deflt,readConsistency);
+
+		psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE x500 = ?", CertLoader.deflt,readConsistency);
+		
+    }
+    
+	public Result<List<Data>> readX500(AuthzTrans trans, String x500) {
+		return psX500.read(trans, R_TEXT, new Object[]{x500});
+	}
+
+	public Result<List<Data>> readID(AuthzTrans trans, String id) {
+		return psID.read(trans, R_TEXT, new Object[]{id});
+	}
+
+    /**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject?override[1]: data.id;
+        hd.memo = memo
+                ? String.format("%s by %s", override[0], hd.user)
+                : (modified.name() + "d certificate info for " + data.id);
+        // Detail?
+   		if(modified==CRUD.delete) {
+        			try {
+        				hd.reconstruct = data.bytify();
+        			} catch (IOException e) {
+        				trans.error().log(e,"Could not serialize CertDAO.Data");
+        			}
+        		}
+
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
+        	trans.error().log("Cannot touch Cert");
+        }
+    }
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java
new file mode 100644
index 0000000..46dc12b
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java
@@ -0,0 +1,258 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+/**
+ * CredDAO manages credentials. 
+ * @author Jonathan
+ * Date: 7/19/13
+ */
+public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
+    public static final String TABLE = "cred";
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+	public static final int RAW = -1;
+    public static final int BASIC_AUTH = 1;
+    public static final int BASIC_AUTH_SHA256 = 2;
+    public static final int CERT_SHA256_RSA =200;
+    
+    private HistoryDAO historyDAO;
+	private CIDAO<AuthzTrans> infoDAO;
+	private PSInfo psNS;
+	private PSInfo psID;
+	
+    public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+        super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+
+    public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {
+        super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        historyDAO = hDao;
+        infoDAO = ciDao;
+        init(trans);
+    }
+
+    public static final int KEYLIMIT = 3;
+	public static class Data extends CacheableData implements Bytification {
+    	
+		public String       			id;
+        public Integer      			type;
+        public Date      				expires;
+        public Integer					other;
+		public String					ns;
+		public String					notes;
+        public ByteBuffer				cred;  //   this is a blob in cassandra
+
+
+        @Override
+		public int[] invalidate(Cached<?,?> cache) {
+        	return new int[] {
+        		seg(cache,id) // cache is for all entities
+        	};
+		}
+        
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			CredLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			CredLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		public String toString() {
+			return id + ' ' + type + ' ' + Chrono.dateTime(expires);
+		}
+    }
+
+    private static class CredLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=153323443;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48; // Note: 
+
+    	public static final CredLoader deflt = new CredLoader(KEYLIMIT);
+    	public CredLoader(int keylimit) {
+            super(keylimit);
+        }
+
+    	@Override
+        public Data load(Data data, Row row) {
+            data.id = row.getString(0);
+            data.type = row.getInt(1);    // NOTE: in datastax driver,  If the int value is NULL, 0 is returned!
+            data.expires = row.getTimestamp(2);
+            data.other = row.getInt(3);
+            data.ns = row.getString(4);     
+            data.notes = row.getString(5);
+            data.cred = row.getBytesUnsafe(6);            
+            return data;
+        }
+
+        @Override
+        protected void key(Data data, int _idx, Object[] obj) {
+	    int idx = _idx;
+
+            obj[idx] = data.id;
+            obj[++idx] = data.type;
+            obj[++idx] = data.expires;
+        }
+
+        @Override
+        protected void body(Data data, int idx, Object[] obj) {
+            int i;
+            obj[i=idx] = data.other;
+            obj[++i] = data.ns;
+            obj[++i] = data.notes;
+            obj[++i] = data.cred;
+        }
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.id);
+			os.writeInt(data.type);	
+			os.writeLong(data.expires==null?-1:data.expires.getTime());
+			os.writeInt(data.other==null?0:data.other);
+			writeString(os, data.ns);
+			writeString(os, data.notes);
+			if(data.cred==null) {
+				os.writeInt(-1);
+			} else {
+				int l = data.cred.limit()-data.cred.position();
+				os.writeInt(l);
+				os.write(data.cred.array(),data.cred.position(),l);
+			}
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.id = readString(is,buff);
+			data.type = is.readInt();
+			
+			long l = is.readLong();
+			data.expires = l<0?null:new Date(l);
+			data.other = is.readInt();
+			data.ns = readString(is,buff);
+			data.notes = readString(is,buff);
+			
+			int i = is.readInt();
+			if(i<0) {
+				data.cred=null;
+			} else {
+				byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
+				is.read(bytes);
+				data.cred = ByteBuffer.wrap(bytes);
+			}
+		}
+    }
+
+    private void init(AuthzTrans trans) throws APIException, IOException {
+        // Set up sub-DAOs
+        if(historyDAO==null) {
+        	historyDAO = new HistoryDAO(trans,this);
+        }
+		if(infoDAO==null) {
+			infoDAO = new CacheInfoDAO(trans,this);
+		}
+		
+
+		String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);
+		
+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE ns = ?", CredLoader.deflt,readConsistency);
+		
+		psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE id = ?", CredLoader.deflt,readConsistency);
+    }
+    
+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+		return psNS.read(trans, R_TEXT, new Object[]{ns});
+	}
+	
+	public Result<List<Data>> readID(AuthzTrans trans, String id) {
+		return psID.read(trans, R_TEXT, new Object[]{id});
+	}
+	
+    /**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject?override[1]: data.id;
+        hd.memo = memo
+                ? String.format("%s by %s", override[0], hd.user)
+                : (modified.name() + "d credential for " + data.id);
+        // Detail?
+   		if(modified==CRUD.delete) {
+        			try {
+        				hd.reconstruct = data.bytify();
+        			} catch (IOException e) {
+        				trans.error().log(e,"Could not serialize CredDAO.Data");
+        			}
+        		}
+
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
+        	trans.error().log("Cannot touch Cred");
+        }
+    }
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java
new file mode 100644
index 0000000..78a98e1
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java
@@ -0,0 +1,138 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+public class DelegateDAO extends CassDAOImpl<AuthzTrans, DelegateDAO.Data> {
+
+	public static final String TABLE = "delegate";
+	private PSInfo psByDelegate;
+	
+	public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+		super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		init(trans);
+	}
+
+	public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
+		super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		init(trans);
+	}
+	
+	private static final int KEYLIMIT = 1;
+	public static class Data implements Bytification {
+		public String user;
+		public String delegate;
+		public Date expires;
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			DelegateLoader.dflt.unmarshal(this, toDIS(bb));
+		}
+	}
+	
+	private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=0xD823ACF2;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48;
+
+		public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);
+
+		public DelegateLoader(int keylimit) {
+			super(keylimit);
+		}
+		
+		@Override
+		public Data load(Data data, Row row) {
+			data.user = row.getString(0);
+			data.delegate = row.getString(1);
+			data.expires = row.getTimestamp(2);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int idx, Object[] obj) {
+			obj[idx]=data.user;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+
+			obj[idx]=data.delegate;
+			obj[++idx]=data.expires;
+		}
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.user);
+			writeString(os, data.delegate);
+			os.writeLong(data.expires.getTime());
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.user = readString(is, buff);
+			data.delegate = readString(is,buff);
+			data.expires = new Date(is.readLong());
+		}
+	}	
+	
+	private void init(AuthzTrans trans) {
+		String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);
+		psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE delegate = ?", new DelegateLoader(1),readConsistency);
+
+	}
+
+	public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {
+		return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java
new file mode 100644
index 0000000..0263e00
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java
@@ -0,0 +1,183 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+
+/**
+ * FutureDAO stores Construction information to create 
+ * elements at another time.
+ * 
+ * @author Jonathan
+ * 8/20/2013
+ */
+public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
+    private static final String TABLE = "future";
+	private final HistoryDAO historyDAO;
+//	private static String createString;
+	private PSInfo psByStartAndTarget;
+	
+    public FutureDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+        super(trans, FutureDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		historyDAO = new HistoryDAO(trans, this);
+        init(trans);
+    }
+
+    public FutureDAO(AuthzTrans trans, HistoryDAO hDAO) {
+        super(trans, FutureDAO.class.getSimpleName(),hDAO, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        historyDAO=hDAO;
+        init(trans);
+    }
+
+    public static final int KEYLIMIT = 1;
+    public static class Data {
+        public UUID         id;
+        public String		target;
+        public String		memo;
+        public Date       	start;
+        public Date       	expires;
+        public ByteBuffer 	construct;  //   this is a blob in cassandra
+    }
+
+    private static class FLoader extends Loader<Data> {
+        public FLoader() {
+            super(KEYLIMIT);
+        }
+
+        public FLoader(int keylimit) {
+            super(keylimit);
+        }
+
+        @Override
+	public Data load(Data data, Row row) {
+            data.id 		= row.getUUID(0);
+            data.target		= row.getString(1);
+            data.memo       = row.getString(2);
+            data.start 		= row.getTimestamp(3);
+            data.expires 	= row.getTimestamp(4);
+            data.construct 	= row.getBytes(5);
+            return data;
+        }
+
+        @Override
+        protected void key(Data data, int idx, Object[] obj) {
+            obj[idx] = data.id;
+        }
+
+        @Override
+        protected void body(Data data, int _idx, Object[] obj) {
+	    int idx = _idx;
+
+            obj[idx] = data.target;
+            obj[++idx] = data.memo;
+            obj[++idx] = data.start;
+            obj[++idx] = data.expires;
+            obj[++idx] = data.construct;
+        }
+    }
+
+    private void init(AuthzTrans trans) {
+        // Set up sub-DAOs
+        String[] helpers = setCRUD(trans, TABLE, Data.class, new FLoader(KEYLIMIT));
+
+        // Uh, oh.  Can't use "now()" in Prepared Statements (at least at this level)
+//		createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),";
+//
+//		// Need a specialty Creator to handle the "now()"
+//		replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" +  helpers[FIELD_COMMAS] +
+//					") VALUES(now(),?,?,?,?,?)",new FLoader(0)));
+		
+		// Other SELECT style statements... match with a local Method
+		psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+				" FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) {
+			@Override
+			protected void key(Data data, int _idx, Object[] obj) {
+			    	int idx = _idx;
+
+				obj[idx]=data.start;
+				obj[++idx]=data.target;
+			}
+		},readConsistency);
+		
+
+    }
+
+    public Result<List<Data>> readByStartAndTarget(AuthzTrans trans, Date start, String target) throws DAOException {
+		return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target});
+	}
+
+    /**
+	 * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null
+     */
+	public Result<FutureDAO.Data> create(AuthzTrans trans,	FutureDAO.Data data, String id) {
+		// If ID is not set (typical), create one.
+		if(data.id==null) {
+			StringBuilder sb = new StringBuilder(trans.user());
+			sb.append(data.target);
+			sb.append(System.currentTimeMillis());
+			data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes());
+		}
+		Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+		if(rs.notOK()) {
+			return Result.err(rs);
+		}
+		wasModified(trans, CRUD.create, data, null, id);
+		return Result.ok(data);	
+	}
+
+	/**
+	 * Log Modification statements to History
+	 *
+	 * @param modified        which CRUD action was done
+	 * @param data            entity data that needs a log entry
+	 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+	 */
+	@Override
+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+		boolean memo = override.length>0 && override[0]!=null;
+		boolean subject = override.length>1 && override[1]!=null;
+		HistoryDAO.Data hd = HistoryDAO.newInitedData();
+	    hd.user = trans.user();
+		hd.action = modified.name();
+		hd.target = TABLE;
+		hd.subject = subject?override[1]:"";
+	    hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo;
+	
+		if(historyDAO.create(trans, hd).status!=Status.OK) {
+	    	trans.error().log("Cannot log to History");
+		}
+	}
+    
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java
new file mode 100644
index 0000000..8e4ada1
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java
@@ -0,0 +1,238 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.nio.ByteBuffer;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ConsistencyLevel;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+
+/**
+ * History
+ * 
+ * Originally written PE3617
+ * @author Jonathan
+ * 
+ * History is a special case, because we don't want Updates or Deletes...  Too likely to mess up history.
+ * 
+ * Jonathan 9-9-2013 - Found a problem with using "Prepare".  You cannot prepare anything with a "now()" in it, as
+ * it is evaluated once during the prepare, and kept.  That renders any use of "now()" pointless.  Therefore
+ * the Create function needs to be run fresh everytime.
+ * 
+ * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616
+ *
+ */
+public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {
+	private static final String TABLE = "history";
+
+	public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");
+//	private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");
+
+	private String[] helpers;
+
+	private HistLoader defLoader;
+
+	private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;
+
+	public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+		super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
+		init(trans);
+	}
+
+	public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
+		super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
+		init(trans);
+	}
+
+
+	private static final int KEYLIMIT = 1;
+	public static class Data {
+		public UUID id;
+		public int	yr_mon;
+		public String user;
+		public String action;
+		public String target;
+		public String subject;
+		public String  memo;
+//		Map<String, String>  detail = null;
+//		public Map<String, String>  detail() {
+//			if(detail == null) {
+//				detail = new HashMap<String, String>();
+//			}
+//			return detail;
+//		}
+		public ByteBuffer reconstruct;
+	}
+	
+	private static class HistLoader extends Loader<Data> {
+		public HistLoader(int keylimit) {
+			super(keylimit);
+		}
+
+		@Override
+		public Data load(Data data, Row row) {
+			data.id = row.getUUID(0);
+			data.yr_mon = row.getInt(1);
+			data.user = row.getString(2);
+			data.action = row.getString(3);
+			data.target = row.getString(4);
+			data.subject = row.getString(5);
+			data.memo = row.getString(6);
+//			data.detail = row.getMap(6, String.class, String.class);
+			data.reconstruct = row.getBytes(7);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int idx, Object[] obj) {
+			obj[idx]=data.id;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.yr_mon;
+			obj[++idx]=data.user;
+			obj[++idx]=data.action;
+			obj[++idx]=data.target;
+			obj[++idx]=data.subject;
+			obj[++idx]=data.memo;
+//			obj[++idx]=data.detail;
+			obj[++idx]=data.reconstruct;		
+		}
+	};
+	
+	private void init(AuthzTrans trans) {
+		// Loader must match fields order
+		defLoader = new HistLoader(KEYLIMIT);
+		helpers = setCRUD(trans, TABLE, Data.class, defLoader);
+
+		// Need a specialty Creator to handle the "now()"
+		// 9/9/2013 - Jonathan - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?
+		// Unless this is fixed, we're putting in non-prepared statement
+		// Solved in Cassandra.  Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616	
+		replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" +  helpers[FIELD_COMMAS] +
+					") VALUES(now(),?,?,?,?,?,?,?)", 
+					new HistLoader(0) {
+						@Override
+						protected void key(Data data, int idx, Object[] obj) {
+						}
+					},writeConsistency)
+				);
+//		disable(CRUD.Create);
+		
+		replace(CRUD.read, new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +
+				" FROM history WHERE id = ?", defLoader,readConsistency) 
+//				new HistLoader(2) {
+//					@Override
+//					protected void key(Data data, int idx, Object[] obj) {
+//						obj[idx]=data.yr_mon;
+//						obj[++idx]=data.id;
+//					}
+//				})
+			);
+		disable(CRUD.update);
+		disable(CRUD.delete);
+		
+		readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 
+				" FROM history WHERE user = ?", defLoader,readConsistency);
+		readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 
+				" FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);
+		readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 
+				" FROM history WHERE yr_mon = ?", defLoader,readConsistency);
+		async(true); //TODO dropping messages with Async
+	}
+
+	public static Data newInitedData() {
+		Data data = new Data();
+		Date now = new Date();
+		data.yr_mon = Integer.parseInt(monthFormat.format(now));
+		// data.day_time = Integer.parseInt(dayTimeFormat.format(now));
+		return data;		
+	}
+
+	public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {
+		Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);
+		if(rs.notOK()) {
+			return Result.err(rs);
+		}
+		return extract(defLoader,rs.value,null,dflt);
+	}
+
+	/**
+	 * Gets the history for a user in the specified year and month
+	 * year - the year in yyyy format
+	 * month -  the month in a year ...values 1 - 12
+	 **/
+	public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {
+		if(yyyymm.length==0) {
+			return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
+		}
+		Result<ResultSet> rs = readByUser.exec(trans, "user", user);
+		if(rs.notOK()) {
+			return Result.err(rs);
+		}
+		return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
+	}
+	
+	public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {
+		if(yyyymm.length==0) {
+			return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
+		}
+		Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);
+		if(rs.notOK()) {
+			return Result.err(rs);
+		}
+		return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
+	}
+	
+	private class YYYYMM implements Accept<Data> {
+		private int[] yyyymm;
+		public YYYYMM(int yyyymm[]) {
+			this.yyyymm = yyyymm;
+		}
+		@Override
+		public boolean ok(Data data) {
+			int dym = data.yr_mon;
+			for(int ym:yyyymm) {
+				if(dym==ym) {
+					return true;
+				}
+			}
+			return false;
+		}
+		
+	};
+	
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java
new file mode 100644
index 0000000..bdf2748
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java
@@ -0,0 +1,231 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+/**
+ * LocateDAO manages credentials. 
+ * @author Jonathan
+ * Date: 10/11/17
+ */
+public class LocateDAO extends CassDAOImpl<AuthzTrans,LocateDAO.Data> {
+    public static final String TABLE = "locate";
+	private AbsCassDAO<AuthzTrans, Data>.PSInfo psName;
+    
+    public LocateDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+        super(trans, LocateDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+
+    public LocateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> adao) throws APIException, IOException {
+        super(trans, LocateDAO.class.getSimpleName(), adao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+    
+    public static final int KEYLIMIT = 3;
+	public static class Data implements Bytification {
+    	
+        public String					name;
+		public String					hostname;
+		public int						port;
+		public int						major;
+		public int						minor;
+		public int						patch;
+		public int						pkg;
+		public float						latitude;
+		public float						longitude;
+		public String					protocol;
+		private Set<String>				subprotocol;
+		public UUID						port_key; // Note: Keep Port_key LAST at all times, because we shorten the UPDATE to leave Port_key Alone during reregistration.
+
+	  // Getters
+		public Set<String> subprotocol(boolean mutable) {
+			if (subprotocol == null) {
+				subprotocol = new HashSet<String>();
+			} else if (mutable && !(subprotocol instanceof HashSet)) {
+				subprotocol = new HashSet<String>(subprotocol);
+			}
+			return subprotocol;
+		}
+		
+        @Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			LocateLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			LocateLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+    }
+
+    private static class LocateLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=85102934;
+	    	public static final int VERSION=1;
+	    	public static final int BUFF_SIZE=48; // Note: 
+	
+	    	public static final LocateLoader deflt = new LocateLoader(KEYLIMIT);
+	    	public LocateLoader(int keylimit) {
+	        super(keylimit);
+        }
+
+    	@Override
+        public Data load(Data data, Row row) {
+    			data.name = row.getString(0);
+    			data.hostname = row.getString(1);
+    			data.port = row.getInt(2);
+    			data.major = row.getInt(3);
+    			data.minor = row.getInt(4);
+    			data.patch = row.getInt(5);
+    			data.pkg = row.getInt(6);
+    			data.latitude = row.getFloat(7);
+    			data.longitude = row.getFloat(8);
+    			data.protocol = row.getString(9);
+    			data.subprotocol = row.getSet(10,String.class);
+    			data.port_key = row.getUUID(11);
+            return data;
+        }
+
+        @Override
+        protected void key(Data data, int idx, Object[] obj) {
+            obj[idx] = data.name;
+            obj[++idx] = data.hostname;
+            obj[++idx] = data.port;
+        }
+
+        @Override
+        protected void body(final Data data, final int _idx, final Object[] obj) {
+        		int idx = _idx;
+            obj[idx] = data.major;
+            obj[++idx] = data.minor;
+            obj[++idx] = data.patch;
+            obj[++idx] = data.pkg;
+            obj[++idx] = data.latitude;
+            obj[++idx] = data.longitude;
+            obj[++idx] = data.protocol;
+            obj[++idx] = data.subprotocol;
+            obj[++idx] = data.port_key;
+        }
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.name);
+			writeString(os, data.hostname);
+			os.writeInt(data.port);
+			os.writeInt(data.major);
+			os.writeInt(data.minor);
+			os.writeInt(data.patch);
+			os.writeInt(data.pkg);
+			os.writeFloat(data.latitude);
+			os.writeFloat(data.longitude);
+			writeString(os, data.protocol);
+			if(data.subprotocol==null) {
+				os.writeInt(0);
+			} else {
+				os.writeInt(data.subprotocol.size());
+				for(String s: data.subprotocol) {
+					writeString(os,s);
+				}
+			}
+			
+			writeString(os,data.port_key==null?"":data.port_key.toString());
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.name = readString(is,buff);
+			data.hostname = readString(is,buff);
+			data.port = is.readInt();
+			data.major = is.readInt();
+			data.minor = is.readInt();
+			data.patch = is.readInt();
+			data.pkg = is.readInt();
+			data.latitude = is.readFloat();
+			data.longitude = is.readFloat();
+			data.protocol = readString(is,buff);
+			
+			int size = is.readInt();
+			data.subprotocol = new HashSet<String>(size);
+			for(int i=0;i<size;++i) {
+				data.subprotocol.add(readString(is,buff));
+			}
+			String port_key = readString(is,buff);
+			if(port_key.length()>0) {
+				data.port_key=UUID.fromString(port_key);
+			} else {
+				data.port_key = null;
+			}
+		}
+    }
+    
+    public Result<List<LocateDAO.Data>> readByName(AuthzTrans trans, String service) {
+    		return psName.read(trans, "Read By Name", new Object[] {service});
+    }
+
+    private void init(AuthzTrans trans) throws APIException, IOException {
+        // Set up sub-DAOs
+		String[] helpers = setCRUD(trans, TABLE, Data.class, LocateLoader.deflt);
+//		int lastComma = helpers[ASSIGNMENT_COMMAS].lastIndexOf(',');
+//		replace(CRUD.update,new PSInfo(trans,"UPDATE LOCATE SET " + helpers[ASSIGNMENT_COMMAS].substring(0, lastComma) +
+//				" WHERE name=? AND hostname=? AND port=?;", new LocateLoader(3),writeConsistency));
+		psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE name = ?", new LocateLoader(1),readConsistency);
+    }
+    
+    /**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    }
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java
new file mode 100644
index 0000000..4b1ff14
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java
@@ -0,0 +1,150 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map.Entry;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.rserv.Pair;
+
+
+public class Namespace implements Bytification {
+	public static final int MAGIC=250935515;
+	public static final int VERSION=1;
+	public static final int BUFF_SIZE=48;
+
+	public String name;
+	public List<String> owner;
+	public List<String> admin;
+	public List<Pair<String,String>> attrib;
+	public String description;
+	public Integer type;
+	public String parent;
+	public Namespace() {}
+	
+	public Namespace(NsDAO.Data ndd) {
+		name = ndd.name;
+		description = ndd.description;
+		type = ndd.type;
+		parent = ndd.parent;
+		if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
+			attrib = new ArrayList<Pair<String,String>>();
+			for( Entry<String, String> entry : ndd.attrib.entrySet()) {
+				attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
+			}
+		}
+	}
+	
+	public Namespace(NsDAO.Data ndd,List<String> owner, List<String> admin) {
+		name = ndd.name;
+		this.owner = owner;
+		this.admin = admin;
+		description = ndd.description;
+		type = ndd.type;
+		parent = ndd.parent;
+		if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
+			attrib = new ArrayList<Pair<String,String>>();
+			for( Entry<String, String> entry : ndd.attrib.entrySet()) {
+				attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
+			}
+		}
+	}
+
+	public NsDAO.Data data() {
+		NsDAO.Data ndd = new NsDAO.Data();
+		ndd.name = name;
+		ndd.description = description;
+		ndd.parent = parent;
+		ndd.type = type;
+		return ndd;
+	}
+
+	@Override
+	public ByteBuffer bytify() throws IOException {
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		DataOutputStream os = new DataOutputStream(baos);
+
+		Loader.writeHeader(os,MAGIC,VERSION);
+		Loader.writeString(os, name);
+		os.writeInt(type);
+		Loader.writeStringSet(os,admin);
+		Loader.writeStringSet(os,owner);
+		Loader.writeString(os,description);
+		Loader.writeString(os,parent);
+
+		return ByteBuffer.wrap(baos.toByteArray());
+	}
+
+	@Override
+	public void reconstitute(ByteBuffer bb) throws IOException {
+		DataInputStream is = CassDAOImpl.toDIS(bb);
+		/*int version = */Loader.readHeader(is,MAGIC,VERSION);
+		// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+		
+		byte[] buff = new byte[BUFF_SIZE];
+		name = Loader.readString(is, buff);
+		type = is.readInt();
+		admin = Loader.readStringList(is,buff);
+		owner = Loader.readStringList(is,buff);
+		description = Loader.readString(is,buff);
+		parent = Loader.readString(is,buff);
+		
+	}
+
+	/* (non-Javadoc)
+	 * @see java.lang.Object#hashCode()
+	 */
+	@Override
+	public int hashCode() {
+		return name.hashCode();
+	}
+	
+
+	/* (non-Javadoc)
+	 * @see java.lang.Object#toString()
+	 */
+	@Override
+	public String toString() {
+		return name.toString();
+	}
+
+	/* (non-Javadoc)
+	 * @see java.lang.Object#equals(java.lang.Object)
+	 */
+	@Override
+	public boolean equals(Object arg0) {
+		if(arg0==null || !(arg0 instanceof Namespace)) {
+			return false;
+		}
+		return name.equals(((Namespace)arg0).name);
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java
new file mode 100644
index 0000000..567246d
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java
@@ -0,0 +1,560 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.TimeTaken;
+
+import java.util.Set;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.exceptions.DriverException;
+
+/**
+ * NsDAO
+ * 
+ * Data Access Object for Namespace Data
+ * 
+ * @author Jonathan
+ *
+ */
+public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {
+	public static final String TABLE = "ns";
+	public static final String TABLE_ATTRIB = "ns_attrib";
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+    public static final int ROOT = 1;
+    public static final int COMPANY=2;
+    public static final int APP = 3;
+
+	private static final String BEGIN_BATCH = "BEGIN BATCH\n";
+	private static final String APPLY_BATCH = "\nAPPLY BATCH;\n";
+	private static final String SQSCCR = "';\n";
+	private static final String SQCSQ = "','";
+    
+	private HistoryDAO historyDAO;
+	private CacheInfoDAO infoDAO;
+	private PSInfo psNS;
+
+	public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+		super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		init(trans);
+	}
+
+	public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException {
+		super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		historyDAO=hDAO;
+		infoDAO = iDAO;
+		init(trans);
+	}
+
+
+    //////////////////////////////////////////
+    // Data Definition, matches Cassandra DM
+    //////////////////////////////////////////
+    private static final int KEYLIMIT = 1;
+    /**
+     * Data class that matches the Cassandra Table "role"
+     * 
+     * @author Jonathan
+     */
+	public static class Data extends CacheableData implements Bytification {
+		public String		      name;
+		public int			      type;
+		public String			  description;
+		public String			  parent;
+		public Map<String,String> attrib;
+
+//		////////////////////////////////////////
+//        // Getters
+		public Map<String,String> attrib(boolean mutable) {
+			if (attrib == null) {
+				attrib = new HashMap<String,String>();
+			} else if (mutable && !(attrib instanceof HashMap)) {
+				attrib = new HashMap<String,String>(attrib);
+			}
+			return attrib;
+		}
+
+		@Override
+		public int[] invalidate(Cached<?,?> cache) {
+			return new int[] {
+				seg(cache,name)
+			};
+		}
+
+		public NsSplit split(String name) {
+			return new NsSplit(this,name);
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			NSLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			NSLoader.deflt.unmarshal(this,toDIS(bb));
+		}
+		
+		@Override
+		public String toString() {
+			return name;
+		}
+		
+    }
+    
+    private void init(AuthzTrans trans) throws APIException, IOException {
+        // Set up sub-DAOs
+        if(historyDAO==null) {
+	    historyDAO = new HistoryDAO(trans, this);
+	}
+        if(infoDAO==null) {
+	    infoDAO = new CacheInfoDAO(trans,this);
+	}
+
+		String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */);
+		
+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE parent = ?", new NSLoader(1),readConsistency);
+
+	}
+	
+    private static final class NSLoader extends Loader<Data> implements Streamer<Data> {
+		public static final int MAGIC=250935515;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48;
+
+    	public static final NSLoader deflt = new NSLoader(KEYLIMIT);
+    	
+		public NSLoader(int keylimit) {
+			super(keylimit);
+		}
+
+		@Override
+		public Data load(Data data, Row row) {
+			// Int more efficient
+			data.name = row.getString(0);
+			data.type = row.getInt(1);
+			data.description = row.getString(2);
+			data.parent = row.getString(3);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int idx, Object[] obj) {
+			obj[idx]=data.name;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+
+			obj[idx]=data.type;
+			obj[++idx]=data.description;
+			obj[++idx]=data.parent;
+		}
+		
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.name);
+			os.writeInt(data.type);
+			writeString(os,data.description);
+			writeString(os,data.parent);
+			if(data.attrib==null) {
+				os.writeInt(-1);
+			} else {
+				os.writeInt(data.attrib.size());
+				for(Entry<String, String> es : data.attrib(false).entrySet()) {
+					writeString(os,es.getKey());
+					writeString(os,es.getValue());
+				}
+			}
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			
+			byte[] buff = new byte[BUFF_SIZE];
+			data.name = readString(is, buff);
+			data.type = is.readInt();
+			data.description = readString(is,buff);
+			data.parent = readString(is,buff);
+			int count = is.readInt();
+			if(count>0) {
+				Map<String, String> da = data.attrib(true);
+				for(int i=0;i<count;++i) {
+					da.put(readString(is,buff), readString(is,buff));
+				}
+			}
+		}
+
+    }
+    
+	@Override
+	public Result<Data> create(AuthzTrans trans, Data data) {
+		String ns = data.name;
+		// Ensure Parent is set
+		if(data.parent==null) {
+			return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
+		}
+
+		// insert Attributes
+		StringBuilder stmt = new StringBuilder();
+		stmt.append(BEGIN_BATCH);
+		attribInsertStmts(stmt, data);
+		stmt.append(APPLY_BATCH);
+		try {
+			getSession(trans).execute(stmt.toString());
+//// TEST CODE for Exception				
+//			boolean force = true; 
+//			if(force) {
+//				throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<InetSocketAddress,Throwable>());
+////				throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message");
+//			}
+////END TEST CODE
+
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			trans.info().log(stmt);
+			return Result.err(Result.ERR_Backend, "Backend Access");
+		}
+		return super.create(trans, data);
+	}
+
+	@Override
+	public Result<Void> update(AuthzTrans trans, Data data) {
+		String ns = data.name;
+		// Ensure Parent is set
+		if(data.parent==null) {
+			return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
+		}
+
+		StringBuilder stmt = new StringBuilder();
+		stmt.append(BEGIN_BATCH);
+		try {
+			Map<String, String> localAttr = data.attrib;
+			Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns);
+			if(rremoteAttr.notOK()) {
+				return Result.err(rremoteAttr);
+			}
+			// update Attributes
+			String str;
+			for(Entry<String, String> es : localAttr.entrySet()) {
+				str = rremoteAttr.value.get(es.getKey());
+				if(str==null || !str.equals(es.getValue())) {
+					attribUpdateStmt(stmt, ns, es.getKey(),es.getValue());
+				}
+			}
+			
+			// No point in deleting... insert overwrites...
+//			for(Entry<String, String> es : remoteAttr.entrySet()) {
+//				str = localAttr.get(es.getKey());
+//				if(str==null || !str.equals(es.getValue())) {
+//					attribDeleteStmt(stmt, ns, es.getKey());
+//				}
+//			}
+			if(stmt.length()>BEGIN_BATCH.length()) {
+				stmt.append(APPLY_BATCH);
+				getSession(trans).execute(stmt.toString());
+			}
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			trans.info().log(stmt);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		return super.update(trans,data);
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object)
+	 */
+	@Override
+	public Result<List<Data>> read(AuthzTrans trans, Data data) {
+		Result<List<Data>> rld = super.read(trans, data);
+		
+		if(rld.isOKhasData()) {
+			for(Data d : rld.value) {
+				// Note: Map is null at this point, save time/mem by assignment
+				Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
+				if(rabn.isOK()) {
+					d.attrib = rabn.value;
+				} else {
+					return Result.err(rabn);
+				}
+			}
+		}
+		return rld;
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object[])
+	 */
+	@Override
+	public Result<List<Data>> read(AuthzTrans trans, Object... key) {
+		Result<List<Data>> rld = super.read(trans, key);
+
+		if(rld.isOKhasData()) {
+			for(Data d : rld.value) {
+				// Note: Map is null at this point, save time/mem by assignment
+				Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
+				if(rabn.isOK()) {
+					d.attrib = rabn.value;
+				} else {
+					return Result.err(rabn);
+				}
+			}
+		}
+		return rld;
+	}
+
+	@Override
+	public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
+		TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE);
+		try {
+			StringBuilder stmt = new StringBuilder();
+			attribDeleteAllStmt(stmt, data);
+			try {
+				getSession(trans).execute(stmt.toString());
+			} catch (DriverException | APIException | IOException e) {
+				reportPerhapsReset(trans,e);
+				trans.info().log(stmt);
+				return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+			}
+		} finally {
+			tt.done();
+		}
+		return super.delete(trans, data, reread);
+
+	}
+    
+	public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) {
+		Map<String,String> map = new HashMap<String,String>();
+		TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE);
+		try {
+			ResultSet rs = getSession(trans).execute("SELECT key,value FROM " 
+					+ TABLE_ATTRIB 
+					+ " WHERE ns='"
+					+ ns
+					+ "';");
+			
+			for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
+				Row r = iter.next();
+				map.put(r.getString(0), r.getString(1));
+			}
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		} finally {
+			tt.done();
+		}
+		return Result.ok(map);
+	}
+
+	public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) {
+		Set<String> set = new HashSet<String>();
+		TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE);
+		try {
+			ResultSet rs = getSession(trans).execute("SELECT ns FROM " 
+				+ TABLE_ATTRIB 
+				+ " WHERE key='"
+				+ key
+				+ "';");
+		
+			for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
+				Row r = iter.next();
+				set.add(r.getString(0));
+			}
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		} finally {
+			tt.done();
+		}
+		return Result.ok(set);
+	}
+
+	public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) {
+		try {
+			getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString());
+			return Result.ok();
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+	}
+	
+	private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) {
+		sb.append("INSERT INTO ");
+		sb.append(TABLE_ATTRIB);
+		sb.append(" (ns,key,value) VALUES ('");
+		sb.append(ns);
+		sb.append(SQCSQ);
+		sb.append(key);
+		sb.append(SQCSQ);
+		sb.append(value);
+		sb.append("');");
+		return sb;
+	}
+
+	private StringBuilder attribUpdateStmt(StringBuilder sb, String ns, String key, String value) {
+		sb.append("UPDATE ");
+		sb.append(TABLE_ATTRIB);
+		sb.append(" set value='");
+		sb.append(value);
+		sb.append("' where ns='");
+		sb.append(ns);
+		sb.append("' AND key='");
+		sb.append(key);
+		sb.append("';");
+		return sb;
+	}
+	
+
+	public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) {
+		try {
+			getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString());
+			return Result.ok();
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+	}
+	
+	private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) {
+		stmt.append("DELETE FROM ");
+		stmt.append(TABLE_ATTRIB);
+		stmt.append(" WHERE ns='");
+		stmt.append(ns);
+		stmt.append("' AND key='");
+		stmt.append(key);
+		stmt.append("';");
+		return stmt;
+	}
+	
+	private void attribDeleteAllStmt(StringBuilder stmt, Data data) {
+		stmt.append("  DELETE FROM ");
+		stmt.append(TABLE_ATTRIB);
+		stmt.append(" WHERE ns='");
+		stmt.append(data.name);
+		stmt.append(SQSCCR);
+	}
+
+	private void attribInsertStmts(StringBuilder stmt, Data data) {
+		// INSERT new Attrib
+		for(Entry<String,String> es : data.attrib(false).entrySet() ) {
+			stmt.append("  ");
+			attribInsertStmt(stmt,data.name,es.getKey(),es.getValue());
+		}
+	}
+
+	/**
+	 * Add description to Namespace
+	 * @param trans
+	 * @param ns
+	 * @param description
+	 * @return
+	 */
+	public Result<Void> addDescription(AuthzTrans trans, String ns, String description) {
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 
+				+ description.replace("'", "''") + "' WHERE name = '" + ns + "';");
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		Data data = new Data();
+		data.name=ns;
+		wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null );
+		return Result.ok();
+	}
+
+	public Result<List<Data>> getChildren(AuthzTrans trans, String parent) {
+		return psNS.read(trans, R_TEXT, new Object[]{parent});
+	}
+		
+
+    /**
+     * Log Modification statements to History
+     * 
+     * @param modified           which CRUD action was done
+     * @param data               entity data that needs a log entry
+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+        //TODO Must log history
+        HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject ? override[1] : data.name;
+        hd.memo = memo ? override[0] : (data.name + " was "  + modified.name() + 'd' );
+		if(modified==CRUD.delete) {
+			try {
+				hd.reconstruct = data.bytify();
+			} catch (IOException e) {
+				trans.error().log(e,"Could not serialize NsDAO.Data");
+			}
+		}
+
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+	    trans.error().log("Cannot log to History");
+	}
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
+	    trans.error().log("Cannot touch CacheInfo");
+	}
+    }
+
+}
\ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java
new file mode 100644
index 0000000..2694c6c
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java
@@ -0,0 +1,61 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+public class NsSplit {
+	public final String ns;
+	public final String name;
+	public final NsDAO.Data nsd;
+	
+	public NsSplit(NsDAO.Data nsd, String child) {
+		this.nsd = nsd;
+		if(child.startsWith(nsd.name)) {
+			ns = nsd.name;
+			int dot = ns.length();
+			if(dot<child.length() && child.charAt(dot)=='.') {
+    			name = child.substring(dot+1);
+			} else {
+				name="";
+			}
+		} else {
+			name=null;
+			ns = null;
+		}
+	}
+	
+	public NsSplit(String ns, String name) {
+		this.ns = ns;
+		this.name = name;
+		this.nsd = new NsDAO.Data();
+		nsd.name = ns;
+		int dot = ns.lastIndexOf('.');
+		if(dot>=0) {
+			nsd.parent = ns.substring(0, dot);
+		} else {
+			nsd.parent = ".";
+		}
+	}
+
+	public boolean isOK() {
+		return ns!=null && name !=null;
+	}
+}
\ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java
new file mode 100644
index 0000000..18d5eee
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java
@@ -0,0 +1,74 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+/**
+ * Defines the Type Codes in the NS Table.
+ * @author Jonathan
+ *
+ */
+public enum NsType {
+		UNKNOWN (-1),
+		DOT (0),
+		ROOT (1), 
+		COMPANY (2), 
+		APP (3), 
+		STACKED_APP (10), 
+		STACK (11);
+		
+		public final int type;
+		private NsType(int t) {
+			type = t;
+		}
+		/**
+		 * This is not the Ordinal, but the Type that is stored in NS Tables
+		 * 
+		 * @param t
+		 * @return
+		 */
+		public static NsType fromType(int t) {
+			for(NsType nst : values()) {
+				if(t==nst.type) {
+					return nst;
+				}
+			}
+			return UNKNOWN;
+		}
+		
+		/**
+		 * Use this one rather than "valueOf" to avoid Exception
+		 * @param s
+		 * @return
+		 */
+		public static NsType fromString(String s) {
+			if(s!=null) {
+				for(NsType nst : values()) {
+					if(nst.name().equals(s)) {
+						return nst;
+					}
+				}
+			}
+			return UNKNOWN;
+		}
+
+		
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java
new file mode 100644
index 0000000..e1375b8
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java
@@ -0,0 +1,213 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+/**
+ * CredDAO manages credentials. 
+ * @author Jonathan
+ * Date: 7/19/13
+ */
+public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
+    public static final String TABLE = "oauth_token";
+	private AbsCassDAO<AuthzTrans, Data>.PSInfo psByUser;
+    
+    public OAuthTokenDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+        super(trans, OAuthTokenDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        init(trans);
+    }
+    
+    public OAuthTokenDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
+    		super(trans, OAuthTokenDAO.class.getSimpleName(),aDao, Data.class, TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+    		init(trans);
+    }
+
+
+    public static final int KEYLIMIT = 1;
+	public static class Data implements Bytification {
+		public String	       			id;
+		public String					client_id;
+		public String					user;
+		public boolean					active;
+        public int						type;
+		public String					refresh;
+        public Date      				expires;
+        public long						exp_sec;
+        public String	 				content;  
+        public Set<String>	      		scopes;
+        public String					state;
+        public String					req_ip; // requesting
+
+		public Set<String> scopes(boolean mutable) {
+			if (scopes == null) {
+				scopes = new HashSet<String>();
+			} else if (mutable && !(scopes instanceof HashSet)) {
+				scopes = new HashSet<String>(scopes);
+			}
+			return scopes;
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			OAuthLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			OAuthLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		public String toString() {
+			return user.toString() + ' ' + id.toString() + ' ' + Chrono.dateTime(expires) + (active?"":"in") + "active";
+		}
+    }
+
+    private static class OAuthLoader extends Loader<Data> implements Streamer<Data>{
+		public static final int MAGIC=235677843;
+    		public static final int VERSION=1;
+	    	public static final int BUFF_SIZE=96; // Note: only used when  
+	
+	    	public static final OAuthLoader deflt = new OAuthLoader(KEYLIMIT);
+	    	public OAuthLoader(int keylimit) {
+	            super(keylimit);
+	        }
+	
+	    	@Override
+        public Data load(Data data, Row row) {
+            data.id = row.getString(0);
+            data.client_id = row.getString(1);
+            data.user = row.getString(2);
+            data.active = row.getBool(3);
+            data.type = row.getInt(4);
+            data.refresh = row.getString(5);
+            data.expires = row.getTimestamp(6);
+            data.exp_sec = row.getLong(7);
+            data.content = row.getString(8);
+            data.scopes = row.getSet(9,String.class);
+            data.state = row.getString(10);
+            data.req_ip = row.getString(11);
+            return data;
+        }
+
+        @Override
+        protected void key(final Data data, final int idx, Object[] obj) {
+            obj[idx] = data.id;
+        }
+
+        @Override
+        protected void body(final Data data, final int idx, Object[] obj) {
+            int i;
+            obj[i=idx] = data.client_id;
+            obj[++i] = data.user;
+            obj[++i] = data.active;
+            obj[++i] = data.type;
+            obj[++i] = data.refresh;
+            obj[++i] = data.expires;
+            obj[++i] = data.exp_sec;
+            obj[++i] = data.content;
+            obj[++i] = data.scopes;
+            obj[++i] = data.state;
+            obj[++i] = data.req_ip;
+        }
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.id);
+			writeString(os, data.client_id);
+			writeString(os, data.user);
+			os.writeBoolean(data.active);
+			os.writeInt(data.type);
+			writeString(os, data.refresh);
+			os.writeLong(data.expires==null?-1:data.expires.getTime());
+			os.writeLong(data.exp_sec);
+			writeString(os, data.content);
+			writeStringSet(os,data.scopes);
+			writeString(os, data.state);
+			writeString(os, data.req_ip);
+		}
+
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE]; // used only if fits
+			data.id = readString(is,buff);
+			data.client_id = readString(is,buff);
+			data.user = readString(is,buff);
+			data.active = is.readBoolean();
+			data.type = is.readInt();
+			data.refresh = readString(is,buff);
+			long l = is.readLong();
+			data.expires = l<0?null:new Date(l);
+			data.exp_sec = is.readLong();
+			data.content = readString(is,buff); // note, large strings still ok with small buffer
+			data.scopes = readStringSet(is,buff);
+			data.state = readString(is,buff);
+			data.req_ip = readString(is,buff);
+		}
+    }
+
+    private void init(AuthzTrans trans) {
+        String[] helpers = setCRUD(trans, TABLE, Data.class, OAuthLoader.deflt);
+        psByUser = new PSInfo(trans, "SELECT " + helpers[0] + " from " + TABLE + " WHERE user=?",OAuthLoader.deflt,readConsistency);
+    }
+
+	/**
+     * Log Modification statements to History
+     *
+     * @param modified        which CRUD action was done
+     * @param data            entity data that needs a log entry
+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    }
+
+	public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
+		return psByUser.read(trans, "Read By User", new Object[]{user});
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java
new file mode 100644
index 0000000..860b7ea
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java
@@ -0,0 +1,501 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.util.Split;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.exceptions.DriverException;
+
+public class PermDAO extends CassDAOImpl<AuthzTrans,PermDAO.Data> {
+
+	public static final String TABLE = "perm";
+
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+	private static final String STAR = "*";
+	
+	private final HistoryDAO historyDAO;
+	private final CacheInfoDAO infoDAO;
+	
+	private PSInfo psNS, psChildren, psByType;
+
+	public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+		super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		init(trans);
+		historyDAO = new HistoryDAO(trans, this);
+		infoDAO = new CacheInfoDAO(trans,this);
+	}
+
+	public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+		super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		historyDAO = hDAO;
+		infoDAO=ciDAO;
+		init(trans);
+	}
+
+
+	private static final int KEYLIMIT = 4;
+	public static class Data extends CacheableData implements Bytification {
+		public String		ns;
+		public String		type;
+		public String		instance;
+		public String		action;
+		public Set<String>  roles; 
+		public String		description;
+
+		public Data() {}
+		
+		public Data(NsSplit nss, String instance, String action) {
+			ns = nss.ns;
+			type = nss.name;
+			this.instance = instance;
+			this.action = action;
+		}
+
+		public String fullType() {
+			return ns + '.' + type;
+		}
+		
+		public String fullPerm() {
+			return ns + '.' + type + '|' + instance + '|' + action;
+		}
+
+		public String encode() {
+			return ns + '|' + type + '|' + instance + '|' + action;
+		}
+		
+		/**
+		 * Decode Perm String, including breaking into appropriate Namespace
+		 * 
+		 * @param trans
+		 * @param q
+		 * @param p
+		 * @return
+		 */
+		public static Result<Data> decode(AuthzTrans trans, Question q, String p) {
+			String[] ss = Split.splitTrim('|', p,4);
+			if(ss[2]==null) {
+				return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
+			}
+			Data data = new Data();
+			if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+				if(nss.notOK()) {
+					return Result.err(nss);
+				}
+				data.ns=nss.value.ns;
+				data.type=nss.value.name;
+				data.instance=ss[1];
+				data.action=ss[2];
+			} else { // new 4 part encoding
+				data.ns=ss[0];
+				data.type=ss[1];
+				data.instance=ss[2];
+				data.action=ss[3];
+			}
+			return Result.ok(data);
+		}
+
+		/**
+		 * Decode Perm String, including breaking into appropriate Namespace
+		 * 
+		 * @param trans
+		 * @param q
+		 * @param p
+		 * @return
+		 */
+		public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
+			String[] ss = Split.splitTrim('|', p,4);
+			if(ss[2]==null) {
+				return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
+			}
+			
+			if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
+				ss[3] = ss[2];
+				ss[2] = ss[1];
+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+				if(nss.notOK()) {
+					return Result.err(nss);
+				}
+				ss[1] = nss.value.name;
+				ss[0] = nss.value.ns;
+			}
+			return Result.ok(ss);
+		}
+
+		public static Data create(NsDAO.Data ns, String name) {
+			NsSplit nss = new NsSplit(ns,name);
+			Data rv = new Data();
+			rv.ns = nss.ns;
+			String[] s = nss.name.split("\\|");
+			switch(s.length) {
+				case 3:
+					rv.type=s[0];
+					rv.instance=s[1];
+					rv.action=s[2];
+					break;
+				case 2:
+					rv.type=s[0];
+					rv.instance=s[1];
+					rv.action=STAR;
+					break;
+				default:
+					rv.type=s[0];
+					rv.instance = STAR;
+					rv.action = STAR;
+			}
+			return rv;
+		}
+		
+		public static Data create(AuthzTrans trans, Question q, String name) {
+			String[] s = name.split("\\|");
+			Result<NsSplit> rdns = q.deriveNsSplit(trans, s[0]);
+			Data rv = new PermDAO.Data();
+			if(rdns.isOKhasData()) {
+				switch(s.length) {
+					case 3:
+						rv.type=s[1];
+						rv.instance=s[2];
+						rv.action=s[3];
+						break;
+					case 2:
+						rv.type=s[1];
+						rv.instance=s[2];
+						rv.action=STAR;
+						break;
+					default:
+						rv.type=s[1];
+						rv.instance = STAR;
+						rv.action = STAR;
+				}
+			}
+			return rv;
+		}
+		
+        ////////////////////////////////////////
+        // Getters
+        public Set<String> roles(boolean mutable) {
+            if (roles == null) {
+                roles = new HashSet<String>();
+            } else if (mutable && !(roles instanceof HashSet)) {
+                roles = new HashSet<String>(roles);
+            }
+            return roles;
+        }
+
+		@Override
+		public int[] invalidate(Cached<?,?> cache) {
+			return new int[] {
+				seg(cache,ns),
+				seg(cache,ns,type),
+				seg(cache,ns,type,STAR),
+				seg(cache,ns,type,instance,action)
+			};
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			PermLoader.deflt.marshal(this, new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			PermLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		@Override
+		public String toString() {
+			return encode();
+		}
+	}
+	
+	private static class PermLoader extends Loader<Data> implements Streamer<Data> {
+		public static final int MAGIC=283939453;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=96;
+
+    	public static final PermLoader deflt = new PermLoader(KEYLIMIT);
+    	
+		public PermLoader(int keylimit) {
+			super(keylimit);
+		}
+		
+		@Override
+		public Data load(Data data, Row row) {
+			// Int more efficient Match "fields" string
+			data.ns = row.getString(0);
+			data.type = row.getString(1);
+			data.instance = row.getString(2);
+			data.action = row.getString(3);
+			data.roles = row.getSet(4,String.class);
+			data.description = row.getString(5);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.ns;
+			obj[++idx]=data.type;
+			obj[++idx]=data.instance;
+			obj[++idx]=data.action;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.roles;
+			obj[++idx]=data.description;
+		}
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.ns);
+			writeString(os, data.type);
+			writeString(os, data.instance);
+			writeString(os, data.action);
+			writeStringSet(os, data.roles);
+			writeString(os, data.description);
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.ns = readString(is, buff);
+			data.type = readString(is,buff);
+			data.instance = readString(is,buff);
+			data.action = readString(is,buff);
+			data.roles = readStringSet(is,buff);
+			data.description = readString(is,buff);
+		}
+	}
+	
+	private void init(AuthzTrans trans) {
+		// the 3 is the number of key fields
+		String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt);
+		
+		// Other SELECT style statements... match with a local Method
+		psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 
+				" WHERE ns = ? AND type = ?", new PermLoader(2) {
+			@Override
+			protected void key(Data data, int idx, Object[] obj) {
+				obj[idx]=data.type;
+			}
+		},readConsistency);
+		
+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE ns = ?", new PermLoader(1),readConsistency);
+				
+		psChildren = new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +  " FROM " + TABLE + 
+				" WHERE ns=? AND type > ? AND type < ?", 
+				new PermLoader(3) {
+			@Override
+			protected void key(Data data, int _idx, Object[] obj) {
+			    	int idx = _idx;
+				obj[idx] = data.ns;
+				obj[++idx]=data.type + DOT;
+				obj[++idx]=data.type + DOT_PLUS_ONE;
+			}
+		},readConsistency);
+
+	}
+
+
+	/**
+	 * Add a single Permission to the Role's Permission Collection
+	 * 
+	 * @param trans
+	 * @param roleFullName
+	 * @param perm
+	 * @param type
+	 * @param action
+	 * @return
+	 */
+	public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
+		// Note: Prepared Statements for Collection updates aren't supported
+		//ResultSet rv =
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'"	+ roleFullName + "'} " +
+				"WHERE " +
+					"ns = '" + perm.ns + "' AND " +
+					"type = '" + perm.type + "' AND " +
+					"instance = '" + perm.instance + "' AND " +
+					"action = '" + perm.action + "';"
+					);
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " +
+				perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
+		return Result.ok();
+	}
+
+	/**
+	 * Remove a single Permission from the Role's Permission Collection
+	 * @param trans
+	 * @param roleFullName
+	 * @param perm
+	 * @param type
+	 * @param action
+	 * @return
+	 */
+	public Result<Void> delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
+		// Note: Prepared Statements for Collection updates aren't supported
+		//ResultSet rv =
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " +
+				"WHERE " +
+					"ns = '" + perm.ns + "' AND " +
+					"type = '" + perm.type + "' AND " +
+					"instance = '" + perm.instance + "' AND " +
+					"action = '" + perm.action + "';"
+					);
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		//TODO how can we tell when it doesn't?
+		wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " +
+				perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
+		return Result.ok();
+	}
+
+
+	
+	/**
+	 * Additional method: 
+	 * 		Select all Permissions by Name
+	 * 
+	 * @param name
+	 * @return
+	 * @throws DAOException
+	 */
+	public Result<List<Data>> readByType(AuthzTrans trans, String ns, String type) {
+		return psByType.read(trans, R_TEXT, new Object[]{ns, type});
+	}
+	
+	public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String type) {
+		return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE});
+	}
+
+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+		return psNS.read(trans, R_TEXT, new Object[]{ns});
+	}
+
+	/**
+	 * Add description to this permission
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @param type
+	 * @param instance
+	 * @param action
+	 * @param description
+	 * @return
+	 */
+	public Result<Void> addDescription(AuthzTrans trans, String ns, String type,
+			String instance, String action, String description) {
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 
+				+ description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'"
+				+ "AND instance = '" + instance + "' AND action = '" + action + "';");
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		Data data = new Data();
+		data.ns=ns;
+		data.type=type;
+		data.instance=instance;
+		data.action=action;
+		wasModified(trans, CRUD.update, data, "Added description " + description + " to permission " 
+				+ data.encode(), null );
+		return Result.ok();
+	}
+	
+	/**
+	 * Log Modification statements to History
+	 */
+	@Override
+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+		// Need to update history
+		HistoryDAO.Data hd = HistoryDAO.newInitedData();
+		hd.user = trans.user();
+		hd.action = modified.name();
+		hd.target = TABLE;
+		hd.subject = subject ? override[1] : data.fullType();
+		if (memo) {
+            hd.memo = String.format("%s", override[0]);
+        } else {
+            hd.memo = String.format("%sd %s|%s|%s", modified.name(),data.fullType(),data.instance,data.action);
+        }
+		
+		if(modified==CRUD.delete) {
+			try {
+				hd.reconstruct = data.bytify();
+			} catch (IOException e) {
+				trans.error().log(e,"Could not serialize PermDAO.Data");
+			}
+		}
+		
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
+        	trans.error().log("Cannot touch CacheInfo");
+        }
+	}
+}
+
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java
new file mode 100644
index 0000000..da7d7a2
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java
@@ -0,0 +1,412 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.util.Split;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.exceptions.DriverException;
+
+public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {
+
+	public static final String TABLE = "role";
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+    
+	private final HistoryDAO historyDAO;
+	private final CacheInfoDAO infoDAO;
+
+	private PSInfo psChildren, psNS, psName;
+
+	public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+		super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+        // Set up sub-DAOs
+        historyDAO = new HistoryDAO(trans, this);
+		infoDAO = new CacheInfoDAO(trans,this);
+		init(trans);
+	}
+
+	public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+		super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		historyDAO = hDAO;
+		infoDAO = ciDAO;
+		init(trans);
+	}
+
+
+    //////////////////////////////////////////
+    // Data Definition, matches Cassandra DM
+    //////////////////////////////////////////
+    private static final int KEYLIMIT = 2;
+    /**
+     * Data class that matches the Cassandra Table "role"
+     * @author Jonathan
+     */
+	public static class Data extends CacheableData implements Bytification {
+    	public String		ns;
+		public String		name;
+		public Set<String>  perms;
+		public String		description;
+
+        ////////////////////////////////////////
+        // Getters
+		public Set<String> perms(boolean mutable) {
+			if (perms == null) {
+				perms = new HashSet<String>();
+			} else if (mutable && !(perms instanceof HashSet)) {
+				perms = new HashSet<String>(perms);
+			}
+			return perms;
+		}
+		
+		public static Data create(NsDAO.Data ns, String name) {
+			NsSplit nss = new NsSplit(ns,name);		
+			RoleDAO.Data rv = new Data();
+			rv.ns = nss.ns;
+			rv.name=nss.name;
+			return rv;
+		}
+		
+		public String fullName() {
+			return ns + '.' + name;
+		}
+		
+		public String encode() {
+			return ns + '|' + name;
+		}
+		
+		/**
+		 * Decode Perm String, including breaking into appropriate Namespace
+		 * 
+		 * @param trans
+		 * @param q
+		 * @param r
+		 * @return
+		 */
+		public static Result<Data> decode(AuthzTrans trans, Question q, String r) {
+			String[] ss = Split.splitTrim('|', r,2);
+			Data data = new Data();
+			if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+				if(nss.notOK()) {
+					return Result.err(nss);
+				}
+				data.ns=nss.value.ns;
+				data.name=nss.value.name;
+			} else { // new 4 part encoding
+				data.ns=ss[0];
+				data.name=ss[1];
+			}
+			return Result.ok(data);
+		}
+
+		/**
+		 * Decode from UserRole Data
+		 * @param urdd
+		 * @return
+		 */
+		public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {
+			RoleDAO.Data rd = new RoleDAO.Data();
+			rd.ns = urdd.ns;
+			rd.name = urdd.rname;
+			return rd;
+		}
+
+
+		/**
+		 * Decode Perm String, including breaking into appropriate Namespace
+		 * 
+		 * @param trans
+		 * @param q
+		 * @param p
+		 * @return
+		 */
+		public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
+			String[] ss = Split.splitTrim('|', p,2);
+			if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+				if(nss.notOK()) {
+					return Result.err(nss);
+				}
+				ss[0] = nss.value.ns;
+				ss[1] = nss.value.name;
+			}
+			return Result.ok(ss);
+		}
+		
+		@Override
+		public int[] invalidate(Cached<?,?> cache) {
+			return new int[] {
+				seg(cache,ns,name),
+				seg(cache,ns),
+				seg(cache,name),
+			};
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			RoleLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			RoleLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		@Override
+		public String toString() {
+			return ns + '.' + name;
+		}
+    }
+
+    private static class RoleLoader extends Loader<Data> implements Streamer<Data> {
+		public static final int MAGIC=923577343;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=96;
+
+    	public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);
+    	
+		public RoleLoader(int keylimit) {
+			super(keylimit);
+		}
+		
+		@Override
+		public Data load(Data data, Row row) {
+			// Int more efficient
+			data.ns = row.getString(0);
+			data.name = row.getString(1);
+			data.perms = row.getSet(2,String.class);
+			data.description = row.getString(3);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.ns;
+			obj[++idx]=data.name;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.perms;
+			obj[++idx]=data.description;
+		}
+
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+			writeString(os, data.ns);
+			writeString(os, data.name);
+			writeStringSet(os,data.perms);
+			writeString(os, data.description);
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			byte[] buff = new byte[BUFF_SIZE];
+			data.ns = readString(is, buff);
+			data.name = readString(is,buff);
+			data.perms = readStringSet(is,buff);
+			data.description = readString(is,buff);
+		}
+    };
+
+	private void init(AuthzTrans trans) {
+		String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);
+		
+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE ns = ?", new RoleLoader(1),readConsistency);
+
+		psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+				" WHERE name = ?", new RoleLoader(1),readConsistency);
+
+		psChildren = new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +  " FROM " + TABLE + 
+				" WHERE ns=? AND name > ? AND name < ?", 
+				new RoleLoader(3) {
+			@Override
+			protected void key(Data data, int _idx, Object[] obj) {
+			    	int idx = _idx;
+				obj[idx] = data.ns;
+				obj[++idx]=data.name + DOT;
+				obj[++idx]=data.name + DOT_PLUS_ONE;
+			}
+		},readConsistency);
+		
+	}
+
+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+		return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});
+	}
+
+	public Result<List<Data>> readName(AuthzTrans trans, String name) {
+		return psName.read(trans, R_TEXT + name, new Object[]{name});
+	}
+
+	public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {
+		if(role.length()==0 || "*".equals(role)) {
+			return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR}); 
+		} else {
+			return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});
+		}
+	}
+
+	/**
+	 * Add a single Permission to the Role's Permission Collection
+	 * 
+	 * @param trans
+	 * @param role
+	 * @param perm
+	 * @param type
+	 * @param action
+	 * @return
+	 */
+	public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
+		// Note: Prepared Statements for Collection updates aren't supported
+		String pencode = perm.encode();
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" + 
+				pencode + "'} WHERE " +
+				"ns = '" + role.ns + "' AND name = '" + role.name + "';");
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());
+		return Result.ok();
+	}
+
+	/**
+	 * Remove a single Permission from the Role's Permission Collection
+	 * @param trans
+	 * @param role
+	 * @param perm
+	 * @param type
+	 * @param action
+	 * @return
+	 */
+	public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
+		// Note: Prepared Statements for Collection updates aren't supported
+
+		String pencode = perm.encode();
+		
+		//ResultSet rv =
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" + 
+				pencode	+ "'} WHERE " +
+				"ns = '" + role.ns + "' AND name = '" + role.name + "';");
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		//TODO how can we tell when it doesn't?
+		wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );
+		return Result.ok();
+	}
+	
+	/**
+	 * Add description to role
+	 * 
+	 * @param trans
+	 * @param ns
+	 * @param name
+	 * @param description
+	 * @return
+	 */
+	public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
+		try {
+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 
+				+ description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");
+		} catch (DriverException | APIException | IOException e) {
+			reportPerhapsReset(trans,e);
+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+		}
+
+		Data data = new Data();
+		data.ns=ns;
+		data.name=name;
+		wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );
+		return Result.ok();
+	}
+	
+	
+    /**
+     * Log Modification statements to History
+     * @param modified           which CRUD action was done
+     * @param data               entity data that needs a log entry
+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data
+     */
+    @Override
+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+    	HistoryDAO.Data hd = HistoryDAO.newInitedData();
+        hd.user = trans.user();
+        hd.action = modified.name();
+        hd.target = TABLE;
+        hd.subject = subject ? override[1] : data.fullName();
+        hd.memo = memo ? override[0] : (data.fullName() + " was "  + modified.name() + 'd' );
+		if(modified==CRUD.delete) {
+			try {
+				hd.reconstruct = data.bytify();
+			} catch (IOException e) {
+				trans.error().log(e,"Could not serialize RoleDAO.Data");
+			}
+		}
+
+        if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+        }
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
+        	trans.error().log("Cannot touch CacheInfo for Role");
+        }
+    }
+
+    
+}
\ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java
new file mode 100644
index 0000000..be52c40
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java
@@ -0,0 +1,88 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import org.onap.aaf.auth.layer.Result;
+
+
+
+
+/**
+ * Add additional Behavior for Specific Applications for Results
+ * 
+ * In this case, we add additional BitField information accessible by
+ * method (
+ * @author Jonathan
+ *
+ * @param <RV>
+ */
+public class Status<RV> extends Result<RV> {
+	
+	// Jonathan 10/1/2013:  Initially, I used enum, but it's not extensible.
+    public final static int ERR_NsNotFound = Result.ERR_General+1,
+    						ERR_RoleNotFound = Result.ERR_General+2,
+    						ERR_PermissionNotFound = Result.ERR_General+3, 
+    						ERR_UserNotFound = Result.ERR_General+4,
+    						ERR_UserRoleNotFound = Result.ERR_General+5,
+    						ERR_DelegateNotFound = Result.ERR_General+6,
+    						ERR_InvalidDelegate = Result.ERR_General+7,
+    						ERR_DependencyExists = Result.ERR_General+8,
+    						ERR_NoApprovals = Result.ERR_General+9,
+    						ACC_Now = Result.ERR_General+10,
+    						ACC_Future = Result.ERR_General+11,
+    						ERR_ChoiceNeeded = Result.ERR_General+12,
+    						ERR_FutureNotRequested = Result.ERR_General+13;
+  
+	/**
+     * Constructor for Result set. 
+     * @param data
+     * @param status
+     */
+    private Status(RV value, int status, String details, String[] variables ) {
+    	super(value,status,details,variables);
+    }
+
+	public static String name(int status) {
+		switch(status) {
+			case OK: return "OK";
+			case ERR_NsNotFound: return "ERR_NsNotFound";
+			case ERR_RoleNotFound: return "ERR_RoleNotFound";
+			case ERR_PermissionNotFound: return "ERR_PermissionNotFound"; 
+			case ERR_UserNotFound: return "ERR_UserNotFound";
+			case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound";
+			case ERR_DelegateNotFound: return "ERR_DelegateNotFound";
+			case ERR_InvalidDelegate: return "ERR_InvalidDelegate";
+			case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists";
+			case ERR_DependencyExists: return "ERR_DependencyExists";
+			case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted";
+			case ERR_Denied: return "ERR_Denied";
+			case ERR_Policy: return "ERR_Policy";
+			case ERR_BadData: return "ERR_BadData";
+			case ERR_NotImplemented: return "ERR_NotImplemented";
+			case ERR_NotFound: return "ERR_NotFound";
+			case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded";
+		}
+		//case ERR_General:   or unknown... 
+		return "ERR_General";
+	}
+    
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java
new file mode 100644
index 0000000..301e47f
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java
@@ -0,0 +1,319 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.cass;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.Bytification;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.dao.Streamer;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Slot;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Row;
+
+public class UserRoleDAO extends CassDAOImpl<AuthzTrans,UserRoleDAO.Data> {
+	public static final String TABLE = "user_role";
+	
+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
+
+	private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";
+	public Slot transURSlot;
+	
+	private final HistoryDAO historyDAO;
+	private final CacheInfoDAO infoDAO;
+	
+	private PSInfo psByUser, psByRole, psUserInRole;
+
+
+
+	public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+		super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		transURSlot = trans.slot(TRANS_UR_SLOT);
+		init(trans);
+
+		// Set up sub-DAOs
+		historyDAO = new HistoryDAO(trans, this);
+		infoDAO = new CacheInfoDAO(trans,this);
+	}
+
+	public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+		super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+		transURSlot = trans.slot(TRANS_UR_SLOT);
+		historyDAO = hDAO;
+		infoDAO = ciDAO;
+		init(trans);
+	}
+
+	private static final int KEYLIMIT = 2;
+	public static class Data extends CacheableData implements Bytification {
+		public String  user;
+		public String  role;
+		public String  ns; 
+		public String  rname; 
+		public Date   expires;
+		
+		@Override
+		public int[] invalidate(Cached<?,?> cache) {
+			// Note: I'm not worried about Name collisions, because the formats are different:
+			// Jonathan... etc versus
+			// com. ...
+			// The "dot" makes the difference.
+			return new int[] {
+				seg(cache,user,role),
+				seg(cache,user),
+				seg(cache,role)
+			};
+		}
+
+		@Override
+		public ByteBuffer bytify() throws IOException {
+			ByteArrayOutputStream baos = new ByteArrayOutputStream();
+			URLoader.deflt.marshal(this,new DataOutputStream(baos));
+			return ByteBuffer.wrap(baos.toByteArray());
+		}
+		
+		@Override
+		public void reconstitute(ByteBuffer bb) throws IOException {
+			URLoader.deflt.unmarshal(this, toDIS(bb));
+		}
+
+		public void role(String ns, String rname) {
+			this.ns = ns;
+			this.rname = rname;
+			this.role = ns + '.' + rname;
+		}
+		
+		public void role(RoleDAO.Data rdd) {
+			ns = rdd.ns;
+			rname = rdd.name;
+			role = rdd.fullName();
+		}
+
+		
+		public boolean role(AuthzTrans trans, Question ques, String role) {
+			this.role = role;
+			Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);
+			if(rnss.isOKhasData()) {
+				ns = rnss.value.ns;
+				rname = rnss.value.name;
+				return true;
+			} else {
+				return false;
+			}
+		}
+
+		@Override
+		public String toString() {
+			return user + '|' + ns + '|' +  rname + '|' + Chrono.dateStamp(expires);
+		}
+
+
+	}
+	
+	private static class URLoader extends Loader<Data> implements Streamer<Data> {
+		public static final int MAGIC=738469903;
+    	public static final int VERSION=1;
+    	public static final int BUFF_SIZE=48;
+    	
+    	public static final URLoader deflt = new URLoader(KEYLIMIT);
+
+		public URLoader(int keylimit) {
+			super(keylimit);
+		}
+
+		@Override
+		public Data load(Data data, Row row) {
+			data.user = row.getString(0);
+			data.role = row.getString(1);
+			data.ns = row.getString(2);
+			data.rname = row.getString(3);
+			data.expires = row.getTimestamp(4);
+			return data;
+		}
+
+		@Override
+		protected void key(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.user;
+			obj[++idx]=data.role;
+		}
+
+		@Override
+		protected void body(Data data, int _idx, Object[] obj) {
+		    	int idx = _idx;
+			obj[idx]=data.ns;
+			obj[++idx]=data.rname;
+			obj[++idx]=data.expires;
+		}
+		
+		@Override
+		public void marshal(Data data, DataOutputStream os) throws IOException {
+			writeHeader(os,MAGIC,VERSION);
+
+			writeString(os, data.user);
+			writeString(os, data.role);
+			writeString(os, data.ns);
+			writeString(os, data.rname);
+			os.writeLong(data.expires==null?-1:data.expires.getTime());
+		}
+
+		@Override
+		public void unmarshal(Data data, DataInputStream is) throws IOException {
+			/*int version = */readHeader(is,MAGIC,VERSION);
+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+			
+			byte[] buff = new byte[BUFF_SIZE];
+			data.user = readString(is,buff);
+			data.role = readString(is,buff);
+			data.ns = readString(is,buff);
+			data.rname = readString(is,buff);
+			long l = is.readLong();
+			data.expires = l<0?null:new Date(l);
+		}
+
+	};
+	
+	private void init(AuthzTrans trans) {
+		String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);
+		
+		psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?", 
+			new URLoader(1) {
+				@Override
+				protected void key(Data data, int idx, Object[] obj) {
+					obj[idx]=data.user;
+				}
+			},readConsistency);
+		
+		// Note: We understand this call may have poor performance, so only should be used in Management (Delete) func
+		psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING", 
+				new URLoader(1) {
+					@Override
+					protected void key(Data data, int idx, Object[] obj) {
+						obj[idx]=data.role;
+					}
+				},readConsistency);
+		
+		psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",
+				URLoader.deflt,readConsistency);
+	}
+
+	public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
+		return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});
+	}
+
+	/**
+	 * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly
+	 * @param trans
+	 * @param role
+	 * @return
+	 * @throws DAOException
+	 */
+	public Result<List<Data>> readByRole(AuthzTrans trans, String role) {
+		return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});
+	}
+	
+	/**
+	 * Direct Lookup of User Role
+	 * Don't forget to check for Expiration
+	 */
+	public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {
+		return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});
+	}
+
+
+	/**
+     * Log Modification statements to History
+     * @param modified           which CRUD action was done
+     * @param data               entity data that needs a log entry
+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data
+     */
+	@Override
+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+    	boolean memo = override.length>0 && override[0]!=null;
+    	boolean subject = override.length>1 && override[1]!=null;
+
+		HistoryDAO.Data hd = HistoryDAO.newInitedData();
+		HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
+		
+        hd.user = hdRole.user = trans.user();
+		hd.action = modified.name();
+		// Modifying User/Role is an Update to Role, not a Create.  Jonathan, 07-14-2015
+		hdRole.action = CRUD.update.name();
+		hd.target = TABLE;
+		hdRole.target = RoleDAO.TABLE;
+		hd.subject = subject?override[1] : (data.user + '|'+data.role);
+		hdRole.subject = data.role;
+		switch(modified) {
+			case create: 
+				hd.memo = hdRole.memo = memo
+					? String.format("%s by %s", override[0], hd.user)
+					: String.format("%s added to %s",data.user,data.role);	
+				break;
+			case update: 
+				hd.memo = hdRole.memo = memo
+					? String.format("%s by %s", override[0], hd.user)
+					: String.format("%s - %s was updated",data.user,data.role);
+				break;
+			case delete: 
+				hd.memo = hdRole.memo = memo
+					? String.format("%s by %s", override[0], hd.user)
+					: String.format("%s removed from %s",data.user,data.role);
+				try {
+					hd.reconstruct = hdRole.reconstruct = data.bytify();
+				} catch (IOException e) {
+					trans.warn().log(e,"Deleted UserRole could not be serialized");
+				}
+				break;
+			default:
+				hd.memo = hdRole.memo = memo
+				? String.format("%s by %s", override[0], hd.user)
+				: "n/a";
+		}
+
+		if(historyDAO.create(trans, hd).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+		}
+		
+		if(historyDAO.create(trans, hdRole).status!=Status.OK) {
+        	trans.error().log("Cannot log to History");
+		}
+		// uses User as Segment
+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
+        	trans.error().log("Cannot touch CacheInfo");
+        }
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java
new file mode 100644
index 0000000..1979db2
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java
@@ -0,0 +1,73 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.hl;
+
+import org.onap.aaf.auth.dao.cass.NsSplit;
+import org.onap.aaf.auth.dao.cass.NsDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.auth.org.Executor;
+
+public class CassExecutor implements Executor {
+
+	private Question q;
+	private Function f;
+	private AuthzTrans trans;
+
+	public CassExecutor(AuthzTrans trans, Function f) {
+		this.trans = trans;
+		this.f = f;
+		this.q = this.f.q;
+	}
+
+	@Override
+	public boolean hasPermission(String user, String ns, String type, String instance, String action) {
+		return isGranted(user, ns, type, instance, action);
+	}
+
+	@Override
+	public boolean inRole(String name) {
+		Result<NsSplit> nss = q.deriveNsSplit(trans, name);
+		if(nss.notOK())return false;
+		return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData();
+	}
+
+	public boolean isGranted(String user, String ns, String type, String instance, String action) {
+		return q.isGranted(trans, user, ns, type, instance,action);
+	}
+
+	@Override
+	public String namespace() throws Exception {
+		Result<Data> res = q.validNSOfDomain(trans,trans.user());
+		if(res.isOK()) {
+			String user[] = trans.user().split("\\.");
+			return user[user.length-1] + '.' + user[user.length-2];
+		}
+		throw new Exception(res.status + ' ' + res.details);
+	}
+
+	@Override
+	public String id() {
+		return trans.user();
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java
new file mode 100644
index 0000000..1f67907
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java
@@ -0,0 +1,1792 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.hl;
+
+import static org.onap.aaf.auth.layer.Result.OK;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+import org.onap.aaf.auth.common.Define;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.DelegateDAO;
+import org.onap.aaf.auth.dao.cass.FutureDAO;
+import org.onap.aaf.auth.dao.cass.Namespace;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsSplit;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO.Data;
+import org.onap.aaf.auth.dao.hl.Question.Access;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.env.AuthzTrans.REQD_TYPE;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.auth.org.Executor;
+import org.onap.aaf.auth.org.Organization;
+import org.onap.aaf.auth.org.OrganizationException;
+import org.onap.aaf.auth.org.Organization.Expiration;
+import org.onap.aaf.auth.org.Organization.Identity;
+import org.onap.aaf.auth.org.Organization.Policy;
+
+public class Function {
+
+	private static final String CANNOT_BE_THE_OWNER_OF_A_NAMESPACE = "%s(%s) cannot be the owner of the namespace '%s'. Owners %s.";
+
+	public enum FUTURE_OP {
+		C("Create"),U("Update"),D("Delete"),G("Grant"),UG("UnGrant"),A("Approval");
+		
+		private String desc;
+	
+		private FUTURE_OP(String desc) {
+			this.desc = desc;
+		}
+		
+		public String desc() {
+			return desc;
+		}
+		
+		/**
+		 *  Same as valueOf(), but passes back null instead of throwing Exception
+		 * @param value
+		 * @return
+		 */
+		public static FUTURE_OP toFO(String value) {
+			if(value!=null) {
+				for(FUTURE_OP fo : values()) {
+					if(fo.name().equals(value)){
+						return fo;
+					}
+				}
+			}
+			return null;
+		}
+	}
+
+	public enum OP_STATUS {
+		E("Executed"),D("Denied"),P("Pending"),L("Lapsed");
+		
+		private String desc;
+		public final static Result<OP_STATUS> RE = Result.ok(OP_STATUS.E);
+		public final static Result<OP_STATUS> RD = Result.ok(OP_STATUS.D);
+		public final static Result<OP_STATUS> RP = Result.ok(OP_STATUS.P);
+		public final static Result<OP_STATUS> RL = Result.ok(OP_STATUS.L);
+
+		private OP_STATUS(String desc) {
+			this.desc = desc;
+		}
+		
+		public String desc() {
+			return desc;
+		}
+		
+	}
+
+	public static final String FOP_CRED = "cred";
+	public static final String FOP_DELEGATE = "delegate";
+	public static final String FOP_NS = "ns";
+	public static final String FOP_PERM = "perm";
+	public static final String FOP_ROLE = "role";
+	public static final String FOP_USER_ROLE = "user_role";
+	private static final List<Identity> NO_ADDL_APPROVE = new ArrayList<Identity>();
+	private static final String ROOT_NS = Define.ROOT_NS();
+	// First Action should ALWAYS be "write", see "CreateRole"
+	public final Question q;
+
+	public Function(AuthzTrans trans, Question question) {
+		q = question;
+	}
+
+	private class ErrBuilder {
+		private StringBuilder sb;
+		private List<String> ao;
+
+		public void log(Result<?> result) {
+			if (result.notOK()) {
+				if (sb == null) {
+					sb = new StringBuilder();
+					ao = new ArrayList<String>();
+				}
+				sb.append(result.details);
+				sb.append('\n');
+				for (String s : result.variables) {
+					ao.add(s);
+				}
+			}
+		}
+
+		public String[] vars() {
+			String[] rv = new String[ao.size()];
+			ao.toArray(rv);
+			return rv;
+		}
+
+		public boolean hasErr() {
+			return sb != null;
+		}
+
+		@Override
+		public String toString() {
+			return sb == null ? "" : String.format(sb.toString(), ao);
+		}
+	}
+
+	/**
+	 * createNS
+	 * 
+	 * Create Namespace
+	 * 
+	 * @param trans
+	 * @param org
+	 * @param ns
+	 * @param user
+	 * @return
+	 * @throws DAOException
+	 * 
+	 *             To create an NS, you need to: 1) validate permission to
+	 *             modify parent NS 2) Does NS exist already? 3) Create NS with
+	 *             a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4)
+	 *             Loop through Roles with Parent NS, and map any that start
+	 *             with this NS into this one 5) Loop through Perms with Parent
+	 *             NS, and map any that start with this NS into this one
+	 */
+	public Result<Void> createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) {
+		Result<?> rq;
+//		if (namespace.name.endsWith(Question.DOT_ADMIN)
+//				|| namespace.name.endsWith(Question.DOT_OWNER)) {
+//			return Result.err(Status.ERR_BadData,
+//					"'admin' and 'owner' are reserved names in AAF");
+//		}
+
+		try {
+			for (String u : namespace.owner) {
+				Organization org = trans.org();
+				Identity orgUser = org.getIdentity(trans, u);
+				String reason;
+				if (orgUser == null) {
+					return Result.err(Status.ERR_Policy,"%s is not a valid user at %s",u,org.getName());	
+				} else if((reason=orgUser.mayOwn())!=null) {
+					if (org.isTestEnv()) {
+						String reason2;
+						if((reason2=org.validate(trans, Policy.AS_RESPONSIBLE,new CassExecutor(trans, this), u))!=null) { // can masquerade as responsible
+							trans.debug().log(reason2);
+							return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name,reason);
+						}
+						// a null means ok
+					} else {
+						if(orgUser.isFound()) {
+							return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name, reason);
+						} else {
+							return Result.err(Status.ERR_Policy,u + " is an invalid Identity");
+						}
+					}
+				}
+			}
+		} catch (Exception e) {
+			trans.error().log(e,
+					"Could not contact Organization for User Validation");
+		}
+
+		String user = trans.user();
+		// 1) May Change Parent?
+		int idx = namespace.name.lastIndexOf('.');
+		String parent;
+		if (idx < 0) {
+			if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "create")) {
+				return Result.err(Result.ERR_Security,
+						"%s may not create Root Namespaces", user);
+			}
+			parent = null;
+			fromApproval = true;
+		} else {
+			parent = namespace.name.substring(0, idx); // get Parent String
+		}
+
+		Result<NsDAO.Data> rparent = q.deriveNs(trans, parent);
+		if (rparent.notOK()) {
+			return Result.err(rparent);
+		}
+		if (!fromApproval) {
+			rparent = q.mayUser(trans, user, rparent.value, Access.write);
+			if (rparent.notOK()) {
+				return Result.err(rparent);
+			}
+		}
+		parent = namespace.parent = rparent.value.name; // Correct Namespace from real data
+
+		// 2) Does requested NS exist
+		if (q.nsDAO.read(trans, namespace.name).isOKhasData()) {
+			return Result.err(Status.ERR_ConflictAlreadyExists,
+					"Target Namespace already exists");
+		}
+
+		// Someone must be responsible.
+		if (namespace.owner == null || namespace.owner.isEmpty()) {
+			return Result
+					.err(Status.ERR_Policy,
+							"Namespaces must be assigned at least one responsible party");
+		}
+
+		// 3) Create NS
+		Date now = new Date();
+
+		Result<Void> r;
+		// 3a) Admin
+
+		try {
+			// Originally, added the enterer as Admin, but that's not necessary,
+			// or helpful for Operations folks..
+			// Admins can be empty, because they can be changed by lower level
+			// NSs
+			// if(ns.admin(false).isEmpty()) {
+			// ns.admin(true).add(user);
+			// }
+			if (namespace.admin != null) {
+				for (String u : namespace.admin) {
+					if ((r = checkValidID(trans, now, u)).notOK()) {
+						return r;
+					}
+				}
+			}
+
+			// 3b) Responsible
+			Organization org = trans.org();
+			for (String u : namespace.owner) {
+				Identity orgUser = org.getIdentity(trans, u);
+				if (orgUser == null) {
+					return Result
+							.err(Status.ERR_BadData,
+									"NS must be created with an %s approved Responsible Party",
+									org.getName());
+				}
+			}
+		} catch (Exception e) {
+			return Result.err(Status.ERR_UserNotFound, e.getMessage());
+		}
+
+		// VALIDATIONS done... Add NS
+		if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) {
+		    return Result.err(rq);
+		}
+
+		// Since Namespace is now created, we need to grab all subsequent errors
+		ErrBuilder eb = new ErrBuilder();
+
+		// Add UserRole(s)
+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+		urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime();
+		urdd.role(namespace.name, Question.ADMIN);
+		for (String admin : namespace.admin) {
+			urdd.user = admin;
+			eb.log(q.userRoleDAO.create(trans, urdd));
+		}
+		urdd.role(namespace.name,Question.OWNER);
+		for (String owner : namespace.owner) {
+			urdd.user = owner;
+			eb.log(q.userRoleDAO.create(trans, urdd));
+		}
+
+		addNSAdminRolesPerms(trans, eb, namespace.name);
+
+		addNSOwnerRolesPerms(trans, eb, namespace.name);
+
+		if (parent != null) {
+			// Build up with any errors
+
+			String targetNs = rparent.value.name; // Get the Parent Namespace,
+													// not target
+			String targetName = namespace.name.substring(targetNs.length() + 1); // Remove the Parent Namespace from the
+									// Target + a dot, and you'll get the name
+			int targetNameDot = targetName.length() + 1;
+
+			// 4) Change any roles with children matching this NS, and
+			Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readChildren(trans,	targetNs, targetName);
+			if (rrdc.isOKhasData()) {
+				for (RoleDAO.Data rdd : rrdc.value) {
+					// Remove old Role from Perms, save them off
+					List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();
+					for(String p : rdd.perms(false)) {
+						Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
+						if(rpdd.isOKhasData()) {
+							PermDAO.Data pdd = rpdd.value;
+							lpdd.add(pdd);
+							q.permDAO.delRole(trans, pdd, rdd);
+						} else{
+							trans.error().log(rpdd.errorString());
+						}
+					}
+					
+					// Save off Old keys
+					String delP1 = rdd.ns;
+					String delP2 = rdd.name;
+
+					// Write in new key
+					rdd.ns = namespace.name;
+					rdd.name = (delP2.length() > targetNameDot) ? delP2
+							.substring(targetNameDot) : "";
+							
+					// Need to use non-cached, because switching namespaces, not
+					// "create" per se
+					if ((rq = q.roleDAO.create(trans, rdd)).isOK()) {
+						// Put Role back into Perm, with correct info
+						for(PermDAO.Data pdd : lpdd) {
+							q.permDAO.addRole(trans, pdd, rdd);
+						}
+						// Change data for User Roles 
+						Result<List<UserRoleDAO.Data>> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName());
+						if(rurd.isOKhasData()) {
+							for(UserRoleDAO.Data urd : rurd.value) {
+								urd.ns = rdd.ns;
+								urd.rname = rdd.name;
+								q.userRoleDAO.update(trans, urd);
+							}
+						}
+						// Now delete old one
+						rdd.ns = delP1;
+						rdd.name = delP2;
+						if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) {
+							eb.log(rq);
+						}
+					} else {
+						eb.log(rq);
+					}
+				}
+			}
+
+			// 4) Change any Permissions with children matching this NS, and
+			Result<List<PermDAO.Data>> rpdc = q.permDAO.readChildren(trans,targetNs, targetName);
+			if (rpdc.isOKhasData()) {
+				for (PermDAO.Data pdd : rpdc.value) {
+					// Remove old Perm from Roles, save them off
+					List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();
+					
+					for(String rl : pdd.roles(false)) {
+						Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
+						if(rrdd.isOKhasData()) {
+							RoleDAO.Data rdd = rrdd.value;
+							lrdd.add(rdd);
+							q.roleDAO.delPerm(trans, rdd, pdd);
+						} else{
+							trans.error().log(rrdd.errorString());
+						}
+					}
+					
+					// Save off Old keys
+					String delP1 = pdd.ns;
+					String delP2 = pdd.type;
+					pdd.ns = namespace.name;
+					pdd.type = (delP2.length() > targetNameDot) ? delP2
+							.substring(targetNameDot) : "";
+					if ((rq = q.permDAO.create(trans, pdd)).isOK()) {
+						// Put Role back into Perm, with correct info
+						for(RoleDAO.Data rdd : lrdd) {
+							q.roleDAO.addPerm(trans, rdd, pdd);
+						}
+
+						pdd.ns = delP1;
+						pdd.type = delP2;
+						if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) {
+							eb.log(rq);
+							// } else {
+							// Need to invalidate directly, because we're
+							// switching places in NS, not normal cache behavior
+							// q.permDAO.invalidate(trans,pdd);
+						}
+					} else {
+						eb.log(rq);
+					}
+				}
+			}
+			if (eb.hasErr()) {
+				return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars());
+			}
+		}
+		return Result.ok();
+	}
+
+	private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
+		// Admin Role/Perm
+		RoleDAO.Data rd = new RoleDAO.Data();
+		rd.ns = ns;
+		rd.name = "admin";
+		rd.description = "AAF Namespace Administrators";
+
+		PermDAO.Data pd = new PermDAO.Data();
+		pd.ns = ns;
+		pd.type = "access";
+		pd.instance = Question.ASTERIX;
+		pd.action = Question.ASTERIX;
+		pd.description = "AAF Namespace Write Access";
+
+		rd.perms = new HashSet<String>();
+		rd.perms.add(pd.encode());
+		eb.log(q.roleDAO.create(trans, rd));
+
+		pd.roles = new HashSet<String>();
+		pd.roles.add(rd.encode());
+		eb.log(q.permDAO.create(trans, pd));
+	}
+
+	private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
+		RoleDAO.Data rd = new RoleDAO.Data();
+		rd.ns = ns;
+		rd.name = "owner";
+		rd.description = "AAF Namespace Owners";
+
+		PermDAO.Data pd = new PermDAO.Data();
+		pd.ns = ns;
+		pd.type = "access";
+		pd.instance = Question.ASTERIX;
+		pd.action = Question.READ;
+		pd.description = "AAF Namespace Read Access";
+
+		rd.perms = new HashSet<String>();
+		rd.perms.add(pd.encode());
+		eb.log(q.roleDAO.create(trans, rd));
+
+		pd.roles = new HashSet<String>();
+		pd.roles.add(rd.encode());
+		eb.log(q.permDAO.create(trans, pd));
+	}
+
+	/**
+	 * deleteNS
+	 * 
+	 * Delete Namespace
+	 * 
+	 * @param trans
+	 * @param org
+	 * @param ns
+	 * @param force
+	 * @param user
+	 * @return
+	 * @throws DAOException
+	 * 
+	 * 
+	 *             To delete an NS, you need to: 1) validate permission to
+	 *             modify this NS 2) Find all Roles with this NS, and 2a) if
+	 *             Force, delete them, else modify to Parent NS 3) Find all
+	 *             Perms with this NS, and modify to Parent NS 3a) if Force,
+	 *             delete them, else modify to Parent NS 4) Find all IDs
+	 *             associated to this NS, and deny if exists. 5) Remove NS
+	 */
+	public Result<Void> deleteNS(AuthzTrans trans, String ns) {
+		boolean force = trans.requested(REQD_TYPE.force);
+		boolean move = trans.requested(REQD_TYPE.move);
+		// 1) Validate
+		Result<List<NsDAO.Data>> nsl;
+		if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) {
+			return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns);
+		}
+		NsDAO.Data nsd = nsl.value.get(0);
+		NsType nt;
+		if (move && !q.canMove(nt = NsType.fromType(nsd.type))) {
+			return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name());
+		}
+
+		Result<NsDAO.Data> dnr = q.mayUser(trans, trans.user(), nsd, Access.write);
+		if (dnr.status != Status.OK) {
+			return Result.err(dnr);
+		}
+
+		// 2) Find Parent
+		String user = trans.user();
+		int idx = ns.lastIndexOf('.');
+		NsDAO.Data parent;
+		if (idx < 0) {
+			if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "delete")) {
+				return Result.err(Result.ERR_Security,
+						"%s may not delete Root Namespaces", user);
+			}
+			parent = null;
+		} else {
+			Result<NsDAO.Data> rlparent = q.deriveNs(trans,	ns.substring(0, idx));
+			if (rlparent.notOKorIsEmpty()) {
+				return Result.err(rlparent);
+			}
+			parent = rlparent.value;
+		}
+
+		// Build up with any errors
+		// If sb != null below is an indication of error
+		StringBuilder sb = null;
+		ErrBuilder er = new ErrBuilder();
+
+		// 2a) Deny if any IDs on Namespace
+		Result<List<CredDAO.Data>> creds = q.credDAO.readNS(trans, ns);
+		if (creds.isOKhasData()) {
+			if (force || move) {
+				for (CredDAO.Data cd : creds.value) {
+					er.log(q.credDAO.delete(trans, cd, false));
+					// Since we're deleting all the creds, we should delete all
+					// the user Roles for that Cred
+					Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
+							.readByUser(trans, cd.id);
+					if (rlurd.isOK()) {
+						for (UserRoleDAO.Data data : rlurd.value) {
+						    q.userRoleDAO.delete(trans, data, false);
+						}
+					}
+
+				}
+			} else {
+				// first possible StringBuilder Create.
+				sb = new StringBuilder();
+				sb.append('[');
+				sb.append(ns);
+				sb.append("] contains users");
+			}
+		}
+
+		// 2b) Find (or delete if forced flag is set) dependencies
+		// First, find if NS Perms are the only ones
+		Result<List<PermDAO.Data>> rpdc = q.permDAO.readNS(trans, ns);
+		if (rpdc.isOKhasData()) {
+			// Since there are now NS perms, we have to count NON-NS perms.
+			// FYI, if we delete them now, and the NS is not deleted, it is in
+			// an inconsistent state.
+			boolean nonaccess = false;
+			for (PermDAO.Data pdd : rpdc.value) {
+				if (!"access".equals(pdd.type)) {
+					nonaccess = true;
+					break;
+				}
+			}
+			if (nonaccess && !force && !move) {
+				if (sb == null) {
+					sb = new StringBuilder();
+					sb.append('[');
+					sb.append(ns);
+					sb.append("] contains ");
+				} else {
+					sb.append(", ");
+				}
+				sb.append("permissions");
+			}
+		}
+
+		Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readNS(trans, ns);
+		if (rrdc.isOKhasData()) {
+			// Since there are now NS roles, we have to count NON-NS roles.
+			// FYI, if we delete th)em now, and the NS is not deleted, it is in
+			// an inconsistent state.
+			int count = rrdc.value.size();
+			for (RoleDAO.Data rdd : rrdc.value) {
+				if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) {
+					--count;
+				}
+			}
+			if (count > 0 && !force && !move) {
+				if (sb == null) {
+					sb = new StringBuilder();
+					sb.append('[');
+					sb.append(ns);
+					sb.append("] contains ");
+				} else {
+					sb.append(", ");
+				}
+				sb.append("roles");
+			}
+		}
+
+		// 2c) Deny if dependencies exist that would be moved to root level
+		// parent is root level parent here. Need to find closest parent ns that
+		// exists
+		if (sb != null) {
+			if (!force && !move) {
+				sb.append(".\n  Delete dependencies and try again.  Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent.");
+				return Result.err(Status.ERR_DependencyExists, sb.toString());
+			}
+
+			if (move && (parent == null || parent.type == NsType.COMPANY.type)) {
+				return Result
+						.err(Status.ERR_DependencyExists,
+								"Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again",
+								parent.name);
+			}
+		} else if (move && parent != null) {
+			sb = new StringBuilder();
+			// 3) Change any roles with children matching this NS, and
+			moveRoles(trans, parent, sb, rrdc);
+			// 4) Change any Perms with children matching this NS, and
+			movePerms(trans, parent, sb, rpdc);
+		}
+
+		if (sb != null && sb.length() > 0) {
+			return Result.err(Status.ERR_DependencyExists, sb.toString());
+		}
+
+		if (er.hasErr()) {
+			if (trans.debug().isLoggable()) {
+				trans.debug().log(er.toString());
+			}
+			return Result.err(Status.ERR_DependencyExists,
+					"Namespace members cannot be deleted for %s", ns);
+		}
+
+		// 5) OK... good to go for NS Deletion...
+		if (!rpdc.isEmpty()) {
+			for (PermDAO.Data perm : rpdc.value) {
+				deletePerm(trans, perm, true, true);
+			}
+		}
+		if (!rrdc.isEmpty()) {
+			for (RoleDAO.Data role : rrdc.value) {
+				deleteRole(trans, role, true, true);
+			}
+		}
+
+		return q.nsDAO.delete(trans, nsd, false);
+	}
+
+	public Result<List<String>> getOwners(AuthzTrans trans, String ns,
+			boolean includeExpired) {
+		return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired);
+	}
+
+	private Result<Void> mayAddOwner(AuthzTrans trans, String ns, String id) {
+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+
+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+
+		Identity user;
+		Organization org = trans.org();
+		try {
+			if ((user = org.getIdentity(trans, id)) == null) {
+				return Result.err(Status.ERR_Policy,
+						"%s reports that this is not a valid credential",
+						org.getName());
+			}
+			String reason;
+			if ((reason=user.mayOwn())==null) {
+				return Result.ok();
+			} else {
+				if (org.isTestEnv()) {
+					String reason2;
+					if((reason2 = org.validate(trans, Policy.AS_RESPONSIBLE, new CassExecutor(trans, this), id))==null) {
+						return Result.ok();
+					} else {
+						trans.debug().log(reason2);
+					}
+				}
+				return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,user.fullName(),user.id(),ns, reason);
+			}
+		} catch (Exception e) {
+			return Result.err(e);
+		}
+	}
+
+	private Result<Void> mayAddAdmin(AuthzTrans trans, String ns,	String id) {
+		// Does NS Exist?
+		Result<Void> r = checkValidID(trans, new Date(), id);
+		if (r.notOK()) {
+			return r;
+		}
+		// Is id able to be an Admin
+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+	
+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+		if (rq.notOK()) {
+			Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
+			if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
+				return Result.err(rq);
+			}
+		}
+		return r;
+	}
+
+	private Result<Void> checkValidID(AuthzTrans trans, Date now, String user) {
+		Organization org = trans.org();
+		if (user.endsWith(org.getRealm())) {
+			try {
+				if (org.getIdentity(trans, user) == null) {
+					return Result.err(Status.ERR_Denied,
+							"%s reports that %s is a faulty ID", org.getName(),
+							user);
+				}
+				return Result.ok();
+			} catch (Exception e) {
+				return Result.err(Result.ERR_Security,
+						"%s is not a valid %s Credential", user, org.getName());
+			}
+		//TODO find out how to make sure good ALTERNATE OAUTH DOMAIN USER
+//		} else if(user.endsWith(ALTERNATE OAUTH DOMAIN)) {
+//			return Result.ok();
+		} else {
+			Result<List<CredDAO.Data>> cdr = q.credDAO.readID(trans, user);
+			if (cdr.notOKorIsEmpty()) {
+				return Result.err(Status.ERR_Security,
+						"%s is not a valid AAF Credential", user);
+			}
+	
+			for (CredDAO.Data cd : cdr.value) {
+				if (cd.expires.after(now)) {
+					return Result.ok();
+				}
+			}
+		}
+		return Result.err(Result.ERR_Security, "%s has expired", user);
+	}
+
+	public Result<Void> delOwner(AuthzTrans trans, String ns, String id) {
+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+
+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+
+		return delUserRole(trans, id, ns,Question.OWNER);
+	}
+
+	public Result<List<String>> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) {
+		return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired);
+	}
+
+	public Result<Void> delAdmin(AuthzTrans trans, String ns, String id) {
+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+		if (rq.notOK()) {
+			return Result.err(rq);
+		}
+
+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+		if (rq.notOK()) { 
+			// Even though not a "writer", Owners still determine who gets to be an Admin
+			Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
+			if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
+				return Result.err(rq);
+			}
+		}
+
+		return delUserRole(trans, id, ns, Question.ADMIN);
+	}
+
+	/**
+	 * Helper function that moves permissions from a namespace being deleted to
+	 * its parent namespace
+	 * 
+	 * @param trans
+	 * @param parent
+	 * @param sb
+	 * @param rpdc
+	 *            - list of permissions in namespace being deleted
+	 */
+	private void movePerms(AuthzTrans trans, NsDAO.Data parent,
+			StringBuilder sb, Result<List<PermDAO.Data>> rpdc) {
+
+		Result<Void> rv;
+		Result<PermDAO.Data> pd;
+
+		if (rpdc.isOKhasData()) {
+			for (PermDAO.Data pdd : rpdc.value) {
+				String delP2 = pdd.type;
+				if ("access".equals(delP2)) {
+				    continue;
+				}
+				// Remove old Perm from Roles, save them off
+				List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();
+				
+				for(String rl : pdd.roles(false)) {
+					Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
+					if(rrdd.isOKhasData()) {
+						RoleDAO.Data rdd = rrdd.value;
+						lrdd.add(rdd);
+						q.roleDAO.delPerm(trans, rdd, pdd);
+					} else{
+						trans.error().log(rrdd.errorString());
+					}
+				}
+				
+				// Save off Old keys
+				String delP1 = pdd.ns;
+				NsSplit nss = new NsSplit(parent, pdd.fullType());
+				pdd.ns = nss.ns;
+				pdd.type = nss.name;
+				// Use direct Create/Delete, because switching namespaces
+				if ((pd = q.permDAO.create(trans, pdd)).isOK()) {
+					// Put Role back into Perm, with correct info
+					for(RoleDAO.Data rdd : lrdd) {
+						q.roleDAO.addPerm(trans, rdd, pdd);
+					}
+
+					pdd.ns = delP1;
+					pdd.type = delP2;
+					if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) {
+						sb.append(rv.details);
+						sb.append('\n');
+						// } else {
+						// Need to invalidate directly, because we're switching
+						// places in NS, not normal cache behavior
+						// q.permDAO.invalidate(trans,pdd);
+					}
+				} else {
+					sb.append(pd.details);
+					sb.append('\n');
+				}
+			}
+		}
+	}
+
+	/**
+	 * Helper function that moves roles from a namespace being deleted to its
+	 * parent namespace
+	 * 
+	 * @param trans
+	 * @param parent
+	 * @param sb
+	 * @param rrdc
+	 *            - list of roles in namespace being deleted
+	 */
+	private void moveRoles(AuthzTrans trans, NsDAO.Data parent,
+			StringBuilder sb, Result<List<RoleDAO.Data>> rrdc) {
+
+		Result<Void> rv;
+		Result<RoleDAO.Data> rd;
+
+		if (rrdc.isOKhasData()) {
+			for (RoleDAO.Data rdd : rrdc.value) {
+				String delP2 = rdd.name;
+				if ("admin".equals(delP2) || "owner".equals(delP2)) {
+				    continue;
+				}
+				// Remove old Role from Perms, save them off
+				List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();
+				for(String p : rdd.perms(false)) {
+					Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
+					if(rpdd.isOKhasData()) {
+						PermDAO.Data pdd = rpdd.value;
+						lpdd.add(pdd);
+						q.permDAO.delRole(trans, pdd, rdd);
+					} else{
+						trans.error().log(rpdd.errorString());
+					}
+				}
+				
+				// Save off Old keys
+				String delP1 = rdd.ns;
+
+				NsSplit nss = new NsSplit(parent, rdd.fullName());
+				rdd.ns = nss.ns;
+				rdd.name = nss.name;
+				// Use direct Create/Delete, because switching namespaces
+				if ((rd = q.roleDAO.create(trans, rdd)).isOK()) {
+					// Put Role back into Perm, with correct info
+					for(PermDAO.Data pdd : lpdd) {
+						q.permDAO.addRole(trans, pdd, rdd);
+					}
+
+					rdd.ns = delP1;
+					rdd.name = delP2;
+					if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) {
+						sb.append(rv.details);
+						sb.append('\n');
+						// } else {
+						// Need to invalidate directly, because we're switching
+						// places in NS, not normal cache behavior
+						// q.roleDAO.invalidate(trans,rdd);
+					}
+				} else {
+					sb.append(rd.details);
+					sb.append('\n');
+				}
+			}
+		}
+	}
+
+	/**
+	 * Create Permission (and any missing Permission between this and Parent) if
+	 * we have permission
+	 * 
+	 * Pass in the desired Management Permission for this Permission
+	 * 
+	 * If Force is set, then Roles listed will be created, if allowed,
+	 * pre-granted.
+	 */
+	public Result<Void> createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) {
+		String user = trans.user();
+		// Next, see if User is allowed to Manage Parent Permission
+
+		Result<NsDAO.Data> rnsd;
+		if (!fromApproval) {
+			rnsd = q.mayUser(trans, user, perm, Access.write);
+			if (rnsd.notOK()) {
+				return Result.err(rnsd);
+			}
+		} else {
+			rnsd = q.deriveNs(trans, perm.ns);
+		}
+
+		// Does Child exist?
+		if (!trans.requested(REQD_TYPE.force)) {
+			if (q.permDAO.read(trans, perm).isOKhasData()) {
+				return Result.err(Status.ERR_ConflictAlreadyExists,
+						"Permission [%s.%s|%s|%s] already exists.", perm.ns,
+						perm.type, perm.instance, perm.action);
+			}
+		}
+
+		// Attempt to add perms to roles, creating as possible
+		Set<String> roles;
+		String pstring = perm.encode();
+
+		// For each Role
+		for (String role : roles = perm.roles(true)) {
+			Result<RoleDAO.Data> rdd = RoleDAO.Data.decode(trans,q,role);
+			if(rdd.isOKhasData()) {
+				RoleDAO.Data rd = rdd.value;
+				if (!fromApproval) {
+					// May User write to the Role in question.
+					Result<NsDAO.Data> rns = q.mayUser(trans, user, rd,
+							Access.write);
+					if (rns.notOK()) {
+						// Remove the role from Add, because
+						roles.remove(role); // Don't allow adding
+						trans.warn()
+								.log("User [%s] does not have permission to relate Permissions to Role [%s]",
+										user, role);
+					}
+				}
+
+				Result<List<RoleDAO.Data>> rlrd;
+				if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) {
+					rd.perms(true).add(pstring);
+					if (q.roleDAO.create(trans, rd).notOK()) {
+						roles.remove(role); // Role doesn't exist, and can't be
+											// created
+					}
+				} else {
+					rd = rlrd.value.get(0);
+					if (!rd.perms.contains(pstring)) {
+						q.roleDAO.addPerm(trans, rd, perm);
+					}
+				}
+			}
+		}
+
+		Result<PermDAO.Data> pdr = q.permDAO.create(trans, perm);
+		if (pdr.isOK()) {
+			return Result.ok();
+		} else { 
+			return Result.err(pdr);
+		}
+	}
+
+	public Result<Void> deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) {
+		String user = trans.user();
+
+		// Next, see if User is allowed to Manage Permission
+		Result<NsDAO.Data> rnsd;
+		if (!fromApproval) {
+			rnsd = q.mayUser(trans, user, perm, Access.write);
+			if (rnsd.notOK()) {
+				return Result.err(rnsd);
+			}
+		}
+		// Does Perm exist?
+		Result<List<PermDAO.Data>> pdr = q.permDAO.read(trans, perm);
+		if (pdr.notOKorIsEmpty()) {
+			return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.",
+					perm.ns,perm.type, perm.instance, perm.action);
+		}
+		// Get perm, but with rest of data.
+		PermDAO.Data fullperm = pdr.value.get(0);
+
+		// Attached to any Roles?
+		if (fullperm.roles != null) {
+			if (force) {
+				for (String role : fullperm.roles) {
+					Result<Void> rv = null;
+					Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, q, role);
+					if(rrdd.isOKhasData()) {
+						trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete");
+						if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) {
+							if (rv.notOK()) {
+								trans.error().log("Error removing Role during delFromPermRole: ",
+												trans.getUserPrincipal(),
+												rv.errorString());
+							}
+						}
+					} else {
+						return Result.err(rrdd);
+					}
+				}
+			} else if (!fullperm.roles.isEmpty()) {
+				return Result
+						.err(Status.ERR_DependencyExists,
+								"Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.",
+								fullperm.ns, fullperm.type, fullperm.instance, fullperm.action);
+			}
+		}
+
+		return q.permDAO.delete(trans, fullperm, false);
+	}
+
+	public Result<Void> deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) {
+		String user = trans.user();
+
+		// Next, see if User is allowed to Manage Role
+		Result<NsDAO.Data> rnsd;
+		if (!fromApproval) {
+			rnsd = q.mayUser(trans, user, role, Access.write);
+			if (rnsd.notOK()) {
+				return Result.err(rnsd);
+			}
+		}
+
+		// Are there any Users Attached to Role?
+		Result<List<UserRoleDAO.Data>> urdr = q.userRoleDAO.readByRole(trans,role.fullName());
+		if (force) {
+			if (urdr.isOKhasData()) {
+				for (UserRoleDAO.Data urd : urdr.value) {
+					q.userRoleDAO.delete(trans, urd, false);
+				}
+			}
+		} else if (urdr.isOKhasData()) {
+			return Result.err(Status.ERR_DependencyExists,
+							"Role [%s.%s] cannot be deleted as it is used by 1 or more Users.",
+							role.ns, role.name);
+		}
+
+		// Does Role exist?
+		Result<List<RoleDAO.Data>> rdr = q.roleDAO.read(trans, role);
+		if (rdr.notOKorIsEmpty()) {
+			return Result.err(Status.ERR_RoleNotFound,
+					"Role [%s.%s] does not exist", role.ns, role.name);
+		}
+		RoleDAO.Data fullrole = rdr.value.get(0); // full key search
+
+		// Remove Self from Permissions... always, force or not.  Force only applies to Dependencies (Users)
+		if (fullrole.perms != null) {
+			for (String perm : fullrole.perms(false)) {
+				Result<PermDAO.Data> rpd = PermDAO.Data.decode(trans,q,perm);
+				if (rpd.isOK()) {
+					trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete");
+
+					Result<?> r = q.permDAO.delRole(trans, rpd.value, fullrole);
+					if (r.notOK()) {
+						trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details);
+					}
+				} else {
+					trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole);
+				}
+			}
+		}
+		return q.roleDAO.delete(trans, fullrole, false);
+	}
+
+	/**
+	 * Only owner of Permission may add to Role
+	 * 
+	 * If force set, however, Role will be created before Grant, if User is
+	 * allowed to create.
+	 * 
+	 * @param trans
+	 * @param role
+	 * @param pd
+	 * @return
+	 */
+	public Result<Void> addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
+		String user = trans.user();
+		
+		if (!fromApproval) {
+			Result<NsDAO.Data> rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY);
+			if(rRoleCo.notOK()) {
+				return Result.err(rRoleCo);
+			}
+			Result<NsDAO.Data> rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY);
+			if(rPermCo.notOK()) {
+				return Result.err(rPermCo);
+			}
+
+			// Not from same company
+			if(!rRoleCo.value.name.equals(rPermCo.value.name)) {
+				Result<Data> r;
+				// Only grant if User ALSO has Write ability in Other Company
+				if((r = q.mayUser(trans, user, role, Access.write)).notOK()) {
+					return Result.err(r);
+				}
+			}
+			
+
+			// Must be Perm Admin, or Granted Special Permission
+			Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
+			if (ucp.notOK()) {
+				// Don't allow CLI potential Grantees to change their own AAF
+				// Perms,
+				if ((ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type)) 
+						|| !q.isGranted(trans, trans.user(),ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) {
+				// Not otherwise granted
+				// TODO Needed?
+					return Result.err(ucp);
+				}
+				// Final Check... Don't allow Grantees to add to Roles they are
+				// part of
+				Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
+						.readByUser(trans, trans.user());
+				if (rlurd.isOK()) {
+					for (UserRoleDAO.Data ur : rlurd.value) {
+						if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) {
+							return Result.err(ucp);
+						}
+					}
+				}
+			}
+		}
+
+		Result<List<PermDAO.Data>> rlpd = q.permDAO.read(trans, pd);
+		if (rlpd.notOKorIsEmpty()) {
+			return Result.err(Status.ERR_PermissionNotFound,
+					"Permission must exist to add to Role");
+		}
+
+		Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(trans, role); // Already
+																		// Checked
+																		// for
+																		// can
+																		// change
+																		// Role
+		Result<Void> rv;
+
+		if (rlrd.notOKorIsEmpty()) {
+			if (trans.requested(REQD_TYPE.force)) {
+				Result<NsDAO.Data> ucr = q.mayUser(trans, user, role,
+						Access.write);
+				if (ucr.notOK()) {
+				    return Result
+				    		.err(Status.ERR_Denied,
+				    				"Role [%s.%s] does not exist. User [%s] cannot create.",
+				    				role.ns, role.name, user);
+				}
+
+				role.perms(true).add(pd.encode());
+				Result<RoleDAO.Data> rdd = q.roleDAO.create(trans, role);
+				if (rdd.isOK()) {
+					rv = Result.ok();
+				} else {
+					rv = Result.err(rdd);
+				}
+			} else {
+			    return Result.err(Status.ERR_RoleNotFound,
+			    		"Role [%s.%s] does not exist.", role.ns, role.name);
+			}
+		} else {
+			role = rlrd.value.get(0);
+			if (role.perms(false).contains(pd.encode())) {
+				return Result.err(Status.ERR_ConflictAlreadyExists,
+								"Permission [%s.%s] is already a member of role [%s,%s]",
+								pd.ns, pd.type, role.ns, role.name);
+			}
+			role.perms(true).add(pd.encode()); // this is added for Caching
+												// access purposes... doesn't
+												// affect addPerm
+			rv = q.roleDAO.addPerm(trans, role, pd);
+		}
+		if (rv.status == Status.OK) {
+			return q.permDAO.addRole(trans, pd, role);
+			// exploring how to add information message to successful http
+			// request
+		}
+		return rv;
+	}
+
+	/**
+	 * Either Owner of Role or Permission may delete from Role
+	 * 
+	 * @param trans
+	 * @param role
+	 * @param pd
+	 * @return
+	 */
+	public Result<Void> delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
+		String user = trans.user();
+		if (!fromApproval) {
+			Result<NsDAO.Data> ucr = q.mayUser(trans, user, role, Access.write);
+			Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
+
+			// If Can't change either Role or Perm, then deny
+			if (ucr.notOK() && ucp.notOK()) {
+				return Result.err(Status.ERR_Denied,
+						"User [" + trans.user()
+								+ "] does not have permission to delete ["
+								+ pd.encode() + "] from Role ["
+								+ role.fullName() + ']');
+			}
+		}
+
+		Result<List<RoleDAO.Data>> rlr = q.roleDAO.read(trans, role);
+		if (rlr.notOKorIsEmpty()) {
+			// If Bad Data, clean out
+			Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
+			if (rlp.isOKhasData()) {
+				for (PermDAO.Data pv : rlp.value) {
+					q.permDAO.delRole(trans, pv, role);
+				}
+			}
+			return Result.err(rlr);
+		}
+		String perm1 = pd.encode();
+		boolean notFound;
+		if (trans.requested(REQD_TYPE.force)) {
+			notFound = false;
+		} else { // only check if force not set.
+			notFound = true;
+			for (RoleDAO.Data r : rlr.value) {
+				if (r.perms != null) {
+					for (String perm : r.perms) {
+						if (perm1.equals(perm)) {
+							notFound = false;
+							break;
+						}
+					}
+					if(!notFound) {
+						break;
+					}
+				}
+			}
+		}
+		if (notFound) { // Need to check both, in case of corruption
+			return Result.err(Status.ERR_PermissionNotFound,
+					"Permission [%s.%s|%s|%s] not associated with any Role",
+					pd.ns,pd.type,pd.instance,pd.action);
+		}
+
+		// Read Perm for full data
+		Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
+		Result<Void> rv = null;
+		if (rlp.isOKhasData()) {
+			for (PermDAO.Data pv : rlp.value) {
+				if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) {
+					if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) {
+						trans.error().log(
+								"Error removing Perm during delFromPermRole:",
+								trans.getUserPrincipal(), rv.errorString());
+					}
+				} else {
+					trans.error().log(
+							"Error removing Role during delFromPermRole:",
+							trans.getUserPrincipal(), rv.errorString());
+				}
+			}
+		} else {
+			rv = q.roleDAO.delPerm(trans, role, pd);
+			if (rv.notOK()) {
+				trans.error().log("Error removing Role during delFromPermRole",
+						rv.errorString());
+			}
+		}
+		return rv == null ? Result.ok() : rv;
+	}
+
+	public Result<Void> delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) {
+		Result<NsSplit> nss = q.deriveNsSplit(trans, role);
+		if (nss.notOK()) {
+			return Result.err(nss);
+		}
+		RoleDAO.Data rd = new RoleDAO.Data();
+		rd.ns = nss.value.ns;
+		rd.name = nss.value.name;
+		return delPermFromRole(trans, rd, pd, false);
+	}
+
+	/**
+	 * Add a User to Role
+	 * 
+	 * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if
+	 * Credential) or known Organizational User
+	 * 
+	 * @param trans
+	 * @param org
+	 * @param urData
+	 * @return
+	 * @throws DAOException
+	 */
+	public Result<Void> addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) {
+		Result<Void> rv;
+		if(Question.ADMIN.equals(urData.rname)) {
+			rv = mayAddAdmin(trans, urData.ns, urData.user);
+		} else if(Question.OWNER.equals(urData.rname)) {
+			rv = mayAddOwner(trans, urData.ns, urData.user);
+		} else {
+			rv = checkValidID(trans, new Date(), urData.user);
+		}
+		if(rv.notOK()) {
+			return rv; 
+		}
+		
+		// Check if record exists
+		if (q.userRoleDAO.read(trans, urData).isOKhasData()) {
+			return Result.err(Status.ERR_ConflictAlreadyExists,
+					"User Role exists");
+		}
+		if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
+			return Result.err(Status.ERR_RoleNotFound,
+					"Role [%s.%s] does not exist", urData.ns, urData.rname);
+		}
+
+		urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime();
+		
+		
+		Result<UserRoleDAO.Data> udr = q.userRoleDAO.create(trans, urData);
+		switch (udr.status) {
+		case OK:
+			return Result.ok();
+		default:
+			return Result.err(udr);
+		}
+	}
+
+	public Result<Void> addUserRole(AuthzTrans trans, String user, String ns, String rname) {
+		try {
+			if(trans.org().getIdentity(trans, user)==null) {
+				return Result.err(Result.ERR_BadData,user+" is an Invalid Identity for " + trans.org().getName());
+			}
+		} catch (OrganizationException e) {
+			return Result.err(e);
+		}
+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+		urdd.ns = ns;
+		urdd.role(ns, rname);
+		urdd.user = user;
+		return addUserRole(trans,urdd);
+	}
+
+	/**
+	 * Extend User Role.
+	 * 
+	 * extend the Expiration data, according to Organization rules.
+	 * 
+	 * @param trans
+	 * @param org
+	 * @param urData
+	 * @return
+	 */
+	public Result<Void> extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) {
+		// Check if record still exists
+		if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) {
+			return Result.err(Status.ERR_UserRoleNotFound,
+					"User Role does not exist");
+		}
+		
+		if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
+			return Result.err(Status.ERR_RoleNotFound,
+					"Role [%s.%s] does not exist", urData.ns,urData.rname);
+		}
+		// Special case for "Admin" roles. Issue brought forward with Prod
+		// problem 9/26
+		Date now = new Date();
+		GregorianCalendar gc = new GregorianCalendar();
+		gc.setTime(now.after(urData.expires)?now:urData.expires);
+		urData.expires = trans.org().expiration(gc, Expiration.UserInRole).getTime(); // get
+																				// Full
+																				// time
+																				// starting
+																				// today
+		return q.userRoleDAO.update(trans, urData);
+	}
+
+	// ////////////////////////////////////////////////////
+	// Special User Role Functions
+	// These exist, because User Roles have Expiration dates, which must be
+	// accounted for
+	// Also, as of July, 2015, Namespace Owners and Admins are now regular User
+	// Roles
+	// ////////////////////////////////////////////////////
+	public Result<List<String>> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) {
+		Result<List<UserRoleDAO.Data>> rurdd = q.userRoleDAO.readByRole(trans,role);
+		if (rurdd.notOK()) {
+			return Result.err(rurdd);
+		}
+		Date now = new Date();
+		List<UserRoleDAO.Data> list = rurdd.value;
+		List<String> rv = new ArrayList<String>(list.size()); // presize
+		for (UserRoleDAO.Data urdd : rurdd.value) {
+			if (includeExpired || urdd.expires.after(now)) {
+				rv.add(urdd.user);
+			}
+		}
+		return Result.ok(rv);
+	}
+
+	public Result<Void> delUserRole(AuthzTrans trans, String user, String ns, String rname) {
+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+		urdd.user = user;
+		urdd.role(ns,rname);
+		Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, urdd);
+		if (r.status == 404 || r.isEmpty()) {
+			return Result.err(Status.ERR_UserRoleNotFound,
+					"UserRole [%s] [%s.%s]", user, ns, rname);
+		}
+		if (r.notOK()) {
+			return Result.err(r);
+		}
+
+		return q.userRoleDAO.delete(trans, urdd, false);
+	}
+
+	public Result<String> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user,
+			NsDAO.Data nsd, FUTURE_OP op) {
+		StringBuilder sb = new StringBuilder();
+		try {
+			Organization org = trans.org();
+			// For Reapproval, only check Owners.. Do Supervisors, etc, separately
+			List<Identity> approvers = op.equals(FUTURE_OP.A)?NO_ADDL_APPROVE:org.getApprovers(trans, user);
+			List<Identity> owners = new ArrayList<Identity>();
+			if (nsd != null) {
+				Result<List<UserRoleDAO.Data>> rrbr = q.userRoleDAO
+						.readByRole(trans, nsd.name + Question.DOT_OWNER);
+				if (rrbr.isOKhasData()) {
+					for(UserRoleDAO.Data urd : rrbr.value) {
+						Identity owner = org.getIdentity(trans, urd.user);
+						if(owner==null) {
+							return Result.err(Result.ERR_NotFound,urd.user + " is not a Valid Owner of " + nsd.name);
+						} else {
+							owners.add(owner);
+						}
+					}
+				}
+			}
+			
+			if(owners.isEmpty()) {
+				return Result.err(Result.ERR_NotFound,"No Owners found for " + nsd.name);
+			}
+			
+			// Create Future Object
+			
+			Result<FutureDAO.Data> fr = q.futureDAO.create(trans, data, id);
+			if (fr.isOK()) {
+				sb.append("Created Future: ");
+				sb.append(data.id);
+				// User Future ID as ticket for Approvals
+				final UUID ticket = fr.value.id;
+				sb.append(", Approvals: ");
+				Boolean first[] = new Boolean[]{true};
+				if(op!=FUTURE_OP.A) {
+					for (Identity u : approvers) {
+						Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,org.getApproverType());
+						if(r.notOK()) {
+							return Result.err(r);
+						}
+					}
+				}
+				for (Identity u : owners) {
+					Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,"owner");
+					if(r.notOK()) {
+						return Result.err(r);
+					}
+				}
+			}
+		} catch (Exception e) {
+			return Result.err(e);
+		}
+		
+		return Result.ok(sb.toString());
+	}
+
+	/*
+	 * This interface is to allow performFutureOps with either Realtime Data, or Batched lookups (See Expiring)
+	 */
+	public interface Lookup<T> {
+		T get(AuthzTrans trans, Object ... keys);
+	}
+	
+	public Lookup<UserRoleDAO.Data> urDBLookup = new Lookup<UserRoleDAO.Data>() {
+		@Override
+		public UserRoleDAO.Data get(AuthzTrans trans, Object ... keys) {
+			Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, keys);
+			if(r.isOKhasData()) {
+				return r.value.get(0);
+			} else {
+				return null;
+			}
+		}
+	};
+
+	/**
+	 * Note: if "allApprovals for Ticket is null, it will be looked up.  
+	 *       if "fdd" is null, it will be looked up, but
+	 *       
+	 * They can be passed for performance reasons.
+	 * 
+	 * @param trans
+	 * @param cd
+	 * @param allApprovalsForTicket
+	 * @return
+	 */
+	public Result<OP_STATUS> performFutureOp(final AuthzTrans trans, FUTURE_OP fop, FutureDAO.Data curr, Lookup<List<ApprovalDAO.Data>> la, Lookup<UserRoleDAO.Data> lur) {
+		// Pre-Evaluate if ReApproval is already done.
+		UserRoleDAO.Data urdd = null;
+		if(fop.equals(FUTURE_OP.A) && curr.target.equals(FOP_USER_ROLE) && curr.construct!=null) {
+			try {
+				// Get Expected UserRole from Future
+				urdd = new UserRoleDAO.Data();
+				urdd.reconstitute(curr.construct);
+				// Get Current UserRole from lookup
+				UserRoleDAO.Data lurdd = lur.get(trans, urdd.user,urdd.role);
+				if(lurdd==null) {
+					q.futureDAO.delete(trans, curr, false);
+					return OP_STATUS.RL;
+				} else {
+					if(curr.expires.compareTo(lurdd.expires)<0) {
+						q.futureDAO.delete(trans, curr, false);
+						return OP_STATUS.RL;
+					}
+				}
+			} catch (IOException e) {
+				return Result.err(Result.ERR_BadData,"Cannot reconstitute %1",curr.memo);
+			}
+		}
+		
+		boolean aDenial = false;
+		int cntSuper=0, appSuper=0,cntOwner=0, appOwner=0;
+		for(ApprovalDAO.Data add : la.get(trans)) {
+			switch(add.status) {
+				case "approved":
+					if("owner".equals(add.type)) {
+						++cntOwner;
+						++appOwner;
+					} else if("supervisor".equals(add.type)) {
+						++cntSuper;
+						++appSuper;
+					}
+					break;
+				case "pending":
+					if("owner".equals(add.type)) {
+						++cntOwner;
+					} else if("supervisor".equals(add.type)) {
+						++cntSuper;
+					}
+					break;
+				case "denied":
+					aDenial=true;
+					break;
+			}
+		}
+		
+		Result<OP_STATUS> ros=null;
+		if(aDenial) {
+			// Note: Denial will be Audit-logged.
+//			for (ApprovalDAO.Data ad : allApprovalsForTicket.value) {
+//			    q.approvalDAO.delete(trans, ad, false);
+//			}
+			ros = OP_STATUS.RD;
+			if(q.futureDAO.delete(trans, curr, false).notOK()) {
+				trans.info().printf("Future %s could not be deleted", curr.id.toString());
+			}  else {
+				if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
+					// A Denial means we must remove UserRole
+					if(fop.equals(FUTURE_OP.U) || fop.equals(FUTURE_OP.A)) {
+						UserRoleDAO.Data data = new UserRoleDAO.Data();
+						try {
+							data.reconstitute(curr.construct);
+						} catch (IOException e) {
+							trans.error().log("Cannot reconstitue",curr.memo);
+						}
+						ros = set(OP_STATUS.RD,delUserRole(trans, data.user, data.ns, data.rname));
+					}
+				}
+			}
+		}
+		
+		// Decision: If not Denied, and at least owner, if exists, and at least one Super, if exists
+		boolean goDecision = (cntOwner>0?appOwner>0:true) && (cntSuper>0?appSuper>0:true);
+
+		if(goDecision) {
+			// should check if any other pendings before performing
+			// actions
+			try {
+				if (FOP_ROLE.equalsIgnoreCase(curr.target)) {
+					RoleDAO.Data data = new RoleDAO.Data();
+					data.reconstitute(curr.construct);
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,q.roleDAO.dao().create(trans, data));
+							break;
+						case D:
+							ros = set(OP_STATUS.RE,deleteRole(trans, data, true, true));
+							break;
+						default:
+					}
+				} else if (FOP_PERM.equalsIgnoreCase(curr.target)) {
+					PermDAO.Data pdd = new PermDAO.Data();
+					pdd.reconstitute(curr.construct);
+					Set<String> roles;
+					Result<RoleDAO.Data> rrdd;
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,createPerm(trans, pdd, true));
+							break;
+						case D:
+							ros = set(OP_STATUS.RE,deletePerm(trans, pdd, true, true));
+							break;
+						case G:
+							roles = pdd.roles(true);
+							for (String roleStr : roles) {
+								rrdd = RoleDAO.Data.decode(trans, q, roleStr);
+								if (rrdd.isOKhasData()) {
+									ros = set(OP_STATUS.RE,addPermToRole(trans, rrdd.value, pdd, true));
+								} else {
+									trans.error().log(rrdd.errorString());
+								}
+							}
+							break;
+						case UG:
+							roles = pdd.roles(true);
+							for (String roleStr : roles) {
+								rrdd = RoleDAO.Data.decode(trans, q, roleStr);
+								if (rrdd.isOKhasData()) {
+									ros = set(OP_STATUS.RE,delPermFromRole(trans, rrdd.value, pdd,	true));
+								} else {
+									trans.error().log(rrdd.errorString());
+								}
+							}
+							break;
+						default:
+					}
+				} else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
+					if(urdd==null) {
+						urdd = new UserRoleDAO.Data();
+						urdd.reconstitute(curr.construct);
+					}
+					// if I am the last to approve, create user role
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,addUserRole(trans, urdd));
+							break;
+						case U:
+						case A:
+							ros = set(OP_STATUS.RE,extendUserRole(trans,urdd,true));
+							break;
+						default:
+					}
+				} else if (FOP_NS.equalsIgnoreCase(curr.target)) {
+					Namespace namespace = new Namespace();
+					namespace.reconstitute(curr.construct);
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,createNS(trans, namespace, true));
+							break;
+						default:
+					}
+				} else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) {
+					DelegateDAO.Data data = new DelegateDAO.Data();
+					data.reconstitute(curr.construct);
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,q.delegateDAO.create(trans, data));
+							break;
+						case U:
+							ros = set(OP_STATUS.RE,q.delegateDAO.update(trans, data));
+							break;
+						default:
+					}
+				} else if (FOP_CRED.equalsIgnoreCase(curr.target)) {
+					CredDAO.Data data = new CredDAO.Data();
+					data.reconstitute(curr.construct);
+					switch(fop) {
+						case C:
+							ros = set(OP_STATUS.RE,q.credDAO.dao().create(trans, data));
+							break;
+						default:
+					}
+				}				
+			} catch (Throwable e) {
+				trans.error().log("Exception: ", e.getMessage(),
+					" \n occurred while performing", curr.memo,
+					" from Ticket ", curr.id.toString());
+			}
+			q.futureDAO.delete(trans, curr, false);
+		} // end for goDecision
+		if(ros==null) {
+			//return Result.err(Status.ACC_Future, "Full Approvals not obtained: No action taken");
+			ros = OP_STATUS.RP;
+		}
+			
+		return ros;
+	}
+
+	// Convenience method for setting OPSTatus Results
+	private Result<OP_STATUS> set(Result<OP_STATUS> rs, Result<?> orig) {
+		if(orig.isOK()) {
+			return rs;
+		} else {
+			return Result.err(orig);
+		}
+	}
+
+	private Result<ApprovalDAO.Data>  addIdentity(AuthzTrans trans, StringBuilder sb, 
+						Boolean[] first, String user, String memo, FUTURE_OP op, Identity u, UUID ticket, String type) throws OrganizationException {
+		ApprovalDAO.Data ad = new ApprovalDAO.Data();
+		// Note ad.id is set by ApprovalDAO Create
+		ad.ticket = ticket;
+		ad.user = user;
+		ad.approver = u.fullID();
+		ad.status = ApprovalDAO.PENDING;
+		ad.memo = memo;
+		ad.type = type;
+		ad.operation = op.name();
+		// Note ad.updated is created in System
+	    Result<ApprovalDAO.Data> r = q.approvalDAO.create(trans,ad);
+	    if(r.isOK()) {
+			if(first[0]) {
+				first[0] = false;
+			} else {
+				sb.append(", ");
+			}
+			sb.append(r.value.user);
+			sb.append(':');
+			sb.append(r.value.ticket);
+			return r;
+	    } else {
+	    	return Result.err(Status.ERR_ActionNotCompleted,
+					"Approval for %s, %s could not be created: %s",
+					ad.user, ad.approver,
+					r.details, sb.toString());
+	    }
+	}
+
+	public Executor newExecutor(AuthzTrans trans) {
+		return new CassExecutor(trans, this);
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java
new file mode 100644
index 0000000..615d6b3
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java
@@ -0,0 +1,185 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.hl;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+
+/**
+ * PermLookup is a Storage class for the various pieces of looking up Permission 
+ * during Transactions to avoid duplicate processing
+ * 
+ * @author Jonathan
+ *
+ */
+// Package on purpose
+class PermLookup {
+	private AuthzTrans trans;
+	private String user;
+	private Question q;
+	private Result<List<UserRoleDAO.Data>> userRoles = null;
+	private Result<List<RoleDAO.Data>> roles = null;
+	private Result<Set<String>> permNames = null;
+	private Result<List<PermDAO.Data>> perms = null;
+	
+	private PermLookup() {}
+	
+	static PermLookup get(AuthzTrans trans, Question q, String user) {
+		PermLookup lp=null;
+		Map<String, PermLookup> permMap = trans.get(Question.PERMS, null);
+		if (permMap == null) {
+			trans.put(Question.PERMS, permMap = new HashMap<String, PermLookup>());
+		} else {
+			lp = permMap.get(user);
+		}
+
+		if (lp == null) {
+			lp = new PermLookup();
+			lp.trans = trans;
+			lp.user = user;
+			lp.q = q;
+			permMap.put(user, lp);
+		}
+		return lp;
+	}
+	
+	public Result<List<UserRoleDAO.Data>> getUserRoles() {
+		if(userRoles==null) {
+			userRoles = q.userRoleDAO.readByUser(trans,user);
+			if(userRoles.isOKhasData()) {
+				List<UserRoleDAO.Data> lurdd = new ArrayList<UserRoleDAO.Data>();
+				Date now = new Date();
+				for(UserRoleDAO.Data urdd : userRoles.value) {
+					if(urdd.expires.after(now)) { // Remove Expired
+						lurdd.add(urdd);
+					}
+				}
+				if(lurdd.size()==0) {
+					return userRoles = Result.err(Status.ERR_UserNotFound,
+								"%s not found or not associated with any Roles: ",
+								user);
+				} else {
+					return userRoles = Result.ok(lurdd);
+				}
+			} else {
+				return userRoles;
+			}
+		} else {
+			return userRoles;
+		}
+	}
+
+	public Result<List<RoleDAO.Data>> getRoles() {
+		if(roles==null) {
+			Result<List<UserRoleDAO.Data>> rur = getUserRoles();
+			if(rur.isOK()) {
+				List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();
+				for (UserRoleDAO.Data urdata : rur.value) {
+					// Gather all permissions from all Roles
+					    if(urdata.ns==null || urdata.rname==null) {
+					    	return Result.err(Status.ERR_BadData,"DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role);
+					    } else {
+							Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(
+									trans, urdata.ns, urdata.rname);
+							if(rlrd.isOK()) {
+								lrdd.addAll(rlrd.value);
+							}
+					    }
+					}
+				return roles = Result.ok(lrdd);
+			} else {
+				return roles = Result.err(rur);
+			}
+		} else {
+			return roles;
+		}
+	}
+
+	public Result<Set<String>> getPermNames() {
+		if(permNames==null) {
+			Result<List<RoleDAO.Data>> rlrd = getRoles();
+			if (rlrd.isOK()) {
+				Set<String> pns = new TreeSet<String>();
+				for (RoleDAO.Data rdata : rlrd.value) {
+					pns.addAll(rdata.perms(false));
+				}
+				return permNames = Result.ok(pns);
+			} else {
+				return permNames = Result.err(rlrd);
+			}
+		} else {
+			return permNames;
+		}
+	}
+	
+	public Result<List<PermDAO.Data>> getPerms(boolean lookup) {
+		if(perms==null) {
+			// Note: It should be ok for a Valid user to have no permissions -
+			// Jonathan 8/12/2013
+			Result<Set<String>> rss = getPermNames();
+			if(rss.isOK()) {
+				List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();
+				for (String perm : rss.value) {
+					if(lookup) {
+						Result<String[]> ap = PermDAO.Data.decodeToArray(trans, q, perm);
+						if(ap.isOK()) {
+							 
+							Result<List<PermDAO.Data>> rlpd = q.permDAO.read(perm,trans,ap.value);
+							if (rlpd.isOKhasData()) {
+								for (PermDAO.Data pData : rlpd.value) {
+									lpdd.add(pData);
+								}
+							}
+						} else {
+							trans.error().log("In getPermsByUser, for", user, perm);
+						}
+					} else {
+						Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, q, perm);
+						if (pr.notOK()) {
+							trans.error().log("In getPermsByUser, for", user, pr.errorString());
+						} else {
+							lpdd.add(pr.value);
+						}
+					}
+
+				}
+				return perms = Result.ok(lpdd);
+			} else {
+				return perms = Result.err(rss);
+			}
+		} else {
+			return perms;
+		}
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java
new file mode 100644
index 0000000..6b0bb17
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java
@@ -0,0 +1,1154 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.dao.hl;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.onap.aaf.auth.common.Define;
+import org.onap.aaf.auth.dao.AbsCassDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.cached.CachedCertDAO;
+import org.onap.aaf.auth.dao.cached.CachedCredDAO;
+import org.onap.aaf.auth.dao.cached.CachedNSDAO;
+import org.onap.aaf.auth.dao.cached.CachedPermDAO;
+import org.onap.aaf.auth.dao.cached.CachedRoleDAO;
+import org.onap.aaf.auth.dao.cached.CachedUserRoleDAO;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO;
+import org.onap.aaf.auth.dao.cass.CacheInfoDAO;
+import org.onap.aaf.auth.dao.cass.CertDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.DelegateDAO;
+import org.onap.aaf.auth.dao.cass.FutureDAO;
+import org.onap.aaf.auth.dao.cass.HistoryDAO;
+import org.onap.aaf.auth.dao.cass.LocateDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsSplit;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.env.AuthzTransFilter;
+import org.onap.aaf.auth.env.AuthzTrans.REQD_TYPE;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.auth.org.Organization;
+import org.onap.aaf.cadi.Hash;
+import org.onap.aaf.cadi.aaf.PermEval;
+import org.onap.aaf.cadi.config.Config;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.Slot;
+import org.onap.aaf.misc.env.TimeTaken;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import com.datastax.driver.core.Cluster;
+
+/**
+ * Question HL DAO
+ * 
+ * A Data Access Combination Object which asks Security and other Questions
+ * 
+ * @author Jonathan
+ *
+ */
+public class Question {
+
+	// DON'T CHANGE FROM lower Case!!!
+	public static enum Type {
+		ns, role, perm, cred
+	};
+
+	public static final String OWNER="owner";
+	public static final String ADMIN="admin";
+	public static final String DOT_OWNER=".owner";
+	public static final String DOT_ADMIN=".admin";
+	public static final String ACCESS = "access";
+
+	static final String ASTERIX = "*";
+
+	public static enum Access {
+		read, write, create
+	};
+
+	public static final String READ = Access.read.name();
+	public static final String WRITE = Access.write.name();
+	public static final String CREATE = Access.create.name();
+
+	public static final String ROLE = Type.role.name();
+	public static final String PERM = Type.perm.name();
+	public static final String NS = Type.ns.name();
+	public static final String CRED = Type.cred.name();
+	private static final String DELG = "delg";
+	public static final String ROOT_NS = Define.ROOT_NS();
+	public static final String ATTRIB = "attrib";
+
+
+	public static final int MAX_SCOPE = 10;
+	public static final int APP_SCOPE = 3;
+	public static final int COMPANY_SCOPE = 2;
+	static Slot PERMS;
+
+	private static Set<String> specialLog = null;
+	public static final Random random = new SecureRandom();
+	private static long traceID = random.nextLong();
+	private static Slot specialLogSlot = null;
+	private static Slot transIDSlot = null;
+
+
+	public final HistoryDAO historyDAO;
+	public final CachedNSDAO nsDAO;
+	public final CachedRoleDAO roleDAO;
+	public final CachedPermDAO permDAO;
+	public final CachedUserRoleDAO userRoleDAO;
+	public final CachedCredDAO credDAO;
+	public final CachedCertDAO certDAO;
+	public final DelegateDAO delegateDAO;
+	public final FutureDAO futureDAO;
+	public final ApprovalDAO approvalDAO;
+	private final CacheInfoDAO cacheInfoDAO;
+	public final LocateDAO locateDAO;
+
+	public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException {
+		PERMS = trans.slot("USER_PERMS");
+		trans.init().log("Instantiating DAOs");
+		long expiresIn = Long.parseLong(trans.getProperty(Config.AAF_USER_EXPIRES, Config.AAF_USER_EXPIRES_DEF));
+		historyDAO = new HistoryDAO(trans, cluster, keyspace);
+
+		// Deal with Cached Entries
+		cacheInfoDAO = new CacheInfoDAO(trans, historyDAO);
+
+		nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO),cacheInfoDAO, expiresIn);
+		permDAO = new CachedPermDAO(new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+		roleDAO = new CachedRoleDAO(new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+		userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO,cacheInfoDAO), cacheInfoDAO, expiresIn);
+		credDAO = new CachedCredDAO(new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+		certDAO = new CachedCertDAO(new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+
+		locateDAO = new LocateDAO(trans,historyDAO);
+		futureDAO = new FutureDAO(trans, historyDAO);
+		delegateDAO = new DelegateDAO(trans, historyDAO);
+		approvalDAO = new ApprovalDAO(trans, historyDAO);
+
+		// Only want to aggressively cleanse User related Caches... The others,
+		// just normal refresh
+		if(startClean) {
+			CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO);
+			CachedDAO.startRefresh(trans.env(), cacheInfoDAO);
+		}
+		// Set a Timer to Check Caches to send messages for Caching changes
+		
+		if(specialLogSlot==null) {
+			specialLogSlot = trans.slot(AuthzTransFilter.SPECIAL_LOG_SLOT);
+		}
+		
+		if(transIDSlot==null) {
+			transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT);
+		}
+		
+		AbsCassDAO.primePSIs(trans);
+	}
+
+
+	public void close(AuthzTrans trans) {
+		historyDAO.close(trans);
+		cacheInfoDAO.close(trans);
+		nsDAO.close(trans);
+		permDAO.close(trans);
+		roleDAO.close(trans);
+		userRoleDAO.close(trans);
+		credDAO.close(trans);
+		certDAO.close(trans);
+		delegateDAO.close(trans);
+		futureDAO.close(trans);
+		approvalDAO.close(trans);
+	}
+
+	public Result<PermDAO.Data> permFrom(AuthzTrans trans, String type,
+			String instance, String action) {
+		Result<NsDAO.Data> rnd = deriveNs(trans, type);
+		if (rnd.isOK()) {
+			return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type),
+					instance, action));
+		} else {
+			return Result.err(rnd);
+		}
+	}
+
+	/**
+	 * getPermsByUser
+	 * 
+	 * Because this call is frequently called internally, AND because we already
+	 * look for it in the initial Call, we cache within the Transaction
+	 * 
+	 * @param trans
+	 * @param user
+	 * @return
+	 */
+	public Result<List<PermDAO.Data>> getPermsByUser(AuthzTrans trans, String user, boolean lookup) {
+		return PermLookup.get(trans, this, user).getPerms(lookup);
+	}
+	
+	public Result<List<PermDAO.Data>> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) {
+		PermLookup plUser = PermLookup.get(trans, this, user);
+		Result<Set<String>> plPermNames = plUser.getPermNames();
+		if(plPermNames.notOK()) {
+			return Result.err(plPermNames);
+		}
+		
+		Set<String> nss;
+		if(forUser.equals(user)) {
+			nss = null;
+		} else {
+			// Setup a TreeSet to check on Namespaces to 
+			nss = new TreeSet<String>();
+			PermLookup fUser = PermLookup.get(trans, this, forUser);
+			Result<Set<String>> forUpn = fUser.getPermNames();
+			if(forUpn.notOK()) {
+				return Result.err(forUpn);
+			}
+			
+			for(String pn : forUpn.value) {
+				Result<String[]> decoded = PermDAO.Data.decodeToArray(trans, this, pn);
+				if(decoded.isOKhasData()) {
+					nss.add(decoded.value[0]);
+				} else {
+					trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString());
+				}
+			}
+		}
+
+		List<PermDAO.Data> rlpUser = new ArrayList<PermDAO.Data>();
+		Result<PermDAO.Data> rpdd;
+		PermDAO.Data pdd;
+		for(String pn : plPermNames.value) {
+			rpdd = PermDAO.Data.decode(trans, this, pn);
+			if(rpdd.isOKhasData()) {
+				pdd=rpdd.value;
+				if(nss==null || nss.contains(pdd.ns)) {
+					rlpUser.add(pdd);
+				}
+			} else {
+				trans.error().log(pn,", derived from a Role, is invalid.  Run Data Cleanup:",rpdd.errorString());
+			}
+		}
+		return Result.ok(rlpUser); 
+	}
+
+	public Result<List<PermDAO.Data>> getPermsByType(AuthzTrans trans, String perm) {
+		Result<NsSplit> nss = deriveNsSplit(trans, perm);
+		if (nss.notOK()) {
+			return Result.err(nss);
+		}
+		return permDAO.readByType(trans, nss.value.ns, nss.value.name);
+	}
+
+	public Result<List<PermDAO.Data>> getPermsByName(AuthzTrans trans,
+			String type, String instance, String action) {
+		Result<NsSplit> nss = deriveNsSplit(trans, type);
+		if (nss.notOK()) {
+			return Result.err(nss);
+		}
+		return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action);
+	}
+
+	public Result<List<PermDAO.Data>> getPermsByRole(AuthzTrans trans, String role, boolean lookup) {
+		Result<NsSplit> nss = deriveNsSplit(trans, role);
+		if (nss.notOK()) {
+			return Result.err(nss);
+		}
+
+		Result<List<RoleDAO.Data>> rlrd = roleDAO.read(trans, nss.value.ns,
+				nss.value.name);
+		if (rlrd.notOKorIsEmpty()) {
+			return Result.err(rlrd);
+		}
+		// Using Set to avoid duplicates
+		Set<String> permNames = new HashSet<String>();
+		if (rlrd.isOKhasData()) {
+			for (RoleDAO.Data drr : rlrd.value) {
+				permNames.addAll(drr.perms(false));
+			}
+		}
+
+		// Note: It should be ok for a Valid user to have no permissions -
+		// Jonathan 8/12/2013
+		List<PermDAO.Data> perms = new ArrayList<PermDAO.Data>();
+		for (String perm : permNames) {
+			Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, this, perm);
+			if (pr.notOK()) {
+				return Result.err(pr);
+			}
+
+			if(lookup) {
+				Result<List<PermDAO.Data>> rlpd = permDAO.read(trans, pr.value);
+				if (rlpd.isOKhasData()) {
+					for (PermDAO.Data pData : rlpd.value) {
+						perms.add(pData);
+					}
+				}
+			} else {
+				perms.add(pr.value);
+			}
+		}
+
+		return Result.ok(perms);
+	}
+
+	public Result<List<RoleDAO.Data>> getRolesByName(AuthzTrans trans,
+			String role) {
+		Result<NsSplit> nss = deriveNsSplit(trans, role);
+		if (nss.notOK()) {
+			return Result.err(nss);
+		}
+		String r = nss.value.name;
+		if (r.endsWith(".*")) { // do children Search
+			return roleDAO.readChildren(trans, nss.value.ns,
+					r.substring(0, r.length() - 2));
+		} else if (ASTERIX.equals(r)) {
+			return roleDAO.readChildren(trans, nss.value.ns, ASTERIX);
+		} else {
+			return roleDAO.read(trans, nss.value.ns, r);
+		}
+	}
+
+	/**
+	 * Derive NS
+	 * 
+	 * Given a Child Namespace, figure out what the best Namespace parent is.
+	 * 
+	 * For instance, if in the NS table, the parent "com.att" exists, but not
+	 * "org.osaaf.child" or "org.osaaf.a.b.c", then passing in either
+	 * "org.osaaf.child" or "org.osaaf.a.b.c" will return "com.att"
+	 * 
+	 * Uses recursive search on Cached DAO data
+	 * 
+	 * @param trans
+	 * @param child
+	 * @return
+	 */
+	public Result<NsDAO.Data> deriveNs(AuthzTrans trans, String child) {
+		Result<List<NsDAO.Data>> r = nsDAO.read(trans, child);
+		
+		if (r.isOKhasData()) {
+			return Result.ok(r.value.get(0));
+		} else {
+			int dot = child == null ? -1 : child.lastIndexOf('.');
+			if (dot < 0) {
+				return Result.err(Status.ERR_NsNotFound,
+						"No Namespace for [%s]", child);
+			} else {
+				return deriveNs(trans, child.substring(0, dot));
+			}
+		}
+	}
+
+	public Result<NsDAO.Data> deriveFirstNsForType(AuthzTrans trans, String str, NsType type) {
+		NsDAO.Data nsd;
+
+		for(String lookup = str;!".".equals(lookup) && lookup!=null;) {
+			Result<List<NsDAO.Data>> rld = nsDAO.read(trans, lookup);
+			if(rld.isOKhasData()) {
+				nsd=rld.value.get(0);
+				lookup = nsd.parent;
+				if(type.type == nsd.type) {
+					return Result.ok(nsd);
+				}
+			} else {
+				return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str);
+			}
+		}
+		return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name());
+	}
+
+	public Result<NsSplit> deriveNsSplit(AuthzTrans trans, String child) {
+		Result<NsDAO.Data> ndd = deriveNs(trans, child);
+		if (ndd.isOK()) {
+			NsSplit nss = new NsSplit(ndd.value, child);
+			if (nss.isOK()) {
+				return Result.ok(nss);
+			} else {
+				return Result.err(Status.ERR_NsNotFound,
+						"Cannot split [%s] into valid namespace elements",
+						child);
+			}
+		}
+		return Result.err(ndd);
+	}
+
+	/**
+	 * Translate an ID into it's domain
+	 * 
+	 * i.e. myid1234@aaf.att.com results in domain of com.att.aaf
+	 * 
+	 * @param id
+	 * @return
+	 */
+	public static String domain2ns(String id) {
+		int at = id.indexOf('@');
+		if (at >= 0) {
+			String[] domain = id.substring(at + 1).split("\\.");
+			StringBuilder ns = new StringBuilder(id.length());
+			boolean first = true;
+			for (int i = domain.length - 1; i >= 0; --i) {
+				if (first) {
+					first = false;
+				} else {
+					ns.append('.');
+				}
+				ns.append(domain[i]);
+			}
+			return ns.toString();
+		} else {
+			return "";
+		}
+
+	}
+
+	/**
+	 * Validate Namespace of ID@Domain
+	 * 
+	 * Namespace is reverse order of Domain.
+	 * 
+	 * @param trans
+	 * @param id
+	 * @return
+	 */
+	public Result<NsDAO.Data> validNSOfDomain(AuthzTrans trans, String id) {
+		// Take domain, reverse order, and check on NS
+		String ns;
+		if(id.indexOf('@')<0) { // it's already an ns, not an ID
+			ns = id;
+		} else {
+			ns = domain2ns(id);
+		}
+		if (ns.length() > 0) {
+			if(!trans.org().getDomain().equals(ns)) { 
+				Result<List<NsDAO.Data>> rlnsd = nsDAO.read(trans, ns);
+				if (rlnsd.isOKhasData()) {
+					return Result.ok(rlnsd.value.get(0));
+				}
+			}
+		}
+		return Result.err(Status.ERR_NsNotFound,
+				"A Namespace is not available for %s", id);
+	}
+
+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) {
+		// <ns>.access|:role:<role name>|<read|write>
+		String ns = ndd.name;
+		int last;
+		do {
+			if (isGranted(trans, user, ns, ACCESS, ":ns", access.name())) {
+				return Result.ok(ndd);
+			}
+			if ((last = ns.lastIndexOf('.')) >= 0) {
+				ns = ns.substring(0, last);
+			}
+		} while (last >= 0);
+		// com.att.aaf.ns|:<client ns>:ns|<access>
+		// AAF-724 - Make consistent response for May User", and not take the
+		// last check... too confusing.
+		Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":"	+ ndd.name + ":ns", access.name());
+		if (rv.isOK()) {
+			return rv;
+		} else if(rv.status==Result.ERR_Backend) {
+			return Result.err(rv);
+		} else {
+			return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]",
+					user, access.name(), ndd.name);
+		}
+	}
+
+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) {
+		Result<NsDAO.Data> rnsd = deriveNs(trans, rdd.ns);
+		if (rnsd.isOK()) {
+			return mayUser(trans, user, rnsd.value, rdd, access);
+		}
+		return rnsd;
+	}
+
+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) {
+		// 1) Is User in the Role?
+		Result<List<UserRoleDAO.Data>> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName());
+		if (rurd.isOKhasData()) {
+			return Result.ok(ndd);
+		}
+
+		String roleInst = ":role:" + rdd.name;
+		// <ns>.access|:role:<role name>|<read|write>
+		String ns = rdd.ns;
+		int last;
+		do {
+			if (isGranted(trans, user, ns,ACCESS, roleInst, access.name())) {
+				return Result.ok(ndd);
+			}
+			if ((last = ns.lastIndexOf('.')) >= 0) {
+				ns = ns.substring(0, last);
+			}
+		} while (last >= 0);
+
+		// Check if Access by Global Role perm
+		// com.att.aaf.ns|:<client ns>:role:name|<access>
+		Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":"
+				+ rdd.ns + roleInst, access.name());
+		if (rnsd.isOK()) {
+			return rnsd;
+		} else if(rnsd.status==Result.ERR_Backend) {
+			return Result.err(rnsd);
+		}
+
+		// Check if Access to Whole NS
+		// AAF-724 - Make consistent response for May User", and not take the
+		// last check... too confusing.
+		Result<org.onap.aaf.auth.dao.cass.NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, 
+				":" + rdd.ns + ":ns", access.name());
+		if (rv.isOK()) {
+			return rv;
+		} else if(rnsd.status==Result.ERR_Backend) {
+			return Result.err(rnsd);
+		} else {
+			return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]",
+					user, access.name(), rdd.fullName());
+		}
+
+	}
+
+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) {
+		Result<NsDAO.Data> rnsd = deriveNs(trans, pdd.ns);
+		if (rnsd.isOK()) {
+			return mayUser(trans, user, rnsd.value, pdd, access);
+		}
+		return rnsd;
+	}
+
+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) {
+		if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) {
+			return Result.ok(ndd);
+		}
+		String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action;
+		// <ns>.access|:role:<role name>|<read|write>
+		String ns = ndd.name;
+		int last;
+		do {
+			if (isGranted(trans, user, ns, ACCESS, permInst, access.name())) {
+				return Result.ok(ndd);
+			}
+			if ((last = ns.lastIndexOf('.')) >= 0) {
+				ns = ns.substring(0, last);
+			}
+		} while (last >= 0);
+
+		// Check if Access by NS perm
+		// com.att.aaf.ns|:<client ns>:role:name|<access>
+		Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name());
+		if (rnsd.isOK()) {
+			return rnsd;
+		} else if(rnsd.status==Result.ERR_Backend) {
+			return Result.err(rnsd);
+		}
+
+		// Check if Access to Whole NS
+		// AAF-724 - Make consistent response for May User", and not take the
+		// last check... too confusing.
+		Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":"	+ pdd.ns + ":ns", access.name());
+		if (rv.isOK()) {
+			return rv;
+		} else {
+			return Result.err(Status.ERR_Denied,
+					"[%s] may not %s Perm [%s|%s|%s]", user, access.name(),
+					pdd.fullType(), pdd.instance, pdd.action);
+		}
+
+	}
+
+	public Result<Void> mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) {
+		try {
+			Result<NsDAO.Data> rnsd = deriveNs(trans, domain2ns(trans.user()));
+			if(rnsd.isOKhasData() && mayUserVirtueOfNS(trans,trans.user(),rnsd.value, ":"	+ rnsd.value.name + ":ns", access.name()).isOK()) {
+				return Result.ok();
+			}
+			boolean isUser = trans.user().equals(dd.user);
+			boolean isDelegate = dd.delegate != null
+					&& (dd.user.equals(dd.delegate) || trans.user().equals(
+							dd.delegate));
+			Organization org = trans.org();
+			switch (access) {
+			case create:
+				if (org.getIdentity(trans, dd.user) == null) {
+					return Result.err(Status.ERR_UserNotFound,
+							"[%s] is not a user in the company database.",
+							dd.user);
+				}
+				if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) {
+					return Result.err(Status.ERR_UserNotFound,
+							"[%s] is not a user in the company database.",
+							dd.delegate);
+				}
+				if (!trans.requested(REQD_TYPE.force) && dd.user != null && dd.user.equals(dd.delegate)) {
+					return Result.err(Status.ERR_BadData,
+							"[%s] cannot be a delegate for self", dd.user);
+				}
+				if (!isUser	&& !isGranted(trans, trans.user(), ROOT_NS,DELG,
+								org.getDomain(), Question.CREATE)) {
+					return Result.err(Status.ERR_Denied,
+							"[%s] may not create a delegate for [%s]",
+							trans.user(), dd.user);
+				}
+				break;
+			case read:
+			case write:
+				if (!isUser	&& !isDelegate && 
+						!isGranted(trans, trans.user(), ROOT_NS,DELG,org.getDomain(), access.name())) {
+					return Result.err(Status.ERR_Denied,
+							"[%s] may not %s delegates for [%s]", trans.user(),
+							access.name(), dd.user);
+				}
+				break;
+			default:
+				return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name());
+			}
+		} catch (Exception e) {
+			return Result.err(e);
+		}
+		return Result.ok();
+	}
+
+	/*
+	 * Check (recursively, if necessary), if able to do something based on NS
+	 */
+	private Result<NsDAO.Data> mayUserVirtueOfNS(AuthzTrans trans, String user,	NsDAO.Data nsd, String ns_and_type, String access) {
+		String ns = nsd.name;
+
+		// If an ADMIN of the Namespace, then allow
+		
+		Result<List<UserRoleDAO.Data>> rurd;
+		if ((rurd = userRoleDAO.readUserInRole(trans, user, ns+DOT_ADMIN)).isOKhasData()) {
+			return Result.ok(nsd);
+		} else if(rurd.status==Result.ERR_Backend) {
+			return Result.err(rurd);
+		}
+		
+		// If Specially granted Global Permission
+		if (isGranted(trans, user, ROOT_NS,NS, ns_and_type, access)) {
+			return Result.ok(nsd);
+		}
+
+		// Check recur
+
+		int dot = ns.length();
+		if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) {
+			Result<NsDAO.Data> rnsd = deriveNs(trans, ns.substring(0, dot));
+			if (rnsd.isOK()) {
+				rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access);
+			} else if(rnsd.status==Result.ERR_Backend) {
+				return Result.err(rnsd);
+			}
+			if (rnsd.isOK()) {
+				return Result.ok(nsd);
+			} else if(rnsd.status==Result.ERR_Backend) {
+				return Result.err(rnsd);
+			}
+		}
+		return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access,
+				ns_and_type);
+	}
+
+	
+	/**
+	 * isGranted
+	 * 
+	 * Important function - Check internal Permission Schemes for Permission to
+	 * do things
+	 * 
+	 * @param trans
+	 * @param type
+	 * @param instance
+	 * @param action
+	 * @return
+	 */
+	public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) {
+		Result<List<PermDAO.Data>> perms = getPermsByUser(trans, user, false);
+		if (perms.isOK()) {
+			for (PermDAO.Data pd : perms.value) {
+				if (ns.equals(pd.ns)) {
+					if (type.equals(pd.type)) {
+						if (PermEval.evalInstance(pd.instance, instance)) {
+							if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action 
+								return true;
+							}
+						}
+					}
+				}
+			}
+		}
+		return false;
+	}
+
+	public Result<Date> doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException {
+		Result<List<CredDAO.Data>> result;
+		TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE);
+		try {
+			result = credDAO.readID(trans, user);
+		} finally {
+			tt.done();
+		}
+
+		Result<Date> rv = null;
+		if(result.isOK()) {
+			if (result.isEmpty()) {
+				rv = Result.err(Status.ERR_UserNotFound, user);
+				if (willSpecialLog(trans,user)) {
+					trans.audit().log("Special DEBUG:", user, " does not exist in DB");
+				}
+			} else {
+				Date now = new Date();//long now = System.currentTimeMillis();
+				// Bug noticed 6/22. Sorting on the result can cause Concurrency Issues.	 
+				List<CredDAO.Data> cddl;
+				if(result.value.size() > 1) {
+					cddl = new ArrayList<CredDAO.Data>(result.value.size());
+					for(CredDAO.Data old : result.value) {
+						if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) {
+							cddl.add(old);
+						}
+					}
+					if(cddl.size()>1) {
+						Collections.sort(cddl,new Comparator<CredDAO.Data>() {
+							@Override
+							public int compare(org.onap.aaf.auth.dao.cass.CredDAO.Data a,
+											   org.onap.aaf.auth.dao.cass.CredDAO.Data b) {
+								return b.expires.compareTo(a.expires);
+							}
+						});
+					}
+				} else {
+					cddl = result.value;
+				}
+	
+				Date expired = null;
+				StringBuilder debug = willSpecialLog(trans,user)?new StringBuilder():null;
+				for (CredDAO.Data cdd : cddl) {
+					if(!cdd.id.equals(user)) {
+						trans.error().log("doesUserCredMatch DB call does not match for user: " + user);
+					}
+					if (cdd.expires.after(now)) {
+						byte[] dbcred = cdd.cred.array();
+						
+						try {
+							switch(cdd.type) {
+								case CredDAO.BASIC_AUTH:
+									byte[] md5=Hash.hashMD5(cred);
+									if(Hash.compareTo(md5,dbcred)==0) {
+										checkLessThanDays(trans,7,now,cdd);
+										return Result.ok(cdd.expires);
+									} else if (debug!=null) {
+										load(debug, cdd,dbcred);
+									}
+									break;
+								case CredDAO.BASIC_AUTH_SHA256:
+									ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length);
+									bb.putInt(cdd.other);
+									bb.put(cred);
+									byte[] hash = Hash.hashSHA256(bb.array());
+	
+									if(Hash.compareTo(hash,dbcred)==0) {
+										checkLessThanDays(trans,7,now,cdd);
+										return Result.ok(cdd.expires);
+									} else if (debug!=null) {
+										load(debug, cdd, dbcred);
+									}
+									break;
+								default:
+									trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires));
+							}
+						} catch (NoSuchAlgorithmException e) {
+							trans.error().log(e);
+						}
+					} else {
+						if(expired==null || expired.before(cdd.expires)) {
+							expired = cdd.expires;
+						}
+					}
+				} // end for each
+				if(debug==null) {
+					debug=new StringBuilder();
+				} else {
+					debug.append(", ");
+				}
+				
+				debug.append("cred=");
+				debug.append(new String(cred));
+				trans.audit().printf("No cred matches ip=%s, user=%s, %s\n",trans.ip(),user,trans.encryptor().encrypt(debug.toString()));
+				if(expired!=null) {
+					// Note: this is only returned if there are no good Credentials
+					rv = Result.err(Status.ERR_Security,
+							"Credentials %s from %s expired %s",trans.user(), trans.ip(), Chrono.dateTime(expired));
+				}
+			}
+		} else {
+			return Result.err(result);
+		}
+		return rv == null ? Result.create((Date) null, Status.ERR_Security, "Wrong credential") : rv;
+	}
+
+
+	private void load(StringBuilder debug, Data cdd, byte[] dbcred) {
+		debug.append("DB Entry: user=");
+		debug.append(cdd.id);
+		debug.append(",type=");
+		debug.append(cdd.type);
+		debug.append(",cred=");
+		debug.append(Hash.toHex(dbcred));
+		debug.append(",expires=");
+		debug.append(Chrono.dateTime(cdd.expires));
+		debug.append('\n');
+	}
+
+
+	private void checkLessThanDays(AuthzTrans trans, int days, Date now, Data cdd) {
+		long close = now.getTime() + (days * 86400000);
+		long cexp=cdd.expires.getTime();
+		if(cexp<close) {
+			int daysLeft = days-(int)((close-cexp)/86400000);
+			trans.audit().printf("user=%s,ip=%s,expires=%s,days=%d,msg=\"Password expires in less than %d day%s\"",
+				cdd.id,trans.ip(),Chrono.dateOnlyStamp(cdd.expires),daysLeft, daysLeft,daysLeft==1?"":"s");
+		}
+	}
+
+
+	public Result<CredDAO.Data> userCredSetup(AuthzTrans trans, CredDAO.Data cred) {
+		if(cred.type==CredDAO.RAW) {
+			TimeTaken tt = trans.start("Hash Cred", Env.SUB);
+			try {
+				cred.type = CredDAO.BASIC_AUTH_SHA256;
+				cred.other = random.nextInt();
+				ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity());
+				bb.putInt(cred.other);
+				bb.put(cred.cred);
+				byte[] hash = Hash.hashSHA256(bb.array());
+				cred.cred = ByteBuffer.wrap(hash);
+				return Result.ok(cred);
+			} catch (NoSuchAlgorithmException e) {
+				return Result.err(Status.ERR_General,e.getLocalizedMessage());
+			} finally {
+				tt.done();
+			}
+			
+		}
+		return Result.err(Status.ERR_Security,"invalid/unreadable credential");
+	}
+	
+	public Result<Boolean> userCredCheck(AuthzTrans trans, CredDAO.Data orig, final byte[] raw) {
+			TimeTaken tt = trans.start("CheckCred Cred", Env.SUB);
+			try {
+				switch(orig.type) {
+					case CredDAO.BASIC_AUTH_SHA256:
+						ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + raw.length);
+						bb.putInt(orig.other);
+						bb.put(raw);
+						return Result.ok(Hash.compareTo(orig.cred.array(),Hash.hashSHA256(bb.array()))==0);
+					case CredDAO.BASIC_AUTH:
+						return Result.ok( Hash.compareTo(orig.cred.array(), Hash.hashMD5(raw))==0);
+					default:
+						return Result.ok(false);
+				}
+			} catch (NoSuchAlgorithmException e) {
+				return Result.err(Status.ERR_General,e.getLocalizedMessage());
+			} finally {
+				tt.done();
+			}
+	}
+
+	public static final String APPROVED = "APPROVE";
+	public static final String REJECT = "REJECT";
+	public static final String PENDING = "PENDING";
+
+	public Result<Void> canAddUser(AuthzTrans trans, UserRoleDAO.Data data,
+			List<ApprovalDAO.Data> approvals) {
+		// get the approval policy for the organization
+
+		// get the list of approvals with an accept status
+
+		// validate the approvals against the policy
+
+		// for now check if all approvals are received and return
+		// SUCCESS/FAILURE/SKIP
+		boolean bReject = false;
+		boolean bPending = false;
+
+		for (ApprovalDAO.Data approval : approvals) {
+			if (approval.status.equals(REJECT)) {
+				bReject = true;
+			} else if (approval.status.equals(PENDING)) {
+				bPending = true;
+			}
+		}
+		if (bReject) {
+			return Result.err(Status.ERR_Policy,
+					"Approval Polocy not conformed");
+		}
+		if (bPending) {
+			return Result.err(Status.ERR_ActionNotCompleted,
+					"Required Approvals not received");
+		}
+
+		return Result.ok();
+	}
+
+	private static final String NO_CACHE_NAME = "No Cache Data named %s";
+
+	public Result<Void> clearCache(AuthzTrans trans, String cname) {
+		boolean all = "all".equals(cname);
+		Result<Void> rv = null;
+
+		if (all || NsDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg);
+		}
+		if (all || PermDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg);
+		}
+		if (all || RoleDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg);
+		}
+		if (all || UserRoleDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg);
+		}
+		if (all || CredDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg);
+		}
+		if (all || CertDAO.TABLE.equals(cname)) {
+			int seg[] = series(NsDAO.CACHE_SEG);
+			for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);}
+			rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg);
+		}
+
+		if (rv == null) {
+			rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
+		}
+		return rv;
+	}
+
+	public Result<Void> cacheClear(AuthzTrans trans, String cname,Integer segment) {
+		Result<Void> rv;
+		if (NsDAO.TABLE.equals(cname)) {
+			rv = nsDAO.invalidate(segment);
+		} else if (PermDAO.TABLE.equals(cname)) {
+			rv = permDAO.invalidate(segment);
+		} else if (RoleDAO.TABLE.equals(cname)) {
+			rv = roleDAO.invalidate(segment);
+		} else if (UserRoleDAO.TABLE.equals(cname)) {
+			rv = userRoleDAO.invalidate(segment);
+		} else if (CredDAO.TABLE.equals(cname)) {
+			rv = credDAO.invalidate(segment);
+		} else if (CertDAO.TABLE.equals(cname)) {
+			rv = certDAO.invalidate(segment);
+		} else {
+			rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
+		}
+		return rv;
+	}
+
+	private int[] series(int max) {
+		int[] series = new int[max];
+		for (int i = 0; i < max; ++i)
+			series[i] = i;
+		return series;
+	}
+
+	public boolean isDelegated(AuthzTrans trans, String user, String approver, Map<String,Result<List<DelegateDAO.Data>>> rldd ) {
+		Result<List<DelegateDAO.Data>> userDelegatedFor = rldd.get(user);
+		if(userDelegatedFor==null) {
+			userDelegatedFor=delegateDAO.readByDelegate(trans, user);
+			rldd.put(user, userDelegatedFor);
+		}
+		if(userDelegatedFor.isOKhasData()) {
+			for (DelegateDAO.Data curr : userDelegatedFor.value) {
+				if (curr.user.equals(approver) && curr.delegate.equals(user)
+						&& curr.expires.after(new Date())) {
+					return true;
+				}
+			}
+		}
+		return false;
+	}
+
+	public static boolean willSpecialLog(AuthzTrans trans, String user) {
+		Boolean b = trans.get(specialLogSlot, null);
+		if(b==null) { // we haven't evaluated in this trans for Special Log yet
+			if(specialLog==null) {
+				return false;
+			} else {
+				b = specialLog.contains(user);
+				trans.put(specialLogSlot, b);
+			}
+		}
+		return b;
+	}
+	
+	public static void logEncryptTrace(AuthzTrans trans, String data) {
+		long ti;
+		trans.put(transIDSlot, ti=nextTraceID());
+		trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"');
+	}
+
+	private synchronized static long nextTraceID() {
+		return ++traceID;
+	}
+
+	public static synchronized boolean specialLogOn(AuthzTrans trans, String id) {
+		if (specialLog == null) {
+			specialLog = new HashSet<String>();
+		}
+		boolean rc = specialLog.add(id);
+		if(rc) {
+			trans.trace().printf("Trace on for %s requested by %s",id,trans.user());			
+		}
+		return rc;
+	}
+
+	public static synchronized boolean specialLogOff(AuthzTrans trans, String id) {
+		if(specialLog==null) {
+			return false;
+		}
+		boolean rv = specialLog.remove(id);
+		if (specialLog.isEmpty()) {
+			specialLog = null;
+		}
+		if(rv) {
+			trans.trace().printf("Trace off for %s requested by %s",id,trans.user());			
+		}
+		return rv;
+	}
+
+	/** 
+	 * canMove
+	 * Which Types can be moved
+	 * @param nsType
+	 * @return
+	 */
+	public boolean canMove(NsType nsType) {
+		boolean rv;
+		switch(nsType) {
+			case DOT:
+			case ROOT:
+			case COMPANY:
+			case UNKNOWN:
+				rv = false;
+				break;
+			default:
+				rv = true;
+		}
+		return rv;
+	}
+
+	public boolean isAdmin(AuthzTrans trans, String user, String ns) {
+		Date now = new Date();
+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_ADMIN);
+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+			if(urdd.expires.after(now)) {
+				return true;
+			}
+		}};
+		return false;
+	}
+	
+	public boolean isOwner(AuthzTrans trans, String user, String ns) {
+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);
+		Date now = new Date();
+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+			if(urdd.expires.after(now)) {
+				return true;
+			}
+		}};
+		return false;
+	}
+
+	public int countOwner(AuthzTrans trans, String ns) {
+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.readByRole(trans,ns+DOT_OWNER);
+		Date now = new Date();
+		int count = 0;
+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+			if(urdd.expires.after(now)) {
+				++count;
+			}
+		}};
+		return count;
+	}
+	
+	/**
+	 * Return a Unique String, (same string, if it is already unique), with only
+	 * lowercase letters, digits and the '.' character.
+	 * 
+	 * @param name
+	 * @return
+	 * @throws IOException 
+	 */
+	public static String toUnique(String name) throws IOException {
+		byte[] from = name.getBytes();
+		StringBuilder sb = new StringBuilder();
+		byte f;
+		for(int i=0;i<from.length;++i) {
+			f=(byte)(from[i]); // printables;
+			sb.append((char)((f>>4)+0x61));
+			sb.append((char)((f&0x0F)+0x61));
+		}
+		return sb.toString();
+	}
+	
+	public static String fromUnique(String name) throws IOException {
+		byte[] from = name.getBytes();
+		StringBuilder sb = new StringBuilder();
+		char c;
+		for(int i=0;i<from.length;++i) {
+			c = (char)((from[i]-0x61)<<4);
+			c |= (from[++i]-0x61);
+			sb.append(c);
+		}
+		return sb.toString();
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java
new file mode 100644
index 0000000..b854def
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java
@@ -0,0 +1,132 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.cass.LocateDAO;
+import org.onap.aaf.auth.dao.cass.LocateDAO.Data;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.LocatorException;
+import org.onap.aaf.cadi.Access.Level;
+import org.onap.aaf.cadi.aaf.v2_0.AbsAAFLocator;
+import org.onap.aaf.cadi.config.Config;
+import org.onap.aaf.misc.env.util.Split;
+
+import locate.v1_0.Endpoint;
+
+public class DirectAAFLocator extends AbsAAFLocator<AuthzTrans> {
+	private LocateDAO ldao;
+	private int major=-1, minor=-1, patch=-1, pkg=-1;
+	private AuthzEnv env;
+	private final URI uri;
+
+	/**
+	 * 
+	 * @param env
+	 * @param ldao
+	 * @param key  must be one or more of service, version, other in that order
+	 * @throws LocatorException 
+	 */
+	public DirectAAFLocator(AuthzEnv env, LocateDAO ldao, String name, String version) throws LocatorException {
+		super(env.access(), name, 1000L /* Don't hit DB more than once a second */); 
+		this.env = env;
+		this.ldao = ldao;
+		if(version!=null) {
+			try { 
+				String[] v = Split.split('.',version);
+				if(v.length>0) {major = Integer.parseInt(v[0]);}
+				if(v.length>1) {minor = Integer.parseInt(v[1]);}
+				if(v.length>2) {patch = Integer.parseInt(v[2]);}
+				if(v.length>3) {pkg   = Integer.parseInt(v[3]);}
+			} catch (NumberFormatException e) {
+				throw new LocatorException("Invalid Version String: " + version);
+			}
+		}
+		
+		try {
+			uri = new URI(access.getProperty(Config.AAF_LOCATE_URL, "localhost")+"/locate/"+name+':'+version);
+		} catch (URISyntaxException e) {
+			throw new LocatorException(e);
+		}
+		myhostname=null;
+		myport = 0; 
+	}
+	
+	
+	@Override
+	public boolean refresh() {
+		AuthzTrans trans = env.newTransNoAvg();
+		Result<List<Data>> rl = ldao.readByName(trans, name);
+		if(rl.isOK()) {
+			LinkedList<EP> epl = new LinkedList<EP>();
+			for(Data d : rl.value) {
+//				if(myhostname!=null && d.port==myport && d.hostname.equals(myhostname)) {
+//					continue;
+//				}
+				if((major<0 || major==d.major) &&
+				   (minor<0 || minor<=d.minor) &&
+				   (patch<0 || patch==d.patch) &&
+				   (pkg<0   || pkg  ==d.pkg)) {
+					Endpoint endpoint = new Endpoint();
+					endpoint.setName(d.name);
+					endpoint.setHostname(d.hostname);
+					endpoint.setPort(d.port);
+					endpoint.setMajor(d.major);
+					endpoint.setMinor(d.minor);
+					endpoint.setPatch(d.patch);
+					endpoint.setPkg(d.pkg);
+					endpoint.setLatitude(d.latitude);
+					endpoint.setLongitude(d.longitude);
+					endpoint.setProtocol(d.protocol);
+					for(String s : d.subprotocol(false)) {
+						endpoint.getSubprotocol().add(s);
+					}
+					
+					try {
+						epl.add(new EP(endpoint,latitude,longitude));
+					} catch (URISyntaxException e) {
+						e.printStackTrace();
+					}
+				}
+			}
+			Collections.sort(epl);
+			replace(epl);
+			return true;
+		} else {
+			access.log(Level.ERROR, rl.errorString());
+		}
+		return false;
+	}
+
+	@Override
+	protected URI getURI() {
+		return uri;
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java
new file mode 100644
index 0000000..5bdb215
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java
@@ -0,0 +1,193 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import static org.onap.aaf.auth.layer.Result.OK;
+
+import java.security.Principal;
+import java.util.List;
+
+import org.onap.aaf.auth.dao.cass.NsSplit;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.dao.cass.PermDAO.Data;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.env.NullTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.Lur;
+import org.onap.aaf.cadi.Permission;
+import org.onap.aaf.cadi.Access.Level;
+import org.onap.aaf.cadi.lur.LocalPermission;
+import org.onap.aaf.misc.env.util.Split;
+
+public class DirectAAFLur implements Lur {
+	private final AuthzEnv env;
+	private final Question question;
+	
+	public DirectAAFLur(AuthzEnv env, Question question/*, TokenMgr tm*/) {
+		this.env = env;
+		this.question = question;
+//		oauth = new OAuth2Lur(null);
+	}
+
+	@Override
+	public boolean fish(Principal bait, Permission pond) {
+		return fish(env.newTransNoAvg(),bait,pond);
+	}
+	
+	public boolean fish(AuthzTrans trans, Principal bait, Permission pond) {
+		Result<List<Data>> pdr = question.getPermsByUser(trans, bait.getName(),false);
+		switch(pdr.status) {
+			case OK:
+				for(PermDAO.Data d : pdr.value) {
+					if(new PermPermission(d).match(pond)) {
+						return true;
+					}
+				}
+				break;
+			case Status.ERR_UserRoleNotFound:
+			case Status.ERR_BadData:
+				return false;
+			default:
+				trans.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-",pdr.details);
+		}
+		return false;
+	}
+
+	@Override
+	public void fishAll(Principal bait, List<Permission> permissions) {
+		Result<List<Data>> pdr = question.getPermsByUser(env.newTrans(), bait.getName(),false);
+		switch(pdr.status) {
+			case OK:
+				for(PermDAO.Data d : pdr.value) {
+					permissions.add(new PermPermission(d));
+				}
+				break;
+			default:
+				env.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-", pdr.details);
+		}
+	}
+	
+	@Override
+	public void destroy() {
+	}
+
+	@Override
+	public boolean handlesExclusively(Permission pond) {
+		return false;
+	}
+	
+	/**
+	 * Small Class implementing CADI's Permission with Cassandra Data
+	 * @author Jonathan
+	 *
+	 */
+	public static class PermPermission implements Permission {
+		private PermDAO.Data data;
+		
+		public PermPermission(PermDAO.Data d) {
+			data = d;
+		}
+		
+		public PermPermission(AuthzTrans trans, Question q, String p) {
+			data = PermDAO.Data.create(trans, q, p);
+		}
+		
+		public PermPermission(String ns, String type, String instance, String action) {
+			data = new PermDAO.Data();
+			data.ns = ns;
+			data.type = type;
+			data.instance = instance;
+			data.action = action;
+		}
+
+		@Override
+		public String getKey() {
+			return data.type;
+		}
+
+		@Override
+		public boolean match(Permission p) {
+			if(p==null) {
+				return false;
+			}
+			PermDAO.Data pd;
+			if(p instanceof DirectAAFLur.PermPermission) {
+				pd = ((DirectAAFLur.PermPermission)p).data;
+				if(data.ns.equals(pd.ns))
+					if(data.type.equals(pd.type))
+						if(data.instance!=null && (data.instance.equals(pd.instance) || "*".equals(data.instance)))
+							if(data.action!=null && (data.action.equals(pd.action) || "*".equals(data.action)))
+								return true;
+			} else{
+				String[] lp = p.getKey().split("\\|");
+				if(lp.length<3)return false;
+				if(data.fullType().equals(lp[0]))
+					if(data.instance!=null && (data.instance.equals(lp[1]) || "*".equals(data.instance)))
+						if(data.action!=null && (data.action.equals(lp[2]) || "*".equals(data.action)))
+							return true;
+			}
+			return false;
+		}
+
+		@Override
+		public String permType() {
+			return "AAFLUR";
+		}
+		
+	}
+	
+	public String toString() {
+		return "DirectAAFLur is enabled";
+		
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.cadi.Lur#handles(java.security.Principal)
+	 */
+	@Override
+	public boolean handles(Principal principal) {
+		return true;
+	}
+
+	@Override
+	public Permission createPerm(String p) {
+		String[] params = Split.split('|', p);
+		if(params.length==3) {
+			Result<NsSplit> nss = question.deriveNsSplit(NullTrans.singleton(), params[0]);
+			if(nss.isOK()) {
+				return new PermPermission(nss.value.ns,nss.value.name,params[1],params[2]);
+			}
+		}
+		return new LocalPermission(p);
+	}
+
+	@Override
+	public void clear(Principal p, StringBuilder sb) {
+		AuthzTrans trans = env.newTrans();
+		question.clearCache(trans,"all");
+		env.log(Level.AUDIT, p.getName(), "has cleared Cache for",getClass().getSimpleName());
+		trans.auditTrail(0, sb);
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java
new file mode 100644
index 0000000..f241cdf
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java
@@ -0,0 +1,83 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import static org.onap.aaf.auth.layer.Result.OK;
+
+import java.util.Date;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.CredVal;
+
+/**
+ * DirectAAFUserPass is intended to provide password Validation directly from Cassandra Database, and is only
+ * intended for use in AAF itself.  The normal "AAF Taf" objects are, of course, clients.
+ * 
+ * @author Jonathan
+ *
+ */
+public class DirectAAFUserPass implements CredVal {
+	private final AuthzEnv env;
+	private final Question question;
+	
+	public DirectAAFUserPass(AuthzEnv env, Question question) {
+		this.env = env;
+		this.question = question;
+	}
+
+	@Override
+	public boolean validate(String user, Type type, byte[] pass, Object state) {
+			try {
+				AuthzTrans trans;
+				if(state !=null) {
+					if(state instanceof AuthzTrans) {
+						trans = (AuthzTrans)state;
+					} else {
+						trans = env.newTransNoAvg();
+						if(state instanceof HttpServletRequest) {
+							trans.set((HttpServletRequest)state);
+						}
+					}
+				} else {
+					trans = env.newTransNoAvg();
+				}
+				Result<Date> result = question.doesUserCredMatch(trans, user, pass);
+				trans.logAuditTrail(env.info());
+				switch(result.status) {
+					case OK:
+						return true;
+					default:
+						String ip = trans.ip()==null?"":(", ip="+trans.ip());
+						env.warn().log(user, "failed password validation" + ip + ':',result.errorString());
+				}
+			} catch (DAOException e) {
+				env.error().log(e,"Cannot validate user/pass from cassandra");
+			}
+		return false;
+	}
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java
new file mode 100644
index 0000000..b5fcd69
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import java.nio.ByteBuffer;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.onap.aaf.auth.dao.cached.CachedCertDAO;
+import org.onap.aaf.auth.dao.cass.CertDAO.Data;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.auth.rserv.TransFilter;
+import org.onap.aaf.cadi.principal.TaggedPrincipal;
+import org.onap.aaf.cadi.principal.X509Principal;
+import org.onap.aaf.cadi.taf.cert.CertIdentity;
+import org.onap.aaf.cadi.taf.cert.X509Taf;
+
+/**
+ * Direct view of CertIdentities
+ * 
+ * Warning:  this class is difficult to instantiate.  The only service that can use it is AAF itself, and is thus 
+ * entered in the "init" after the CachedCertDAO is created.
+ * 
+ * @author Jonathan
+ *
+ */
+public class DirectCertIdentity implements CertIdentity {
+	private static CachedCertDAO certDAO;
+
+	@Override
+	public TaggedPrincipal identity(HttpServletRequest req, X509Certificate cert,	byte[] _certBytes) throws CertificateException {
+	    	byte[] certBytes = _certBytes;
+		if(cert==null && certBytes==null) {
+		    return null;
+		}
+		if(certBytes==null) {
+		    certBytes = cert.getEncoded();
+		}
+		byte[] fingerprint = X509Taf.getFingerPrint(certBytes);
+
+		AuthzTrans trans = (AuthzTrans) req.getAttribute(TransFilter.TRANS_TAG);
+		
+		Result<List<Data>> cresp = certDAO.read(trans, ByteBuffer.wrap(fingerprint));
+		if(cresp.isOKhasData()) {
+			Data cdata = cresp.value.get(0);
+			return new X509Principal(cdata.id,cert,certBytes);
+		}
+		return null;
+	}
+
+	public static void set(CachedCertDAO ccd) {
+		certDAO = ccd;
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java
new file mode 100644
index 0000000..3dceb3b
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java
@@ -0,0 +1,59 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import org.onap.aaf.auth.dao.cass.LocateDAO;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.cadi.LocatorException;
+import org.onap.aaf.cadi.aaf.v2_0.AbsAAFLocator;
+
+public class DirectLocatorCreator implements AbsAAFLocator.LocatorCreator {
+	private final AuthzEnv env;
+	private final LocateDAO locateDAO;
+	private String myhostname;
+	private int myport;
+	
+	public DirectLocatorCreator(AuthzEnv env, LocateDAO locateDAO) {
+		this.env = env;
+		this.locateDAO = locateDAO;
+	}
+	
+	@Override
+	public AbsAAFLocator<?> create(String key, String version) throws LocatorException {
+		DirectAAFLocator dal = new DirectAAFLocator(env,locateDAO,key,version);
+		if(myhostname!=null) {
+			dal.setSelf(myhostname, myport);
+		}
+		return dal;
+	}
+	
+	/**
+	 * Make sure DirectAAFLocator created does not include self.
+	 * @param hostname
+	 * @param port
+	 */
+	public void setSelf(String hostname, int port) {
+		myhostname = hostname;
+		myport = port;
+	}
+
+}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java
new file mode 100644
index 0000000..1e8faa2
--- /dev/null
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.direct;
+
+import java.net.Inet4Address;
+import java.net.UnknownHostException;
+
+import org.onap.aaf.auth.dao.cass.LocateDAO;
+import org.onap.aaf.auth.dao.cass.LocateDAO.Data;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.cadi.Access;
+import org.onap.aaf.cadi.CadiException;
+import org.onap.aaf.cadi.client.Result;
+import org.onap.aaf.cadi.config.Config;
+import org.onap.aaf.cadi.register.Registrant;
+import org.onap.aaf.cadi.util.Split;
+
+public class DirectRegistrar implements Registrant<AuthzEnv> {
+	private Data locate;
+	private LocateDAO ldao;
+	public DirectRegistrar(Access access, LocateDAO ldao, String name, String version, int port) throws CadiException {
+		this.ldao = ldao;
+		locate = new LocateDAO.Data();
+		locate.name = name;
+		locate.port = port;
+		
+		try {
+			String latitude = access.getProperty(Config.CADI_LATITUDE, null);
+			if(latitude==null) {
+				latitude = access.getProperty("AFT_LATITUDE", null);
+			}
+			String longitude = access.getProperty(Config.CADI_LONGITUDE, null);
+			if(longitude==null) {
+				longitude = access.getProperty("AFT_LONGITUDE", null);
+			}
+			if(latitude==null || longitude==null) {
+				throw new CadiException(Config.CADI_LATITUDE + " and " + Config.CADI_LONGITUDE + " is required");
+			} else {
+				locate.latitude = Float.parseFloat(latitude);
+				locate.longitude = Float.parseFloat(longitude);
+			}
+			String split[] = Split.splitTrim('.', version);
+			locate.pkg = split.length>3?Integer.parseInt(split[3]):0;
+			locate.patch = split.length>2?Integer.parseInt(split[2]):0;
+			locate.minor = split.length>1?Integer.parseInt(split[1]):0;
+			locate.major = split.length>0?Integer.parseInt(split[0]):0;
+			
+			locate.hostname = access.getProperty(Config.HOSTNAME, Inet4Address.getLocalHost().getHostName());
+			String subprotocols = access.getProperty(Config.CADI_PROTOCOLS, null);
+			if(subprotocols==null) {
+				locate.protocol="http";
+			} else {
+				locate.protocol="https";
+				for(String s : Split.split(',', subprotocols)) {
+					locate.subprotocol(true).add(s);
+				}
+			}
+		} catch (NumberFormatException | UnknownHostException e) {
+			throw new CadiException("Error extracting Data from Properties for Registrar",e);
+		}
+	}
+	
+	@Override
+	public Result<Void> update(AuthzEnv env) {
+		org.onap.aaf.auth.layer.Result<Void> dr = ldao.update(env.newTransNoAvg(), locate);
+		if(dr.isOK()) {
+			return Result.ok(200, null);
+		} else {
+			return Result.err(503, dr.errorString());
+		}
+	}
+
+	/* (non-Javadoc)
+	 * @see org.onap.aaf.auth.server.Registrant#cancel(org.onap.aaf.auth.env.test.AuthzEnv)
+	 */
+	@Override
+	public Result<Void> cancel(AuthzEnv env) {
+		org.onap.aaf.auth.layer.Result<Void> dr = ldao.delete(env.newTransNoAvg(), locate, false);
+		if(dr.isOK()) {
+			return Result.ok(200, null);
+		} else {
+			return Result.err(503, dr.errorString());
+		}
+
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/.gitignore b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/.gitignore
new file mode 100644
index 0000000..488b914
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/.gitignore
@@ -0,0 +1 @@
+/JU_OAuthAppDAO.java
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/AbsJUCass.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/AbsJUCass.java
new file mode 100644
index 0000000..5b6a08c
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/AbsJUCass.java
@@ -0,0 +1,200 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.security.NoSuchAlgorithmException;
+import java.util.Properties;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.cadi.Hash;
+import org.onap.aaf.cadi.Symm;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.Trans.Metric;
+
+import com.datastax.driver.core.Cluster;
+
+import junit.framework.Assert;
+
+/**
+ * Do Setup of Cassandra for Cassandra JUnit Testing
+ * 
+ * @author Jonathan
+ *
+ */
+public class AbsJUCass {
+	protected static final String AUTHZ = "authz";
+	protected static Cluster cluster;
+	protected static AuthzEnv env;
+	protected static int iterations = 0;
+	protected static float totals=0.0f;
+	protected static float remote = 0.0f;
+	protected static float json = 0.0f;
+	protected static AuthzTrans trans;
+	protected static boolean details = true;
+	
+	@BeforeClass 
+	public static void startup() throws APIException, IOException {
+		try {
+			synchronized(AUTHZ) {
+				if(env==null) {
+					final String resource = "cadi.properties";
+		            File f = new File("etc" + resource);
+		            InputStream is=null;
+		            Properties props = new Properties();
+		            try {
+		                if(f.exists()) {
+		                    is = new FileInputStream(f);
+		                } else {
+		                    URL rsrc = ClassLoader.getSystemResource(resource);
+		                    is = rsrc.openStream();
+		                }
+		                props.load(is);
+		            } finally {
+		                if(is==null) {
+		                	env= new AuthzEnv();
+		                    Assert.fail(resource + " must exist in etc dir, or in Classpath");
+		                }
+		                is.close();
+		            }
+					env = new AuthzEnv(props);
+				}
+			}
+			cluster = CassAccess.cluster(env,"LOCAL");
+	
+			env.info().log("Connecting to Cluster");
+			try {
+				cluster.connect(AUTHZ);
+			} catch(Exception e) {
+				cluster=null;
+				env.error().log(e);
+				Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());
+			}
+			env.info().log("Connected");
+			
+			// Load special data here
+			
+			iterations = 0;
+		} catch (Throwable t) {
+			t.printStackTrace();
+			throw t;
+		}
+	}
+	
+	@AfterClass
+	public static void shutdown() {
+		if(cluster!=null) {
+			cluster.close();
+			cluster = null;
+		}
+	}
+
+	@Before
+	public void newTrans() {
+		trans = env.newTrans();
+		
+		trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));
+	}
+	
+	@After
+	public void auditTrail() {
+		if(totals==0) { // "updateTotals()" was not called... just do one Trans
+			StringBuilder sb = new StringBuilder();
+			Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);
+			if(details) {
+				env.info().log(
+				sb,
+				"Total time:",
+				totals += metric.total,
+				"JSON time: ",
+				metric.buckets[0],
+				"REMOTE time: ",
+				metric.buckets[1]
+				);
+			} else {
+				totals += metric.total;
+			}
+		}
+	}
+	
+	protected void updateTotals() {
+		Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);
+		totals+=metric.total;
+		json  +=metric.buckets[0];
+		remote+=metric.buckets[1];
+	}
+
+
+	@AfterClass
+	public static void print() {
+		float transTime;
+		if(iterations==0) {
+			transTime=totals;
+		} else {
+			transTime=totals/iterations;
+		}
+		env.info().log(
+		"Total time:",
+		totals,   
+		"JSON time:",
+		json,
+		"REMOTE time:",
+		remote,
+		"Iterations:",
+		iterations,
+		"Transaction time:",
+		transTime
+		);
+	}
+	
+	/**
+	 * Take a User/Pass and turn into an MD5 Hashed BasicAuth
+	 * 
+	 * @param user
+	 * @param pass
+	 * @return
+	 * @throws IOException
+	 * @throws NoSuchAlgorithmException
+	 */
+	public static byte[] userPassToBytes(String user, String pass)
+			throws IOException, NoSuchAlgorithmException {
+		// Take the form of BasicAuth, so as to allow any character in Password
+		// (this is an issue in 1.0)
+		// Also, it makes it quicker to evaluate Basic Auth direct questions
+		String ba = Symm.base64url.encode(user + ':' + pass);
+		// Take MD5 Hash, so that data in DB can't be reversed out.
+		return Hash.hashMD5(ba.getBytes());
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ApprovalDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ApprovalDAO.java
new file mode 100644
index 0000000..bc860d0
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ApprovalDAO.java
@@ -0,0 +1,153 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+public class JU_ApprovalDAO  extends AbsJUCass {
+	@Test
+	public void testCRUD() throws Exception {
+		ApprovalDAO rrDAO = new ApprovalDAO(trans, cluster, AUTHZ);
+		ApprovalDAO.Data data = new ApprovalDAO.Data();
+		
+		// Note, Create creates the time id
+		data.ticket = UUID.randomUUID(); // normally, read from Future object
+		data.user = "testid@test.com";
+		data.approver = "mySuper@att.com";
+		data.type = "supervisor";
+		data.status = "pending";
+		data.operation = "C";
+		data.updated = new Date();
+		data.memo = "Sing Hey for the break of day";
+		data.last_notified = null;
+		
+		try {
+			// Test create
+			Result<Data> rc = rrDAO.create(trans, data);
+			if(rc.isOKhasData()) { // Create creates the TIMEID.
+				data = rc.value;
+			}
+			
+			// Test Read by Ticket
+			Result<List<ApprovalDAO.Data>> rlad;
+			rlad = rrDAO.readByTicket(trans, data.ticket);
+			assertTrue(rlad.isOK());
+			assertEquals(1,rlad.value.size());
+			compare(data,rlad.value.get(0));
+			
+			// Hold onto original ID for deletion, and read tests
+			UUID id = rlad.value.get(0).id;
+			
+			try {
+				// Test Read by User
+				rlad = rrDAO.readByUser(trans, data.user);
+				assertTrue(rlad.isOKhasData());
+				boolean ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Read by Approver
+				rlad = rrDAO.readByApprover(trans, data.approver);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Read by ID
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Update
+				data.status = "approved";
+				data.id = id;
+				assertTrue(rrDAO.update(trans, data).isOK());
+				
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+
+			} finally {
+				// Delete
+				data.id = id;
+				rrDAO.delete(trans, data, true);
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOK());
+				assertTrue(rlad.isEmpty());
+			}
+			
+		} finally {
+			rrDAO.close(trans);
+		}
+	}
+
+	private void compare(Data d1, Data d2) {
+		assertEquals(d1.id.toString(),d2.id.toString());
+		assertEquals(d1.ticket.toString(),d2.ticket.toString());
+		assertEquals(d1.user,d2.user);
+		assertEquals(d1.approver,d2.approver);
+		assertEquals(d1.type,d2.type);
+		assertEquals(d1.status,d2.status);
+		assertEquals(d1.operation,d2.operation);
+		//assertEquals(d1.updated,d2.updated);
+		assertEquals(d1.memo,d2.memo);
+		assertEquals(d1.last_notified,d2.last_notified);
+	}
+
+	
+	
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ArtiDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ArtiDAO.java
new file mode 100644
index 0000000..e104cbe
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_ArtiDAO.java
@@ -0,0 +1,137 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.ArtiDAO;
+import org.onap.aaf.auth.dao.cass.ArtiDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+/**
+ * UserDAO unit test.
+ * Date: 7/19/13
+ */
+public class JU_ArtiDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException {
+		ArtiDAO adao = new ArtiDAO(trans,cluster,CassAccess.KEYSPACE);
+		try {
+			// TODO: Clean out AT&T specific data
+			// Create
+	        ArtiDAO.Data data = new ArtiDAO.Data();
+	        data.mechid="m553j5@perturbed.onap.org";
+	        data.machine="perturbed1232.onap.org";
+	        data.type(false).add("file");
+	        data.type(false).add("jks");
+	        data.sponsor="Fred Flintstone";
+	        data.ca="devl";
+	        data.dir="/opt/app/aft/keys";
+	        data.ns="kumquat";
+	        data.os_user="aft";
+	        data.notify="email:fred@bogus.com";
+	        data.expires=new Date();
+	        
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new ArtiDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+	        
+	        
+//	        DB work
+			adao.create(trans,data);
+			try {
+				// Validate Read with key fields in Data
+				Result<List<ArtiDAO.Data>> rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// Validate Read with key fields in Data
+				rlcd = adao.read(trans,data.mechid, data.machine);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// By Machine
+				rlcd = adao.readByMachine(trans,data.machine);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				
+				// By MechID
+				rlcd = adao.readByMechID(trans,data.mechid);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// Update
+				data.sponsor = "Wilma Flintstone";
+				adao.update(trans,data);
+				rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}			
+
+			} finally {
+				// Always delete data, even if failure.
+				adao.delete(trans,data, true);
+			}
+		} finally {
+			adao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.mechid,d.mechid);
+		assertEquals(data.machine,d.machine);
+		assertEquals(data.type(false).size(),d.type(false).size());
+		for(String s: data.type(false)) {
+			assertTrue(d.type(false).contains(s));
+		}
+		assertEquals(data.sponsor,d.sponsor);
+		assertEquals(data.ca,d.ca);
+		assertEquals(data.dir,d.dir);
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.os_user,d.os_user);
+		assertEquals(data.notify,d.notify);
+		assertEquals(data.expires,d.expires);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_Bytification.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_Bytification.java
new file mode 100644
index 0000000..206c52a
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_Bytification.java
@@ -0,0 +1,266 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+
+public class JU_Bytification {
+
+	@Test
+	public void testNS() throws IOException {
+		
+		// Normal
+		NsDAO.Data ns = new NsDAO.Data();
+		ns.name = "org.osaaf.whatever";
+		ns.type = NsType.APP.type;
+
+		ByteBuffer bb = ns.bytify();
+		
+		NsDAO.Data nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+		
+		// Empty admin
+//		ns.admin(true).clear();
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+		
+		// Empty responsible
+//		ns.responsible(true).clear();
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+	}
+	
+	private void check(NsDAO.Data a, NsDAO.Data b) {
+		assertEquals(a.name,b.name);
+		assertEquals(a.type,b.type);
+//		assertEquals(a.admin.size(),b.admin.size());
+		
+//		for(String s: a.admin) {
+//			assertTrue(b.admin.contains(s));
+//		}
+//		
+//		assertEquals(a.responsible.size(),b.responsible.size());
+//		for(String s: a.responsible) {
+//			assertTrue(b.responsible.contains(s));
+//		}
+	}
+
+	@Test
+	public void testRole() throws IOException {
+		RoleDAO.Data rd1 = new RoleDAO.Data();
+		rd1.ns = "org.osaaf.whatever";
+		rd1.name = "my.role";
+		rd1.perms(true).add("org.osaaf.whatever.my.Perm|myInstance|myAction");
+		rd1.perms(true).add("org.osaaf.whatever.my.Perm|myInstance|myAction2");
+
+		// Normal
+		ByteBuffer bb = rd1.bytify();
+		RoleDAO.Data rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// Overshoot Buffer
+		StringBuilder sb = new StringBuilder(300);
+		sb.append("role|instance|veryLongAction...");
+		for(int i=0;i<280;++i) {
+			sb.append('a');
+		}
+		rd1.perms(true).add(sb.toString());
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// No Perms
+		rd1.perms.clear();
+		
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// 1000 Perms
+		for(int i=0;i<1000;++i) {
+			rd1.perms(true).add("com|inst|action"+ i);
+		}
+
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+
+	}
+	
+	private void check(RoleDAO.Data a, RoleDAO.Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.name,b.name);
+		
+		assertEquals(a.perms.size(),b.perms.size());
+		for(String s: a.perms) {
+			assertTrue(b.perms.contains(s));
+		}
+	}
+
+	@Test
+	public void testPerm() throws IOException {
+		PermDAO.Data pd1 = new PermDAO.Data();
+		pd1.ns = "org.osaaf.whatever";
+		pd1.type = "my.perm";
+		pd1.instance = "instance";
+		pd1.action = "read";
+		pd1.roles(true).add("org.osaaf.whatever.my.Role");
+		pd1.roles(true).add("org.osaaf.whatever.my.Role2");
+
+		// Normal
+		ByteBuffer bb = pd1.bytify();
+		PermDAO.Data rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+		
+		// No Perms
+		pd1.roles.clear();
+		
+		bb = pd1.bytify();
+		rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+		
+		// 1000 Perms
+		for(int i=0;i<1000;++i) {
+			pd1.roles(true).add("org.osaaf.whatever.my.Role"+ i);
+		}
+
+		bb = pd1.bytify();
+		rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+
+	}
+	
+	private void check(PermDAO.Data a, PermDAO.Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		assertEquals(a.instance,b.instance);
+		assertEquals(a.action,b.action);
+		
+		assertEquals(a.roles.size(),b.roles.size());
+		for(String s: a.roles) {
+			assertTrue(b.roles.contains(s));
+		}
+	}
+
+	@Test
+	public void testUserRole() throws IOException {
+		UserRoleDAO.Data urd1 = new UserRoleDAO.Data();
+//		TODO: Clean out AT&T specific data
+		urd1.user = "jg1555@abc.att.com";
+		urd1.role("org.osaaf.whatever","my.role");
+		urd1.expires = new Date();
+
+		// Normal
+		ByteBuffer bb = urd1.bytify();
+		UserRoleDAO.Data urd2 = new UserRoleDAO.Data();
+		urd2.reconstitute(bb);
+		check(urd1,urd2);
+		
+		// A null
+		urd1.expires = null; 
+		urd1.role = null;
+		
+		bb = urd1.bytify();
+		urd2 = new UserRoleDAO.Data();
+		urd2.reconstitute(bb);
+		check(urd1,urd2);
+	}
+
+	private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {
+		assertEquals(a.user,b.user);
+		assertEquals(a.role,b.role);
+		assertEquals(a.expires,b.expires);
+	}
+
+	
+	@Test
+	public void testCred() throws IOException {
+		CredDAO.Data cd = new CredDAO.Data();
+		cd.id = "m55555@abc.att.com";
+		cd.ns = "org.osaaf.abc";
+		cd.type = 2;
+		cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});
+		cd.expires = new Date();
+
+		// Normal
+		ByteBuffer bb = cd.bytify();
+		CredDAO.Data cd2 = new CredDAO.Data();
+		cd2.reconstitute(bb);
+		check(cd,cd2);
+		
+		// nulls
+		cd.expires = null;
+		cd.cred = null;
+		
+		bb = cd.bytify();
+		cd2 = new CredDAO.Data();
+		cd2.reconstitute(bb);
+		check(cd,cd2);
+
+	}
+
+	private void check(CredDAO.Data a, CredDAO.Data b) {
+		assertEquals(a.id,b.id);
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		if(a.cred==null) {
+			assertEquals(a.cred,b.cred); 
+		} else {
+			int l = a.cred.limit();
+			assertEquals(l,b.cred.limit());
+			for (int i=0;i<l;++i) {
+				assertEquals(a.cred.get(),b.cred.get());
+			}
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CacheInfoDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CacheInfoDAO.java
new file mode 100644
index 0000000..87b8848
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CacheInfoDAO.java
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+
+package com.att.dao.aaf.test;
+
+import java.io.IOException;
+import java.util.Date;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.cass.CacheInfoDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import junit.framework.Assert;
+
+
+public class JU_CacheInfoDAO extends AbsJUCass {
+
+	@Test
+	public void test() throws DAOException, APIException, IOException {
+		CIDAO<AuthzTrans> id = new CacheInfoDAO(trans, cluster, AUTHZ);
+		Date date  = new Date();
+		
+		id.touch(trans, RoleDAO.TABLE,1);
+		try {
+			Thread.sleep(3000);
+		} catch (InterruptedException e) {
+		}
+		Result<Void> rid = id.check(trans);
+		Assert.assertEquals(rid.status,Status.OK);
+		Date[] dates = CacheInfoDAO.info.get(RoleDAO.TABLE);
+		if(dates.length>0 && dates[1]!=null) {
+			System.out.println(Chrono.dateStamp(dates[1]));
+			System.out.println(Chrono.dateStamp(date));
+			Assert.assertTrue(Math.abs(dates[1].getTime() - date.getTime())<20000); // allow for 4 seconds, given Remote DB
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CertDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CertDAO.java
new file mode 100644
index 0000000..cd3fb8d
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CertDAO.java
@@ -0,0 +1,103 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.CertDAO;
+import org.onap.aaf.auth.dao.cass.CertDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * UserDAO unit test.
+ * Date: 7/19/13
+ */
+public class JU_CertDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		CertDAO cdao = new CertDAO(trans,cluster,CassAccess.KEYSPACE);
+		try {
+			// Create
+	        CertDAO.Data data = new CertDAO.Data();
+	        data.serial=new BigInteger("11839383");
+	        data.id = "m55555@tguard.att.com";
+	        data.x500="CN=ju_cert.dao.att.com, OU=AAF, O=\"ATT Services, Inc.\", L=Southfield, ST=Michigan, C=US";
+	        data.x509="I'm a cert";
+	        data.ca = "aaf";
+			cdao.create(trans,data);
+
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new CertDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+
+			// Validate Read with key fields in Data
+			Result<List<CertDAO.Data>> rlcd = cdao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+
+			// Validate Read with key fields in Data
+			rlcd = cdao.read(trans,data.ca,data.serial);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+
+			// Update
+			data.id = "m66666.tguard.att.com";
+			cdao.update(trans,data);
+			rlcd = cdao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}			
+			
+			cdao.delete(trans,data, true);
+		} finally {
+			cdao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.ca,d.ca);
+		assertEquals(data.serial,d.serial);
+		assertEquals(data.id,d.id);
+		assertEquals(data.x500,d.x500);
+		assertEquals(data.x509,d.x509);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CredDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CredDAO.java
new file mode 100644
index 0000000..3ccc432
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_CredDAO.java
@@ -0,0 +1,250 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * UserDAO unit test.
+ * Date: 7/19/13
+ */
+public class JU_CredDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		CredDAO udao = new CredDAO(trans,cluster,CassAccess.KEYSPACE);
+		try {
+			// Create
+	        CredDAO.Data data = new CredDAO.Data();
+	        data.id = "m55555@aaf.att.com";
+	        data.type = CredDAO.BASIC_AUTH;
+	        data.notes = "temp pass";
+	        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));
+	        data.other = 12;
+	        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);
+			udao.create(trans,data);
+			
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new CredDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+
+			// Validate Read with key fields in Data
+			Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CredDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+			
+			// Update
+			data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));
+			udao.update(trans,data);
+			rlcd = udao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CredDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}			
+			
+			udao.delete(trans,data, true);
+		} finally {
+			udao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.id,d.id);
+		assertEquals(data.type,d.type);
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.notes,d.notes);
+		assertEquals(data.cred,d.cred);
+		assertEquals(data.other,d.other);
+		assertEquals(data.expires,d.expires);
+	}
+
+//    private String                          CONST_myName = "MyName";
+//    public static final java.nio.ByteBuffer CONST_MY_CRED = get_CONST_MY_CRED();
+//    public static final int                 CONST_CRED_TYPE = 11;
+//
+//    public static final Date                CONST_UPDATE_DATE = new Date(System.currentTimeMillis()+60000*24);
+//    @Test
+//    public void test() {
+//        UserDAO ud = new UserDAO(trans, cluster,CassAccess.KEYSPACE);
+//        try {
+//            UserDAO.Data data = createPrototypeUserData();
+//            ud.create(trans, data);
+//
+//            // Validate Read with key fields in Data
+//            for(UserDAO.Data d : ud.read(trans, data)) {
+//                checkData1(data,d);
+//            }
+//
+//            // Validate readByName
+//            for(UserDAO.Data d : ud.read(trans, CONST_myName)) {
+//                checkData1(data,d);
+//            }
+//
+//            ud.delete(trans, data);
+//            List<UserDAO.Data> d_2 = ud.read(trans, CONST_myName);
+//
+//            // Validate that data was deleted
+//            assertEquals("User should not be found after deleted", 0, d_2.size() );
+//
+//            data = new UserDAO.Data();
+//            data.name = CONST_myName;
+//            data.cred = CONST_MY_CRED;
+//            data.cred_type= CONST_CRED_TYPE;
+//            data.expires = new Date(System.currentTimeMillis()+60000*24);
+//            final Result<UserDAO.Data> user = ud.r_create(trans, data);
+//            assertEquals("ud.createUser should work", Result.Status.OK, user.status);
+//
+//            checkDataIgnoreDateDiff(data, user.value);
+//
+//            // finally leave system in consistent state by deleting user again
+//            ud.delete(trans,data);
+//
+//        } catch (DAOException e) {
+//            e.printStackTrace();
+//            fail("Fail due to Exception");
+//        } finally {
+//            ud.close(trans);
+//        }
+//    }
+//
+//    private UserDAO.Data createPrototypeUserData() {
+//        UserDAO.Data data = new UserDAO.Data();
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires = CONST_UPDATE_DATE;
+//        return data;
+//    }
+//
+//    //    @Test
+//    //    public void testReadByUser() throws Exception {
+//    //           // this test was done above in our super test, since it uses the same setup
+//    //    }
+//
+//    @Test
+//    public void testFunctionCreateUser() throws Exception {
+//        String name = "roger_rabbit";
+//        Integer credType = CONST_CRED_TYPE;
+//        java.nio.ByteBuffer cred = CONST_MY_CRED;
+//        final UserDAO ud = new UserDAO(trans, cluster,CassAccess.KEYSPACE);
+//        final UserDAO.Data data = createPrototypeUserData();
+//        Result<UserDAO.Data> ret = ud.r_create(trans, data);
+//        Result<List<Data>> byUserNameLookup = ud.r_read(trans, name);
+//        
+//        assertEquals("sanity test w/ different username (different than other test cases) failed", name, byUserNameLookup.value.get(0).name);
+//        assertEquals("delete roger_rabbit failed", true, ud.delete(trans, byUserNameLookup.value.get(0)));
+//    }
+//
+//    @Test
+//    public void testLowLevelCassandraCreateData_Given_UserAlreadyPresent_ShouldPass() throws Exception {
+//        UserDAO ud = new UserDAO(trans, cluster,CassAccess.KEYSPACE);
+//
+//        final UserDAO.Data data = createPrototypeUserData();
+//        final UserDAO.Data data1 = ud.create(trans, data);
+//        final UserDAO.Data data2 = ud.create(trans, data);
+//
+//        assertNotNull(data1);
+//        assertNotNull(data2);
+//
+//        assertEquals(CONST_myName, data1.name);
+//        assertEquals(CONST_myName, data2.name);
+//    }
+//
+//    @Test
+//    public void testCreateUser_Given_UserAlreadyPresent_ShouldFail() throws Exception {
+//        UserDAO ud = new UserDAO(trans, cluster,CassAccess.KEYSPACE);
+//
+//        final UserDAO.Data data = createPrototypeUserData();
+//
+//        // make sure that some prev test did not leave the user in the DB
+//        ud.delete(trans, data);
+//
+//        // attempt to create same user twice !!!
+//        
+//        final Result<UserDAO.Data> data1 = ud.r_create(trans, data);
+//        final Result<UserDAO.Data> data2 = ud.r_create(trans, data);
+//
+//        assertNotNull(data1);
+//        assertNotNull(data2);
+//
+//        assertEquals(true,   Result.Status.OK == data1.status);
+//        assertEquals(false,  Result.Status.OK == data2.status);
+//    }
+//
+//    private void checkData1(UserDAO.Data data, UserDAO.Data d) {
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires   = CONST_UPDATE_DATE;
+//
+//        assertEquals(data.name, d.name);
+//        assertEquals(data.cred_type, d.cred_type);
+//        assertEquals(data.cred, d.cred);
+//        assertEquals(data.expires, d.expires);
+//
+//    }
+//
+//    private void checkDataIgnoreDateDiff(UserDAO.Data data, UserDAO.Data d) {
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires   = CONST_UPDATE_DATE;
+//
+//        assertEquals(data.name, d.name);
+//        assertEquals(data.cred_type, d.cred_type);
+//        assertEquals(data.cred, d.cred);
+//         // we allow dates to be different, e.g. high level calls e.g. createUser sets the date itself.
+//        //assertEquals(data.updated, d.updated);
+//
+//    }
+//
+//    /**
+//     * Get a CONST_MY_CRED ByteBuffer, which is the java type for a cass blob.
+//     * @return
+//     */
+//    private static java.nio.ByteBuffer get_CONST_MY_CRED() {
+//     return ByteBuffer.wrap("Hello".getBytes());
+//    }
+//
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_DelegateDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_DelegateDAO.java
new file mode 100644
index 0000000..1a4d21c
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_DelegateDAO.java
@@ -0,0 +1,108 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.DelegateDAO;
+import org.onap.aaf.auth.dao.cass.DelegateDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+
+public class JU_DelegateDAO  extends AbsJUCass {
+	@Test
+	public void testCRUD() throws Exception {
+		DelegateDAO dao = new DelegateDAO(trans, cluster, AUTHZ);
+		DelegateDAO.Data data = new DelegateDAO.Data();
+//		TODO: Clean out AT&T specific data
+		data.user = "jg1555";
+		data.delegate = "rd8227";
+		data.expires = new Date();
+		
+//        Bytification
+        ByteBuffer bb = data.bytify();
+        Data bdata = new DelegateDAO.Data();
+        bdata.reconstitute(bb);
+        compare(data, bdata);
+
+		try {
+			// Test create
+			Result<Data> ddcr = dao.create(trans,data);
+			assertTrue(ddcr.isOK());
+			
+			
+			// Read by User
+			Result<List<DelegateDAO.Data>> records = dao.read(trans,data.user);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Read by Delegate
+			records = dao.readByDelegate(trans,data.delegate);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+			
+			// Update
+			// TODO: Clean out AT&T specific data
+			data.delegate = "pf2819";
+			data.expires = new Date();
+			assertTrue(dao.update(trans, data).isOK());
+
+			// Read by User
+			records = dao.read(trans,data.user);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Read by Delegate
+			records = dao.readByDelegate(trans,data.delegate);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Test delete
+			dao.delete(trans,data, true);
+			records = dao.read(trans,data.user);
+			assertTrue(records.isEmpty());
+			
+			
+		} finally {
+			dao.close(trans);
+		}
+	}
+	
+	private void compare(Data d1, Data d2) {
+		assertEquals(d1.user, d2.user);
+		assertEquals(d1.delegate, d2.delegate);
+		assertEquals(d1.expires,d2.expires);
+	}
+
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_FastCalling.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_FastCalling.java
new file mode 100644
index 0000000..9d3ff5d
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_FastCalling.java
@@ -0,0 +1,90 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+public class JU_FastCalling extends AbsJUCass {
+
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		trans.setProperty("cassandra.writeConsistency.cred","ONE");
+		
+		CredDAO udao = new CredDAO(env.newTransNoAvg(),cluster,CassAccess.KEYSPACE);
+		System.out.println("Starting calls");
+		for(iterations=0;iterations<8;++iterations) {
+			try {
+				// Create
+		        CredDAO.Data data = new CredDAO.Data();
+		        data.id = "m55555@aaf.att.com";
+		        data.type = CredDAO.BASIC_AUTH;
+		        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));
+		        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);
+				udao.create(trans,data);
+				
+				// Validate Read with key fields in Data
+				Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(CredDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				
+				// Update
+				data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));
+				udao.update(trans,data);
+				rlcd = udao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(CredDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}			
+				
+				udao.delete(trans,data, true);
+			} finally {
+				updateTotals();
+				newTrans();
+			}
+		}
+
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.id,d.id);
+		assertEquals(data.type,d.type);
+		assertEquals(data.cred,d.cred);
+		assertEquals(data.expires,d.expires);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_HistoryDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_HistoryDAO.java
new file mode 100644
index 0000000..11d8835
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_HistoryDAO.java
@@ -0,0 +1,153 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.security.SecureRandom;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.HistoryDAO;
+import org.onap.aaf.auth.layer.Result;
+
+public class JU_HistoryDAO  extends AbsJUCass {
+	
+	@Test
+	public void testCreate() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);			
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans,data.user,data.yr_mon);
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+	
+	@Test
+	public void tesReadByUser() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans, data.user,data.yr_mon);
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+	
+/*
+	@Test
+	public void readByUserAndMonth() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);			
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUserAndMonth(trans,
+					data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),
+					Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)));
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+*/	
+	//TODO readadd this
+//	@Test
+//	public void readByUserAndDay() throws Exception {
+//		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);
+//		HistoryDAO.Data data = createHistoryData();
+//		
+//		try {
+//			historyDAO.create(trans, data);		
+//			Thread.sleep(200);// History Create is Async
+//			
+//			String dayTime = String.valueOf(data.day_time);
+//			String day = null;
+//			if (dayTime.length() < 8)
+//				day = dayTime.substring(0, 1);
+//			else 
+//				day = dayTime.substring(0, 2);
+//			
+//			List<HistoryDAO.Data> records = historyDAO.readByUserBetweenDates(trans,
+//							data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),
+//							Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)),
+//							Integer.valueOf(day), 0);
+//			assertEquals(1,records.size());
+//			for(HistoryDAO.Data d : records) {
+//				assertHistory(data, d);
+//			}
+//		} finally {
+//			historyDAO.close(trans);
+//		}
+//	}
+	private HistoryDAO.Data createHistoryData() {
+		HistoryDAO.Data data = HistoryDAO.newInitedData();
+		SecureRandom random = new SecureRandom();
+		data.user = "test" + random.nextInt();
+		data.action = "add";
+		data.target = "history";
+		data.memo = "adding a row into history table";
+//		data.detail().put("id", "test");
+//		data.detail().put("name", "test");
+		//String temp = "Test Blob Message";
+		data.reconstruct = ByteBuffer.wrap("Temp Blob Message".getBytes());		
+		return data;
+	}
+	
+	private void assertHistory(HistoryDAO.Data ip, HistoryDAO.Data op) {
+		assertEquals(ip.yr_mon, op.yr_mon);		
+//		assertEquals(ip.day_time, op.day_time);		
+		assertEquals(ip.user, op.user);		
+		assertEquals(ip.action, op.action);
+		assertEquals(ip.target, op.target);
+		assertEquals(ip.memo, op.memo);
+		//TODO : have to see if third party assert utility can be used
+//		assertTrue(CollectionUtils.isEqualCollection(ip.detail, op.detail));
+//		for (String key : ip.detail().keySet()) {
+//			assertNotNull(op.detail().get(key));
+//		}
+		assertNotNull(op.reconstruct);
+	}
+	
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocateDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocateDAO.java
new file mode 100644
index 0000000..283a356
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocateDAO.java
@@ -0,0 +1,146 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.LocateDAO;
+import org.onap.aaf.auth.dao.cass.LocateDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * Test the LocateDAO
+ * 
+ * Utilize AbsJUCass to initialize and pre-load Cass
+ * 
+ * @author Jonathan
+ *
+ */
+public class JU_LocateDAO extends AbsJUCass{
+
+	@Test
+	public void test() throws APIException, IOException {
+		LocateDAO pd = new LocateDAO(trans,cluster,CassAccess.KEYSPACE);
+		try {
+			LocateDAO.Data data = new LocateDAO.Data();
+			data.name="org.osaaf.aaf.locateTester";
+			data.hostname="mithrilcsp.sbc.com";
+			data.port=19999;
+			data.latitude=32.780140f;
+			data.longitude=-96.800451f;
+			data.major=2;
+			data.minor=0;
+			data.patch=19;
+			data.pkg=23;
+			data.protocol="https";
+			Set<String> sp = data.subprotocol(true);
+			sp.add("TLS1.1");
+			sp.add("TLS1.2");
+			
+
+
+			// CREATE
+			Result<Data> rpdc = pd.create(trans,data);
+			assertTrue(rpdc.isOK());
+
+			Result<List<LocateDAO.Data>> rlpd;
+			try {
+//		        Bytification
+		        ByteBuffer bb = data.bytify();
+		        Data bdata = new LocateDAO.Data();
+		        bdata.reconstitute(bb);
+		        compare(data, bdata);
+
+				// Validate Read with key fields in Data
+		        rlpd = pd.read(trans,data);
+		        assertTrue(rlpd.isOK());
+		        if(rlpd.isOK()) {
+					for(LocateDAO.Data d : rlpd.value) {
+						compare(data,d);
+					}
+		        }
+
+		        // Validate Read by Name
+		        rlpd = pd.readByName(trans,data.name);
+		        assertTrue(rlpd.isOK());
+		        if(rlpd.isOK()) {
+					for(LocateDAO.Data d : rlpd.value) {
+						compare(data,d);
+					}
+		        }
+
+				// Modify
+				data.latitude = -31.0000f;
+				
+				Result<Void> rupd = pd.update(trans, data);
+				assertTrue(rupd.isOK());
+		        rlpd = pd.read(trans,data);
+		        assertTrue(rlpd.isOK());
+		        if(rlpd.isOK()) {
+					for(LocateDAO.Data d : rlpd.value) {
+						compare(data,d);
+					}
+		        }
+
+			} catch (IOException e) {
+				e.printStackTrace();
+			} finally {
+				// DELETE
+				Result<Void> rpdd = pd.delete(trans,data,true);
+				assertTrue(rpdd.isOK());
+				rlpd = pd.read(trans, data);
+				assertTrue(rlpd.isOK() && rlpd.isEmpty());
+				assertEquals(rlpd.value.size(),0);
+			}
+		} finally {
+			pd.close(trans);
+		}
+	}
+
+	private void compare(Data a, Data b) {
+		assertEquals(a.name,b.name);
+		assertEquals(a.hostname,b.hostname);
+		assertEquals(a.port,b.port);
+		assertEquals(a.major,b.major);
+		assertEquals(a.minor,b.minor);
+		assertEquals(a.patch,b.patch);
+		assertEquals(a.pkg,b.pkg);
+		assertEquals(a.latitude,b.latitude);
+		assertEquals(a.longitude,b.longitude);
+		assertEquals(a.protocol,b.protocol);
+		Set<String> spa = a.subprotocol(false);
+		Set<String> spb = b.subprotocol(false);
+		assertEquals(spa.size(),spb.size());
+		for(String s : spa) {
+			assertTrue(spb.contains(s));
+		}
+	}
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocationContent.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocationContent.java
new file mode 100644
index 0000000..39f096c
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_LocationContent.java
@@ -0,0 +1,93 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.onap.aaf.cadi.PropAccess;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Data;
+import org.onap.aaf.misc.env.Data.TYPE;
+import org.onap.aaf.misc.rosetta.env.RosettaDF;
+import org.onap.aaf.misc.rosetta.env.RosettaEnv;
+
+import locate.v1_0.MgmtEndpoint;
+import locate.v1_0.MgmtEndpoint.SpecialPorts;
+import locate.v1_0.MgmtEndpoints;
+
+public class JU_LocationContent {
+
+	@BeforeClass
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	@AfterClass
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	@Before
+	public void setUp() throws Exception {
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		PropAccess access = new PropAccess();
+		RosettaEnv env = new RosettaEnv(access.getProperties());
+		try {
+			RosettaDF<MgmtEndpoints> medf = env.newDataFactory(MgmtEndpoints.class);
+			medf.out(TYPE.JSON);
+			medf.option(Data.PRETTY);
+			MgmtEndpoint me = new MgmtEndpoint();
+			me.setHostname("mithrilcsp.sbc.com");
+			me.setLatitude(32);
+			me.setLongitude(-90);
+			me.setMajor(2);
+			me.setMinor(0);
+			me.setPatch(19);
+			me.setPort(3312);
+			me.setProtocol("http");
+			me.getSubprotocol().add("TLS1.1");
+
+			SpecialPorts sp = new SpecialPorts();
+			sp.setName("debug");
+			sp.setPort(9000);
+			sp.setProtocol("java");
+			me.getSpecialPorts().add(sp);
+			
+			MgmtEndpoints mes = new MgmtEndpoints();
+			mes.getMgmtEndpoint().add(me);
+			System.out.println(medf.newData().load(mes).asString());
+			
+		} catch (APIException e) {
+			e.printStackTrace();
+		}
+		
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsDAO.java
new file mode 100644
index 0000000..5b31341
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsDAO.java
@@ -0,0 +1,187 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.NsDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+
+public class JU_NsDAO extends AbsJUCass {
+	private static final String CRM = "ju_crm";
+	private static final String SWM = "ju_swm";
+
+	@Test
+	public void test() throws APIException, IOException  {
+		NsDAO nsd = new NsDAO(trans, cluster, AUTHZ);
+		try {
+			final String nsparent = "com.test";
+			final String ns1 = nsparent +".ju_ns";
+			final String ns2 = nsparent + ".ju_ns2";
+			
+			Map<String,String> oAttribs = new HashMap<String,String>();
+			oAttribs.put(SWM, "swm_data");
+			oAttribs.put(CRM, "crm_data");
+			Data data = new NsDAO.Data();
+			data.name = ns1;
+			data.type = NsType.APP.type;
+			data.parent = nsparent;
+			data.attrib(true).putAll(oAttribs);
+			
+
+			Result<List<Data>> rdrr;
+
+			// CREATE
+			Result<Data> rdc = nsd.create(trans, data);
+			assertTrue(rdc.isOK());
+			
+			try {
+//		        Bytification
+		        ByteBuffer bb = data.bytify();
+		        Data bdata = new NsDAO.Data();
+		        bdata.reconstitute(bb);
+		        compare(data, bdata);
+
+				// Test READ by Object
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				Data d = rdrr.value.get(0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.type,data.type);
+				attribsEqual(d.attrib(false),data.attrib(false));
+				attribsEqual(oAttribs,data.attrib(false));
+				
+				// Test Read by Key
+				rdrr = nsd.read(trans, data.name);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				d = rdrr.value.get(0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.type,data.type);
+				attribsEqual(d.attrib(false),data.attrib(false));
+				attribsEqual(oAttribs,data.attrib(false));
+				
+				// Read NS by Type
+				Result<Set<String>> rtypes = nsd.readNsByAttrib(trans, SWM);
+				Set<String> types;
+				if(rtypes.notOK()) {
+					throw new IOException(rtypes.errorString());
+				} else {
+					types = rtypes.value;
+				}
+				assertEquals(1,types.size());
+				assertEquals(true,types.contains(ns1));
+				
+				// Add second NS to test list of data returned
+				Data data2 = new NsDAO.Data();
+				data2.name = ns2;
+				data2.type = 3; // app
+				data2.parent = nsparent;
+				Result<Data> rdc2 = nsd.create(trans, data2);
+				assertTrue(rdc2.isOK());
+				
+					// Interrupt - test PARENT
+					Result<List<Data>> rdchildren = nsd.getChildren(trans, "com.test");
+					assertTrue(rdchildren.isOKhasData());
+					boolean child1 = false;
+					boolean child2 = false;
+					for(Data dchild : rdchildren.value) {
+						if(ns1.equals(dchild.name))child1=true;
+						if(ns2.equals(dchild.name))child2=true;
+					}
+					assertTrue(child1);
+					assertTrue(child2);
+
+				// FINISH DATA 2 by deleting
+				Result<Void> rddr = nsd.delete(trans, data2, true);
+				assertTrue(rddr.isOK());
+
+				// ADD DESCRIPTION
+				String description = "This is my test Namespace";
+				assertFalse(description.equalsIgnoreCase(data.description));
+				
+				Result<Void> addDesc = nsd.addDescription(trans, data.name, description);
+				assertTrue(addDesc.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).description,description);
+				
+				// UPDATE
+				String newDescription = "zz1234 Owns This Namespace Now";
+				oAttribs.put("mso", "mso_data");
+				data.attrib(true).put("mso", "mso_data");
+				data.description = newDescription;
+				Result<Void> update = nsd.update(trans, data);
+				assertTrue(update.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).description,newDescription);
+				attribsEqual(oAttribs, rdrr.value.get(0).attrib);
+				
+				
+			} catch (IOException e) {
+				e.printStackTrace();
+			} finally {
+				// DELETE
+				Result<Void> rddr = nsd.delete(trans, data, true);
+				assertTrue(rddr.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOK() && rdrr.isEmpty());
+				assertEquals(rdrr.value.size(),0);
+			}
+		} finally {
+			nsd.close(trans);
+		}
+	}
+
+	private void compare(NsDAO.Data d, NsDAO.Data data) {
+		assertEquals(d.name,data.name);
+		assertEquals(d.type,data.type);
+		attribsEqual(d.attrib(false),data.attrib(false));
+		attribsEqual(d.attrib(false),data.attrib(false));
+	}
+	
+	private void attribsEqual(Map<String,String> aa, Map<String,String> ba) {
+		assertEquals(aa.size(),ba.size());
+		for(Entry<String, String> es : aa.entrySet()) {
+			assertEquals(es.getValue(),ba.get(es.getKey()));
+		}
+	}
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsType.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsType.java
new file mode 100644
index 0000000..2644fed
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_NsType.java
@@ -0,0 +1,58 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.NsType;
+
+public class JU_NsType {
+
+	@AfterClass
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		NsType nt,nt2;
+		String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};
+		for(String s : tests) {
+			nt = NsType.valueOf(s);
+			assertEquals(s,nt.name());
+			
+			nt2 = NsType.fromString(s);
+			assertEquals(nt,nt2);
+			
+			int t = nt.type;
+			nt2 = NsType.fromType(t);
+			assertEquals(nt,nt2);
+		}
+		
+		nt  = NsType.fromType(Integer.MIN_VALUE);
+		assertEquals(nt,NsType.UNKNOWN);
+		nt = NsType.fromString("Garbage");
+		assertEquals(nt,NsType.UNKNOWN);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_OAuthTokenDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_OAuthTokenDAO.java
new file mode 100644
index 0000000..f3f91d0
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_OAuthTokenDAO.java
@@ -0,0 +1,134 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.OAuthTokenDAO;
+import org.onap.aaf.auth.dao.cass.OAuthTokenDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.oauth.AAFToken;
+
+/**
+ * UserDAO unit test.
+ * Date: 7/19/13
+ */
+public class JU_OAuthTokenDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException {
+		OAuthTokenDAO adao = new OAuthTokenDAO(trans,cluster,CassAccess.KEYSPACE);
+		UUID uuid = UUID.randomUUID();
+		try {
+			// Create
+	        Data data = new OAuthTokenDAO.Data();
+	        data.id=AAFToken.toToken(uuid);
+	        data.client_id="zClient";
+	        data.user = "xy1255@csp.att.com";
+	        data.active = true;
+	        data.type=1;
+	        data.refresh = AAFToken.toToken(UUID.randomUUID());
+	        data.expires=new Date();
+	        data.scopes(false).add("org.osaaf.aaf");
+	        data.scopes(false).add("org.osaaf.grid");
+	        data.content="{darth:\"I am your content\"}";
+	        data.req_ip="::1";
+	        
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new OAuthTokenDAO.Data();
+//	        System.out.println(new String(Symm.base64noSplit.encode(bb.array())));
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+	        
+//	        DB work
+			adao.create(trans,data);
+			try {
+				// Validate Read with Data Object
+				Result<List<OAuthTokenDAO.Data>> rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(OAuthTokenDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				// Validate Read with key fields in Data
+				rlcd = adao.read(trans,data.id);
+				assertTrue(rlcd.isOKhasData());
+				for(OAuthTokenDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				
+				// Validate Read by User
+				rlcd = adao.readByUser(trans,data.user);
+				assertTrue(rlcd.isOKhasData());
+				for(OAuthTokenDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+
+				// Update
+				data.content = "{darth:\"I am your content\", luke:\"Noooooooo!\"}";
+				data.active = false;
+				adao.update(trans,data);
+				rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(OAuthTokenDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}			
+
+			} finally {
+				// Always delete data, even if failure.
+				adao.delete(trans,data, true);
+			}
+		} finally {
+			adao.close(trans);
+		}
+
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.id,d.id);
+		assertEquals(data.client_id,d.client_id);
+		assertEquals(data.user,d.user);
+		assertEquals(data.active,d.active);
+		assertEquals(data.type,d.type);
+		assertEquals(data.refresh,d.refresh);
+		assertEquals(data.expires,d.expires);
+		for(String s: data.scopes(false)) {
+			assertTrue(d.scopes(false).contains(s));
+		}
+		for(String s: d.scopes(false)) {
+			assertTrue(data.scopes(false).contains(s));
+		}
+		assertEquals(data.content,d.content);
+		assertEquals(data.state,d.state);
+		assertEquals(data.req_ip,d.req_ip);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_PermDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_PermDAO.java
new file mode 100644
index 0000000..0a506db
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_PermDAO.java
@@ -0,0 +1,176 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.PermDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * Test the PermissionDAO
+ * 
+ * Utilize AbsJUCass to initialize and pre-load Cass
+ * 
+ * @author Jonathan
+ *
+ */
+public class JU_PermDAO extends AbsJUCass{
+
+	@Test
+	public void test() throws APIException, IOException {
+		PermDAO pd = new PermDAO(trans,cluster,CassAccess.KEYSPACE);
+		try {
+			PermDAO.Data data = new PermDAO.Data();
+			data.ns = "com.test.ju_perm";
+			data.type = "MyType";
+			data.instance = "MyInstance";
+			data.action = "MyAction";
+			data.roles(true).add(data.ns + ".dev");
+			
+
+
+			// CREATE
+			Result<Data> rpdc = pd.create(trans,data);
+			assertTrue(rpdc.isOK());
+
+			Result<List<PermDAO.Data>> rlpd;
+			try {
+//		        Bytification
+		        ByteBuffer bb = data.bytify();
+		        Data bdata = new PermDAO.Data();
+		        bdata.reconstitute(bb);
+		        compare(data, bdata);
+
+				// Validate Read with key fields in Data
+				if((rlpd = pd.read(trans,data)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData1(data,d);
+				}
+				
+				// Validate readByName
+				if((rlpd = pd.readByType(trans,data.ns, data.type)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData1(data,d);
+				}
+				
+				// Add Role
+				RoleDAO.Data role = new RoleDAO.Data();
+				role.ns = data.ns;
+				role.name = "test";
+				
+				Result<Void> rvpd = pd.addRole(trans, data, role.fullName());
+				assertTrue(rvpd.isOK());
+				// Validate Read with key fields in Data
+				if((rlpd = pd.read(trans,data)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData2(data,d);
+				  }
+				
+				// Remove Role
+				rvpd = pd.delRole(trans, data, role.fullName());
+				assertTrue(rvpd.isOK());
+				if((rlpd = pd.read(trans,data)).isOK())
+					for(PermDAO.Data d : rlpd.value) {
+						checkData1(data,d);
+					}
+				
+				// Add Child
+				Data data2 = new Data();
+				data2.ns = data.ns;
+				data2.type = data.type + ".2";
+				data2.instance = data.instance;
+				data2.action = data.action;
+				
+				rpdc = pd.create(trans, data2);
+				assertTrue(rpdc.isOK());
+				try {
+					rlpd = pd.readChildren(trans, data.ns,data.type);
+					assertTrue(rlpd.isOKhasData());
+					assertEquals(rlpd.value.size(),1);
+					assertEquals(rlpd.value.get(0).fullType(),data2.fullType());
+				} finally {
+					// Delete Child
+					pd.delete(trans, data2,true);
+
+				}
+			} catch (IOException e) {
+				e.printStackTrace();
+			} finally {
+				// DELETE
+				Result<Void> rpdd = pd.delete(trans,data,true);
+				assertTrue(rpdd.isOK());
+				rlpd = pd.read(trans, data);
+				assertTrue(rlpd.isOK() && rlpd.isEmpty());
+				assertEquals(rlpd.value.size(),0);
+			}
+		} finally {
+			pd.close(trans);
+		}
+	}
+
+	private void compare(Data a, Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		assertEquals(a.instance,b.instance);
+		assertEquals(a.action,b.action);
+		assertEquals(a.roles(false).size(),b.roles(false).size());
+		for(String s: a.roles(false)) {
+			assertTrue(b.roles(false).contains(s));
+		}
+	}
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.type,d.type);
+		assertEquals(data.instance,d.instance);
+		assertEquals(data.action,d.action);
+		
+		Set<String> ss = d.roles(true);
+		assertEquals(1,ss.size());
+		assertTrue(ss.contains(data.ns+".dev"));
+	}
+	
+	private void checkData2(Data data, Data d) {
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.type,d.type);
+		assertEquals(data.instance,d.instance);
+		assertEquals(data.action,d.action);
+		
+		Set<String> ss = d.roles(true);
+		assertEquals(2,ss.size());
+		assertTrue(ss.contains(data.ns+".dev"));
+		assertTrue(ss.contains(data.ns+".test"));
+	}
+
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_RoleDAO.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_RoleDAO.java
new file mode 100644
index 0000000..56875bd
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/JU_RoleDAO.java
@@ -0,0 +1,138 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+
+package com.att.dao.aaf.test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+
+public class JU_RoleDAO extends AbsJUCass {
+
+	@Test
+	public void test()  throws IOException, APIException {
+		RoleDAO rd = new RoleDAO(trans, cluster, AUTHZ);
+		try {
+			Data data = new RoleDAO.Data();
+			data.ns = "com.test.ju_role";
+			data.name = "role1";
+
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new RoleDAO.Data();
+	        bdata.reconstitute(bb);
+	        compare(data, bdata);
+
+			// CREATE
+			Result<Data> rdc = rd.create(trans, data);
+			assertTrue(rdc.isOK());
+			Result<List<Data>> rdrr;
+			try {
+				// READ
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				Data d = rdrr.value.get(0);
+				assertEquals(d.perms.size(),0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.ns,data.ns);
+
+				PermDAO.Data perm = new PermDAO.Data();
+				perm.ns = data.ns;
+				perm.type = "Perm";
+				perm.instance = "perm1";
+				perm.action = "write";
+				
+				// ADD Perm
+				Result<Void> rdar = rd.addPerm(trans, data, perm);
+				assertTrue(rdar.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).perms.size(),1);
+				assertTrue(rdrr.value.get(0).perms.contains(perm.encode()));
+				
+				// DEL Perm
+				rdar = rd.delPerm(trans, data,perm);
+				assertTrue(rdar.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).perms.size(),0);
+
+				// Add Child
+				Data data2 = new Data();
+				data2.ns = data.ns;
+				data2.name = data.name + ".2";
+				
+				rdc = rd.create(trans, data2);
+				assertTrue(rdc.isOK());
+				try {
+					rdrr = rd.readChildren(trans, data.ns,data.name);
+					assertTrue(rdrr.isOKhasData());
+					assertEquals(rdrr.value.size(),1);
+					assertEquals(rdrr.value.get(0).name,data.name + ".2");
+					
+					rdrr = rd.readChildren(trans, data.ns,"*");
+					assertTrue(rdrr.isOKhasData());
+					assertEquals(rdrr.value.size(),2);
+
+				} finally {
+					// Delete Child
+					rd.delete(trans, data2, true);
+				}
+	
+			} finally {
+				// DELETE
+				Result<Void> rddr = rd.delete(trans, data, true);
+				assertTrue(rddr.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOK() && rdrr.isEmpty());
+				assertEquals(rdrr.value.size(),0);
+			}
+		} finally {
+			rd.close(trans);
+		}
+	}
+
+	private void compare(Data a, Data b) {
+		assertEquals(a.name,b.name);
+		assertEquals(a.description, b.description);
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.perms(false).size(),b.perms(false).size());
+		for(String p : a.perms(false)) {
+			assertTrue(b.perms(false).contains(p));
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/com/att/dao/aaf/test/NS_ChildUpdate.java b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/NS_ChildUpdate.java
new file mode 100644
index 0000000..8e2f78b
--- /dev/null
+++ b/auth/auth-cass/src/test/java/com/att/dao/aaf/test/NS_ChildUpdate.java
@@ -0,0 +1,74 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package com.att.dao.aaf.test;
+
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.env.AuthzEnv;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+
+public class NS_ChildUpdate {
+
+	public static void main(String[] args) {
+		if(args.length < 3 ) {
+			System.out.println("usage: NS_ChildUpdate machine mechid (encrypted)passwd");
+		} else {
+			try {
+				AuthzEnv env = new AuthzEnv();
+				env.setLog4JNames("log.properties","authz","authz","audit","init","trace");
+				
+				Cluster cluster = Cluster.builder()
+						.addContactPoint(args[0])
+						.withCredentials(args[1],env.decrypt(args[2], false))
+						.build();
+	
+				Session session = cluster.connect(CassAccess.KEYSPACE);
+				try {
+					ResultSet result = session.execute("SELECT name,parent FROM ns");
+					int count = 0;
+					for(Row r : result.all()) {
+						++count;
+						String name = r.getString(0);
+						String parent = r.getString(1);
+						if(parent==null) {
+							int idx = name.lastIndexOf('.');
+							
+							parent = idx>0?name.substring(0, idx):".";
+							System.out.println("UPDATE " + name + " to " + parent);
+							session.execute("UPDATE ns SET parent='" + parent + "' WHERE name='" + name + "';");
+						}
+					}
+					System.out.println("Processed " + count + " records");
+				} finally {
+					session.close();
+					cluster.close();
+				}
+			} catch (Exception e) {
+				e.printStackTrace();
+			}
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question.java
new file mode 100644
index 0000000..e06a8c5
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question.java
@@ -0,0 +1,509 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.cass.hl;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
+import static junit.framework.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO.Data;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.dao.hl.Question.Access;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.cadi.principal.TaggedPrincipal;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.TimeTaken;
+
+import com.att.dao.aaf.test.AbsJUCass;
+
+public class JU_Question extends AbsJUCass {
+
+	private static final int EXPIRES_IN = 60000000;
+	private static final String COM_TEST_JU = "com.test.ju_question";
+	private static final String JU9999_JU_TEST_COM = "ju9999@ju.test.com";
+	private static final String JU9998_JU_TEST_COM = "ju9998@ju.test.com";
+	private static final String READ = "read";
+	private static final int NFR_1 = 80;
+	private static final int NFR_2 = 4000;
+	private static final int ROLE_LEVEL1 = 1000;
+	private static final int PERM_LEVEL1 = 1000;
+//	private static final int PERM_LEVEL2 = 20;
+	private static Question q;
+	private static NsDAO.Data ndd;
+
+	@BeforeClass
+	public static void startupBeforeClass() throws Exception {
+		details=false;
+		AuthzTrans trans = env.newTransNoAvg();
+		q = new Question(trans,cluster,AUTHZ, false);
+		ndd = new NsDAO.Data();
+		ndd.name=COM_TEST_JU;
+		ndd.type=3; // app
+		ndd.parent="com.test";
+		ndd.description="Temporary Namespace for JU_Question";
+		q.nsDAO.create(trans, ndd);
+	}
+	
+	@AfterClass
+	public static void endAfterClass() throws Exception {
+		q.nsDAO.delete(trans, ndd,false);
+	}
+//    @Test
+	public void mayUserRead_EmptyPerm() {
+		PermDAO.Data pdd = new PermDAO.Data();
+		Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,pdd,Access.read);
+		assertFalse(result.isOK());
+	}
+
+//    @Test
+	public void mayUserRead_OnePermNotExist() {
+		Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,newPerm(0,0,READ),Access.read);
+		assertFalse(result.isOK());
+		assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm [" + COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());
+	}
+	
+//    @Test
+	public void mayUserRead_OnePermExistDenied() {
+		PermDAO.Data perm = newPerm(0,0,READ);
+		q.permDAO.create(trans,perm);
+		try {
+			Result<NsDAO.Data> result;
+			TimeTaken tt = trans.start("q.mayUser...", Env.SUB);
+			try {
+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);
+			} finally {
+				tt.done();
+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);
+			}
+			assertFalse(result.isOK());
+			assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm ["+COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());
+		} finally {
+			q.permDAO.delete(trans, perm, false);
+		}
+	}
+
+//    @Test
+	public void mayUserRead_OnePermOneRoleExistOK() {
+		PermDAO.Data perm = newPerm(0,0,READ);
+		RoleDAO.Data role = newRole(0,perm);
+		UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);
+		try {
+			q.permDAO.create(trans,perm);
+			q.roleDAO.create(trans,role);
+			q.userRoleDAO.create(trans,ur);
+			
+			Result<NsDAO.Data> result;
+			TimeTaken tt = trans.start("q.mayUser...", Env.SUB);
+			try {
+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);
+			} finally {
+				tt.done();
+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);
+			}
+			assertTrue(result.isOK());
+		} finally {
+			q.permDAO.delete(trans, perm, false);
+			q.roleDAO.delete(trans, role, false);
+			q.userRoleDAO.delete(trans, ur, false);
+		}
+	}
+
+//	@Test
+	public void filter_OnePermOneRoleExistOK() {
+		PermDAO.Data perm = newPerm(0,0,READ);
+		RoleDAO.Data role = newRole(0,perm);
+		UserRoleDAO.Data ur1 = newUserRole(role,JU9998_JU_TEST_COM,EXPIRES_IN);
+		UserRoleDAO.Data ur2 = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);
+		try {
+			q.permDAO.create(trans,perm);
+			q.roleDAO.create(trans,role);
+			q.userRoleDAO.create(trans,ur1);
+			q.userRoleDAO.create(trans,ur2);
+			
+			Result<List<PermDAO.Data>> pres;
+			TimeTaken tt = trans.start("q.getPerms...", Env.SUB);
+			try {
+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9999_JU_TEST_COM);
+			} finally {
+				tt.done();
+				trans.info().log("filter_OnePermOneRleExistOK",tt);
+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);
+			}
+			assertTrue(pres.isOK());
+			
+			try {
+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);
+			} finally {
+				tt.done();
+				trans.info().log("filter_OnePermOneRleExistOK No Value",tt);
+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);
+			}
+			assertFalse(pres.isOKhasData());
+
+		} finally {
+			q.permDAO.delete(trans, perm, false);
+			q.roleDAO.delete(trans, role, false);
+			q.userRoleDAO.delete(trans, ur1, false);
+			q.userRoleDAO.delete(trans, ur2, false);
+		}
+	}
+
+//    @Test
+	public void mayUserRead_OnePermMultiRoleExistOK() {
+		PermDAO.Data perm = newPerm(0,0,READ);
+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();
+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();
+		try {
+			q.permDAO.create(trans,perm);
+			for(int i=0;i<ROLE_LEVEL1;++i) {
+				RoleDAO.Data role = newRole(i,perm);
+				lrole.add(role);
+				q.roleDAO.create(trans,role);
+				
+				UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,60000000);
+				lur.add(ur);
+				q.userRoleDAO.create(trans,ur);
+			}
+			
+			Result<NsDAO.Data> result;
+			TimeTaken tt = trans.start("mayUserRead_OnePermMultiRoleExistOK", Env.SUB);
+			try {
+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);
+			} finally {
+				tt.done();
+				env.info().log(tt,ROLE_LEVEL1,"iterations");
+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+			}
+			assertTrue(result.isOK());
+		} finally {
+			q.permDAO.delete(trans, perm, false);
+			for(RoleDAO.Data role : lrole) {
+				q.roleDAO.delete(trans, role, false);
+			}
+			for(UserRoleDAO.Data ur : lur) {
+				q.userRoleDAO.delete(trans, ur, false);
+			}
+		}
+	}
+
+    @Test
+	public void mayUserRead_MultiPermOneRoleExistOK() {
+		RoleDAO.Data role = newRole(0);
+		UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);
+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();
+		try {
+			for(int i=0;i<PERM_LEVEL1;++i) {
+				lperm.add(newPerm(i,i,READ,role));
+			}
+			q.roleDAO.create(trans, role);
+			q.userRoleDAO.create(trans, ur);
+			
+			Result<NsDAO.Data> result;
+			TimeTaken tt = trans.start("mayUserRead_MultiPermOneRoleExistOK", Env.SUB);
+			try {
+				result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(PERM_LEVEL1-1),Access.read);
+			} finally {
+				tt.done();
+				env.info().log(tt,PERM_LEVEL1,"iterations");
+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+			}
+			assertTrue(result.isOK());
+		} finally {
+			for(PermDAO.Data perm : lperm) {
+				q.permDAO.delete(trans, perm, false);
+			}
+			q.roleDAO.delete(trans, role, false);
+			q.userRoleDAO.delete(trans, ur, false);
+		}
+	}
+
+////	@Test
+//	public void mayUserRead_MultiPermMultiRoleExistOK() {
+//		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();
+//		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();
+//		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();
+//
+//		try {
+//			RoleDAO.Data role;
+//			UserRoleDAO.Data ur;
+//			for(int i=0;i<ROLE_LEVEL1;++i) {
+//				lrole.add(role=newRole(i));
+//				q.roleDAO.create(trans, role);
+//				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));
+//				q.userRoleDAO.create(trans, ur);
+//				for(int j=0;j<PERM_LEVEL2;++j) {
+//					lperm.add(newPerm(i,j,READ,role));
+//				}
+//			}
+//			
+//			Result<NsDAO.Data> result;
+//			TimeTaken tt = trans.start("mayUserRead_MultiPermMultiRoleExistOK", Env.SUB);
+//			try {
+//				result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(ROLE_LEVEL1*PERM_LEVEL2-1),Access.read);
+//			} finally {
+//				tt.done();
+//				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role");
+//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+//			}
+//			assertTrue(result.isOK());
+//		} finally {
+//			for(PermDAO.Data perm : lperm) {
+//				q.permDAO.delete(trans, perm, false);
+//			}
+//			for(RoleDAO.Data role : lrole) {
+//				q.roleDAO.delete(trans, role, false);
+//			}
+//			for(UserRoleDAO.Data ur : lur) {
+//				q.userRoleDAO.delete(trans, ur, false);
+//			}
+//		}
+//	}
+
+	@Test
+	public void mayUserRead_MultiPermMultiRoleExist_10x10() {
+		env.info().log("Original Filter Method 10x10");
+		mayUserRead_MultiPermMultiRoleExist(10,10);
+		env.info().log("New Filter Method 10x10");
+		mayUserRead_MultiPermMultiRoleExist_NewOK(10,10);
+	}
+
+//	@Test
+	public void mayUserRead_MultiPermMultiRoleExist_20x10() {
+		env.info().log("mayUserRead_MultiPermMultiRoleExist_20x10");
+		mayUserRead_MultiPermMultiRoleExist_NewOK(20,10);
+	}
+
+//	@Test
+	public void mayUserRead_MultiPermMultiRoleExist_100x10() {
+		env.info().log("mayUserRead_MultiPermMultiRoleExist_100x10");
+		mayUserRead_MultiPermMultiRoleExist_NewOK(100,10);
+	}
+
+//	@Test
+	public void mayUserRead_MultiPermMultiRoleExist_100x20() {
+		env.info().log("mayUserRead_MultiPermMultiRoleExist_100x20");
+		mayUserRead_MultiPermMultiRoleExist_NewOK(100,20);
+	}
+
+//	@Test
+	public void mayUserRead_MultiPermMultiRoleExist_1000x20() {
+		env.info().log("mayUserRead_MultiPermMultiRoleExist_1000x20");
+		mayUserRead_MultiPermMultiRoleExist_NewOK(1000,20);
+	}
+
+	private void mayUserRead_MultiPermMultiRoleExist(int roleLevel, int permLevel) {
+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();
+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();
+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();
+		load(roleLevel, permLevel, lperm,lrole,lur);
+
+
+		Result<List<PermDAO.Data>> pres;
+		trans.setUser(new TaggedPrincipal() {
+			@Override
+			public String getName() {
+				return JU9999_JU_TEST_COM;
+			}
+
+			@Override
+			public String tag() {
+				return "JUnit";
+			}
+
+			@Override
+			public String personalName() {
+				return JU9998_JU_TEST_COM;
+			}
+		});
+
+		try {
+			TimeTaken group = trans.start("  Original Security Method (1st time)", Env.SUB);
+			try {
+				TimeTaken tt = trans.start("    Get User Perms for "+JU9998_JU_TEST_COM, Env.SUB);
+				try {
+					pres = q.getPermsByUser(trans,JU9998_JU_TEST_COM,true);
+				} finally {
+					tt.done();
+					env.info().log(tt,"  Looked up (full) getPermsByUser for",JU9998_JU_TEST_COM);
+				}
+				assertTrue(pres.isOK());
+				tt = trans.start("    q.mayUser", Env.SUB);
+				List<PermDAO.Data> reduced = new ArrayList<PermDAO.Data>();
+				
+				try {
+					for(PermDAO.Data p : pres.value) {
+						Result<Data> r = q.mayUser(trans,JU9999_JU_TEST_COM,p,Access.read);
+						if(r.isOK()) {
+							reduced.add(p);
+						}
+					}
+				} finally {
+					tt.done();
+					env.info().log(tt," reduced" + pres.value.size(),"perms","to",reduced.size());
+	//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+				}
+	//			assertFalse(result.isOK());
+			} finally {
+				group.done();
+				env.info().log(group,"  Original Validation Method (1st pass)");
+			}
+			
+
+		} finally {
+			unload(lperm, lrole, lur);
+		}
+	}
+
+	private void mayUserRead_MultiPermMultiRoleExist_NewOK(int roleLevel, int permLevel) {
+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();
+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();
+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();
+		load(roleLevel, permLevel, lperm,lrole,lur);
+
+		try {
+
+			Result<List<PermDAO.Data>> pres;
+			TimeTaken tt = trans.start("  mayUserRead_MultiPermMultiRoleExist_New New Filter", Env.SUB);
+			try {
+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);
+			} finally {
+				tt.done();
+				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");
+//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+			}
+//			assertTrue(pres.isOKhasData());
+
+			tt = trans.start("  mayUserRead_MultiPermMultiRoleExist_New New Filter (2nd time)", Env.SUB);
+			try {
+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);
+			} finally {
+				tt.done();
+				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");
+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);
+			}
+//			assertTrue(pres.isOKhasData());
+
+		} finally {
+			unload(lperm, lrole, lur);
+		}
+	}
+
+
+	private void load(int roleLevel, int permLevel,	List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {
+		RoleDAO.Data role;
+		UserRoleDAO.Data ur;
+		PermDAO.Data perm;
+		
+		int onethirdR=roleLevel/3;
+		int twothirdR=onethirdR*2;
+		int onethirdP=permLevel/3;
+		int twothirdP=onethirdP*2;
+
+		for(int i=0;i<roleLevel;++i) {
+			lrole.add(role=newRole(i));
+			if(i<onethirdR) { // one has
+				lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));
+				q.userRoleDAO.create(trans, ur);
+				for(int j=0;j<onethirdP;++j) {
+					lperm.add(perm=newPerm(i,j,READ,role));
+					q.permDAO.create(trans, perm);
+				}
+			} else if(i<twothirdR) { // both have
+				lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));
+				q.userRoleDAO.create(trans, ur);
+				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));
+				q.userRoleDAO.create(trans, ur);
+				for(int j=onethirdP;j<twothirdP;++j) {
+					lperm.add(perm=newPerm(i,j,READ,role));
+					q.permDAO.create(trans, perm);
+				}
+			} else { // other has
+				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));
+				q.userRoleDAO.create(trans, ur);
+				for(int j=twothirdP;j<permLevel;++j) {
+					lperm.add(perm=newPerm(i,j,READ,role));
+					q.permDAO.create(trans, perm);
+				}
+			}
+			q.roleDAO.create(trans, role);
+		}
+
+	}
+	
+	private void unload(List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {
+		for(PermDAO.Data perm : lperm) {
+			q.permDAO.delete(trans, perm, false);
+		}
+		for(RoleDAO.Data role : lrole) {
+			q.roleDAO.delete(trans, role, false);
+		}
+		for(UserRoleDAO.Data ur : lur) {
+			q.userRoleDAO.delete(trans, ur, false);
+		}
+
+	}
+	private PermDAO.Data newPerm(int permNum, int instNum, String action, RoleDAO.Data ... grant) {
+		PermDAO.Data pdd = new PermDAO.Data();
+		pdd.ns=COM_TEST_JU;
+		pdd.type="myPerm"+permNum;
+		pdd.instance="myInstance"+instNum;
+		pdd.action=action;
+		for(RoleDAO.Data r : grant) {
+			pdd.roles(true).add(r.fullName());
+			r.perms(true).add(pdd.encode());
+		}
+		return pdd;
+	}
+
+	private RoleDAO.Data newRole(int roleNum, PermDAO.Data ... grant) {
+		RoleDAO.Data rdd = new RoleDAO.Data();
+		rdd.ns = COM_TEST_JU+roleNum;
+		rdd.name = "myRole"+roleNum;
+		for(PermDAO.Data p : grant) {
+			rdd.perms(true).add(p.encode());
+			p.roles(true).add(rdd.fullName());
+		}
+		return rdd;
+	}
+
+	private UserRoleDAO.Data newUserRole(RoleDAO.Data role,String user, long offset) {
+		UserRoleDAO.Data urd = new UserRoleDAO.Data();
+		urd.user=user;
+		urd.role(role);
+		urd.expires=new Date(System.currentTimeMillis()+offset);
+		return urd;
+	}
+
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question2.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question2.java
new file mode 100644
index 0000000..bfb6fd4
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/cass/hl/JU_Question2.java
@@ -0,0 +1,73 @@
+/**
+ * ============LICENSE_START====================================================
+ * org.onap.aaf
+ * ===========================================================================
+ * Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+ * ===========================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END====================================================
+ *
+ */
+
+package org.onap.aaf.auth.cass.hl;
+
+import java.io.IOException;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.onap.aaf.auth.dao.hl.Question;
+
+public class JU_Question2 {
+
+	@BeforeClass
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	@AfterClass
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	@Before
+	public void setUp() throws Exception {
+	}
+
+	@After
+	public void tearDown() throws Exception {
+	}
+
+	@Test
+	public void test() throws IOException {
+		String s,u;
+		System.out.println((s="com") + '=' + (u=Question.toUnique(s)));
+		System.out.println(u+'='+(Question.fromUnique(u)));
+		System.out.println((s="org.osaaf.cdp.Tenant32_what-a-joy") + '=' + (u=Question.toUnique(s)));
+		System.out.println(u+'='+(Question.fromUnique(u)));
+		System.out.println((s="org.osaaf.cdp") + '=' + (u=Question.toUnique(s)));
+		System.out.println(u+'='+(Question.fromUnique(u)));
+
+//		Assert.assertSame(s="com", Question.toUnique(s));
+//		Assert.assertSame(s="", Question.toUnique(s));
+//		Assert.assertSame(s="com.aa", Question.toUnique(s));
+//		Assert.assertNotSame(s="com.Aa", Question.toUnique(s));
+//		Assert.assertEquals("com.aa", Question.toUnique(s));
+//		Assert.assertNotSame(s="com.Aa.1", Question.toUnique(s));
+//		Assert.assertEquals("com.aa.1", Question.toUnique(s));
+//		Assert.assertNotSame(s="com.Aa.1-3", Question.toUnique(s));
+//		Assert.assertEquals("com.aa.13", Question.toUnique(s));
+//		Assert.assertEquals("com.aa.13", Question.toUnique("com.aA.1_3"));
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java
new file mode 100644
index 0000000..22f9a6f
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java
@@ -0,0 +1,124 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao;
+
+import static org.junit.Assert.*;
+
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.auth.cache.Cache;
+import org.onap.aaf.auth.cache.Cache.Dated;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.Cached;
+import org.onap.aaf.auth.dao.Cached.Getter;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.Trans;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class JU_Cached {
+	Cached cached;
+	@Mock
+	CIDAO<Trans> ciDaoMock;
+	@Mock
+	AuthzEnv authzEnvMock;
+	@Mock
+	CIDAO<AuthzTrans> cidaoATMock;
+	
+	String name = "nameString";
+	
+	@Before
+	public void setUp(){
+		cached = new Cached(ciDaoMock, name, (int)0, 30000L);
+	}
+	
+	@Test(expected=ArithmeticException.class)
+	public void testCachedIdx(){
+		int Result = cached.cacheIdx("1234567890");		
+	}
+	
+	@Test(expected=ArithmeticException.class)
+	public void testInvalidate(){
+		int Res = cached.invalidate(name);
+	}
+	
+	@SuppressWarnings("static-access")
+	@Test
+	public void testStopTimer(){
+		cached.stopTimer();
+		assertTrue(true);
+	}
+
+	@SuppressWarnings("static-access")
+	@Test
+	public void testStartRefresh(){
+		cached.startRefresh(authzEnvMock, cidaoATMock);
+		assertTrue(true);
+	}
+//	@Mock
+//	Trans transMock;
+//	@Mock
+//	Getter<DAO> getterMock;
+//	
+//	@Test
+//	public void testGet(){
+//		cached.get(transMock, name, getterMock);
+//		fail("not implemented");
+//	}
+//	
+//	@SuppressWarnings("unchecked")
+//	public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
+//		List<DATA> ld = null;
+//		Result<List<DATA>> rld = null;
+//		
+//		int cacheIdx = cacheIdx(key);
+//		Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+//		
+//		// Check for saved element in cache
+//		Dated cached = map.get(key);
+//		// Note: These Segment Timestamps are kept up to date with DB
+//		Date dbStamp = info.get(trans, name,cacheIdx);
+//		
+//		// Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
+//		if(cached!=null && dbStamp.before(cached.timestamp)) {
+//			ld = (List<DATA>)cached.data;
+//			rld = Result.ok(ld);
+//		} else {
+//			rld = getter.get();
+//			if(rld.isOK()) { // only store valid lists
+//				map.put(key, new Dated(rld.value));  // successful item found gets put in cache
+////			} else if(rld.status == Result.ERR_Backend){
+////				map.remove(key);
+//			}
+//		}
+//		return rld;
+//	}
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java
new file mode 100644
index 0000000..14612a1
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java
@@ -0,0 +1,64 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.CachedDAO;
+import org.onap.aaf.auth.dao.DAO;
+import org.onap.aaf.misc.env.Trans;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class JU_CachedDAO {
+	CachedDAO cachedDAO;
+	@Mock
+	DAO daoMock;
+	@Mock
+	CIDAO<Trans> ciDAOMock; 
+	int segsize=1;
+	Object[ ] objs = new Object[2];
+	
+	@Before
+	public void setUp(){
+		objs[0] = "helo";
+		objs[1] = "polo";
+		cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize, segsize);
+	}
+		
+	@Test
+	public void testKeyFromObjs(){
+		String result = cachedDAO.keyFromObjs(objs);
+		System.out.println("value of resut " +result);
+		assertTrue(true);
+	}
+	
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java
new file mode 100644
index 0000000..c73371e
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java
@@ -0,0 +1,73 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+//import org.onap.aaf.auth.dao.CassAccess.Resettable;
+import com.datastax.driver.core.Cluster.Builder;
+
+@RunWith(PowerMockRunner.class)
+public class JU_CassAccess {
+	CassAccess cassAccess;
+	
+	public static final String KEYSPACE = "authz";
+	public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
+	public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
+	public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
+	public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
+	public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
+	public static final String LATITUDE = "LATITUDE";
+	public static final String LONGITUDE = "LONGITUDE";
+	//private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();
+	public static final String ERR_ACCESS_MSG = "Accessing Backend";
+	private static Builder cb = null;
+	@Mock
+	Env envMock;
+	String prefix=null;
+	
+	@Before
+	public void setUp(){
+		cassAccess = new CassAccess();
+	}
+
+
+	@Test(expected=APIException.class)
+	public void testCluster() throws APIException, IOException {
+		cassAccess.cluster(envMock, prefix);
+		
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java
new file mode 100644
index 0000000..d06e38f
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java
@@ -0,0 +1,96 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao;
+
+import static org.junit.Assert.*;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.dao.Loader;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.misc.env.Data;
+import org.onap.aaf.misc.env.Trans;
+import org.onap.aaf.misc.env.TransStore;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.ConsistencyLevel;
+
+@RunWith(PowerMockRunner.class)
+public class JU_CassDAOImpl {
+
+public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";
+public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";
+
+CassDAOImpl cassDAOImpl;
+
+
+@Mock
+TransStore transStoreMock;
+@SuppressWarnings("rawtypes")
+Class dcMock;
+@SuppressWarnings("rawtypes")
+Loader loaderMock;
+Cluster clusterMock;
+Class<Data> classDataMock;
+ConsistencyLevel consistencyLevelMock;
+Trans transMock;
+
+@Mock
+AuthzTrans authzTransMock;
+
+
+
+	@SuppressWarnings({ "rawtypes", "unchecked" })
+	@Before
+	public void setUp()
+	{
+		String name = "name";
+		String keySpace = "keySpace";
+		String table = "table";
+		cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock);
+	}
+
+	//TODO: Gabe [JUnit] Visibility issue
+	@Test 
+	public void testReadConsistency() {
+		String table = "users";
+		PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO");
+		ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table);
+		System.out.println("Consistency level" + consistencyLevel.name());
+		assertEquals("TWO", consistencyLevel.name());
+	}
+	
+	@Test 
+	public void testWriteConsistency() {
+		String table = "users";
+		PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null);
+		ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table);
+		System.out.println("Consistency level" + consistencyLevel.name());
+		assertEquals("ONE", consistencyLevel.name());
+	}
+	
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java
new file mode 100644
index 0000000..8cfb852
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java
@@ -0,0 +1,49 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao;
+
+import static org.junit.Assert.*;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.onap.aaf.auth.dao.DAOException;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class JU_DAOException {
+DAOException daoException;
+
+	//DAOException daoException = new DAOException();
+	String message = "message";
+	Throwable cause;	
+	@Before
+	public void setUp(){
+	daoException = new DAOException();	
+	}
+
+	@Test
+	public void test(){
+		assertTrue(true);
+	}
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java
new file mode 100644
index 0000000..3064de5
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java
@@ -0,0 +1,200 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.security.NoSuchAlgorithmException;
+import java.util.Properties;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.onap.aaf.auth.dao.CassAccess;
+import org.onap.aaf.auth.dao.CassDAOImpl;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.cadi.Hash;
+import org.onap.aaf.cadi.Symm;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.Env;
+import org.onap.aaf.misc.env.Trans.Metric;
+
+import com.datastax.driver.core.Cluster;
+
+import junit.framework.Assert;
+
+/**
+ * Do Setup of Cassandra for Cassandra JUnit Testing
+ * 
+ *
+ */
+public class AbsJUCass {
+	protected static final String AUTHZ = "authz";
+	protected static Cluster cluster;
+	protected static AuthzEnv env;
+	protected static int iterations = 0;
+	protected static float totals=0.0f;
+	protected static float remote = 0.0f;
+	protected static float json = 0.0f;
+	protected static AuthzTrans trans;
+	protected static boolean details = true;
+	
+	@BeforeClass 
+	public static void startup() throws APIException, IOException {
+		synchronized(AUTHZ) {
+			if(env==null) {
+				final String resource = "cadi.properties";
+	            File f = new File("etc" + resource);
+	            InputStream is=null;
+	            Properties props = new Properties();
+	            try {
+	                if(f.exists()) {
+	                    is = new FileInputStream(f);
+	                } else {
+	                    URL rsrc = ClassLoader.getSystemResource(resource);
+	                    is = rsrc.openStream();
+	                }
+	                props.load(is);
+	            } finally {
+	                if(is==null) {
+	                	env= new AuthzEnv();
+	                    Assert.fail(resource + " must exist in etc dir, or in Classpath");
+	                }
+	                is.close();
+	            }
+				env = new AuthzEnv(props);
+			}
+		}
+		cluster = CassAccess.cluster(env,"LOCAL");
+
+		env.info().log("Connecting to Cluster");
+		try {
+			cluster.connect(AUTHZ);
+		} catch(Exception e) {
+			cluster=null;
+			env.error().log(e);
+			Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());
+		}
+		env.info().log("Connected");
+		
+		// Load special data here
+		
+		// WebPhone
+		env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389");
+		env.setProperty("com.sun.jndi.ldap.connect.pool","true");
+		
+		iterations = 0;
+		
+	}
+	
+	@AfterClass
+	public static void shutdown() {
+		if(cluster!=null) {
+			cluster.close();
+			cluster = null;
+		}
+	}
+
+	@Before
+	public void newTrans() {
+		trans = env.newTrans();
+		
+		trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));
+	}
+	
+	@After
+	public void auditTrail() {
+		if(totals==0) { // "updateTotals()" was not called... just do one Trans
+			StringBuilder sb = new StringBuilder();
+			Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);
+			if(details) {
+				env.info().log(
+				sb,
+				"Total time:",
+				totals += metric.total,
+				"JSON time: ",
+				metric.buckets[0],
+				"REMOTE time: ",
+				metric.buckets[1]
+				);
+			} else {
+				totals += metric.total;
+			}
+		}
+	}
+	
+	protected void updateTotals() {
+		Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);
+		totals+=metric.total;
+		json  +=metric.buckets[0];
+		remote+=metric.buckets[1];
+	}
+
+
+	@AfterClass
+	public static void print() {
+		float transTime;
+		if(iterations==0) {
+			transTime=totals;
+		} else {
+			transTime=totals/iterations;
+		}
+		env.info().log(
+		"Total time:",
+		totals,   
+		"JSON time:",
+		json,
+		"REMOTE time:",
+		remote,
+		"Iterations:",
+		iterations,
+		"Transaction time:",
+		transTime
+		);
+	}
+	
+	/**
+	 * Take a User/Pass and turn into an MD5 Hashed BasicAuth
+	 * 
+	 * @param user
+	 * @param pass
+	 * @return
+	 * @throws IOException
+	 * @throws NoSuchAlgorithmException
+	 */
+	//TODO: Gabe [JUnit] Issue
+	public static byte[] userPassToBytes(String user, String pass)
+			throws IOException, NoSuchAlgorithmException {
+		// Take the form of BasicAuth, so as to allow any character in Password
+		// (this is an issue in 1.0)
+		// Also, it makes it quicker to evaluate Basic Auth direct questions
+		String ba = Symm.base64url.encode(user + ':' + pass);
+		// Take MD5 Hash, so that data in DB can't be reversed out.
+		return Hash.hashMD5(ba.getBytes());
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ApprovalDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ApprovalDAO.java
new file mode 100644
index 0000000..13a13ed
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ApprovalDAO.java
@@ -0,0 +1,146 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO;
+import org.onap.aaf.auth.dao.cass.ApprovalDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+public class JU_ApprovalDAO  extends AbsJUCass {
+	@Test
+	public void testCRUD() throws Exception {
+		ApprovalDAO rrDAO = new ApprovalDAO(trans, cluster, AUTHZ);
+		ApprovalDAO.Data data = new ApprovalDAO.Data();
+		
+		data.ticket = UUID.randomUUID(); // normally, read from Future object
+		data.user = "testid@test.com";
+		data.approver = "mySuper@att.com";
+		data.type = "supervisor";
+		data.status = "pending";
+		data.operation = "C";
+		data.updated = new Date();
+		
+		try {
+			// Test create
+			rrDAO.create(trans, data);
+			
+			// Test Read by Ticket
+			Result<List<ApprovalDAO.Data>> rlad;
+			rlad = rrDAO.readByTicket(trans, data.ticket);
+			assertTrue(rlad.isOK());
+			assertEquals(1,rlad.value.size());
+			compare(data,rlad.value.get(0));
+			
+			// Hold onto original ID for deletion, and read tests
+			UUID id = rlad.value.get(0).id;
+			
+			try {
+				// Test Read by User
+				rlad = rrDAO.readByUser(trans, data.user);
+				assertTrue(rlad.isOKhasData());
+				boolean ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Read by Approver
+				rlad = rrDAO.readByApprover(trans, data.approver);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Read by ID
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+	
+				// Test Update
+				data.status = "approved";
+				data.id = id;
+				assertTrue(rrDAO.update(trans, data).isOK());
+				
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOKhasData());
+				ok = false;
+				for(ApprovalDAO.Data a : rlad.value) {
+					if(a.id.equals(id)) {
+						ok = true;
+						compare(data,a);
+					}
+				}
+				assertTrue(ok);
+
+			} finally {
+				// Delete
+				data.id = id;
+				rrDAO.delete(trans, data, true);
+				rlad = rrDAO.read(trans, id);
+				assertTrue(rlad.isOK());
+				assertTrue(rlad.isEmpty());
+			}
+			
+		} finally {
+			rrDAO.close(trans);
+		}
+	}
+
+	private void compare(Data d1, Data d2) {
+		assertNotSame(d1.id,d2.id);
+		assertEquals(d1.ticket,d2.ticket);
+		assertEquals(d1.user,d2.user);
+		assertEquals(d1.approver,d2.approver);
+		assertEquals(d1.type,d2.type);
+		assertEquals(d1.status,d2.status);
+		assertEquals(d1.operation,d2.operation);
+		assertNotSame(d1.updated,d2.updated);
+	}
+
+	
+	
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ArtiDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ArtiDAO.java
new file mode 100644
index 0000000..f095e32
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_ArtiDAO.java
@@ -0,0 +1,136 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.ArtiDAO;
+import org.onap.aaf.auth.dao.cass.ArtiDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+/**
+ * UserDAO unit test.
+ * User: tp007s
+ * Date: 7/19/13
+ */
+public class JU_ArtiDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException {
+		ArtiDAO adao = new ArtiDAO(trans,cluster,"authz");
+		try {
+			// Create
+	        ArtiDAO.Data data = new ArtiDAO.Data();
+	        data.mechid="m55555@perturbed.att.com";
+	        data.machine="perturbed1232.att.com";
+	        data.type(false).add("file");
+	        data.type(false).add("jks");
+	        data.sponsor="Fred Flintstone";
+	        data.ca="devl";
+	        data.dir="/opt/app/aft/keys";
+	        data.ns="kumquat";
+	        data.os_user="aft";
+	        data.notify="email:myname@bogus.email.com";
+	        data.expires=new Date();
+	        
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new ArtiDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+	        
+	        
+//	        DB work
+			adao.create(trans,data);
+			try {
+				// Validate Read with key fields in Data
+				Result<List<ArtiDAO.Data>> rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// Validate Read with key fields in Data
+				rlcd = adao.read(trans,data.mechid, data.machine);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// By Machine
+				rlcd = adao.readByMachine(trans,data.machine);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				
+				// By MechID
+				rlcd = adao.readByMechID(trans,data.mechid);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+	
+				// Update
+				data.sponsor = "Wilma Flintstone";
+				adao.update(trans,data);
+				rlcd = adao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(ArtiDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}			
+
+			} finally {
+				// Always delete data, even if failure.
+				adao.delete(trans,data, true);
+			}
+		} finally {
+			adao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.mechid,d.mechid);
+		assertEquals(data.machine,d.machine);
+		assertEquals(data.type(false).size(),d.type(false).size());
+		for(String s: data.type(false)) {
+			assertTrue(d.type(false).contains(s));
+		}
+		assertEquals(data.sponsor,d.sponsor);
+		assertEquals(data.ca,d.ca);
+		assertEquals(data.dir,d.dir);
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.os_user,d.os_user);
+		assertEquals(data.notify,d.notify);
+		assertEquals(data.expires,d.expires);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java
new file mode 100644
index 0000000..e316ac7
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java
@@ -0,0 +1,265 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.UserRoleDAO;
+
+public class JU_Bytification {
+
+	@Test
+	public void testNS() throws IOException {
+		
+		// Normal
+		NsDAO.Data ns = new NsDAO.Data();
+		ns.name = "org.osaaf.<pass>";
+		ns.type = NsType.APP.type;
+
+		ByteBuffer bb = ns.bytify();
+		
+		NsDAO.Data nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+		
+		// Empty admin
+//		ns.admin(true).clear();
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+		
+		// Empty responsible
+//		ns.responsible(true).clear();
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+
+		bb = ns.bytify();
+		nsr = new NsDAO.Data();
+		nsr.reconstitute(bb);
+		check(ns,nsr);
+	}
+	
+	private void check(NsDAO.Data a, NsDAO.Data b) {
+		assertEquals(a.name,b.name);
+		assertEquals(a.type,b.type);
+//		assertEquals(a.admin.size(),b.admin.size());
+		
+//		for(String s: a.admin) {
+//			assertTrue(b.admin.contains(s));
+//		}
+//		
+//		assertEquals(a.responsible.size(),b.responsible.size());
+//		for(String s: a.responsible) {
+//			assertTrue(b.responsible.contains(s));
+//		}
+	}
+
+	@Test
+	public void testRole() throws IOException {
+		RoleDAO.Data rd1 = new RoleDAO.Data();
+		rd1.ns = "org.osaaf.<pass>";
+		rd1.name = "my.role";
+		rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction");
+		rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction2");
+
+		// Normal
+		ByteBuffer bb = rd1.bytify();
+		RoleDAO.Data rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// Overshoot Buffer
+		StringBuilder sb = new StringBuilder(300);
+		sb.append("role|instance|veryLongAction...");
+		for(int i=0;i<280;++i) {
+			sb.append('a');
+		}
+		rd1.perms(true).add(sb.toString());
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// No Perms
+		rd1.perms.clear();
+		
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+		
+		// 1000 Perms
+		for(int i=0;i<1000;++i) {
+			rd1.perms(true).add("com|inst|action"+ i);
+		}
+
+		bb = rd1.bytify();
+		rd2 = new RoleDAO.Data();
+		rd2.reconstitute(bb);
+		check(rd1,rd2);
+
+	}
+	
+	private void check(RoleDAO.Data a, RoleDAO.Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.name,b.name);
+		
+		assertEquals(a.perms.size(),b.perms.size());
+		for(String s: a.perms) {
+			assertTrue(b.perms.contains(s));
+		}
+	}
+
+	@Test
+	public void testPerm() throws IOException {
+		PermDAO.Data pd1 = new PermDAO.Data();
+		pd1.ns = "org.osaaf.<pass>";
+		pd1.type = "my.perm";
+		pd1.instance = "instance";
+		pd1.action = "read";
+		pd1.roles(true).add("org.osaaf.<pass>.my.Role");
+		pd1.roles(true).add("org.osaaf.<pass>.my.Role2");
+
+		// Normal
+		ByteBuffer bb = pd1.bytify();
+		PermDAO.Data rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+		
+		// No Perms
+		pd1.roles.clear();
+		
+		bb = pd1.bytify();
+		rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+		
+		// 1000 Perms
+		for(int i=0;i<1000;++i) {
+			pd1.roles(true).add("org.osaaf.<pass>.my.Role"+ i);
+		}
+
+		bb = pd1.bytify();
+		rd2 = new PermDAO.Data();
+		rd2.reconstitute(bb);
+		check(pd1,rd2);
+
+	}
+	
+	private void check(PermDAO.Data a, PermDAO.Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		assertEquals(a.instance,b.instance);
+		assertEquals(a.action,b.action);
+		
+		assertEquals(a.roles.size(),b.roles.size());
+		for(String s: a.roles) {
+			assertTrue(b.roles.contains(s));
+		}
+	}
+
+	@Test
+	public void testUserRole() throws IOException {
+		UserRoleDAO.Data urd1 = new UserRoleDAO.Data();
+		urd1.user = "myname@abc.att.com";
+		urd1.role("org.osaaf.<pass>","my.role");
+		urd1.expires = new Date();
+
+		// Normal
+		ByteBuffer bb = urd1.bytify();
+		UserRoleDAO.Data urd2 = new UserRoleDAO.Data();
+		urd2.reconstitute(bb);
+		check(urd1,urd2);
+		
+		// A null
+		urd1.expires = null; 
+		urd1.role = null;
+		
+		bb = urd1.bytify();
+		urd2 = new UserRoleDAO.Data();
+		urd2.reconstitute(bb);
+		check(urd1,urd2);
+	}
+
+	private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {
+		assertEquals(a.user,b.user);
+		assertEquals(a.role,b.role);
+		assertEquals(a.expires,b.expires);
+	}
+
+	
+	@Test
+	public void testCred() throws IOException {
+		CredDAO.Data cd = new CredDAO.Data();
+		cd.id = "m55555@abc.att.com";
+		cd.ns = "org.osaaf.abc";
+		cd.type = 2;
+		cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});
+		cd.expires = new Date();
+
+		// Normal
+		ByteBuffer bb = cd.bytify();
+		CredDAO.Data cd2 = new CredDAO.Data();
+		cd2.reconstitute(bb);
+		check(cd,cd2);
+		
+		// nulls
+		cd.expires = null;
+		cd.cred = null;
+		
+		bb = cd.bytify();
+		cd2 = new CredDAO.Data();
+		cd2.reconstitute(bb);
+		check(cd,cd2);
+
+	}
+
+	private void check(CredDAO.Data a, CredDAO.Data b) {
+		assertEquals(a.id,b.id);
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		if(a.cred==null) {
+			assertEquals(a.cred,b.cred); 
+		} else {
+			int l = a.cred.limit();
+			assertEquals(l,b.cred.limit());
+			for (int i=0;i<l;++i) {
+				assertEquals(a.cred.get(),b.cred.get());
+			}
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CacheInfoDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CacheInfoDAO.java
new file mode 100644
index 0000000..7a1bd58
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CacheInfoDAO.java
@@ -0,0 +1,63 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import java.io.IOException;
+import java.util.Date;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.CIDAO;
+import org.onap.aaf.auth.dao.DAOException;
+import org.onap.aaf.auth.dao.cass.CacheInfoDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.Status;
+import org.onap.aaf.auth.env.AuthzTrans;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+import org.onap.aaf.misc.env.util.Chrono;
+
+import junit.framework.Assert;
+
+
+public class JU_CacheInfoDAO extends AbsJUCass {
+
+	@Test
+	public void test() throws DAOException, APIException, IOException {
+		CIDAO<AuthzTrans> id = new CacheInfoDAO(trans, cluster, AUTHZ);
+		Date date  = new Date();
+		
+		id.touch(trans, RoleDAO.TABLE,1);
+		try {
+			Thread.sleep(3000);
+		} catch (InterruptedException e) {
+		}
+		Result<Void> rid = id.check(trans);
+		Assert.assertEquals(rid.status,Status.OK);
+		Date[] dates = CacheInfoDAO.info.get(RoleDAO.TABLE);
+		if(dates.length>0 && dates[1]!=null) {
+			System.out.println(Chrono.dateStamp(dates[1]));
+			System.out.println(Chrono.dateStamp(date));
+			Assert.assertTrue(Math.abs(dates[1].getTime() - date.getTime())<20000); // allow for 4 seconds, given Remote DB
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CertDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CertDAO.java
new file mode 100644
index 0000000..8e8ed6e
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CertDAO.java
@@ -0,0 +1,103 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.CertDAO;
+import org.onap.aaf.auth.dao.cass.CertDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * UserDAO unit test.
+ * User: tp007s
+ * Date: 7/19/13
+ */
+public class JU_CertDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		CertDAO cdao = new CertDAO(trans,cluster,"authz");
+		try {
+			// Create
+	        CertDAO.Data data = new CertDAO.Data();
+	        data.serial=new BigInteger("11839383");
+	        data.id = "m55555@tguard.att.com";
+	        data.x500="CN=ju_cert.dao.att.com, OU=AAF, O=\"ATT Services, Inc.\", L=Southfield, ST=Michigan, C=US";
+	        data.x509="I'm a cert";
+	        data.ca = "aaf";
+			cdao.create(trans,data);
+
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new CertDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+
+			// Validate Read with key fields in Data
+			Result<List<CertDAO.Data>> rlcd = cdao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+
+			// Validate Read with key fields in Data
+			rlcd = cdao.read(trans,data.ca,data.serial);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+
+			// Update
+			data.id = "m66666.tguard.att.com";
+			cdao.update(trans,data);
+			rlcd = cdao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CertDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}			
+			
+			cdao.delete(trans,data, true);
+		} finally {
+			cdao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.ca,d.ca);
+		assertEquals(data.serial,d.serial);
+		assertEquals(data.id,d.id);
+		assertEquals(data.x500,d.x500);
+		assertEquals(data.x509,d.x509);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CredDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CredDAO.java
new file mode 100644
index 0000000..bb88a2a
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_CredDAO.java
@@ -0,0 +1,250 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * UserDAO unit test.
+ * User: tp007s
+ * Date: 7/19/13
+ */
+public class JU_CredDAO  extends AbsJUCass {
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		CredDAO udao = new CredDAO(trans,cluster,"authz");
+		try {
+			// Create
+	        CredDAO.Data data = new CredDAO.Data();
+	        data.id = "m55555@aaf.att.com";
+	        data.type = CredDAO.BASIC_AUTH;
+	        data.notes = "temp pass";
+	        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));
+	        data.other = 12;
+	        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);
+			udao.create(trans,data);
+			
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new CredDAO.Data();
+	        bdata.reconstitute(bb);
+	        checkData1(data, bdata);
+
+			// Validate Read with key fields in Data
+			Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CredDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}
+			
+			// Update
+			data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));
+			udao.update(trans,data);
+			rlcd = udao.read(trans,data);
+			assertTrue(rlcd.isOKhasData());
+			for(CredDAO.Data d : rlcd.value) {
+				checkData1(data,d);
+			}			
+			
+			udao.delete(trans,data, true);
+		} finally {
+			udao.close(trans);
+		}
+
+		
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.id,d.id);
+		assertEquals(data.type,d.type);
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.notes,d.notes);
+		assertEquals(data.cred,d.cred);
+		assertEquals(data.other,d.other);
+		assertEquals(data.expires,d.expires);
+	}
+
+//    private String                          CONST_myName = "MyName";
+//    public static final java.nio.ByteBuffer CONST_MY_CRED = get_CONST_MY_CRED();
+//    public static final int                 CONST_CRED_TYPE = 11;
+//
+//    public static final Date                CONST_UPDATE_DATE = new Date(System.currentTimeMillis()+60000*24);
+//    @Test
+//    public void test() {
+//        UserDAO ud = new UserDAO(trans, cluster,"authz");
+//        try {
+//            UserDAO.Data data = createPrototypeUserData();
+//            ud.create(trans, data);
+//
+//            // Validate Read with key fields in Data
+//            for(UserDAO.Data d : ud.read(trans, data)) {
+//                checkData1(data,d);
+//            }
+//
+//            // Validate readByName
+//            for(UserDAO.Data d : ud.read(trans, CONST_myName)) {
+//                checkData1(data,d);
+//            }
+//
+//            ud.delete(trans, data);
+//            List<UserDAO.Data> d_2 = ud.read(trans, CONST_myName);
+//
+//            // Validate that data was deleted
+//            assertEquals("User should not be found after deleted", 0, d_2.size() );
+//
+//            data = new UserDAO.Data();
+//            data.name = CONST_myName;
+//            data.cred = CONST_MY_CRED;
+//            data.cred_type= CONST_CRED_TYPE;
+//            data.expires = new Date(System.currentTimeMillis()+60000*24);
+//            final Result<UserDAO.Data> user = ud.r_create(trans, data);
+//            assertEquals("ud.createUser should work", Result.Status.OK, user.status);
+//
+//            checkDataIgnoreDateDiff(data, user.value);
+//
+//            // finally leave system in consistent state by deleting user again
+//            ud.delete(trans,data);
+//
+//        } catch (DAOException e) {
+//            e.printStackTrace();
+//            fail("Fail due to Exception");
+//        } finally {
+//            ud.close(trans);
+//        }
+//    }
+//
+//    private UserDAO.Data createPrototypeUserData() {
+//        UserDAO.Data data = new UserDAO.Data();
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires = CONST_UPDATE_DATE;
+//        return data;
+//    }
+//
+//    //    @Test
+//    //    public void testReadByUser() throws Exception {
+//    //           // this test was done above in our super test, since it uses the same setup
+//    //    }
+//
+//    @Test
+//    public void testFunctionCreateUser() throws Exception {
+//        String name = "roger_rabbit";
+//        Integer credType = CONST_CRED_TYPE;
+//        java.nio.ByteBuffer cred = CONST_MY_CRED;
+//        final UserDAO ud = new UserDAO(trans, cluster,"authz");
+//        final UserDAO.Data data = createPrototypeUserData();
+//        Result<UserDAO.Data> ret = ud.r_create(trans, data);
+//        Result<List<Data>> byUserNameLookup = ud.r_read(trans, name);
+//        
+//        assertEquals("sanity test w/ different username (different than other test cases) failed", name, byUserNameLookup.value.get(0).name);
+//        assertEquals("delete roger_rabbit failed", true, ud.delete(trans, byUserNameLookup.value.get(0)));
+//    }
+//
+//    @Test
+//    public void testLowLevelCassandraCreateData_Given_UserAlreadyPresent_ShouldPass() throws Exception {
+//        UserDAO ud = new UserDAO(trans, cluster,"authz");
+//
+//        final UserDAO.Data data = createPrototypeUserData();
+//        final UserDAO.Data data1 = ud.create(trans, data);
+//        final UserDAO.Data data2 = ud.create(trans, data);
+//
+//        assertNotNull(data1);
+//        assertNotNull(data2);
+//
+//        assertEquals(CONST_myName, data1.name);
+//        assertEquals(CONST_myName, data2.name);
+//    }
+//
+//    @Test
+//    public void testCreateUser_Given_UserAlreadyPresent_ShouldFail() throws Exception {
+//        UserDAO ud = new UserDAO(trans, cluster,"authz");
+//
+//        final UserDAO.Data data = createPrototypeUserData();
+//
+//        // make sure that some prev test did not leave the user in the DB
+//        ud.delete(trans, data);
+//
+//        // attempt to create same user twice !!!
+//        
+//        final Result<UserDAO.Data> data1 = ud.r_create(trans, data);
+//        final Result<UserDAO.Data> data2 = ud.r_create(trans, data);
+//
+//        assertNotNull(data1);
+//        assertNotNull(data2);
+//
+//        assertEquals(true,   Result.Status.OK == data1.status);
+//        assertEquals(false,  Result.Status.OK == data2.status);
+//    }
+//
+//    private void checkData1(UserDAO.Data data, UserDAO.Data d) {
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires   = CONST_UPDATE_DATE;
+//
+//        assertEquals(data.name, d.name);
+//        assertEquals(data.cred_type, d.cred_type);
+//        assertEquals(data.cred, d.cred);
+//        assertEquals(data.expires, d.expires);
+//
+//    }
+//
+//    private void checkDataIgnoreDateDiff(UserDAO.Data data, UserDAO.Data d) {
+//        data.name = CONST_myName;
+//
+//        data.cred_type = CONST_CRED_TYPE;
+//        data.cred      = CONST_MY_CRED;
+//        data.expires   = CONST_UPDATE_DATE;
+//
+//        assertEquals(data.name, d.name);
+//        assertEquals(data.cred_type, d.cred_type);
+//        assertEquals(data.cred, d.cred);
+//         // we allow dates to be different, e.g. high level calls e.g. createUser sets the date itself.
+//        //assertEquals(data.updated, d.updated);
+//
+//    }
+//
+//    /**
+//     * Get a CONST_MY_CRED ByteBuffer, which is the java type for a cass blob.
+//     * @return
+//     */
+//    private static java.nio.ByteBuffer get_CONST_MY_CRED() {
+//     return ByteBuffer.wrap("Hello".getBytes());
+//    }
+//
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_DelegateDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_DelegateDAO.java
new file mode 100644
index 0000000..a518e50
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_DelegateDAO.java
@@ -0,0 +1,106 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.DelegateDAO;
+import org.onap.aaf.auth.dao.cass.DelegateDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+
+
+public class JU_DelegateDAO  extends AbsJUCass {
+	@Test
+	public void testCRUD() throws Exception {
+		DelegateDAO dao = new DelegateDAO(trans, cluster, AUTHZ);
+		DelegateDAO.Data data = new DelegateDAO.Data();
+		data.user = "myname";
+		data.delegate = "yourname";
+		data.expires = new Date();
+		
+//        Bytification
+        ByteBuffer bb = data.bytify();
+        Data bdata = new DelegateDAO.Data();
+        bdata.reconstitute(bb);
+        compare(data, bdata);
+
+		try {
+			// Test create
+			Result<Data> ddcr = dao.create(trans,data);
+			assertTrue(ddcr.isOK());
+			
+			
+			// Read by User
+			Result<List<DelegateDAO.Data>> records = dao.read(trans,data.user);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Read by Delegate
+			records = dao.readByDelegate(trans,data.delegate);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+			
+			// Update
+			data.delegate = "hisname";
+			data.expires = new Date();
+			assertTrue(dao.update(trans, data).isOK());
+
+			// Read by User
+			records = dao.read(trans,data.user);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Read by Delegate
+			records = dao.readByDelegate(trans,data.delegate);
+			assertTrue(records.isOKhasData());
+			for(DelegateDAO.Data rdata : records.value) 
+				compare(data,rdata);
+
+			// Test delete
+			dao.delete(trans,data, true);
+			records = dao.read(trans,data.user);
+			assertTrue(records.isEmpty());
+			
+			
+		} finally {
+			dao.close(trans);
+		}
+	}
+	
+	private void compare(Data d1, Data d2) {
+		assertEquals(d1.user, d2.user);
+		assertEquals(d1.delegate, d2.delegate);
+		assertEquals(d1.expires,d2.expires);
+	}
+
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_FastCalling.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_FastCalling.java
new file mode 100644
index 0000000..d7886d3
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_FastCalling.java
@@ -0,0 +1,89 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.CredDAO;
+import org.onap.aaf.auth.dao.cass.CredDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+public class JU_FastCalling extends AbsJUCass {
+
+	@Test
+	public void test() throws IOException, NoSuchAlgorithmException, APIException {
+		trans.setProperty("cassandra.writeConsistency.cred","ONE");
+		
+		CredDAO udao = new CredDAO(env.newTransNoAvg(),cluster,"authz");
+		System.out.println("Starting calls");
+		for(iterations=0;iterations<8;++iterations) {
+			try {
+				// Create
+		        CredDAO.Data data = new CredDAO.Data();
+		        data.id = "m55555@aaf.att.com";
+		        data.type = CredDAO.BASIC_AUTH;
+		        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));
+		        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);
+				udao.create(trans,data);
+				
+				// Validate Read with key fields in Data
+				Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(CredDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}
+				
+				// Update
+				data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));
+				udao.update(trans,data);
+				rlcd = udao.read(trans,data);
+				assertTrue(rlcd.isOKhasData());
+				for(CredDAO.Data d : rlcd.value) {
+					checkData1(data,d);
+				}			
+				
+				udao.delete(trans,data, true);
+			} finally {
+				updateTotals();
+				newTrans();
+			}
+		}
+
+	}
+
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.id,d.id);
+		assertEquals(data.type,d.type);
+		assertEquals(data.cred,d.cred);
+		assertEquals(data.expires,d.expires);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_HistoryDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_HistoryDAO.java
new file mode 100644
index 0000000..0b552a4
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_HistoryDAO.java
@@ -0,0 +1,153 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Random;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.HistoryDAO;
+import org.onap.aaf.auth.layer.Result;
+
+public class JU_HistoryDAO  extends AbsJUCass {
+	
+	@Test
+	public void testCreate() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);			
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans,data.user,data.yr_mon);
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+	
+	@Test
+	public void tesReadByUser() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans, data.user,data.yr_mon);
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+	
+/*
+	@Test
+	public void readByUserAndMonth() throws Exception {
+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);
+		HistoryDAO.Data data = createHistoryData();
+		
+		try {
+			historyDAO.create(trans,data);			
+			Thread.sleep(200);// History Create is Async
+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUserAndMonth(trans,
+					data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),
+					Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)));
+			assertTrue(records.isOKhasData());
+			for(HistoryDAO.Data d : records.value) {
+				assertHistory(data, d);
+			}
+		} finally {
+			historyDAO.close(trans);
+		}
+	}
+*/	
+	//TODO readadd this
+//	@Test
+//	public void readByUserAndDay() throws Exception {
+//		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);
+//		HistoryDAO.Data data = createHistoryData();
+//		
+//		try {
+//			historyDAO.create(trans, data);		
+//			Thread.sleep(200);// History Create is Async
+//			
+//			String dayTime = String.valueOf(data.day_time);
+//			String day = null;
+//			if (dayTime.length() < 8)
+//				day = dayTime.substring(0, 1);
+//			else 
+//				day = dayTime.substring(0, 2);
+//			
+//			List<HistoryDAO.Data> records = historyDAO.readByUserBetweenDates(trans,
+//							data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),
+//							Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)),
+//							Integer.valueOf(day), 0);
+//			assertEquals(1,records.size());
+//			for(HistoryDAO.Data d : records) {
+//				assertHistory(data, d);
+//			}
+//		} finally {
+//			historyDAO.close(trans);
+//		}
+//	}
+	private HistoryDAO.Data createHistoryData() {
+		HistoryDAO.Data data = HistoryDAO.newInitedData();
+		Random random = new Random();
+		data.user = "test" + random.nextInt();
+		data.action = "add";
+		data.target = "history";
+		data.memo = "adding a row into history table";
+//		data.detail().put("id", "test");
+//		data.detail().put("name", "test");
+		//String temp = "Test Blob Message";
+		data.reconstruct = ByteBuffer.wrap("Temp Blob Message".getBytes());		
+		return data;
+	}
+	
+	private void assertHistory(HistoryDAO.Data ip, HistoryDAO.Data op) {
+		assertEquals(ip.yr_mon, op.yr_mon);		
+//		assertEquals(ip.day_time, op.day_time);		
+		assertEquals(ip.user, op.user);		
+		assertEquals(ip.action, op.action);
+		assertEquals(ip.target, op.target);
+		assertEquals(ip.memo, op.memo);
+		//TODO : have to see if third party assert utility can be used
+//		assertTrue(CollectionUtils.isEqualCollection(ip.detail, op.detail));
+//		for (String key : ip.detail().keySet()) {
+//			assertNotNull(op.detail().get(key));
+//		}
+		assertNotNull(op.reconstruct);
+	}
+	
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsDAO.java
new file mode 100644
index 0000000..eb06495
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsDAO.java
@@ -0,0 +1,185 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.NsDAO;
+import org.onap.aaf.auth.dao.cass.NsType;
+import org.onap.aaf.auth.dao.cass.NsDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+
+public class JU_NsDAO extends AbsJUCass {
+	private static final String CRM = "ju_crm";
+	private static final String SWM = "ju_swm";
+
+	@Test
+	public void test() throws APIException, IOException  {
+		NsDAO nsd = new NsDAO(trans, cluster, AUTHZ);
+		try {
+			final String nsparent = "com.test";
+			final String ns1 = nsparent +".ju_ns";
+			final String ns2 = nsparent + ".ju_ns2";
+			
+			Map<String,String> oAttribs = new HashMap<String,String>();
+			oAttribs.put(SWM, "swm_data");
+			oAttribs.put(CRM, "crm_data");
+			Data data = new NsDAO.Data();
+			data.name = ns1;
+			data.type = NsType.APP.type;
+			data.attrib(true).putAll(oAttribs);
+			
+
+			Result<List<Data>> rdrr;
+
+			// CREATE
+			Result<Data> rdc = nsd.create(trans, data);
+			assertTrue(rdc.isOK());
+			
+			try {
+//		        Bytification
+		        ByteBuffer bb = data.bytify();
+		        Data bdata = new NsDAO.Data();
+		        bdata.reconstitute(bb);
+		        compare(data, bdata);
+
+				// Test READ by Object
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				Data d = rdrr.value.get(0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.type,data.type);
+				attribsEqual(d.attrib(false),data.attrib(false));
+				attribsEqual(oAttribs,data.attrib(false));
+				
+				// Test Read by Key
+				rdrr = nsd.read(trans, data.name);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				d = rdrr.value.get(0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.type,data.type);
+				attribsEqual(d.attrib(false),data.attrib(false));
+				attribsEqual(oAttribs,data.attrib(false));
+				
+				// Read NS by Type
+				Result<Set<String>> rtypes = nsd.readNsByAttrib(trans, SWM);
+				Set<String> types;
+				if(rtypes.notOK()) {
+					throw new IOException(rtypes.errorString());
+				} else {
+					types = rtypes.value;
+				}
+				assertEquals(1,types.size());
+				assertEquals(true,types.contains(ns1));
+				
+				// Add second NS to test list of data returned
+				Data data2 = new NsDAO.Data();
+				data2.name = ns2;
+				data2.type = 3; // app
+				Result<Data> rdc2 = nsd.create(trans, data2);
+				assertTrue(rdc2.isOK());
+				
+					// Interrupt - test PARENT
+					Result<List<Data>> rdchildren = nsd.getChildren(trans, "com.test");
+					assertTrue(rdchildren.isOKhasData());
+					boolean child1 = false;
+					boolean child2 = false;
+					for(Data dchild : rdchildren.value) {
+						if(ns1.equals(dchild.name))child1=true;
+						if(ns2.equals(dchild.name))child2=true;
+					}
+					assertTrue(child1);
+					assertTrue(child2);
+
+				// FINISH DATA 2 by deleting
+				Result<Void> rddr = nsd.delete(trans, data2, true);
+				assertTrue(rddr.isOK());
+
+				// ADD DESCRIPTION
+				String description = "This is my test Namespace";
+				assertFalse(description.equalsIgnoreCase(data.description));
+				
+				Result<Void> addDesc = nsd.addDescription(trans, data.name, description);
+				assertTrue(addDesc.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).description,description);
+				
+				// UPDATE
+				String newDescription = "zz1234 Owns This Namespace Now";
+				oAttribs.put("mso", "mso_data");
+				data.attrib(true).put("mso", "mso_data");
+				data.description = newDescription;
+				Result<Void> update = nsd.update(trans, data);
+				assertTrue(update.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).description,newDescription);
+				attribsEqual(oAttribs, rdrr.value.get(0).attrib);
+				
+				
+			} catch (IOException e) {
+				e.printStackTrace();
+			} finally {
+				// DELETE
+				Result<Void> rddr = nsd.delete(trans, data, true);
+				assertTrue(rddr.isOK());
+				rdrr = nsd.read(trans, data);
+				assertTrue(rdrr.isOK() && rdrr.isEmpty());
+				assertEquals(rdrr.value.size(),0);
+			}
+		} finally {
+			nsd.close(trans);
+		}
+	}
+
+	private void compare(NsDAO.Data d, NsDAO.Data data) {
+		assertEquals(d.name,data.name);
+		assertEquals(d.type,data.type);
+		attribsEqual(d.attrib(false),data.attrib(false));
+		attribsEqual(d.attrib(false),data.attrib(false));
+	}
+	
+	private void attribsEqual(Map<String,String> aa, Map<String,String> ba) {
+		assertEquals(aa.size(),ba.size());
+		for(Entry<String, String> es : aa.entrySet()) {
+			assertEquals(es.getValue(),ba.get(es.getKey()));
+		}
+	}
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java
new file mode 100644
index 0000000..06e5f0e
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java
@@ -0,0 +1,58 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.NsType;
+
+public class JU_NsType {
+
+	@AfterClass
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	@Test
+	public void test() {
+		NsType nt,nt2;
+		String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};
+		for(String s : tests) {
+			nt = NsType.valueOf(s);
+			assertEquals(s,nt.name());
+			
+			nt2 = NsType.fromString(s);
+			assertEquals(nt,nt2);
+			
+			int t = nt.type;
+			nt2 = NsType.fromType(t);
+			assertEquals(nt,nt2);
+		}
+		
+		nt  = NsType.fromType(Integer.MIN_VALUE);
+		assertEquals(nt,NsType.UNKNOWN);
+		nt = NsType.fromString("Garbage");
+		assertEquals(nt,NsType.UNKNOWN);
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_PermDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_PermDAO.java
new file mode 100644
index 0000000..1a407af
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_PermDAO.java
@@ -0,0 +1,174 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.PermDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+/**
+ * Test the PermissionDAO
+ * 
+ * Utilize AbsJUCass to initialize and pre-load Cass
+ * 
+ *
+ */
+public class JU_PermDAO extends AbsJUCass{
+
+	@Test
+	public void test() throws APIException, IOException {
+		PermDAO pd = new PermDAO(trans,cluster,"authz");
+		try {
+			PermDAO.Data data = new PermDAO.Data();
+			data.ns = "com.test.ju_perm";
+			data.type = "MyType";
+			data.instance = "MyInstance";
+			data.action = "MyAction";
+			data.roles(true).add(data.ns + ".dev");
+			
+
+
+			// CREATE
+			Result<Data> rpdc = pd.create(trans,data);
+			assertTrue(rpdc.isOK());
+
+			Result<List<PermDAO.Data>> rlpd;
+			try {
+//		        Bytification
+		        ByteBuffer bb = data.bytify();
+		        Data bdata = new PermDAO.Data();
+		        bdata.reconstitute(bb);
+		        compare(data, bdata);
+
+				// Validate Read with key fields in Data
+				if((rlpd = pd.read(trans,data)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData1(data,d);
+				}
+				
+				// Validate readByName
+				if((rlpd = pd.readByType(trans,data.ns, data.type)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData1(data,d);
+				}
+				
+				// Add Role
+				RoleDAO.Data role = new RoleDAO.Data();
+				role.ns = data.ns;
+				role.name = "test";
+				
+				Result<Void> rvpd = pd.addRole(trans, data, role.fullName());
+				assertTrue(rvpd.isOK());
+				// Validate Read with key fields in Data
+				if((rlpd = pd.read(trans,data)).isOK())
+				  for(PermDAO.Data d : rlpd.value) {
+					checkData2(data,d);
+				  }
+				
+				// Remove Role
+				rvpd = pd.delRole(trans, data, role.fullName());
+				assertTrue(rvpd.isOK());
+				if((rlpd = pd.read(trans,data)).isOK())
+					for(PermDAO.Data d : rlpd.value) {
+						checkData1(data,d);
+					}
+				
+				// Add Child
+				Data data2 = new Data();
+				data2.ns = data.ns;
+				data2.type = data.type + ".2";
+				data2.instance = data.instance;
+				data2.action = data.action;
+				
+				rpdc = pd.create(trans, data2);
+				assertTrue(rpdc.isOK());
+				try {
+					rlpd = pd.readChildren(trans, data.ns,data.type);
+					assertTrue(rlpd.isOKhasData());
+					assertEquals(rlpd.value.size(),1);
+					assertEquals(rlpd.value.get(0).fullType(),data2.fullType());
+				} finally {
+					// Delete Child
+					pd.delete(trans, data2,true);
+
+				}
+			} catch (IOException e) {
+				e.printStackTrace();
+			} finally {
+				// DELETE
+				Result<Void> rpdd = pd.delete(trans,data,true);
+				assertTrue(rpdd.isOK());
+				rlpd = pd.read(trans, data);
+				assertTrue(rlpd.isOK() && rlpd.isEmpty());
+				assertEquals(rlpd.value.size(),0);
+			}
+		} finally {
+			pd.close(trans);
+		}
+	}
+
+	private void compare(Data a, Data b) {
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.type,b.type);
+		assertEquals(a.instance,b.instance);
+		assertEquals(a.action,b.action);
+		assertEquals(a.roles(false).size(),b.roles(false).size());
+		for(String s: a.roles(false)) {
+			assertTrue(b.roles(false).contains(s));
+		}
+	}
+	private void checkData1(Data data, Data d) {
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.type,d.type);
+		assertEquals(data.instance,d.instance);
+		assertEquals(data.action,d.action);
+		
+		Set<String> ss = d.roles(true);
+		assertEquals(1,ss.size());
+		assertTrue(ss.contains(data.ns+".dev"));
+	}
+	
+	private void checkData2(Data data, Data d) {
+		assertEquals(data.ns,d.ns);
+		assertEquals(data.type,d.type);
+		assertEquals(data.instance,d.instance);
+		assertEquals(data.action,d.action);
+		
+		Set<String> ss = d.roles(true);
+		assertEquals(2,ss.size());
+		assertTrue(ss.contains(data.ns+".dev"));
+		assertTrue(ss.contains(data.ns+".test"));
+	}
+
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_RoleDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_RoleDAO.java
new file mode 100644
index 0000000..fda818f
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_RoleDAO.java
@@ -0,0 +1,137 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.dao.aaf.test;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.junit.Test;
+import org.onap.aaf.auth.dao.cass.PermDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO;
+import org.onap.aaf.auth.dao.cass.RoleDAO.Data;
+import org.onap.aaf.auth.layer.Result;
+import org.onap.aaf.misc.env.APIException;
+
+
+public class JU_RoleDAO extends AbsJUCass {
+
+	@Test
+	public void test()  throws IOException, APIException {
+		RoleDAO rd = new RoleDAO(trans, cluster, AUTHZ);
+		try {
+			Data data = new RoleDAO.Data();
+			data.ns = "com.test.ju_role";
+			data.name = "role1";
+
+//	        Bytification
+	        ByteBuffer bb = data.bytify();
+	        Data bdata = new RoleDAO.Data();
+	        bdata.reconstitute(bb);
+	        compare(data, bdata);
+
+			// CREATE
+			Result<Data> rdc = rd.create(trans, data);
+			assertTrue(rdc.isOK());
+			Result<List<Data>> rdrr;
+			try {
+				// READ
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				Data d = rdrr.value.get(0);
+				assertEquals(d.perms.size(),0);
+				assertEquals(d.name,data.name);
+				assertEquals(d.ns,data.ns);
+
+				PermDAO.Data perm = new PermDAO.Data();
+				perm.ns = data.ns;
+				perm.type = "Perm";
+				perm.instance = "perm1";
+				perm.action = "write";
+				
+				// ADD Perm
+				Result<Void> rdar = rd.addPerm(trans, data, perm);
+				assertTrue(rdar.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).perms.size(),1);
+				assertTrue(rdrr.value.get(0).perms.contains(perm.encode()));
+				
+				// DEL Perm
+				rdar = rd.delPerm(trans, data,perm);
+				assertTrue(rdar.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOKhasData());
+				assertEquals(rdrr.value.size(),1);
+				assertEquals(rdrr.value.get(0).perms.size(),0);
+
+				// Add Child
+				Data data2 = new Data();
+				data2.ns = data.ns;
+				data2.name = data.name + ".2";
+				
+				rdc = rd.create(trans, data2);
+				assertTrue(rdc.isOK());
+				try {
+					rdrr = rd.readChildren(trans, data.ns,data.name);
+					assertTrue(rdrr.isOKhasData());
+					assertEquals(rdrr.value.size(),1);
+					assertEquals(rdrr.value.get(0).name,data.name + ".2");
+					
+					rdrr = rd.readChildren(trans, data.ns,"*");
+					assertTrue(rdrr.isOKhasData());
+					assertEquals(rdrr.value.size(),2);
+
+				} finally {
+					// Delete Child
+					rd.delete(trans, data2, true);
+				}
+	
+			} finally {
+				// DELETE
+				Result<Void> rddr = rd.delete(trans, data, true);
+				assertTrue(rddr.isOK());
+				rdrr = rd.read(trans, data);
+				assertTrue(rdrr.isOK() && rdrr.isEmpty());
+				assertEquals(rdrr.value.size(),0);
+			}
+		} finally {
+			rd.close(trans);
+		}
+	}
+
+	private void compare(Data a, Data b) {
+		assertEquals(a.name,b.name);
+		assertEquals(a.description, b.description);
+		assertEquals(a.ns,b.ns);
+		assertEquals(a.perms(false).size(),b.perms(false).size());
+		for(String p : a.perms(false)) {
+			assertTrue(b.perms(false).contains(p));
+		}
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFLur.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFLur.java
new file mode 100644
index 0000000..79d30c4
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFLur.java
@@ -0,0 +1,63 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.direct.test;
+
+import static org.junit.Assert.assertTrue;
+
+import java.security.Principal;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.direct.DirectAAFLur;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.cadi.Permission;
+import org.powermock.modules.junit4.PowerMockRunner;
+@RunWith(PowerMockRunner.class)
+public class JU_DirectAAFLur {
+	
+public static AuthzEnv env;
+public static Question question;
+public DirectAAFLur directAAFLur;
+
+
+
+	@Before
+	public void setUp()
+	{
+	directAAFLur = new DirectAAFLur(env, question);	
+	}
+	
+	@Test
+	public void testFish()
+	{
+		
+	Principal bait = null;
+	Permission pond=null;
+	directAAFLur.fish(bait, pond);	
+	
+	assertTrue(true);
+		
+	}
+	
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFUserPass.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFUserPass.java
new file mode 100644
index 0000000..6a25d99
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectAAFUserPass.java
@@ -0,0 +1,84 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.direct.test;
+
+import static org.junit.Assert.*;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.mockito.Mockito.*;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.onap.aaf.auth.dao.hl.Question;
+import org.onap.aaf.auth.direct.DirectAAFUserPass;
+import org.onap.aaf.auth.env.AuthzEnv;
+import org.onap.aaf.cadi.CredVal.Type;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class JU_DirectAAFUserPass {
+
+	// TODO: Ian - This test is in shambles. fix it
+
+	//public static AuthzEnv env;
+	//public static Question question;
+	public DirectAAFUserPass directAAFUserPass;
+
+	@Mock
+	AuthzEnv env;
+
+	@Mock
+	Question question;
+
+	String user;
+
+	Type type; 
+
+	byte[] pass;
+
+	@Before
+	public void setUp() {
+		directAAFUserPass = new DirectAAFUserPass(env, question);
+	}
+
+	@Test
+	public void testvalidate(){
+
+		//	Boolean bolVal =  directAAFUserPass.validate(user, type, pass);
+		//	assertEquals((bolVal==null),true);
+
+		assertTrue(true);
+
+	}
+
+	@Test
+	public void notYetTested() {
+		fail("Tests not yet implemented");
+	}
+
+}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java
new file mode 100644
index 0000000..07cd7ae
--- /dev/null
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java
@@ -0,0 +1,71 @@
+/*******************************************************************************
+ * ============LICENSE_START====================================================
+ * * org.onap.aaf
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * * 
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * * 
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * *
+ ******************************************************************************/
+package org.onap.aaf.auth.direct.test;
+
+import static org.junit.Assert.*;
+
+import java.security.Principal;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.onap.aaf.auth.dao.cached.CachedCertDAO;
+import org.onap.aaf.auth.direct.DirectCertIdentity;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+@RunWith(PowerMockRunner.class)
+public class JU_DirectCertIdentity {
+	
+	public DirectCertIdentity directCertIdentity;
+	
+	@Before
+	public void setUp(){
+		directCertIdentity = new DirectCertIdentity();
+	}
+
+
+	@Mock
+	HttpServletRequest req;
+	X509Certificate cert;
+	byte[] _certBytes;
+	
+	@Test
+	public void testidentity(){
+		
+		try {
+		Principal p = directCertIdentity.identity(req, cert, _certBytes);
+		assertEquals(( (p) == null),true);
+			
+		} catch (CertificateException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		//assertTrue(true);
+		
+	}
+
+}