Update project structure to org.onap.aaf

Update project structure of authz module in aaf from
com.att to org.onap.aaf and add distribution management
and repositories.

Issue-id: AAF-21
Change-Id: Ia2486954e99f2bd60f18122ed60d32d5590781e9
Signed-off-by: sg481n <sg481n@att.com>
diff --git a/authz-cass/pom.xml b/authz-cass/pom.xml
new file mode 100644
index 0000000..f1e34a7
--- /dev/null
+++ b/authz-cass/pom.xml
@@ -0,0 +1,204 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<!--

+  ============LICENSE_START====================================================

+  * org.onap.aaf

+  * ===========================================================================

+  * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+  * ===========================================================================

+  * Licensed under the Apache License, Version 2.0 (the "License");

+  * you may not use this file except in compliance with the License.

+  * You may obtain a copy of the License at

+  * 

+   *      http://www.apache.org/licenses/LICENSE-2.0

+  * 

+   * Unless required by applicable law or agreed to in writing, software

+  * distributed under the License is distributed on an "AS IS" BASIS,

+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+  * See the License for the specific language governing permissions and

+  * limitations under the License.

+  * ============LICENSE_END====================================================

+  *

+  * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+  *

+-->

+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

+	<modelVersion>4.0.0</modelVersion>

+	<parent>

+		<groupId>org.onap.aaf.authz</groupId>

+		<artifactId>parent</artifactId>

+		<version>1.0.0-SNAPSHOT</version>

+		<relativePath>../pom.xml</relativePath>

+	</parent>

+		

+	<artifactId>authz-cass</artifactId>

+	<name>Authz Cass</name>

+	<description>Cassandra DAOs for Authz</description>

+	<packaging>jar</packaging>

+		<url>https://github.com/att/AAF</url>

+	<licenses>

+		<license>

+		<name>BSD License</name>

+		<url> </url>

+		</license>

+	</licenses>

+	<developers>

+		<developer>

+		<name>Jonathan Gathman</name>

+		<email></email>

+	<organization>ATT</organization>

+	<organizationUrl></organizationUrl>

+		</developer>

+	</developers>

+		<properties>

+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

+		<project.cadiVersion>1.0.0-SNAPSHOT</project.cadiVersion>

+        <nexusproxy>https://nexus.onap.org</nexusproxy>

+		<snapshotNexusPath>/content/repositories/snapshots/</snapshotNexusPath>

+		<releaseNexusPath>/content/repositories/releases/</releaseNexusPath>

+		<stagingNexusPath>/content/repositories/staging/</stagingNexusPath>

+		<sitePath>/content/sites/site/${project.groupId}/${project.artifactId}/${project.version}</sitePath>

+	</properties>

+	<dependencies>

+		<dependency>

+			<groupId>org.onap.aaf.authz</groupId>

+			<artifactId>authz-core</artifactId>

+		</dependency>

+

+		<dependency>

+			<groupId>org.onap.aaf.cadi</groupId>

+			<artifactId>cadi-aaf</artifactId>

+		</dependency>

+

+       	<dependency>

+			<groupId>com.datastax.cassandra</groupId>

+			<artifactId>cassandra-driver-core</artifactId>

+		</dependency>	

+		

+		<!-- Cassandra prefers Snappy and LZ4 libs for performance -->

+		<dependency>

+		  <groupId>org.xerial.snappy</groupId>

+		  <artifactId>snappy-java</artifactId>

+		  <version>1.1.1-M1</version>

+		</dependency>

+		

+		<dependency>

+		  <groupId>net.jpountz.lz4</groupId>

+		  <artifactId>lz4</artifactId>

+		  <version>1.2.0</version>

+		</dependency>

+		

+		<dependency>

+          <groupId>com.googlecode.jcsv</groupId>

+          <artifactId>jcsv</artifactId>

+          <version>1.4.0</version>

+		</dependency>

+		

+		<dependency>

+			<groupId>org.slf4j</groupId>

+			<artifactId>slf4j-log4j12</artifactId>

+	        <scope>test</scope>

+		</dependency>

+		

+	

+	</dependencies>

+	<build>

+		<plugins>

+			<plugin>

+				<groupId>org.apache.maven.plugins</groupId>

+				<artifactId>maven-jarsigner-plugin</artifactId>

+			</plugin>

+			<plugin>

+				<groupId>org.apache.maven.plugins</groupId>

+				<artifactId>maven-deploy-plugin</artifactId>

+		    </plugin>

+		

+		<plugin>

+			<groupId>org.apache.maven.plugins</groupId>

+			<artifactId>maven-javadoc-plugin</artifactId>

+			<configuration>

+			<failOnError>false</failOnError>

+			</configuration>

+			<executions>

+				<execution>

+					<id>attach-javadocs</id>

+					<goals>

+						<goal>jar</goal>

+					</goals>

+				</execution>

+			</executions>

+		</plugin> 

+	   

+	   

+	       <plugin>

+		      <groupId>org.apache.maven.plugins</groupId>

+		      <artifactId>maven-source-plugin</artifactId>

+		      <version>2.2.1</version>

+		      <executions>

+			<execution>

+			  <id>attach-sources</id>

+			  <goals>

+			    <goal>jar-no-fork</goal>

+			  </goals>

+			</execution>

+		      </executions>

+		    </plugin>

+<plugin>

+				<groupId>org.sonatype.plugins</groupId>

+				<artifactId>nexus-staging-maven-plugin</artifactId>

+				<version>1.6.7</version>

+				<extensions>true</extensions>

+				<configuration>

+					<nexusUrl>${nexusproxy}</nexusUrl>

+					<stagingProfileId>176c31dfe190a</stagingProfileId>

+					<serverId>ecomp-staging</serverId>

+				</configuration>

+			</plugin>

+		</plugins>

+	</build>

+	<distributionManagement>

+		<repository>

+			<id>ecomp-releases</id>

+			<name>AAF Release Repository</name>

+			<url>${nexusproxy}${releaseNexusPath}</url>

+		</repository>

+		<snapshotRepository>

+			<id>ecomp-snapshots</id>

+			<name>AAF Snapshot Repository</name>

+			<url>${nexusproxy}${snapshotNexusPath}</url>

+		</snapshotRepository>

+		<site>

+			<id>ecomp-site</id>

+			<url>dav:${nexusproxy}${sitePath}</url>

+		</site>

+	</distributionManagement>

+<pluginRepositories>

+        <pluginRepository>

+            <id>onap-plugin-snapshots</id>

+            <url>https://nexus.onap.org/content/repositories/snapshots/</url>

+        </pluginRepository>

+    </pluginRepositories>

+	

+	<repositories>

+		<repository>

+			<id>central</id>

+			<name>Maven 2 repository 2</name>

+			<url>http://repo2.maven.org/maven2/</url>

+		</repository>

+		<repository>

+            <id>onap-jar-snapshots</id>

+            <url>https://nexus.onap.org/content/repositories/snapshots</url>

+        </repository>

+		<repository>

+			<id>spring-repo</id>

+			<name>Spring repo</name>

+			<url>https://artifacts.alfresco.com/nexus/content/repositories/public/</url>

+		</repository>

+		<repository>

+			<id>repository.jboss.org-public</id>

+			<name>JBoss.org Maven repository</name>

+			<url>https://repository.jboss.org/nexus/content/groups/public</url>

+		</repository>

+	</repositories>	

+</project>

+

diff --git a/authz-cass/src/main/cql/ecomp.cql b/authz-cass/src/main/cql/ecomp.cql
new file mode 100644
index 0000000..967d6da
--- /dev/null
+++ b/authz-cass/src/main/cql/ecomp.cql
@@ -0,0 +1,118 @@
+//
+//  Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+// 
+USE authz;
+
+// Create Root pass
+INSERT INTO cred (id,ns,type,cred,expires)
+  VALUES ('dgl@openecomp.org','org.openecomp',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31');
+
+// Create 'com' root NS
+INSERT INTO ns (name,scope,description,parent,type)
+  VALUES('com',1,'Root Namespace',null,1);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('com','admin',{'com.access|*|*'},'Com Admins');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('com','owner',{'com.access|*|read'},'Com Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('com','access','*','read',{'com.owner'},'Com Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('com','access','*','*',{'com.admin'},'Com Write Access');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.owner','2020-12-31','com','owner');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.admin','2020-12-31','com','admin');
+
+// Create org root NS
+INSERT INTO ns (name,scope,description,parent,type)
+  VALUES('org',1,'Root Namespace Org',null,1);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org','admin',{'org.access|*|*'},'Com Admins');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org','owner',{'org.access|*|read'},'Com Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org','access','*','read',{'org.owner'},'Com Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org','access','*','*',{'org.admin'},'Com Write Access');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','org.owner','2020-12-31','org','owner');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','org.admin','2020-12-31','org','admin');
+
+
+// Create com.att
+
+INSERT INTO ns (name,scope,description,parent,type)
+  VALUES('com.att',2,'AT&T Namespace','com',2);
+
+INSERT INTO role(ns, name, perms,description)
+  VALUES('com.att','admin',{'com.att.access|*|*'},'AT&T Admins');
+
+INSERT INTO role(ns, name, perms,description)
+  VALUES('com.att','owner',{'com.att.access|*|read'},'AT&T Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles,description) 
+  VALUES ('com.att','access','*','read',{'com.att.owner'},'AT&T Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles,description) 
+  VALUES ('com.att','access','*','*',{'com.att.admin'},'AT&T Write Access');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.att.owner','2020-12-31','com.att','owner');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.att.admin','2020-12-31','com.att','admin');
+
+// Create com.att.aaf
+
+INSERT INTO ns (name,scope,description,parent,type)
+  VALUES('com.att.aaf',3,'Application Authorization Framework','com.att',3);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('com.att.aaf','admin',{'com.att.aaf.access|*|*'},'AAF Admins');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('com.att.aaf','owner',{'com.att.aaf.access|*|read'},'AAF Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('com.att.aaf','access','*','read',{'com.att.aaf.owner'},'AAF Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('com.att.aaf','access','*','*',{'com.att.aaf.admin'},'AAF Write Access');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.att.aaf.admin','2020-12-31','com.att.aaf','admin');
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','com.att.aaf.owner','2020-12-31','com.att.aaf','owner');
+  
+
+// Create org.openecomp
+INSERT INTO ns (name,scope,description,parent,type)
+  VALUES('org.openecomp',2,'Open EComp NS','com.att',2);
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org.openecomp','admin',{'org.openecomp.access|*|*'},'OpenEcomp Admins');
+
+INSERT INTO role(ns, name, perms, description)
+  VALUES('org.openecomp','owner',{'org.openecomp.access|*|read'},'OpenEcomp Owners');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org.openecomp','access','*','read',{'org.openecomp.owner'},'OpenEcomp Read Access');
+
+INSERT INTO perm(ns, type, instance, action, roles, description) 
+  VALUES ('org.openecomp','access','*','*',{'org.openecomp.admin'},'OpenEcomp Write Access');
+
+INSERT INTO user_role(user,role,expires,ns,rname)
+  VALUES ('dgl@openecomp.org','org.openecomp.admin','2020-12-31','org.openecomp','admin');
diff --git a/authz-cass/src/main/cql/init.cql b/authz-cass/src/main/cql/init.cql
new file mode 100644
index 0000000..3b2688a
--- /dev/null
+++ b/authz-cass/src/main/cql/init.cql
@@ -0,0 +1,212 @@
+//
+//  Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+//
+// For Developer Machine single instance
+//
+ CREATE KEYSPACE authz
+ WITH REPLICATION = {'class' : 'SimpleStrategy','replication_factor':1};
+ 
+USE authz;
+
+//
+// CORE Table function
+//
+
+// Namespace - establish hierarchical authority to modify
+// Permissions and Roles
+// "scope" is flag to determine Policy.  Typical important scope
+// is "company" (1)
+CREATE TABLE ns (
+  name			varchar,
+  scope			int,  // deprecated 2.0.11
+  description   	varchar,
+  parent 		varchar,
+  type			int,
+  PRIMARY KEY (name)  
+);
+CREATE INDEX ns_parent on ns(parent);
+  
+
+CREATE TABLE ns_attrib (
+  ns            varchar,
+  key           varchar,
+  value         varchar,
+  PRIMARY KEY (ns,key)
+);
+create index ns_attrib_key on ns_attrib(key);
+
+// Will be cached
+CREATE TABLE role (
+  ns	    varchar,
+  name		varchar,
+  perms		set<varchar>, // Use "Key" of "name|type|action"
+  description varchar,
+  PRIMARY KEY (ns,name)
+);
+CREATE INDEX role_name  ON role(name);
+ 
+// Will be cached
+CREATE TABLE perm (
+  ns	    varchar,
+  type 		varchar,
+  instance	varchar,
+  action	varchar,
+  roles		set<varchar>, // Need to find Roles given Permissions
+  description varchar,
+  PRIMARY KEY (ns,type,instance,action)
+);
+
+// This table is user for Authorization
+CREATE TABLE user_role (
+    user		varchar,
+    role		varchar, // deprecated: change to ns/rname after 2.0.11
+    ns			varchar,
+    rname		varchar,
+    expires		timestamp,
+    PRIMARY KEY(user,role)
+  );
+CREATE INDEX user_role_ns ON user_role(ns);
+CREATE INDEX user_role_role ON user_role(role);
+
+// This table is only for the case where return User Credential (MechID) Authentication
+CREATE TABLE cred (
+    id    varchar,
+    type  int,
+    expires timestamp,  
+    ns    varchar,
+    other int,
+    notes varchar,
+    cred  blob,
+    prev  blob,
+    PRIMARY KEY (id,type,expires)
+  );
+CREATE INDEX cred_ns ON cred(ns);
+
+// Certificate Cross Table
+//   coordinated with CRED type 2
+CREATE TABLE cert (
+    fingerprint blob,
+    id    	varchar,
+    x500	varchar,
+    expires 	timestamp,  
+    PRIMARY KEY (fingerprint)
+  );
+CREATE INDEX cert_id ON cert(id);
+CREATE INDEX cert_x500 ON cert(x500);
+
+CREATE TABLE notify (
+  user text,
+  type int,
+  last timestamp,
+  checksum int,
+  PRIMARY KEY (user,type)
+);
+
+CREATE TABLE x509 (
+  ca     text,
+  serial blob,
+  id     text,
+  x500   text,
+  x509   text,
+  PRIMARY KEY (ca,serial)
+);
+
+
+CREATE INDEX x509_id   ON x509 (id);
+CREATE INDEX x509_x500 ON x509 (x500);
+
+// 
+// Deployment Artifact (for Certman)
+//
+CREATE TABLE artifact (
+  mechid        text,
+  machine       text,
+  type          Set<text>,
+  sponsor       text,
+  ca            text,
+  dir           text,
+  appName       text,
+  os_user       text,
+  notify        text,
+  expires	timestamp,
+  renewDays   int,
+  PRIMARY KEY (mechid,machine)
+);
+CREATE INDEX artifact_machine ON artifact(machine); 
+
+//
+// Non-Critical Table functions
+//
+// Table Info - for Caching
+CREATE TABLE cache (
+   name		varchar,
+   seg		int, 		// cache Segment
+   touched	timestamp,
+   PRIMARY KEY(name,seg)
+);
+
+CREATE TABLE history (
+  id			timeuuid,
+  yr_mon		int,
+  user			varchar,
+  action 		varchar,
+  target		varchar,   // user, user_role, 
+  subject		varchar,   // field for searching main portion of target key
+  memo			varchar,   //description of the action
+  reconstruct 	blob,      //serialized form of the target
+  // detail 	Map<varchar, varchar>,  // additional information
+  PRIMARY KEY (id)
+);
+CREATE INDEX history_yr_mon ON history(yr_mon);
+CREATE INDEX history_user ON history(user); 
+CREATE INDEX history_subject ON history(subject); 
+
+// 
+// A place to hold objects to be created at a future time.
+//
+CREATE TABLE future (
+  id        uuid,  		// uniquify
+  target    varchar,   		// Target Table
+  memo	    varchar,    	// Description
+  start     timestamp, 		// When it should take effect
+  expires   timestamp, 		// When not longer valid
+  construct blob, 		// How to construct this object (like History)
+  PRIMARY KEY(id)
+);
+CREATE INDEX future_idx ON future(target);
+CREATE INDEX future_start_idx ON future(start);
+
+
+CREATE TABLE approval (
+  id	    timeuuid,	      // unique Key
+  ticket    uuid,	      // Link to Future Record
+  user 	    varchar,          // the user who needs to be approved
+  approver  varchar, 	      // user approving
+  type      varchar,          // approver types i.e. Supervisor, Owner
+  status    varchar,          // approval status. pending, approved, denied
+  memo      varchar,          // Text for Approval to know what's going on
+  operation varchar,	      // List operation to perform
+  PRIMARY KEY(id)
+ );
+CREATE INDEX appr_approver_idx ON approval(approver);
+CREATE INDEX appr_user_idx ON approval(user);
+CREATE INDEX appr_ticket_idx ON approval(ticket);
+CREATE INDEX appr_status_idx ON approval(status);
+
+CREATE TABLE delegate (
+  user      varchar,
+  delegate  varchar,
+  expires   timestamp,
+  PRIMARY KEY (user)  
+);
+CREATE INDEX delg_delg_idx ON delegate(delegate);
+
+//
+// Used by authz-batch processes to ensure only 1 runs at a time
+//
+CREATE TABLE run_lock (
+  class text,
+  host text,
+  start timestamp,
+  PRIMARY KEY ((class))
+);
diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java
new file mode 100644
index 0000000..c76a88f
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java
@@ -0,0 +1,497 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.Deque;

+import java.util.List;

+import java.util.concurrent.ConcurrentLinkedDeque;

+

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.Slot;

+import org.onap.aaf.inno.env.TimeTaken;

+import org.onap.aaf.inno.env.TransStore;

+import com.datastax.driver.core.BoundStatement;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ConsistencyLevel;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.ResultSetFuture;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.Session;

+import com.datastax.driver.core.exceptions.DriverException;

+

+public abstract class AbsCassDAO<TRANS extends TransStore,DATA> {

+	protected static final char DOT = '.';

+	protected static final char DOT_PLUS_ONE = '.'+1;

+	protected static final String FIRST_CHAR = Character.toString((char)0);

+	protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE);

+	protected static final int FIELD_COMMAS = 0;

+	protected static final int QUESTION_COMMAS = 1;

+	protected static final int ASSIGNMENT_COMMAS = 2;

+	protected static final int WHERE_ANDS = 3;

+	

+	private Cluster cluster; 

+	private Session session;

+	private final String keyspace;

+	// If this is null, then we own session

+	private final AbsCassDAO<TRANS,?> owningDAO;

+	protected Class<DATA> dataClass;

+	private final String name;

+	private static Slot sessionSlot;

+	//private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<AbsCassDAO<TransStore,?>.PSInfo>();

+	private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo>();

+	private static final List<Object> EMPTY = new ArrayList<Object>(0);

+	private static final Deque<ResetRequest> resetDeque = new ConcurrentLinkedDeque<ResetRequest>();

+	private static boolean resetTrigger = false;

+	private static long nextAvailableReset = 0;

+	

+

+	public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass) {

+		this.name = name;

+		this.cluster = cluster;

+		this.keyspace = keyspace;

+		owningDAO = null;  // we own session

+		session = null;

+		this.dataClass = dataClass;

+		

+	}

+

+	public AbsCassDAO(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass) {      

+		this.name = name;

+		cluster = aDao.cluster;

+		keyspace = aDao.keyspace;

+		session = null;

+		owningDAO = aDao; // We do not own session

+		this.dataClass = dataClass;

+	}

+	

+	public static void setSessionSlot(Slot slot) {

+		sessionSlot = slot;

+	}

+

+	//Note: Lower case ON PURPOSE. These names used to create History Messages

+	public enum CRUD {

+		create,read,update,delete

+	;

+

+}

+

+	public class PSInfo {

+		private BoundStatement ps;

+		private final int size;

+		private final Loader<DATA> loader;

+		private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader

+		private final String cql;

+		private final ConsistencyLevel consistency;

+

+

+		/**

+		 * Create a PSInfo and create Prepared Statement

+		 * 

+		 * @param trans

+		 * @param theCQL

+		 * @param loader

+		 */

+		public PSInfo(TRANS trans, String theCQL, Loader<DATA> loader, ConsistencyLevel consistency) {

+			this.loader = loader;

+			this.consistency=consistency;

+			psinfos.add(this);

+

+			cql = theCQL.trim().toUpperCase();

+			if(cql.startsWith("INSERT")) {

+				crud = CRUD.create;

+			} else if(cql.startsWith("UPDATE")) {

+				crud = CRUD.update;

+			} else if(cql.startsWith("DELETE")) {

+				crud = CRUD.delete;

+			} else {

+				crud = CRUD.read;

+			}

+			

+			int idx = 0, count=0;

+			while((idx=cql.indexOf('?',idx))>=0) {

+				++idx;

+				++count;

+			}

+			size=count;

+		}

+		

+		public synchronized void reset() {

+			ps = null;

+		}

+		

+		private BoundStatement ps(TransStore trans) throws APIException, IOException {

+			if(ps==null) {

+				synchronized(this) {

+					if(ps==null) {

+						TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB);

+						try {

+							ps = new BoundStatement(getSession(trans).prepare(cql));

+							ps.setConsistencyLevel(consistency);

+						} catch (DriverException e) {

+							reportPerhapsReset(trans,e);

+							throw e;

+						} finally {

+							tt.done();

+						}

+					}

+				}

+			}

+			return ps;

+		}

+

+		/**

+		 * Execute a Prepared Statement by extracting from DATA object

+		 * 

+		 * @param trans

+		 * @param text

+		 * @param data

+		 * @return

+		 */

+		public Result<ResultSetFuture> execAsync(TRANS trans, String text, DATA data) {

+			TimeTaken tt = trans.start(text, Env.REMOTE);

+			try {

+				return Result.ok(getSession(trans).executeAsync(

+						ps(trans).bind(loader.extract(data, size, crud))));

+			} catch (DriverException | APIException | IOException e) {

+				AbsCassDAO.this.reportPerhapsReset(trans,e);

+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);

+			} finally {

+				tt.done();

+			}

+		}

+

+		/**

+		 * Execute a Prepared Statement on Object[] key

+		 * 

+		 * @param trans

+		 * @param text

+		 * @param objs

+		 * @return

+		 */

+		public Result<ResultSetFuture> execAsync(TRANS trans, String text, Object ... objs) {

+			TimeTaken tt = trans.start(text, Env.REMOTE);

+			try {

+				return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs)));

+			} catch (DriverException | APIException | IOException e) {

+				AbsCassDAO.this.reportPerhapsReset(trans,e);

+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);

+			} finally {

+				tt.done();

+			}

+		}

+		

+		/* 

+		 * Note:

+		 * 

+		 */

+

+		/**

+		 * Execute a Prepared Statement by extracting from DATA object

+		 * 

+		 * @param trans

+		 * @param text

+		 * @param data

+		 * @return

+		 */

+		public Result<ResultSet> exec(TRANS trans, String text, DATA data) {

+			TimeTaken tt = trans.start(text, Env.REMOTE);

+			try {

+				/*

+				 * "execute" (and executeAsync)

+				 * Executes the provided query.

+					This method blocks until at least some result has been received from the database. However, 

+					for SELECT queries, it does not guarantee that the result has been received in full. But it 

+					does guarantee that some response has been received from the database, and in particular 

+					guarantee that if the request is invalid, an exception will be thrown by this method.

+

+					Parameters:

+					statement - the CQL query to execute (that can be any Statement).

+					Returns:

+						the result of the query. That result will never be null but can be empty (and will 

+						be for any non SELECT query).

+				 */

+				return Result.ok(getSession(trans).execute(

+						ps(trans).bind(loader.extract(data, size, crud))));

+			} catch (DriverException | APIException | IOException e) {

+				AbsCassDAO.this.reportPerhapsReset(trans,e);

+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);

+			} finally {

+				tt.done();

+			}

+		}

+

+		/**

+		 * Execute a Prepared Statement on Object[] key

+		 * 

+		 * @param trans

+		 * @param text

+		 * @param objs

+		 * @return

+		 */

+		public Result<ResultSet> exec(TRANS trans, String text, Object ... objs) {

+			TimeTaken tt = trans.start(text, Env.REMOTE);

+			try {

+				return Result.ok(getSession(trans).execute(ps(trans).bind(objs)));

+			} catch (DriverException | APIException | IOException e) {

+				AbsCassDAO.this.reportPerhapsReset(trans,e);

+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);

+			} finally {

+				tt.done();

+			}

+		}

+

+		/**

+		 * Read the Data from Cassandra given a Prepared Statement (defined by the

+		 * DAO Instance)

+		 *

+		 * This is common behavior among all DAOs.

+		 * @throws DAOException

+		 */

+		public Result<List<DATA>> read(TRANS trans, String text, Object[] key) {

+			TimeTaken tt = trans.start(text,Env.REMOTE);

+			

+			ResultSet rs;

+			try {

+				rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key));

+/// TEST CODE for Exception				

+//				boolean force = true; 

+//				if(force) {

+//					Map<InetSocketAddress, Throwable> misa = new HashMap<InetSocketAddress,Throwable>();

+//					//misa.put(new InetSocketAddress(444),new Exception("no host was tried"));

+//					misa.put(new InetSocketAddress(444),new Exception("Connection has been closed"));

+//					throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa);

+////					throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried");

+//				}

+//// END TEST CODE

+			} catch (DriverException | APIException | IOException e) {

+				AbsCassDAO.this.reportPerhapsReset(trans,e);

+				return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);

+			} finally {

+				tt.done();

+			}

+			

+			return extract(loader,rs,null /*let Array be created if necessary*/,dflt);

+		}

+		

+		public Result<List<DATA>> read(TRANS trans, String text, DATA data) {

+			return read(trans,text, loader.extract(data, size, crud));

+		}

+		

+		public Object[] keyFrom(DATA data) {

+			return loader.extract(data, size, CRUD.delete); // Delete is key only

+		}

+

+		/*

+		 * Note: in case PSInfos are deleted, we want to remove them from list.  This is not expected, 

+		 * but we don't want a data leak if it does.  Finalize doesn't have to happen quickly

+		 */

+		@Override

+		protected void finalize() throws Throwable {

+			psinfos.remove(this);

+		}

+	}

+

+	protected final Accept<DATA> dflt = new Accept<DATA>() {

+		@Override

+		public boolean ok(DATA data) {

+			return true;

+		}

+	};

+

+

+	@SuppressWarnings("unchecked")

+    protected final Result<List<DATA>> extract(Loader<DATA> loader, ResultSet rs, List<DATA> indata, Accept<DATA> accept) {

+		List<Row> rows = rs.all();

+		if(rows.isEmpty()) {

+			return Result.ok((List<DATA>)EMPTY); // Result sets now .emptyList(true);

+		} else {

+			DATA d;

+			List<DATA> data = indata==null?new ArrayList<DATA>(rows.size()):indata;

+			

+			for(Row row : rows) {

+				try {

+					d = loader.load(dataClass.newInstance(),row);

+					if(accept.ok(d)) {

+						data.add(d);

+					}

+				} catch(Exception e) {

+					return Result.err(e);

+				}

+			}

+			return Result.ok(data);

+		}

+    }

+    

+	private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created";

+	private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created";

+	private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session";

+

+	private static class ResetRequest {

+		//package on purpose

+		Session session;

+		long timestamp;

+		

+		public ResetRequest(Session session) {

+			this.session = session;

+			timestamp = System.currentTimeMillis();

+		}

+	}

+

+	

+	public static final void primePSIs(TransStore trans) throws APIException, IOException {

+		for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {

+			if(psi.ps==null) {

+				psi.ps(trans);

+			}

+		}

+	}

+	

+	public final Session getSession(TransStore trans) throws APIException, IOException {

+		// Try to use Trans' session, if exists

+		if(sessionSlot!=null) { // try to get from Trans

+			Session sess = trans.get(sessionSlot, null);

+			if(sess!=null) {

+				return sess;

+			}

+		}

+		

+		// If there's an owning DAO, use it's session

+		if(owningDAO!=null) {

+			return owningDAO.getSession(trans);

+		}

+		

+		// OK, nothing else works... get our own.

+		if(session==null || resetTrigger) {

+			Cluster tempCluster = null;

+			Session tempSession = null;

+			try {

+				synchronized(NEW_CASSANDRA_SESSION_CREATED) {

+					boolean reset = false;

+					for(ResetRequest r : resetDeque) {

+						if(r.session == session) {

+							if(r.timestamp>nextAvailableReset) {

+								reset=true;

+								nextAvailableReset = System.currentTimeMillis() + 60000;

+								tempCluster = cluster;

+								tempSession = session;

+								break;

+							} else {

+								trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset");

+							}

+						}

+					}

+	

+					if(reset || session == null) {

+						TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB);

+						try {

+							// Note: Maitrayee recommended not closing the cluster, just

+							// overwrite it. 9/30/2016 assuming same for Session

+							// This was a bad idea.  Ran out of File Handles as I suspected..

+							if(reset) {

+								for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {

+									psi.reset();

+								}

+							}

+							if(reset || cluster==null) {

+								cluster = CassAccess.cluster(trans, keyspace);

+								trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED);

+							}

+							if(reset || session==null) {

+								session = cluster.connect(keyspace);

+								trans.warn().log(NEW_CASSANDRA_SESSION_CREATED);

+							}

+						} finally {

+							resetTrigger=false;

+							tt.done();

+						}

+					}

+				}

+			} finally {

+				TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB);

+				try {

+					resetDeque.clear();

+					// Not clearing Session/Cluster appears to kill off FileHandles

+					if(tempSession!=null && !tempSession.isClosed()) {

+						tempSession.close();

+					}

+					if(tempCluster!=null && !tempCluster.isClosed()) {

+						tempCluster.close();

+					}

+				} finally {

+					tt.done();

+				}

+			}

+		}

+		return session;

+	}

+	

+	public final boolean reportPerhapsReset(TransStore trans, Exception e) {

+		if(owningDAO!=null) {

+			return owningDAO.reportPerhapsReset(trans, e);

+		} else {

+			boolean rv = false;

+			if(CassAccess.isResetException(e)) {

+				trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName());

+				resetDeque.addFirst(new ResetRequest(session));

+				rv = resetTrigger = true;

+			} 

+			trans.error().log(e);

+			return rv;

+		}

+	}

+

+	public void close(TransStore trans) {

+		if(owningDAO==null) {

+			if(session!=null) {

+				TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB);

+				try {

+					session.close();

+				} finally {

+					tt.done();

+				}

+				session = null;

+			} else {

+				trans.debug().log("close called(), Session already closed");

+			}

+		} else {

+			owningDAO.close(trans);

+		}

+	}

+

+	protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) {

+	}

+	

+	protected interface Accept<DATA> {

+		public boolean ok(DATA data);

+	}

+

+}

+

+

+

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java b/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java
new file mode 100644
index 0000000..901339e
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java
@@ -0,0 +1,31 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+

+public interface Bytification {

+	public ByteBuffer bytify() throws IOException;

+	public void reconstitute(ByteBuffer bb) throws IOException;

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java
new file mode 100644
index 0000000..05bb86d
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java
@@ -0,0 +1,52 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.util.Date;

+

+import org.onap.aaf.authz.layer.Result;

+

+import org.onap.aaf.inno.env.Trans;

+

+public interface CIDAO<TRANS extends Trans> {

+

+	/**

+	 * Touch the date field for given Table

+	 *  

+	 * @param trans

+	 * @param name

+	 * @return

+	 */

+	public abstract Result<Void> touch(TRANS trans, String name, int ... seg);

+

+	/**

+	 * Read all Info entries, and set local Date objects

+	 * 

+	 * This is to support regular data checks on the Database to speed up Caching behavior

+	 * 

+	 */

+	public abstract Result<Void> check(TRANS trans);

+

+	public abstract Date get(TRANS trans, String table, int seg);

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java b/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java
new file mode 100644
index 0000000..0848292
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java
@@ -0,0 +1,34 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+/**

+ * Interface to obtain Segment Integer from DAO Data

+ * for use in Caching mechanism

+ * 

+ * This should typically be obtained by getting the Hash of the key, then using modulus on the size of segment.

+ * 

+ *

+ */

+public interface Cacheable {

+	public int[] invalidate(Cached<?,?> cache);

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java b/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java
new file mode 100644
index 0000000..5e5323c
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java
@@ -0,0 +1,198 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.util.Date;

+import java.util.List;

+import java.util.Map;

+import java.util.Timer;

+import java.util.TimerTask;

+

+import org.onap.aaf.authz.env.AuthzEnv;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.cache.Cache;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.Trans;

+

+public class Cached<TRANS extends Trans, DATA extends Cacheable> extends Cache<TRANS,DATA> {

+	// Java does not allow creation of Arrays with Generics in them...

+	// private Map<String,Dated> cache[];

+	protected final CIDAO<TRANS> info;

+	

+	private static Timer infoTimer;

+	private Object cache[];

+	public final int segSize;

+

+	protected final String name;

+	

+

+

+	// Taken from String Hash, but coded, to ensure consistent across Java versions.  Also covers negative case;

+	public int cacheIdx(String key) {

+		int h = 0;

+		for (int i = 0; i < key.length(); i++) {

+		    h = 31*h + key.charAt(i);

+		}

+		if(h<0)h*=-1;

+		return h%segSize;

+	}

+	

+	public Cached(CIDAO<TRANS> info, String name, int segSize) {

+		this.name =name;

+		this.segSize = segSize;

+		this.info = info;

+		cache = new Object[segSize];

+		// Create a new Map for each Segment, and store locally

+		for(int i=0;i<segSize;++i) {

+			cache[i]=obtain(name+i);

+		}

+	}

+	

+	public void add(String key, List<DATA> data) {

+		@SuppressWarnings("unchecked")

+		Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);

+		map.put(key, new Dated(data));

+	}

+

+

+	public int invalidate(String key)  {

+		int cacheIdx = cacheIdx(key);

+		@SuppressWarnings("unchecked")

+		Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);

+//		if(map.remove(key)!=null) // Not seeming to remove all the time

+		if(map!=null)map.clear();

+//			System.err.println("Remove " + name + " " + key);

+		return cacheIdx;

+	}

+

+	public Result<Void> invalidate(int segment)  {

+		if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));

+		@SuppressWarnings("unchecked")

+		Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);

+		if(map!=null) {

+			map.clear();

+		}

+		return Result.ok();

+	}

+

+	protected interface Getter<D> {

+		public abstract Result<List<D>> get();

+	};

+	

+	// TODO utilize Segmented Caches, and fold "get" into "reads"

+	@SuppressWarnings("unchecked")

+	public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {

+		List<DATA> ld = null;

+		Result<List<DATA>> rld = null;

+		

+		int cacheIdx = cacheIdx(key);

+		Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);

+		

+		// Check for saved element in cache

+		Dated cached = map.get(key);

+		// Note: These Segment Timestamps are kept up to date with DB

+		Date dbStamp = info.get(trans, name,cacheIdx);

+		

+		// Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)

+		if(cached!=null && dbStamp.before(cached.timestamp)) {

+			ld = (List<DATA>)cached.data;

+			rld = Result.ok(ld);

+		} else {

+			rld = getter.get();

+			if(rld.isOK()) { // only store valid lists

+				map.put(key, new Dated(rld.value));  // successful item found gets put in cache

+//			} else if(rld.status == Result.ERR_Backend){

+//				map.remove(key);

+			}

+		}

+		return rld;

+	}

+

+	/**

+	 * Each Cached object has multiple Segments that need cleaning.  Derive each, and add to Cleansing Thread

+	 * @param env

+	 * @param dao

+	 */

+	public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {

+		for(CachedDAO<?,?,?> d : dao) {  

+			for(int i=0;i<d.segSize;++i) {

+				startCleansing(env, d.table()+i);

+			}

+		}

+	}

+

+

+	public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {

+		if(infoTimer==null) {

+			infoTimer = new Timer("CachedDAO Info Refresh Timer");

+			int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh 

+			infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately

+		}

+	}

+	

+	public static void stopTimer() {

+		Cache.stopTimer();

+		if(infoTimer!=null) {

+			infoTimer.cancel();

+			infoTimer = null;

+		}

+	}

+	

+	private final static class Refresh extends TimerTask {

+		private static final int maxRefresh = 2*60*10000; // 20 mins

+		private AuthzEnv env;

+		private CIDAO<AuthzTrans> cidao;

+		private int minRefresh;

+		private long lastRun;

+		

+		public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {

+			this.env = env;

+			this.cidao = cidao;

+			this.minRefresh = minRefresh;

+			lastRun = System.currentTimeMillis()-maxRefresh-1000;

+		}

+		

+		@Override

+		public void run() {

+			// Evaluate whether to refresh based on transaction rate

+			long now = System.currentTimeMillis();

+			long interval = now-lastRun;

+

+			if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;

+			lastRun = now;

+			AuthzTrans trans = env.newTransNoAvg();

+			Result<Void> rv = cidao.check(trans);

+			if(rv.status!=Result.OK) {

+				env.error().log("Error in CacheInfo Refresh",rv.details);

+			}

+			if(env.debug().isLoggable()) {

+				StringBuilder sb = new StringBuilder("Cache Info Refresh: ");

+				trans.auditTrail(0, sb, Env.REMOTE);

+				env.debug().log(sb);

+			}

+		}

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java
new file mode 100644
index 0000000..4237b91
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java
@@ -0,0 +1,229 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+import org.onap.aaf.inno.env.Trans;

+

+/**

+ * CachedDAO

+ * 

+ * Cache the response of "get" of any DAO.  

+ * 

+ * For simplicity's sake, at this time, we only do this for single Object keys  

+ * 

+ *

+ * @param <DATA>

+ */

+public class CachedDAO<TRANS extends Trans,D extends DAO<TRANS,DATA>,DATA extends Cacheable> 

+		extends Cached<TRANS,DATA> implements DAO_RO<TRANS,DATA>{

+//	private final String dirty_str; 

+	

+	private final D dao;

+

+	public CachedDAO(D dao, CIDAO<TRANS> info, int segsize) {

+		super(info, dao.table(), segsize);

+		

+		// Instantiate a new Cache per DAO name (so separate instances use the same cache) 

+		this.dao = dao;

+		//read_str = "Cached READ for " + dao.table();

+//		dirty_str = "Cache DIRTY on " + dao.table();

+		if(dao instanceof CassDAOImpl) {

+			((CassDAOImpl<?,?>)dao).cache = this;

+		}

+	}

+	

+	public static<T extends Trans, DA extends DAO<T,DT>, DT extends Cacheable> 

+			CachedDAO<T,DA,DT> create(DA dao, CIDAO<T> info, int segsize) {

+		return new CachedDAO<T,DA,DT>(dao,info, segsize);

+	}

+

+	public void add(DATA data)  {

+		String key = keyFromObjs(dao.keyFrom(data));

+		List<DATA> list = new ArrayList<DATA>();

+		list.add(data);

+		super.add(key,list);

+	}

+	

+//	public void invalidate(TRANS trans, Object ... objs)  {

+//		TimeTaken tt = trans.start(dirty_str, Env.SUB);

+//		try {

+//			super.invalidate(keyFromObjs(objs));

+//		} finally {

+//			tt.done();

+//		}

+//	}

+

+	public static String keyFromObjs(Object ... objs) {

+		String key;

+		if(objs.length==1 && objs[0] instanceof String) {

+			key = (String)objs[0];

+		} else {

+			StringBuilder sb = new StringBuilder();

+			boolean first = true;

+			for(Object o : objs) {

+				if(o!=null) {

+					if(first) {

+					    first =false;

+					} else {

+					    sb.append('|');

+					}

+					sb.append(o.toString());

+				}

+			}

+			key = sb.toString();

+		}

+		return key;

+	}

+

+	public Result<DATA> create(TRANS trans, DATA data) {

+		Result<DATA> d = dao.create(trans,data);

+		if(d.status==Status.OK) {

+		    add(d.value);

+		} else {

+			trans.error().log(d.errorString());

+		}

+		invalidate(trans,data);

+		return d;

+	}

+

+	protected class DAOGetter implements Getter<DATA> {

+		protected TRANS trans;

+		protected Object objs[];

+		protected D dao;

+		public Result<List<DATA>> result;

+

+		public DAOGetter(TRANS trans, D dao, Object ... objs) {

+			this.trans = trans;

+			this.dao = dao;

+			this.objs = objs;

+		}

+		

+		/**

+		 * Separated into single call for easy overloading

+		 * @return

+		 */

+		public Result<List<DATA>> call() {

+			return dao.read(trans, objs);

+		}

+		

+		@Override

+		public final Result<List<DATA>> get() {

+			return call();

+//			if(result.isOKhasData()) { // Note, given above logic, could exist, but stale

+//				return result.value;

+//			} else {

+//				return null;

+//			}

+		}

+	}

+

+	@Override

+	public Result<List<DATA>> read(final TRANS trans, final Object ... objs) {

+		DAOGetter getter = new DAOGetter(trans,dao,objs); 

+		return get(trans, keyFromObjs(objs),getter);

+//		if(ld!=null) {

+//			return Result.ok(ld);//.emptyList(ld.isEmpty());

+//		}

+//		// Result Result if exists

+//		if(getter.result==null) {

+//			return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());

+//		}

+//		return getter.result;

+	}

+

+	// Slight Improved performance available when String and Obj versions are known. 

+	public Result<List<DATA>> read(final String key, final TRANS trans, final Object ... objs) {

+		DAOGetter getter = new DAOGetter(trans,dao,objs); 

+		return get(trans, key, getter);

+//		if(ld!=null) {

+//			return Result.ok(ld);//.emptyList(ld.isEmpty());

+//		}

+//		// Result Result if exists

+//		if(getter.result==null) {

+//			return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());

+//		}

+//		return getter.result;

+	}

+	

+	@Override

+	public Result<List<DATA>> read(TRANS trans, DATA data) {

+		return read(trans,dao.keyFrom(data));

+	}

+	public Result<Void> update(TRANS trans, DATA data) {

+		Result<Void> d = dao.update(trans, data);

+		if(d.status==Status.OK) {

+		    add(data);

+		} else {

+			trans.error().log(d.errorString());

+		}

+		return d;

+	}

+

+	public Result<Void> delete(TRANS trans, DATA data, boolean reread) {

+		if(reread) { // If reread, get from Cache, if possible, not DB exclusively

+			Result<List<DATA>> rd = read(trans,data);

+			if(rd.notOK()) {

+			    return Result.err(rd);

+			} else {

+				trans.error().log(rd.errorString());

+			}

+			if(rd.isEmpty()) {

+				data.invalidate(this);

+				return Result.err(Status.ERR_NotFound,"Not Found");

+			}

+			data = rd.value.get(0);

+		}

+		Result<Void> rv=dao.delete(trans, data, false);

+		data.invalidate(this);

+		return rv;

+	}

+	

+	@Override

+	public void close(TRANS trans) {

+		if(dao!=null) {

+		    dao.close(trans);

+		}

+	}

+	

+

+	@Override

+	public String table() {

+		return dao.table();

+	}

+	

+	public D dao() {

+		return dao;

+	}

+	

+	public void invalidate(TRANS trans, DATA data) {

+        if(info.touch(trans, dao.table(),data.invalidate(this)).notOK()) {

+	    trans.error().log("Cannot touch CacheInfo for Role");

+	}

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java b/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java
new file mode 100644
index 0000000..79bd6e0
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java
@@ -0,0 +1,220 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzEnv;

+

+import org.onap.aaf.cadi.routing.GreatCircle;

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.util.Split;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Cluster.Builder;

+import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy;

+

+public class CassAccess {

+	public static final String KEYSPACE = "authz";

+	public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";

+	public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";

+	public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";

+	public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";

+	public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";

+	public static final String LATITUDE = "LATITUDE";

+	public static final String LONGITUDE = "LONGITUDE";

+	private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();

+	public static final String ERR_ACCESS_MSG = "Accessing Backend";

+	private static Builder cb = null;

+

+	/**

+	 * To create DCAwareRoundRobing Policy:

+	 * 	 Need Properties

+	 * 		LATITUDE (or AFT_LATITUDE)

+	 * 		LONGITUDE (or AFT_LONGITUDE)

+	 * 		CASSANDRA CLUSTERS with additional information:

+	 * 			machine:DC:lat:long,machine:DC:lat:long

+	 * @param env

+	 * @param prefix

+	 * @return

+	 * @throws APIException

+	 * @throws IOException

+	 */

+

+	@SuppressWarnings("deprecation")

+	public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException {

+		if(cb == null) {

+			String pre;

+			if(prefix==null) {

+				pre="";

+			} else {

+				env.info().log("Cassandra Connection for ",prefix);

+				pre = prefix+'.';

+			}

+			cb = Cluster.builder();

+			String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,"9042");

+			if(str!=null) {

+				env.init().log("Cass Port = ",str );

+				cb.withPort(Integer.parseInt(str));

+			}

+			str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,null);

+			if(str!=null) {

+				env.init().log("Cass User = ",str );

+				String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,null);

+				if(epass==null) {

+					throw new APIException("No Password configured for " + str);

+				}

+				//TODO Figure out way to ensure Decryptor setting in AuthzEnv

+				if(env instanceof AuthzEnv) {

+					cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true));

+				} else {

+					cb.withCredentials(str, env.decryptor().decrypt(epass));

+				}

+			}

+	

+			str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,null);

+			if(str!=null) {

+				env.init().log("Cass ResetExceptions = ",str );

+				for(String ex : Split.split(',', str)) {

+					resetExceptions.add(new Resettable(env,ex));

+				}

+			}

+	

+			str = env.getProperty(LATITUDE,env.getProperty("AFT_LATITUDE",null));

+			Double lat = str!=null?Double.parseDouble(str):null;

+			str = env.getProperty(LONGITUDE,env.getProperty("AFT_LONGITUDE",null));

+			Double lon = str!=null?Double.parseDouble(str):null;

+			if(lat == null || lon == null) {

+				throw new APIException("LATITUDE(or AFT_LATITUDE) and/or LONGITUDE(or AFT_LATITUDE) are not set");

+			}

+			

+			env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon);

+			

+			str = env.getProperty(pre+CASSANDRA_CLUSTERS,"localhost");

+			env.init().log("Cass Clusters = ",str );

+			String[] machs = Split.split(',', str);

+			String[] cpoints = new String[machs.length];

+			String bestDC = null;

+			int numInBestDC = 1;

+			double mlat, mlon,temp,distance = -1.0;

+			for(int i=0;i<machs.length;++i) {

+				String[] minfo = Split.split(':',machs[i]);

+				if(minfo.length>0) {

+					cpoints[i]=minfo[0];

+				}

+			

+				// Calc closest DC with Great Circle

+				if(minfo.length>3) {

+					mlat = Double.parseDouble(minfo[2]);

+					mlon = Double.parseDouble(minfo[3]);

+					if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) > distance) {

+						distance = temp;

+						if(bestDC!=null && bestDC.equals(minfo[1])) {

+							++numInBestDC;

+						} else {

+							bestDC = minfo[1];

+							numInBestDC = 1;

+						}

+					} else {

+						if(bestDC!=null && bestDC.equals(minfo[1])) {

+							++numInBestDC;

+						}

+					}

+				}

+			}

+			

+			cb.addContactPoints(cpoints);

+			

+			if(bestDC!=null) {

+				// 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations

+				// Local DC Aware Load Balancing appears to have the highest normal performance, with the best

+				// Degraded Accuracy

+				cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy(

+						bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */));

+				env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy at %s with emergency remote of up to %d node(s)"

+					,bestDC, numInBestDC);

+			} else {

+				env.init().printf("Cassandra is using Default Policy, which is not DC aware");

+			}

+		}

+		return cb.build();

+	}

+	

+	private static class Resettable {

+		private Class<? extends Exception> cls;

+		private List<String> messages;

+		

+		@SuppressWarnings("unchecked")

+		public Resettable(Env env, String propData) throws APIException {

+			if(propData!=null && propData.length()>1) {

+				String[] split = Split.split(':', propData);

+				if(split.length>0) {

+					try {

+						cls = (Class<? extends Exception>)Class.forName(split[0]);

+					} catch (ClassNotFoundException e) {

+						throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded");

+					}

+				}

+				if(split.length>1) {

+					messages=new ArrayList<String>();

+					for(int i=1;i<split.length;++i) {

+						String str = split[i];

+						int start = str.startsWith("\"")?1:0;

+						int end = str.length()-(str.endsWith("\"")?1:0);

+						messages.add(split[i].substring(start, end));

+					}

+				} else {

+					messages = null;

+				}

+			}

+		}

+		

+		public boolean matches(Exception ex) {

+			if(ex.getClass().equals(cls)) {

+				if(messages!=null) {

+					String msg = ex.getMessage();

+					for(String m : messages) {

+						if(msg.contains(m)) {

+							return true;

+						}

+					}

+				}

+			}

+			return false;

+		}

+	}

+	

+	public static final boolean isResetException(Exception e) {

+		if(e==null) {

+			return true;

+		}

+		for(Resettable re : resetExceptions) {

+			if(re.matches(e)) {

+				return true;

+			}

+		}

+		return false;

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CassDAOImpl.java b/authz-cass/src/main/java/org/onap/aaf/dao/CassDAOImpl.java
new file mode 100644
index 0000000..61db914
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/CassDAOImpl.java
@@ -0,0 +1,328 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.ByteArrayInputStream;

+import java.io.DataInputStream;

+import java.lang.reflect.Field;

+import java.nio.ByteBuffer;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+import org.onap.aaf.inno.env.TransStore;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ConsistencyLevel;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.ResultSetFuture;

+

+/**

+ * AbsCassDAO

+ *

+ * Deal with the essentials of Interaction with Cassandra DataStore for all Cassandra DAOs

+ *

+ *

+ * @param <DATA>

+ */

+public class CassDAOImpl<TRANS extends TransStore,DATA> extends AbsCassDAO<TRANS, DATA> implements DAO<TRANS,DATA> {

+	public static final String USER_NAME = "__USER_NAME__";

+	protected static final String CREATE_SP = "CREATE ";

+	protected static final String UPDATE_SP = "UPDATE ";

+	protected static final String DELETE_SP = "DELETE ";

+	protected static final String SELECT_SP = "SELECT ";

+

+	protected final String C_TEXT = getClass().getSimpleName() + " CREATE";

+	protected final String R_TEXT = getClass().getSimpleName() + " READ";

+	protected final String U_TEXT = getClass().getSimpleName() + " UPDATE";

+	protected final String D_TEXT = getClass().getSimpleName() + " DELETE";

+	private String table;

+	

+	protected final ConsistencyLevel readConsistency,writeConsistency;

+	

+	// Setteable only by CachedDAO

+	protected Cached<?, ?> cache;

+

+	/**

+	 * A Constructor from the originating Cluster.  This DAO will open the Session at need,

+	 * and shutdown the session when "close()" is called.

+	 *

+	 * @param cluster

+	 * @param keyspace

+	 * @param dataClass

+	 */

+	public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {

+		super(trans, name, cluster,keyspace,dataClass);

+		this.table = table;

+		readConsistency = read;

+		writeConsistency = write;

+	}

+	

+	/**

+	 * A Constructor to share Session with other DAOs.

+	 *

+	 * This method get the Session and Cluster information from the calling DAO, and won't

+	 * touch the Session on closure.

+	 *

+	 * @param aDao

+	 * @param dataClass

+	 */

+	public CassDAOImpl(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {

+		super(trans, name, aDao,dataClass);

+		this.table = table;

+		readConsistency = read;

+		writeConsistency = write;

+	}

+

+	protected PSInfo createPS;

+	protected PSInfo readPS;

+	protected PSInfo updatePS;

+	protected PSInfo deletePS;

+	private boolean async=false;

+

+	public void async(boolean bool) {

+		async = bool;

+	}

+

+	public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader) {

+		return setCRUD(trans, table, dc, loader, -1);

+	}

+	

+	public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader, int max) {

+				Field[] fields = dc.getDeclaredFields();

+				int end = max>=0 & max<fields.length?max:fields.length;

+				// get keylimit from a non-null Loader

+				int keylimit = loader.keylimit();

+			

+				StringBuilder sbfc = new StringBuilder();

+				StringBuilder sbq = new StringBuilder();

+				StringBuilder sbwc = new StringBuilder();

+				StringBuilder sbup = new StringBuilder();

+			

+				if(keylimit>0) {

+					for(int i=0;i<end;++i) {

+						if(i>0) {

+							sbfc.append(',');

+							sbq.append(',');

+							if(i<keylimit) {

+								sbwc.append(" AND ");

+							}

+						}

+						sbfc.append(fields[i].getName());

+						sbq.append('?');

+						if(i>=keylimit) {

+							if(i>keylimit) {

+								sbup.append(',');

+							}

+							sbup.append(fields[i].getName());

+							sbup.append("=?");

+						}

+						if(i<keylimit) {

+							sbwc.append(fields[i].getName());

+							sbwc.append("=?");

+						}

+					}

+			

+					createPS = new PSInfo(trans, "INSERT INTO " + table + " ("+ sbfc +") VALUES ("+ sbq +");",loader,writeConsistency);

+			

+					readPS = new PSInfo(trans, "SELECT " + sbfc + " FROM " + table + " WHERE " + sbwc + ';',loader,readConsistency);

+			

+					// Note: UPDATES can't compile if there are no fields besides keys... Use "Insert"

+					if(sbup.length()==0) {

+						updatePS = createPS; // the same as an insert

+					} else {

+						updatePS = new PSInfo(trans, "UPDATE " + table + " SET " + sbup + " WHERE " + sbwc + ';',loader,writeConsistency);

+					}

+			

+					deletePS = new PSInfo(trans, "DELETE FROM " + table + " WHERE " + sbwc + ';',loader,writeConsistency);

+				}

+				return new String[] {sbfc.toString(), sbq.toString(), sbup.toString(), sbwc.toString()};

+			}

+

+	public void replace(CRUD crud, PSInfo psInfo) {

+		switch(crud) {

+			case create: createPS = psInfo; break;

+			case read:   readPS = psInfo; break;

+			case update: updatePS = psInfo; break;

+			case delete: deletePS = psInfo; break;

+		}

+	}

+

+	public void disable(CRUD crud) {

+		switch(crud) {

+			case create: createPS = null; break;

+			case read:   readPS = null; break;

+			case update: updatePS = null; break;

+			case delete: deletePS = null; break;

+		}

+	}

+

+	

+	/**

+	 * Given a DATA object, extract the individual elements from the Data into an Object Array for the

+	 * execute element.

+	 */

+	public Result<DATA> create(TRANS trans, DATA data)  {

+		if(createPS==null) {

+			Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName());

+		}

+		if(async) /*ResultSetFuture */ {

+			Result<ResultSetFuture> rs = createPS.execAsync(trans, C_TEXT, data);

+			if(rs.notOK()) {

+				return Result.err(rs);

+			}

+		} else {

+			Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);

+			if(rs.notOK()) {

+				return Result.err(rs);

+			}

+		}

+		wasModified(trans, CRUD.create, data);

+		return Result.ok(data);

+	}

+

+	/**

+	 * Read the Unique Row associated with Full Keys

+	 */

+	public Result<List<DATA>> read(TRANS trans, DATA data) {

+		if(readPS==null) {

+			Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());

+		}

+		return readPS.read(trans, R_TEXT, data);

+	}

+

+	public Result<List<DATA>> read(TRANS trans, Object ... key) {

+		if(readPS==null) {

+			Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());

+		}

+		return readPS.read(trans, R_TEXT, key);

+	}

+

+	public Result<Void> update(TRANS trans, DATA data) {

+		if(updatePS==null) {

+			Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName());

+		}

+		if(async)/* ResultSet rs =*/ {

+			Result<ResultSetFuture> rs = updatePS.execAsync(trans, U_TEXT, data);

+			if(rs.notOK()) {

+				return Result.err(rs);

+			}

+		} else {

+			Result<ResultSet> rs = updatePS.exec(trans, U_TEXT, data);

+			if(rs.notOK()) {

+				return Result.err(rs);

+			}

+		}

+		

+		wasModified(trans, CRUD.update, data);

+		return Result.ok();

+	}

+

+	// This method Sig for Cached...

+	public Result<Void> delete(TRANS trans, DATA data, boolean reread) {

+		if(deletePS==null) {

+			Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName());

+		}

+		// Since Deleting will be stored off, for possible re-constitution, need the whole thing

+		if(reread) {

+			Result<List<DATA>> rd = read(trans,data);

+			if(rd.notOK()) {

+				return Result.err(rd);

+			}

+			if(rd.isEmpty()) {

+				return Result.err(Status.ERR_NotFound,"Not Found");

+			}

+			for(DATA d : rd.value) { 

+				if(async) {

+					Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, d);

+					if(rs.notOK()) {

+						return Result.err(rs);

+					}

+				} else {

+					Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, d);

+					if(rs.notOK()) {

+						return Result.err(rs);

+					}

+				}

+				wasModified(trans, CRUD.delete, d);

+			}

+		} else {

+			if(async)/* ResultSet rs =*/ {

+				Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, data);

+				if(rs.notOK()) {

+					return Result.err(rs);

+				}

+			} else {

+				Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, data);

+				if(rs.notOK()) {

+					return Result.err(rs);

+				}

+			}

+			wasModified(trans, CRUD.delete, data);

+		}

+		return Result.ok();

+	}

+	

+	public final Object[] keyFrom(DATA data) {

+		return createPS.keyFrom(data);

+	}

+

+	@Override

+	public String table() {

+		return table;

+	}

+	

+	public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";

+	public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";

+	protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) {

+		String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table);

+		if(prop==null) {

+			prop = trans.getProperty(CASS_READ_CONSISTENCY);

+			if(prop==null) {

+				return ConsistencyLevel.ONE; // this is Cassandra Default

+			}

+		}

+		return ConsistencyLevel.valueOf(prop);

+	}

+

+	protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) {

+		String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table);

+		if(prop==null) {

+			prop = trans.getProperty(CASS_WRITE_CONSISTENCY);

+			if(prop==null) {

+				return ConsistencyLevel.ONE; // this is Cassandra Default\

+			}

+		}

+		return ConsistencyLevel.valueOf(prop);

+	}

+

+	public static DataInputStream toDIS(ByteBuffer bb) {

+		byte[] b = bb.array();

+		return new DataInputStream(

+			new ByteArrayInputStream(b,bb.position(),bb.limit())

+		);

+	}

+

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java
new file mode 100644
index 0000000..acdb36d
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java
@@ -0,0 +1,44 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import org.onap.aaf.authz.layer.Result;

+

+import org.onap.aaf.inno.env.Trans;

+

+

+/**

+ * DataAccessObject Interface

+ *

+ * Extend the ReadOnly form (for Get), and add manipulation methods

+ *

+ * @param <DATA>

+ */

+public interface DAO<TRANS extends Trans,DATA> extends DAO_RO<TRANS,DATA> {

+	public Result<DATA> create(TRANS trans, DATA data);

+	public Result<Void> update(TRANS trans, DATA data);

+	// In many cases, the data has been correctly read first, so we shouldn't read again

+	// Use reread=true if you are using DATA with only a Key

+	public Result<Void> delete(TRANS trans, DATA data, boolean reread);

+	public Object[] keyFrom(DATA data);

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java
new file mode 100644
index 0000000..85b8c84
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java
@@ -0,0 +1,52 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+public class DAOException extends Exception {

+

+	/**

+	 * 

+	 */

+	private static final long serialVersionUID = 1527904125585539823L;

+

+//    // TODO -   enum in result class == is our intended design, currently the DAO layer does not use Result<RV> so we still use these for now

+//    public final static DAOException RoleNotFoundDAOException = new DAOException("RoleNotFound");

+//    public final static DAOException PermissionNotFoundDAOException = new DAOException("PermissionNotFound");

+//    public final static DAOException UserNotFoundDAOException = new DAOException("UserNotFound");

+

+    public DAOException() {

+	}

+

+	public DAOException(String message) {

+		super(message);

+	}

+

+	public DAOException(Throwable cause) {

+		super(cause);

+	}

+

+	public DAOException(String message, Throwable cause) {

+		super(message, cause);

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java
new file mode 100644
index 0000000..a853675
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java
@@ -0,0 +1,71 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.util.List;

+

+import org.onap.aaf.authz.layer.Result;

+

+import org.onap.aaf.inno.env.Trans;

+

+/**

+ * DataAccessObject - ReadOnly

+ * 

+ * It is useful to have a ReadOnly part of the interface for CachedDAO

+ * 

+ * Normal DAOs will implement full DAO

+ * 

+ *

+ * @param <DATA>

+ */

+public interface DAO_RO<TRANS extends Trans,DATA> {

+	/**

+	 * Get a List of Data given Key of Object Array

+	 * @param objs

+	 * @return

+	 * @throws DAOException

+	 */

+	public Result<List<DATA>> read(TRANS trans, Object ... key);

+

+	/**

+	 * Get a List of Data given Key of DATA Object

+	 * @param trans

+	 * @param key

+	 * @return

+	 * @throws DAOException

+	 */

+	public Result<List<DATA>> read(TRANS trans, DATA key);

+

+	/**

+	 * close DAO

+	 */

+	public void close(TRANS trans);

+

+	/**

+	 * Return name of referenced Data

+	 * @return

+	 */

+	public String table();

+

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java b/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java
new file mode 100644
index 0000000..42a73f4
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java
@@ -0,0 +1,214 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.Collection;

+import java.util.HashMap;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import com.datastax.driver.core.Row;

+

+public abstract class Loader<DATA> {

+	private int keylimit;

+	public Loader(int keylimit) {

+		this.keylimit = keylimit;

+	}

+	

+	public int keylimit() {

+		return keylimit;

+	}

+	

+	protected abstract DATA load(DATA data, Row row);

+	protected abstract void key(DATA data, int idx, Object[] obj);

+	protected abstract void body(DATA data, int idx, Object[] obj);

+

+	public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) {

+		Object[] rv=null;

+		switch(type) {

+			case delete:

+				rv = new Object[keylimit()];

+				key(data,0,rv);

+				break;

+			case update:

+				rv = new Object[size];

+				body(data,0,rv);

+				int body = size-keylimit();

+				if(body>0) {

+				    key(data,body,rv);

+				}

+				break;

+			default:

+				rv = new Object[size];

+				key(data,0,rv);

+				if(size>keylimit()) {

+				    body(data,keylimit(),rv);

+				}

+				break;

+		}

+		return rv;

+	}

+	

+	public static void writeString(DataOutputStream os, String s) throws IOException {

+		if(s==null) {

+			os.writeInt(-1);

+		} else {

+			switch(s.length()) {

+				case 0:

+					os.writeInt(0);

+					break;

+				default:

+					byte[] bytes = s.getBytes();

+					os.writeInt(bytes.length);

+					os.write(bytes);

+			}

+		}

+	}

+	

+	/**

+	 * We use bytes here to set a Maximum

+	 * 

+	 * @param is

+	 * @param MAX

+	 * @return

+	 * @throws IOException

+	 */

+	public static String readString(DataInputStream is, byte[] _buff) throws IOException {

+		int l = is.readInt();

+		byte[] buff = _buff;

+		switch(l) {

+			case -1: return null;

+			case  0: return "";

+			default:

+				// Cover case where there is a large string, without always allocating a large buffer.

+				if(l>buff.length) {

+				    buff = new byte[l];

+				}

+				is.read(buff,0,l);

+				return new String(buff,0,l);

+		}

+	}

+

+	/**

+	 * Write a set with proper sizing

+	 * 

+	 * Note: at the moment, this is just String.  Probably can develop system where types

+	 * are supported too... but not now.

+	 * 

+	 * @param os

+	 * @param set

+	 * @throws IOException

+	 */

+	public static void writeStringSet(DataOutputStream os, Collection<String> set) throws IOException {

+		if(set==null) {

+			os.writeInt(-1);

+		} else {

+			os.writeInt(set.size());

+			for(String s : set) {

+				writeString(os, s);

+			}

+		}

+

+	}

+	

+	public static Set<String> readStringSet(DataInputStream is, byte[] buff) throws IOException {

+		int l = is.readInt();

+		if(l<0) {

+		    return null;

+		}

+		Set<String> set = new HashSet<String>(l);

+		for(int i=0;i<l;++i) {

+			set.add(readString(is,buff));

+		}

+		return set;

+	}

+	

+	public static List<String> readStringList(DataInputStream is, byte[] buff) throws IOException {

+		int l = is.readInt();

+		if(l<0) {

+		    return null;

+		}

+		List<String> list = new ArrayList<String>(l);

+		for(int i=0;i<l;++i) {

+			list.add(Loader.readString(is,buff));

+		}

+		return list;

+	}

+

+	/** 

+	 * Write a map

+	 * @param os

+	 * @param map

+	 * @throws IOException

+	 */

+	public static void writeStringMap(DataOutputStream os, Map<String,String> map) throws IOException {

+		if(map==null) {

+			os.writeInt(-1);

+		} else {

+			Set<Entry<String, String>> es = map.entrySet();

+			os.writeInt(es.size());

+			for(Entry<String,String> e : es) {

+				writeString(os, e.getKey());

+				writeString(os, e.getValue());

+			}

+		}

+

+	}

+

+	public static Map<String,String> readStringMap(DataInputStream is, byte[] buff) throws IOException {

+		int l = is.readInt();

+		if(l<0) {

+		    return null;

+		}

+		Map<String,String> map = new HashMap<String,String>(l);

+		for(int i=0;i<l;++i) {

+			String key = readString(is,buff);

+			map.put(key,readString(is,buff));

+		}

+		return map;

+	}

+	public static void writeHeader(DataOutputStream os, int magic, int version) throws IOException {

+		os.writeInt(magic);

+		os.writeInt(version);

+	}

+	

+	public static int readHeader(DataInputStream is, final int magic, final int version) throws IOException {

+		if(is.readInt()!=magic) {

+		    throw new IOException("Corrupted Data Stream");

+		}

+		int v = is.readInt();

+		if(version<0 || v>version) {

+		    throw new IOException("Unsupported Data Version: " + v);

+		}

+		return v;

+	}

+

+}

+

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java b/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java
new file mode 100644
index 0000000..f645dd6
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java
@@ -0,0 +1,32 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+

+public interface Streamer<DATA> {

+	public abstract void marshal(DATA data, DataOutputStream os) throws IOException;

+	public abstract void unmarshal(DATA data, DataInputStream is) throws IOException;

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java b/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java
new file mode 100644
index 0000000..dc3ab05
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java
@@ -0,0 +1,27 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+public interface Touchable {

+	 // Or make all DAOs accept list of CIDAOs...

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java
new file mode 100644
index 0000000..567bd06
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java
@@ -0,0 +1,55 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.CertDAO;

+

+public class CachedCertDAO extends CachedDAO<AuthzTrans, CertDAO, CertDAO.Data> {

+	public CachedCertDAO(CertDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, CertDAO.CACHE_SEG);

+	}

+	

+	/**

+	 * Pass through Cert ID Lookup

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @return

+	 */

+	

+	public Result<List<CertDAO.Data>> readID(AuthzTrans trans, final String id) {

+		return dao().readID(trans, id);

+	}

+	

+	public Result<List<CertDAO.Data>> readX500(AuthzTrans trans, final String x500) {

+		return dao().readX500(trans, x500);

+	}

+

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java
new file mode 100644
index 0000000..1467503
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java
@@ -0,0 +1,67 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+public class CachedCredDAO extends CachedDAO<AuthzTrans, CredDAO, CredDAO.Data> {

+	public CachedCredDAO(CredDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, CredDAO.CACHE_SEG);

+	}

+	

+	/**

+	 * Pass through Cred Lookup

+	 * 

+	 * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete.

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @return

+	 */

+	public Result<List<CredDAO.Data>> readNS(AuthzTrans trans, final String ns) {

+		

+		return dao().readNS(trans, ns);

+	}

+	

+	public Result<List<CredDAO.Data>> readID(AuthzTrans trans, final String id) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<CredDAO.Data>> call() {

+				return dao().readID(trans, id);

+			}

+		};

+		

+		Result<List<CredDAO.Data>> lurd = get(trans, id, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_UserNotFound,"No User Cred found");

+		}

+		return lurd;

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java
new file mode 100644
index 0000000..aae74e2
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java
@@ -0,0 +1,34 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+

+public class CachedNSDAO extends CachedDAO<AuthzTrans, NsDAO, NsDAO.Data> {

+	public CachedNSDAO(NsDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, NsDAO.CACHE_SEG);

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java
new file mode 100644
index 0000000..7d4c7fe
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java
@@ -0,0 +1,125 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.PermDAO.Data;

+

+public class CachedPermDAO extends CachedDAO<AuthzTrans,PermDAO, PermDAO.Data> {

+

+	public CachedPermDAO(PermDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, PermDAO.CACHE_SEG);

+	}

+

+	public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				return dao.readNS(trans, ns);

+			}

+		};

+		

+		Result<List<Data>> lurd = get(trans, ns, getter);

+		if(lurd.isOKhasData()) {

+			return lurd;

+		} else {

+			

+		}

+//		if(getter.result==null) {

+//			if(lurd==null) {

+				return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details);

+//			} else {

+//				return Result.ok(lurd);

+//			}

+//		}

+//		return getter.result;

+	}

+

+	public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String type) {

+		return dao().readChildren(trans,ns,type);

+	}

+

+	/**

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @param type

+	 * @return

+	 */

+	public Result<List<Data>> readByType(AuthzTrans trans, final String ns, final String type) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				return dao.readByType(trans, ns, type);

+			}

+		};

+		

+		// Note: Can reuse index1 here, because there is no name collision versus response

+		Result<List<Data>> lurd = get(trans, ns+'|'+type, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_PermissionNotFound,"No Permission found");

+		}

+		return lurd;

+	}

+	

+	/**

+	 * Add desciption to this permission

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @param type

+	 * @param instance

+	 * @param action

+	 * @param description

+	 * @return

+	 */

+	public Result<Void> addDescription(AuthzTrans trans, String ns, String type, 

+			String instance, String action, String description) {

+		//TODO Invalidate?

+		return dao().addDescription(trans, ns, type, instance, action, description);

+	}

+	

+	public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) {

+		Result<Void> rv = dao().addRole(trans,perm,role.encode());

+		if(trans.debug().isLoggable())

+			trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole");

+		invalidate(trans,perm);

+		return rv;

+	}

+

+	public Result<Void> delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) {

+		Result<Void> rv = dao().delRole(trans,perm,role.encode());

+		if(trans.debug().isLoggable())

+			trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole");

+		invalidate(trans,perm);

+		return rv;

+	}

+

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java
new file mode 100644
index 0000000..788efbe
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java
@@ -0,0 +1,107 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.RoleDAO.Data;

+

+public class CachedRoleDAO extends CachedDAO<AuthzTrans,RoleDAO, RoleDAO.Data> {

+	public CachedRoleDAO(RoleDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, RoleDAO.CACHE_SEG);

+	}

+

+	public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				return dao.readNS(trans, ns);

+			}

+		};

+		

+		Result<List<Data>> lurd = get(trans, ns, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_RoleNotFound,"No Role found");

+		}

+		return lurd;

+	}

+

+	public Result<List<Data>> readName(AuthzTrans trans, final String name) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				return dao().readName(trans, name);

+			}

+		};

+		

+		Result<List<Data>> lurd = get(trans, name, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_RoleNotFound,"No Role found");

+		}

+		return lurd;

+	}

+

+	public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String name) {

+		// At this point, I'm thinking it's better not to try to cache "*" results

+		// Data probably won't be accurate, and adding it makes every update invalidate most of the cache

+		// 2/4/2014

+		return dao().readChildren(trans,ns,name);

+	}

+

+	public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {

+		Result<Void> rv = dao().addPerm(trans,rd,perm);

+		if(trans.debug().isLoggable())

+			trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm");

+		invalidate(trans, rd);

+		return rv;

+	}

+

+	public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {

+		Result<Void> rv = dao().delPerm(trans,rd,perm);

+		if(trans.debug().isLoggable())

+			trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm");

+		invalidate(trans, rd);

+		return rv;

+	}

+	

+	/**

+	 * Add description to this role

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @param name

+	 * @param description

+	 * @return

+	 */

+	public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {

+		//TODO Invalidate?

+		return dao().addDescription(trans, ns, name, description);

+

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java
new file mode 100644
index 0000000..68231ea
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java
@@ -0,0 +1,117 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cached;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO.Data;

+

+import org.onap.aaf.inno.env.Slot;

+

+public class CachedUserRoleDAO extends CachedDAO<AuthzTrans,UserRoleDAO, UserRoleDAO.Data> {

+	private Slot transURSlot;

+

+	public CachedUserRoleDAO(UserRoleDAO dao, CIDAO<AuthzTrans> info) {

+		super(dao, info, UserRoleDAO.CACHE_SEG);

+		transURSlot = dao.transURSlot;

+	}

+

+	/**

+	 * Special Case.  

+	 * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..."

+	 * Pull result, and make accessible by the Trans, which is always keyed by User.

+	 * @param trans

+	 * @param user

+	 * @return

+	 */

+	public Result<List<Data>> readByUser(AuthzTrans trans, final String user) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				// If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not.

+				if(user!=null && user.equals(trans.user())) {

+					Result<List<Data>> transLD = trans.get(transURSlot,null);

+					if(transLD==null ) {

+						transLD = dao.readByUser(trans, user);

+					}

+					return transLD;

+				} else {

+					return dao.readByUser(trans, user);

+				}

+			}

+		};

+		Result<List<Data>> lurd = get(trans, user, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user);

+		}

+		return lurd;

+	}

+

+	

+	public Result<List<Data>> readByRole(AuthzTrans trans, final String role) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				return dao.readByRole(trans, role);

+			}

+		};

+		Result<List<Data>> lurd = get(trans, role, getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role);

+		}

+		return lurd;

+	}

+

+	public Result<List<UserRoleDAO.Data>> readUserInRole(final AuthzTrans trans, final String user, final String role) {

+		DAOGetter getter = new DAOGetter(trans,dao()) {

+			public Result<List<Data>> call() {

+				if(user.equals(trans.user())) {

+					Result<List<Data>> rrbu = readByUser(trans, user);

+					if(rrbu.isOK()) {

+						List<Data> ld = new ArrayList<Data>(1);

+						for(Data d : rrbu.value) {

+							if(d.role.equals(role)) {

+								ld.add(d);

+								break;

+							}

+						}

+						return Result.ok(ld).emptyList(ld.isEmpty());

+					} else {

+						return rrbu;

+					}

+				}

+				return dao.readByUserRole(trans, user, role);

+			}

+		};

+		Result<List<Data>> lurd = get(trans, keyFromObjs(user,role), getter);

+		if(lurd.isOK() && lurd.isEmpty()) {

+			return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user);

+		}

+		return lurd;

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java
new file mode 100644
index 0000000..dec1c9a
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java
@@ -0,0 +1,206 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.util.Date;

+import java.util.List;

+import java.util.UUID;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+

+public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {

+	public static final String PENDING = "pending";

+	public static final String DENIED = "denied";

+	public static final String APPROVED = "approved";

+	

+	private static final String TABLE = "approval";

+	private HistoryDAO historyDAO;

+	private PSInfo psByUser, psByApprover, psByTicket, psByStatus;

+

+	

+	public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) {

+		super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        historyDAO = new HistoryDAO(trans, this);

+		init(trans);

+	}

+

+

+	public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) {

+		super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		historyDAO=hDAO;

+		init(trans);

+	}

+

+	private static final int KEYLIMIT = 1;

+	public static class Data {

+		public UUID   id;

+        public UUID   ticket;

+		public String user;

+		public String approver;

+		public String type;

+		public String status;

+		public String memo;

+		public String operation;

+		public Date updated;

+	}

+	

+	private static class ApprovalLoader extends Loader<Data> {

+		public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT);

+		

+		public ApprovalLoader(int keylimit) {

+			super(keylimit);

+		}

+		

+		@Override

+		public Data load(Data data, Row row) {

+			data.id = row.getUUID(0);

+			data.ticket = row.getUUID(1);

+			data.user = row.getString(2);

+			data.approver = row.getString(3);

+			data.type = row.getString(4);

+			data.status = row.getString(5);

+			data.memo = row.getString(6);

+			data.operation = row.getString(7);

+			if(row.getColumnDefinitions().size()>8) {

+				// Rows reported in MicroSeconds

+				data.updated = new Date(row.getLong(8)/1000);

+			}

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int idx, Object[] obj) {

+			obj[idx]=data.id;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.ticket;

+			obj[++idx]=data.user;

+			obj[++idx]=data.approver;

+			obj[++idx]=data.type;

+			obj[++idx]=data.status;

+			obj[++idx]=data.memo;

+			obj[++idx]=data.operation;

+		}

+	}	

+	

+	private void init(AuthzTrans trans) {

+		String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,8);

+		// Need a specialty Creator to handle the "now()"

+		replace(CRUD.create, new PSInfo(trans, "INSERT INTO " + TABLE + " (" +  helpers[FIELD_COMMAS] +

+					") VALUES(now(),?,?,?,?,?,?,?)",new ApprovalLoader(0) {

+						@Override

+						protected void key(Data data, int idx, Object[] obj) {

+							// Overridden because key is the "now()"

+						}

+					},writeConsistency)

+				);

+

+		psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 

+				" WHERE user = ?", new ApprovalLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.user;

+			}

+		}, readConsistency);

+		

+		psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 

+				" WHERE approver = ?", new ApprovalLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.approver;

+			}

+		}, readConsistency);

+

+		psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 

+				" WHERE ticket = ?", new ApprovalLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.ticket;

+			}

+		}, readConsistency);

+

+		psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + 

+				" WHERE status = ?", new ApprovalLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.status;

+			}

+		}, readConsistency);

+

+

+	}

+	

+	public Result<List<ApprovalDAO.Data>> readByUser(AuthzTrans trans, String user) {

+		return psByUser.read(trans, R_TEXT, new Object[]{user});

+	}

+

+	public Result<List<ApprovalDAO.Data>> readByApprover(AuthzTrans trans, String approver) {

+		return psByApprover.read(trans, R_TEXT, new Object[]{approver});

+	}

+

+	public Result<List<ApprovalDAO.Data>> readByTicket(AuthzTrans trans, UUID ticket) {

+		return psByTicket.read(trans, R_TEXT, new Object[]{ticket});

+	}

+

+	public Result<List<ApprovalDAO.Data>> readByStatus(AuthzTrans trans, String status) {

+		return psByStatus.read(trans, R_TEXT, new Object[]{status});

+	}	

+

+	/**

+     * Log Modification statements to History

+     *

+     * @param modified        which CRUD action was done

+     * @param data            entity data that needs a log entry

+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+        HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject?override[1]:data.user + "|" + data.approver;

+        hd.memo = memo

+                ? String.format("%s by %s", override[0], hd.user)

+                : (modified.name() + "d approval for " + data.user);

+        // Detail?

+        // Reconstruct?

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+    }

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java
new file mode 100644
index 0000000..bc5532e
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java
@@ -0,0 +1,267 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Set;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+

+import org.onap.aaf.inno.env.util.Chrono;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+/**

+ * CredDAO manages credentials. 

+ * Date: 7/19/13

+ */

+public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {

+    public static final String TABLE = "artifact";

+    

+    private HistoryDAO historyDAO;

+    private PSInfo psByMechID,psByMachine;

+	

+    public ArtiDAO(AuthzTrans trans, Cluster cluster, String keyspace) {

+        super(trans, ArtiDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        init(trans);

+    }

+

+    public ArtiDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) {

+        super(trans, ArtiDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        historyDAO = hDao;

+        init(trans);

+    }

+

+    public static final int KEYLIMIT = 2;

+	public static class Data implements Bytification {

+		public String       			mechid;

+		public String       			machine;

+        private Set<String>      		type;

+        public String					sponsor;

+        public String					ca;

+        public String					dir;

+        public String					appName;

+        public String					os_user;

+        public String					notify;

+        public Date      				expires;

+        public int						renewDays;

+        

+//      // Getters

+		public Set<String> type(boolean mutable) {

+			if (type == null) {

+				type = new HashSet<String>();

+			} else if (mutable && !(type instanceof HashSet)) {

+				type = new HashSet<String>(type);

+			}

+			return type;

+		}

+

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			ArtifactLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+

+		public String toString() {

+			return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires);

+		}

+    }

+

+    private static class ArtifactLoader extends Loader<Data> implements Streamer<Data>{

+		public static final int MAGIC=95829343;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48; // Note: 

+

+    	public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT);

+    	public ArtifactLoader(int keylimit) {

+            super(keylimit);

+        }

+

+    	@Override

+        public Data load(Data data, Row row) {

+            data.mechid = row.getString(0);

+            data.machine = row.getString(1);

+            data.type = row.getSet(2, String.class);

+            data.sponsor = row.getString(3);

+            data.ca = row.getString(4);

+            data.dir = row.getString(5);

+            data.appName = row.getString(6);

+            data.os_user = row.getString(7);

+            data.notify = row.getString(8);

+            data.expires = row.getDate(9);

+            data.renewDays = row.getInt(10);

+            return data;

+        }

+

+        @Override

+        protected void key(final Data data, final int idx, Object[] obj) {

+        	int i;

+            obj[i=idx] = data.mechid;

+            obj[++i] = data.machine;

+        }

+

+        @Override

+        protected void body(final Data data, final int idx, Object[] obj) {

+            int i;

+            obj[i=idx] = data.type;

+            obj[++i] = data.sponsor;

+            obj[++i] = data.ca;

+            obj[++i] = data.dir;

+            obj[++i] = data.appName;

+            obj[++i] = data.os_user;

+            obj[++i] = data.notify;

+            obj[++i] = data.expires;

+            obj[++i] = data.renewDays;

+        }

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.mechid);

+			writeString(os, data.machine);

+			os.writeInt(data.type.size());

+			for(String s : data.type) {

+				writeString(os, s);

+			}

+			writeString(os, data.sponsor);

+			writeString(os, data.ca);

+			writeString(os, data.dir);

+			writeString(os, data.appName);

+			writeString(os, data.os_user);

+			writeString(os, data.notify);

+			os.writeLong(data.expires==null?-1:data.expires.getTime());

+			os.writeInt(data.renewDays);

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.mechid = readString(is,buff);

+			data.machine = readString(is,buff);

+			int size = is.readInt();

+			data.type = new HashSet<String>(size);

+			for(int i=0;i<size;++i) {

+				data.type.add(readString(is,buff));

+			}

+			data.sponsor = readString(is,buff);

+			data.ca = readString(is,buff);

+			data.dir = readString(is,buff);

+			data.appName = readString(is,buff);

+			data.os_user = readString(is,buff);

+			data.notify = readString(is,buff);

+			long l = is.readLong();

+			data.expires = l<0?null:new Date(l);

+			data.renewDays = is.readInt();

+		}

+    }

+

+    private void init(AuthzTrans trans) {

+        // Set up sub-DAOs

+        if(historyDAO==null) {

+        	historyDAO = new HistoryDAO(trans,this);

+        }

+        

+        String[] helpers = setCRUD(trans, TABLE, Data.class, ArtifactLoader.deflt);

+

+		psByMechID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 

+				" WHERE mechid = ?", new ArtifactLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.type;

+			}

+		},readConsistency);

+

+		psByMachine = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 

+				" WHERE machine = ?", new ArtifactLoader(1) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.type;

+			}

+		},readConsistency);

+

+    }

+    

+	

+    public Result<List<Data>> readByMechID(AuthzTrans trans, String mechid) {

+		return psByMechID.read(trans, R_TEXT, new Object[]{mechid});

+	}

+

+	public Result<List<ArtiDAO.Data>> readByMachine(AuthzTrans trans, String machine) {

+		return psByMachine.read(trans, R_TEXT, new Object[]{machine});

+	}

+

+	/**

+     * Log Modification statements to History

+     *

+     * @param modified        which CRUD action was done

+     * @param data            entity data that needs a log entry

+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+        HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject?override[1]: data.mechid;

+        hd.memo = memo

+                ? String.format("%s by %s", override[0], hd.user)

+                : String.format("%sd %s for %s",modified.name(),data.mechid,data.machine);

+        // Detail?

+   		if(modified==CRUD.delete) {

+        			try {

+        				hd.reconstruct = data.bytify();

+        			} catch (IOException e) {

+        				trans.error().log(e,"Could not serialize CredDAO.Data");

+        			}

+        		}

+

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+    }

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java
new file mode 100644
index 0000000..e7cab3e
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java
@@ -0,0 +1,464 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.IOException;

+import java.net.HttpURLConnection;

+import java.net.URI;

+import java.util.Date;

+import java.util.HashMap;

+import java.util.HashSet;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.concurrent.BlockingQueue;

+import java.util.concurrent.ConcurrentHashMap;

+import java.util.concurrent.LinkedBlockingQueue;

+import java.util.concurrent.TimeUnit;

+

+import org.onap.aaf.authz.env.AuthzEnv;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.AbsCassDAO;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CassAccess;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+

+import org.onap.aaf.cadi.CadiException;

+import org.onap.aaf.cadi.SecuritySetter;

+import org.onap.aaf.cadi.client.Future;

+import org.onap.aaf.cadi.client.Rcli;

+import org.onap.aaf.cadi.client.Retryable;

+import org.onap.aaf.cadi.http.HMangr;

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.TimeTaken;

+import org.onap.aaf.inno.env.Trans;

+import com.datastax.driver.core.BoundStatement;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.exceptions.DriverException;

+

+public class CacheInfoDAO extends CassDAOImpl<AuthzTrans,CacheInfoDAO.Data> implements CIDAO<AuthzTrans> {

+

+	private static final String TABLE = "cache";

+	public static final Map<String,Date[]> info = new ConcurrentHashMap<String,Date[]>();

+

+	private static CacheUpdate cacheUpdate;

+	

+	

+	private BoundStatement check;

+	// Hold current time stamps from Tables

+	private final Date startTime;

+	

+	public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+		super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		startTime = new Date();

+		init(trans);

+	}

+

+	public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {

+		super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		startTime = new Date();

+		init(trans);

+	}

+

+

+    //////////////////////////////////////////

+    // Data Definition, matches Cassandra DM

+    //////////////////////////////////////////

+    private static final int KEYLIMIT = 2;

+	/**

+     */

+	public static class Data {

+		public Data() {

+			name = null;

+			touched = null;

+		}

+		public Data(String name, int seg) {

+			this.name = name;

+			this.seg = seg;

+			touched = null;

+		}

+		

+		public String		name;

+		public int			seg;

+		public Date			touched;

+    }

+

+    private static class InfoLoader extends Loader<Data> {

+    	public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);

+    	

+		public InfoLoader(int keylimit) {

+			super(keylimit);

+		}

+		

+		@Override

+		public Data load(Data data, Row row) {

+			// Int more efficient

+			data.name = row.getString(0);

+			data.seg = row.getInt(1);

+			data.touched = row.getDate(2);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+

+			obj[idx]=data.name;

+			obj[++idx]=data.seg;

+		}

+

+		@Override

+		protected void body(Data data, int idx, Object[] obj) {

+			obj[idx]=data.touched;

+		}

+    }

+    

+	public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {

+		if(cacheUpdate==null) {

+			Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");

+			t.setDaemon(true);

+			t.start();

+		}

+	}

+

+	public static<T extends Trans> void stopUpdate() {

+		if(cacheUpdate!=null) {

+			cacheUpdate.go=false;

+		}

+	}

+

+	private final static class CacheUpdate extends Thread {

+		public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);

+

+		private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";

+		private AuthzEnv env;

+		private HMangr hman;

+		private SecuritySetter<HttpURLConnection> ss;

+		private final String authority;

+		public boolean go = true;

+		

+		public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {

+			this.env = env;

+			this.hman = hman;

+			this.ss = ss;

+			

+			this.authority = ip+':'+port;

+		}

+		

+		private static class Transfer {

+			public String table;

+			public int segs[];

+			public Transfer(String table, int[] segs)  {

+				this.table = table;

+				this.segs = segs;

+			}

+		}

+		private class CacheClear extends Retryable<Integer> {

+			public int total=0;

+			private AuthzTrans trans;

+			private String type;

+			private String segs;

+			

+			public CacheClear(AuthzTrans trans) {

+				this.trans = trans;

+			}

+

+			public void set(Entry<String, IntHolder> es) {

+				type = es.getKey();

+				segs = es.getValue().toString();

+			}

+			

+		@Override

+			public Integer code(Rcli<?> client) throws APIException, CadiException {

+				URI to = client.getURI();

+				if(!to.getAuthority().equals(authority)) {

+					Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);

+					if(f.get(hman.readTimeout())) {

+					    ++total;

+					} else {

+					    trans.error().log("Error During AAF Peer Notify",f.code(),f.body());

+					}

+				}

+				return total;

+			}

+		}

+		

+		private class IntHolder {

+			private int[] raw;

+			HashSet<Integer> set;

+			

+			public IntHolder(int ints[]) {

+				raw = ints;

+				set = null;

+			}

+			public void add(int[] ints) {

+				if(set==null) {

+					set = new HashSet<Integer>();

+					

+					for(int i=0;i<raw.length;++i) {

+						set.add(raw[i]);

+					}

+				}

+				for(int i=0;i<ints.length;++i) {

+					set.add(ints[i]);

+				}

+			}

+

+			@Override

+			public String toString() {

+				StringBuilder sb = new StringBuilder();

+				boolean first = true;

+				if(set==null) {

+					for(int i : raw) {

+						if(first) {

+							first=false;

+						} else {

+							sb.append(',');

+						}

+						sb.append(i);

+					}

+				} else {

+					for(Integer i : set) {

+						if(first) {

+							first=false;

+						} else {

+							sb.append(',');

+						}

+						sb.append(i);

+					}

+				}

+				return sb.toString();

+			}

+		}

+		

+		@Override

+		public void run() {

+			do {

+				try {

+					Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);

+					if(data==null) {

+						continue;

+					}

+					

+					int count = 0;

+					CacheClear cc = null;

+					Map<String,IntHolder> gather = null;

+					AuthzTrans trans = null;

+					long start=0;

+					// Do a block poll first

+					do {

+						if(gather==null) {

+							start = System.nanoTime();

+							trans = env.newTransNoAvg();

+							cc = new CacheClear(trans);

+							gather = new HashMap<String,IntHolder>();

+						}

+						IntHolder prev = gather.get(data.table);

+						if(prev==null) {

+							gather.put(data.table,new IntHolder(data.segs));

+						} else {

+							prev.add(data.segs);

+						}

+						// continue while there is data

+					} while((data = notifyDQ.poll())!=null);

+					if(gather!=null) {

+						for(Entry<String, IntHolder> es : gather.entrySet()) {

+							cc.set(es);

+							try {

+								if(hman.all(ss, cc, false)!=null) {

+									++count;

+								}

+							} catch (Exception e) {

+								trans.error().log(e, "Error on Cache Update");

+							}

+						}

+						if(env.debug().isLoggable()) {

+							float millis = (System.nanoTime()-start)/1000000f;

+							StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");

+							sb.append("Updated ");

+							sb.append(count);

+							if(count==1) {

+								sb.append(" entry for ");

+							} else { 

+								sb.append(" entries for ");

+							}

+							int peers = count<=0?0:cc.total/count;

+							sb.append(peers);

+							sb.append(" client");

+							if(peers!=1) {

+								sb.append('s');

+							}

+							sb.append(" in ");

+							sb.append(millis);

+							sb.append("ms");

+							trans.auditTrail(0, sb, Env.REMOTE);

+							env.debug().log(sb);

+						}

+					}

+				} catch (InterruptedException e1) {

+					go = false;

+				}

+			} while(go);

+		}

+	}

+

+	private void init(AuthzTrans trans) throws APIException, IOException {

+		

+		String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);

+		check = getSession(trans).prepare(SELECT_SP +  helpers[FIELD_COMMAS] + " FROM " + TABLE).bind();

+

+		disable(CRUD.create);

+		disable(CRUD.delete);

+	}

+

+	/* (non-Javadoc)

+	 * @see org.onap.aaf.dao.aaf.cass.CIDAO#touch(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, int)

+	 */

+	

+	@Override

+	public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {

+		/////////////

+		// Direct Service Cache Invalidation

+		/////////////

+		// ConcurrentQueues are open-ended.  We don't want any Memory leaks 

+		// Note: we keep a separate counter, because "size()" on a Linked Queue is expensive

+		if(cacheUpdate!=null) {

+			try {

+				if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {

+					trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );

+				}

+			} catch (InterruptedException e) {

+				trans.error().log("Cache Notify Queue posting was interrupted" );

+			}

+		}

+

+		/////////////

+		// Table Based Cache Invalidation (original)

+		/////////////

+		// Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN

+		StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");

+		start.append(name);

+		start.append(": ");

+		StringBuilder sb = new StringBuilder("BEGIN BATCH\n");

+		boolean first = true;

+		for(int s : seg) {

+			sb.append(UPDATE_SP);

+			sb.append(TABLE);

+			sb.append(" SET touched=dateof(now()) WHERE name = '");

+			sb.append(name);

+			sb.append("' AND seg = ");

+			sb.append(s);

+			sb.append(";\n");	

+			if(first) {

+				first =false;

+			} else {

+				start.append(',');

+			}

+			start.append(s);

+		}

+		sb.append("APPLY BATCH;");

+		TimeTaken tt = trans.start(start.toString(),Env.REMOTE);

+		try {

+			getSession(trans).executeAsync(sb.toString());

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		} finally {

+			tt.done();

+		}

+		return Result.ok();

+	}

+

+	/* (non-Javadoc)

+	 * @see org.onap.aaf.dao.aaf.cass.CIDAO#check(org.onap.aaf.authz.env.AuthzTrans)

+	 */

+	@Override

+	public Result<Void> check(AuthzTrans trans) {

+		ResultSet rs;

+		TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);

+		try {

+			rs = getSession(trans).execute(check);

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		} finally {

+			tt.done();

+		}

+		

+		String lastName = null;

+		Date[] dates = null;

+		for(Row row : rs.all()) {

+			String name = row.getString(0);

+			int seg = row.getInt(1);

+			if(!name.equals(lastName)) {

+				dates = info.get(name);

+				lastName=name;

+			}

+			if(dates==null) {

+				dates=new Date[seg+1];

+				info.put(name,dates);

+			} else if(dates.length<=seg) {

+				Date[] temp = new Date[seg+1];

+				System.arraycopy(dates, 0, temp, 0, dates.length);

+				dates = temp;

+				info.put(name, dates);

+			}

+			Date temp = row.getDate(2);

+			if(dates[seg]==null || dates[seg].before(temp)) {

+				dates[seg]=temp;

+			}

+		}

+		return Result.ok();

+	}

+	

+    /* (non-Javadoc)

+	 * @see org.onap.aaf.dao.aaf.cass.CIDAO#get(java.lang.String, int)

+	 */

+    @Override

+	public Date get(AuthzTrans trans, String table, int seg) {

+		Date[] dates = info.get(table);

+		if(dates==null) {

+			dates = new Date[seg+1];

+			touch(trans,table, seg);

+		} else if(dates.length<=seg) {

+			Date[] temp = new Date[seg+1];

+			System.arraycopy(dates, 0, temp, 0, dates.length);

+			dates = temp;

+		}

+		Date rv = dates[seg];

+		if(rv==null) {

+			rv=dates[seg]=startTime;

+		}

+		return rv;

+	}

+

+	@Override

+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+		// Do nothing

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java
new file mode 100644
index 0000000..7564813
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java
@@ -0,0 +1,36 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import org.onap.aaf.dao.Cacheable;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CachedDAO;

+

+public abstract class CacheableData implements Cacheable {

+	// WARNING:  DON'T attempt to add any members here, as it will 

+	// be treated by system as fields expected in Tables

+	protected int seg(Cached<?,?> cache, Object ... fields) {

+		return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields));

+	}

+	

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java
new file mode 100644
index 0000000..4ed6a3e
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java
@@ -0,0 +1,244 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.math.BigInteger;

+import java.nio.ByteBuffer;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+

+import org.onap.aaf.inno.env.APIException;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+/**

+ * CredDAO manages credentials. 

+ * Date: 7/19/13

+ */

+public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {

+    public static final String TABLE = "x509";

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+    

+    private HistoryDAO historyDAO;

+	private CIDAO<AuthzTrans> infoDAO;

+	private PSInfo psX500,psID;

+	

+    public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+        super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        init(trans);

+    }

+

+    public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {

+        super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        historyDAO = hDao;

+        infoDAO = ciDao;

+        init(trans);

+    }

+    

+    public static final int KEYLIMIT = 2;

+	public static class Data extends CacheableData implements Bytification {

+    	

+        public String					ca;

+		public BigInteger 				serial;

+        public String	      			id;

+        public String					x500;

+        public String					x509;

+

+        @Override

+		public int[] invalidate(Cached<?,?> cache) {

+        	return new int[] {

+        		seg(cache,ca,serial)

+        	};

+		}

+        

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			CertLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			CertLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+    }

+

+    private static class CertLoader extends Loader<Data> implements Streamer<Data>{

+		public static final int MAGIC=85102934;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48; // Note: 

+

+    	public static final CertLoader deflt = new CertLoader(KEYLIMIT);

+    	public CertLoader(int keylimit) {

+            super(keylimit);

+        }

+

+    	@Override

+        public Data load(Data data, Row row) {

+        	data.ca = row.getString(0);

+            ByteBuffer bb = row.getBytesUnsafe(1);

+            byte[] bytes = new byte[bb.remaining()];

+            bb.get(bytes);

+            data.serial = new BigInteger(bytes);

+            data.id = row.getString(2);

+            data.x500 = row.getString(3);

+            data.x509 = row.getString(4);

+            return data;

+        }

+

+        @Override

+        protected void key(Data data, int idx, Object[] obj) {

+            obj[idx] = data.ca;

+            obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray());

+        }

+

+        @Override

+        protected void body(Data data, int _idx, Object[] obj) {

+        	int idx = _idx;

+

+            obj[idx] = data.id;

+            obj[++idx] = data.x500;

+            obj[++idx] = data.x509;

+

+            

+        }

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.id);

+			writeString(os, data.x500);

+			writeString(os, data.x509);

+			writeString(os, data.ca);

+			if(data.serial==null) {

+				os.writeInt(-1);

+			} else {

+				byte[] dsba = data.serial.toByteArray();

+				int l = dsba.length;

+				os.writeInt(l);

+				os.write(dsba,0,l);

+			}

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.id = readString(is,buff);

+			data.x500 = readString(is,buff);

+			data.x509 = readString(is,buff);

+			data.ca = readString(is,buff);

+			int i = is.readInt();

+			if(i<0) {

+				data.serial=null;

+			} else {

+				byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads

+				is.read(bytes);

+				data.serial = new BigInteger(bytes);

+			}

+		}

+    }

+    

+    public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {

+    	// Translate BigInteger to Byte array for lookup

+    	return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));

+    }

+

+    private void init(AuthzTrans trans) throws APIException, IOException {

+        // Set up sub-DAOs

+        if(historyDAO==null) {

+        	historyDAO = new HistoryDAO(trans,this);

+        }

+		if(infoDAO==null) {

+			infoDAO = new CacheInfoDAO(trans,this);

+		}

+

+		String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);

+

+		psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE id = ?", CertLoader.deflt,readConsistency);

+

+		psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE x500 = ?", CertLoader.deflt,readConsistency);

+		

+    }

+    

+	public Result<List<Data>> readX500(AuthzTrans trans, String x500) {

+		return psX500.read(trans, R_TEXT, new Object[]{x500});

+	}

+

+	public Result<List<Data>> readID(AuthzTrans trans, String id) {

+		return psID.read(trans, R_TEXT, new Object[]{id});

+	}

+

+    /**

+     * Log Modification statements to History

+     *

+     * @param modified        which CRUD action was done

+     * @param data            entity data that needs a log entry

+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+        HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject?override[1]: data.id;

+        hd.memo = memo

+                ? String.format("%s by %s", override[0], hd.user)

+                : (modified.name() + "d certificate info for " + data.id);

+        // Detail?

+   		if(modified==CRUD.delete) {

+        			try {

+        				hd.reconstruct = data.bytify();

+        			} catch (IOException e) {

+        				trans.error().log(e,"Could not serialize CertDAO.Data");

+        			}

+        		}

+

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {

+        	trans.error().log("Cannot touch Cert");

+        }

+    }

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java
new file mode 100644
index 0000000..dad5fdb
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java
@@ -0,0 +1,258 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.util.Chrono;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+/**

+ * CredDAO manages credentials. 

+ * Date: 7/19/13

+ */

+public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {

+    public static final String TABLE = "cred";

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+	public static final int RAW = -1;

+    public static final int BASIC_AUTH = 1;

+    public static final int BASIC_AUTH_SHA256 = 2;

+    public static final int CERT_SHA256_RSA =200;

+    

+    private HistoryDAO historyDAO;

+	private CIDAO<AuthzTrans> infoDAO;

+	private PSInfo psNS;

+	private PSInfo psID;

+	

+    public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+        super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        init(trans);

+    }

+

+    public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException {

+        super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        historyDAO = hDao;

+        infoDAO = ciDao;

+        init(trans);

+    }

+

+    public static final int KEYLIMIT = 3;

+	public static class Data extends CacheableData implements Bytification {

+    	

+		public String       			id;

+        public Integer      			type;

+        public Date      				expires;

+        public Integer					other;

+		public String					ns;

+		public String					notes;

+        public ByteBuffer				cred;  //   this is a blob in cassandra

+

+

+        @Override

+		public int[] invalidate(Cached<?,?> cache) {

+        	return new int[] {

+        		seg(cache,id) // cache is for all entities

+        	};

+		}

+        

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			CredLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			CredLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+

+		public String toString() {

+			return id + ' ' + type + ' ' + Chrono.dateTime(expires);

+		}

+    }

+

+    private static class CredLoader extends Loader<Data> implements Streamer<Data>{

+		public static final int MAGIC=153323443;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48; // Note: 

+

+    	public static final CredLoader deflt = new CredLoader(KEYLIMIT);

+    	public CredLoader(int keylimit) {

+            super(keylimit);

+        }

+

+    	@Override

+        public Data load(Data data, Row row) {

+            data.id = row.getString(0);

+            data.type = row.getInt(1);    // NOTE: in datastax driver,  If the int value is NULL, 0 is returned!

+            data.expires = row.getDate(2);

+            data.other = row.getInt(3);

+            data.ns = row.getString(4);     

+            data.notes = row.getString(5);

+            data.cred = row.getBytesUnsafe(6);            

+            return data;

+        }

+

+        @Override

+        protected void key(Data data, int _idx, Object[] obj) {

+	    int idx = _idx;

+

+            obj[idx] = data.id;

+            obj[++idx] = data.type;

+            obj[++idx] = data.expires;

+        }

+

+        @Override

+        protected void body(Data data, int idx, Object[] obj) {

+            int i;

+            obj[i=idx] = data.other;

+            obj[++i] = data.ns;

+            obj[++i] = data.notes;

+            obj[++i] = data.cred;

+        }

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.id);

+			os.writeInt(data.type);	

+			os.writeLong(data.expires==null?-1:data.expires.getTime());

+			os.writeInt(data.other==null?0:data.other);

+			writeString(os, data.ns);

+			writeString(os, data.notes);

+			if(data.cred==null) {

+				os.writeInt(-1);

+			} else {

+				int l = data.cred.limit()-data.cred.position();

+				os.writeInt(l);

+				os.write(data.cred.array(),data.cred.position(),l);

+			}

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.id = readString(is,buff);

+			data.type = is.readInt();

+			

+			long l = is.readLong();

+			data.expires = l<0?null:new Date(l);

+			data.other = is.readInt();

+			data.ns = readString(is,buff);

+			data.notes = readString(is,buff);

+			

+			int i = is.readInt();

+			if(i<0) {

+				data.cred=null;

+			} else {

+				byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads

+				is.read(bytes);

+				data.cred = ByteBuffer.wrap(bytes);

+			}

+		}

+    }

+

+    private void init(AuthzTrans trans) throws APIException, IOException {

+        // Set up sub-DAOs

+        if(historyDAO==null) {

+        	historyDAO = new HistoryDAO(trans,this);

+        }

+		if(infoDAO==null) {

+			infoDAO = new CacheInfoDAO(trans,this);

+		}

+		

+

+		String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);

+		

+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE ns = ?", CredLoader.deflt,readConsistency);

+		

+		psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE id = ?", CredLoader.deflt,readConsistency);

+    }

+    

+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {

+		return psNS.read(trans, R_TEXT, new Object[]{ns});

+	}

+	

+	public Result<List<Data>> readID(AuthzTrans trans, String id) {

+		return psID.read(trans, R_TEXT, new Object[]{id});

+	}

+	

+    /**

+     * Log Modification statements to History

+     *

+     * @param modified        which CRUD action was done

+     * @param data            entity data that needs a log entry

+     * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+        HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject?override[1]: data.id;

+        hd.memo = memo

+                ? String.format("%s by %s", override[0], hd.user)

+                : (modified.name() + "d credential for " + data.id);

+        // Detail?

+   		if(modified==CRUD.delete) {

+        			try {

+        				hd.reconstruct = data.bytify();

+        			} catch (IOException e) {

+        				trans.error().log(e,"Could not serialize CredDAO.Data");

+        			}

+        		}

+

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {

+        	trans.error().log("Cannot touch Cred");

+        }

+    }

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java
new file mode 100644
index 0000000..6ff7120
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java
@@ -0,0 +1,139 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.AbsCassDAO;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+public class DelegateDAO extends CassDAOImpl<AuthzTrans, DelegateDAO.Data> {

+

+	public static final String TABLE = "delegate";

+	private PSInfo psByDelegate;

+	

+	public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {

+		super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		init(trans);

+	}

+

+	public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {

+		super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		init(trans);

+	}

+	

+	private static final int KEYLIMIT = 1;

+	public static class Data implements Bytification {

+		public String user;

+		public String delegate;

+		public Date expires;

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			DelegateLoader.dflt.unmarshal(this, toDIS(bb));

+		}

+	}

+	

+	private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{

+		public static final int MAGIC=0xD823ACF2;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48;

+

+		public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);

+

+		public DelegateLoader(int keylimit) {

+			super(keylimit);

+		}

+		

+		@Override

+		public Data load(Data data, Row row) {

+			data.user = row.getString(0);

+			data.delegate = row.getString(1);

+			data.expires = row.getDate(2);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int idx, Object[] obj) {

+			obj[idx]=data.user;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+

+			obj[idx]=data.delegate;

+			obj[++idx]=data.expires;

+		}

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.user);

+			writeString(os, data.delegate);

+			os.writeLong(data.expires.getTime());

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.user = readString(is, buff);

+			data.delegate = readString(is,buff);

+			data.expires = new Date(is.readLong());

+		}

+	}	

+	

+	private void init(AuthzTrans trans) {

+		String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);

+		psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE delegate = ?", new DelegateLoader(1),readConsistency);

+

+	}

+

+	public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {

+		return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java
new file mode 100644
index 0000000..4fda97a
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java
@@ -0,0 +1,183 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.List;

+import java.util.UUID;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.Loader;

+

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.Row;

+

+/**

+ * FutureDAO stores Construction information to create 

+ * elements at another time.

+ * 

+ * 8/20/2013

+ */

+public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {

+    private static final String TABLE = "future";

+	private final HistoryDAO historyDAO;

+//	private static String createString;

+	private PSInfo psByStartAndTarget;

+	

+    public FutureDAO(AuthzTrans trans, Cluster cluster, String keyspace) {

+        super(trans, FutureDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		historyDAO = new HistoryDAO(trans, this);

+        init(trans);

+    }

+

+    public FutureDAO(AuthzTrans trans, HistoryDAO hDAO) {

+        super(trans, FutureDAO.class.getSimpleName(),hDAO, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        historyDAO=hDAO;

+        init(trans);

+    }

+

+    public static final int KEYLIMIT = 1;

+    public static class Data {

+        public UUID         id;

+        public String		target;

+        public String		memo;

+        public Date       	start;

+        public Date       	expires;

+        public ByteBuffer 	construct;  //   this is a blob in cassandra

+    }

+

+    private static class FLoader extends Loader<Data> {

+        public FLoader() {

+            super(KEYLIMIT);

+        }

+

+        public FLoader(int keylimit) {

+            super(keylimit);

+        }

+

+        @Override

+	public Data load(Data data, Row row) {

+            data.id 		= row.getUUID(0);

+            data.target		= row.getString(1);

+            data.memo       = row.getString(2);

+            data.start 		= row.getDate(3);

+            data.expires 	= row.getDate(4);

+            data.construct 	= row.getBytes(5);

+            return data;

+        }

+

+        @Override

+        protected void key(Data data, int idx, Object[] obj) {

+            obj[idx] = data.id;

+        }

+

+        @Override

+        protected void body(Data data, int _idx, Object[] obj) {

+	    int idx = _idx;

+

+            obj[idx] = data.target;

+            obj[++idx] = data.memo;

+            obj[++idx] = data.start;

+            obj[++idx] = data.expires;

+            obj[++idx] = data.construct;

+        }

+    }

+

+    private void init(AuthzTrans trans) {

+        // Set up sub-DAOs

+        String[] helpers = setCRUD(trans, TABLE, Data.class, new FLoader(KEYLIMIT));

+

+        // Uh, oh.  Can't use "now()" in Prepared Statements (at least at this level)

+//		createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),";

+//

+//		// Need a specialty Creator to handle the "now()"

+//		replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" +  helpers[FIELD_COMMAS] +

+//					") VALUES(now(),?,?,?,?,?)",new FLoader(0)));

+		

+		// Other SELECT style statements... match with a local Method

+		psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +

+				" FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) {

+			@Override

+			protected void key(Data data, int _idx, Object[] obj) {

+			    	int idx = _idx;

+

+				obj[idx]=data.start;

+				obj[++idx]=data.target;

+			}

+		},readConsistency);

+		

+

+    }

+

+    public Result<List<Data>> readByStartAndTarget(AuthzTrans trans, Date start, String target) throws DAOException {

+		return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target});

+	}

+

+    /**

+	 * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null

+     */

+	public Result<FutureDAO.Data> create(AuthzTrans trans,	FutureDAO.Data data, String id) {

+		// If ID is not set (typical), create one.

+		if(data.id==null) {

+			StringBuilder sb = new StringBuilder(trans.user());

+			sb.append(data.target);

+			sb.append(System.currentTimeMillis());

+			data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes());

+		}

+		Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);

+		if(rs.notOK()) {

+			return Result.err(rs);

+		}

+		wasModified(trans, CRUD.create, data, null, id);

+		return Result.ok(data);	

+	}

+

+	/**

+	 * Log Modification statements to History

+	 *

+	 * @param modified        which CRUD action was done

+	 * @param data            entity data that needs a log entry

+	 * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data

+	 */

+	@Override

+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+		boolean memo = override.length>0 && override[0]!=null;

+		boolean subject = override.length>1 && override[1]!=null;

+		HistoryDAO.Data hd = HistoryDAO.newInitedData();

+	    hd.user = trans.user();

+		hd.action = modified.name();

+		hd.target = TABLE;

+		hd.subject = subject?override[1]:"";

+	    hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo;

+	

+		if(historyDAO.create(trans, hd).status!=Status.OK) {

+	    	trans.error().log("Cannot log to History");

+		}

+	}

+    

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java
new file mode 100644
index 0000000..e72c774
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java
@@ -0,0 +1,237 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.nio.ByteBuffer;

+import java.text.SimpleDateFormat;

+import java.util.Date;

+import java.util.List;

+import java.util.UUID;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.AbsCassDAO;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ConsistencyLevel;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.Row;

+

+/**

+ * History

+ * 

+ * 

+ * History is a special case, because we don't want Updates or Deletes...  Too likely to mess up history.

+ * 

+ * 9-9-2013 - Found a problem with using "Prepare".  You cannot prepare anything with a "now()" in it, as

+ * it is evaluated once during the prepare, and kept.  That renders any use of "now()" pointless.  Therefore

+ * the Create function needs to be run fresh everytime.

+ * 

+ * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616

+ *

+ */

+public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {

+	private static final String TABLE = "history";

+

+	public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM");

+//	private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss");

+

+	private String[] helpers;

+

+	private HistLoader defLoader;

+

+	private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;

+

+	public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {

+		super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);

+		init(trans);

+	}

+

+	public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {

+		super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);

+		init(trans);

+	}

+

+

+	private static final int KEYLIMIT = 1;

+	public static class Data {

+		public UUID id;

+		public int	yr_mon;

+		public String user;

+		public String action;

+		public String target;

+		public String subject;

+		public String  memo;

+//		Map<String, String>  detail = null;

+//		public Map<String, String>  detail() {

+//			if(detail == null) {

+//				detail = new HashMap<String, String>();

+//			}

+//			return detail;

+//		}

+		public ByteBuffer reconstruct;

+	}

+	

+	private static class HistLoader extends Loader<Data> {

+		public HistLoader(int keylimit) {

+			super(keylimit);

+		}

+

+		@Override

+		public Data load(Data data, Row row) {

+			data.id = row.getUUID(0);

+			data.yr_mon = row.getInt(1);

+			data.user = row.getString(2);

+			data.action = row.getString(3);

+			data.target = row.getString(4);

+			data.subject = row.getString(5);

+			data.memo = row.getString(6);

+//			data.detail = row.getMap(6, String.class, String.class);

+			data.reconstruct = row.getBytes(7);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int idx, Object[] obj) {

+			obj[idx]=data.id;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.yr_mon;

+			obj[++idx]=data.user;

+			obj[++idx]=data.action;

+			obj[++idx]=data.target;

+			obj[++idx]=data.subject;

+			obj[++idx]=data.memo;

+//			obj[++idx]=data.detail;

+			obj[++idx]=data.reconstruct;		

+		}

+	};

+	

+	private void init(AuthzTrans trans) {

+		// Loader must match fields order

+		defLoader = new HistLoader(KEYLIMIT);

+		helpers = setCRUD(trans, TABLE, Data.class, defLoader);

+

+		// Need a specialty Creator to handle the "now()"

+		// 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?

+		// Unless this is fixed, we're putting in non-prepared statement

+		// Solved in Cassandra.  Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616	

+		replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" +  helpers[FIELD_COMMAS] +

+					") VALUES(now(),?,?,?,?,?,?,?)", 

+					new HistLoader(0) {

+						@Override

+						protected void key(Data data, int idx, Object[] obj) {

+						}

+					},writeConsistency)

+				);

+//		disable(CRUD.Create);

+		

+		replace(CRUD.read, new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +

+				" FROM history WHERE id = ?", defLoader,readConsistency) 

+//				new HistLoader(2) {

+//					@Override

+//					protected void key(Data data, int idx, Object[] obj) {

+//						obj[idx]=data.yr_mon;

+//						obj[++idx]=data.id;

+//					}

+//				})

+			);

+		disable(CRUD.update);

+		disable(CRUD.delete);

+		

+		readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 

+				" FROM history WHERE user = ?", defLoader,readConsistency);

+		readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 

+				" FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);

+		readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + 

+				" FROM history WHERE yr_mon = ?", defLoader,readConsistency);

+		async(true); //TODO dropping messages with Async

+	}

+

+	public static Data newInitedData() {

+		Data data = new Data();

+		Date now = new Date();

+		data.yr_mon = Integer.parseInt(monthFormat.format(now));

+		// data.day_time = Integer.parseInt(dayTimeFormat.format(now));

+		return data;		

+	}

+

+	public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {

+		Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);

+		if(rs.notOK()) {

+			return Result.err(rs);

+		}

+		return extract(defLoader,rs.value,null,dflt);

+	}

+

+	/**

+	 * Gets the history for a user in the specified year and month

+	 * year - the year in yyyy format

+	 * month -  the month in a year ...values 1 - 12

+	 **/

+	public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {

+		if(yyyymm.length==0) {

+			return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");

+		}

+		Result<ResultSet> rs = readByUser.exec(trans, "user", user);

+		if(rs.notOK()) {

+			return Result.err(rs);

+		}

+		return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);

+	}

+	

+	public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {

+		if(yyyymm.length==0) {

+			return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");

+		}

+		Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);

+		if(rs.notOK()) {

+			return Result.err(rs);

+		}

+		return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);

+	}

+	

+	private class YYYYMM implements Accept<Data> {

+		private int[] yyyymm;

+		public YYYYMM(int yyyymm[]) {

+			this.yyyymm = yyyymm;

+		}

+		@Override

+		public boolean ok(Data data) {

+			int dym = data.yr_mon;

+			for(int ym:yyyymm) {

+				if(dym==ym) {

+					return true;

+				}

+			}

+			return false;

+		}

+		

+	};

+	

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java
new file mode 100644
index 0000000..98c4616
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java
@@ -0,0 +1,151 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.ArrayList;

+import java.util.List;

+import java.util.Map.Entry;

+

+import org.onap.aaf.cssa.rserv.Pair;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+

+

+public class Namespace implements Bytification {

+	public static final int MAGIC=250935515;

+	public static final int VERSION=1;

+	public static final int BUFF_SIZE=48;

+

+	public String name;

+	public List<String> owner;

+	public List<String> admin;

+	public List<Pair<String,String>> attrib;

+	public String description;

+	public Integer type;

+	public String parent;

+	public Namespace() {}

+	

+	public Namespace(NsDAO.Data ndd) {

+		name = ndd.name;

+		description = ndd.description;

+		type = ndd.type;

+		parent = ndd.parent;

+		if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {

+			attrib = new ArrayList<Pair<String,String>>();

+			for( Entry<String, String> entry : ndd.attrib.entrySet()) {

+				attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));

+			}

+		}

+	}

+	

+	public Namespace(NsDAO.Data ndd,List<String> owner, List<String> admin) {

+		name = ndd.name;

+		this.owner = owner;

+		this.admin = admin;

+		description = ndd.description;

+		type = ndd.type;

+		parent = ndd.parent;

+		if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {

+			attrib = new ArrayList<Pair<String,String>>();

+			for( Entry<String, String> entry : ndd.attrib.entrySet()) {

+				attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));

+			}

+		}

+	}

+

+	public NsDAO.Data data() {

+		NsDAO.Data ndd = new NsDAO.Data();

+		ndd.name = name;

+		ndd.description = description;

+		ndd.parent = parent;

+		ndd.type = type;

+		return ndd;

+	}

+

+	@Override

+	public ByteBuffer bytify() throws IOException {

+		ByteArrayOutputStream baos = new ByteArrayOutputStream();

+		DataOutputStream os = new DataOutputStream(baos);

+

+		Loader.writeHeader(os,MAGIC,VERSION);

+		Loader.writeString(os, name);

+		os.writeInt(type);

+		Loader.writeStringSet(os,admin);

+		Loader.writeStringSet(os,owner);

+		Loader.writeString(os,description);

+		Loader.writeString(os,parent);

+

+		return ByteBuffer.wrap(baos.toByteArray());

+	}

+

+	@Override

+	public void reconstitute(ByteBuffer bb) throws IOException {

+		DataInputStream is = CassDAOImpl.toDIS(bb);

+		/*int version = */Loader.readHeader(is,MAGIC,VERSION);

+		// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+		

+		byte[] buff = new byte[BUFF_SIZE];

+		name = Loader.readString(is, buff);

+		type = is.readInt();

+		admin = Loader.readStringList(is,buff);

+		owner = Loader.readStringList(is,buff);

+		description = Loader.readString(is,buff);

+		parent = Loader.readString(is,buff);

+		

+	}

+

+	/* (non-Javadoc)

+	 * @see java.lang.Object#hashCode()

+	 */

+	@Override

+	public int hashCode() {

+		return name.hashCode();

+	}

+	

+

+	/* (non-Javadoc)

+	 * @see java.lang.Object#toString()

+	 */

+	@Override

+	public String toString() {

+		return name.toString();

+	}

+

+	/* (non-Javadoc)

+	 * @see java.lang.Object#equals(java.lang.Object)

+	 */

+	@Override

+	public boolean equals(Object arg0) {

+		if(arg0==null || !(arg0 instanceof Namespace)) {

+			return false;

+		}

+		return name.equals(((Namespace)arg0).name);

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java
new file mode 100644
index 0000000..9e18195
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java
@@ -0,0 +1,542 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.HashMap;

+import java.util.HashSet;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassAccess;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+

+import java.util.Set;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.TimeTaken;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.exceptions.DriverException;

+

+/**

+ * NsDAO

+ * 

+ * Data Access Object for Namespace Data

+ *

+ */

+public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {

+	public static final String TABLE = "ns";

+	public static final String TABLE_ATTRIB = "ns_attrib";

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+    public static final int ROOT = 1;

+    public static final int COMPANY=2;

+    public static final int APP = 3;

+

+	private static final String BEGIN_BATCH = "BEGIN BATCH\n";

+	private static final String APPLY_BATCH = "APPLY BATCH;\n";

+	private static final String SQSCCR = "';\n";

+	private static final String SQCSQ = "','";

+    

+	private HistoryDAO historyDAO;

+	private CacheInfoDAO infoDAO;

+	private PSInfo psNS;

+

+	public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+		super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		init(trans);

+	}

+

+	public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException {

+		super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		historyDAO=hDAO;

+		infoDAO = iDAO;

+		init(trans);

+	}

+

+

+    //////////////////////////////////////////

+    // Data Definition, matches Cassandra DM

+    //////////////////////////////////////////

+    private static final int KEYLIMIT = 1;

+    /**

+     * Data class that matches the Cassandra Table "role"

+     * 

+     */

+	public static class Data extends CacheableData implements Bytification {

+		public String		      name;

+		public int			      type;

+		public String			  description;

+		public String			  parent;

+		public Map<String,String> attrib;

+

+//		////////////////////////////////////////

+//        // Getters

+		public Map<String,String> attrib(boolean mutable) {

+			if (attrib == null) {

+				attrib = new HashMap<String,String>();

+			} else if (mutable && !(attrib instanceof HashMap)) {

+				attrib = new HashMap<String,String>(attrib);

+			}

+			return attrib;

+		}

+

+		@Override

+		public int[] invalidate(Cached<?,?> cache) {

+			return new int[] {

+				seg(cache,name)

+			};

+		}

+

+		public NsSplit split(String name) {

+			return new NsSplit(this,name);

+		}

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			NSLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			NSLoader.deflt.unmarshal(this,toDIS(bb));

+		}

+		

+		@Override

+		public String toString() {

+			return name;

+		}

+		

+    }

+    

+    private void init(AuthzTrans trans) throws APIException, IOException {

+        // Set up sub-DAOs

+        if(historyDAO==null) {

+	    historyDAO = new HistoryDAO(trans, this);

+	}

+        if(infoDAO==null) {

+	    infoDAO = new CacheInfoDAO(trans,this);

+	}

+

+		String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */);

+		

+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE parent = ?", new NSLoader(1),readConsistency);

+

+	}

+	

+    private static final class NSLoader extends Loader<Data> implements Streamer<Data> {

+		public static final int MAGIC=250935515;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48;

+

+    	public static final NSLoader deflt = new NSLoader(KEYLIMIT);

+    	

+		public NSLoader(int keylimit) {

+			super(keylimit);

+		}

+

+		@Override

+		public Data load(Data data, Row row) {

+			// Int more efficient

+			data.name = row.getString(0);

+			data.type = row.getInt(1);

+			data.description = row.getString(2);

+			data.parent = row.getString(3);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int idx, Object[] obj) {

+			obj[idx]=data.name;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+

+			obj[idx]=data.type;

+			obj[++idx]=data.description;

+			obj[++idx]=data.parent;

+		}

+		

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.name);

+			os.writeInt(data.type);

+			writeString(os,data.description);

+			writeString(os,data.parent);

+			if(data.attrib==null) {

+				os.writeInt(-1);

+			} else {

+				os.writeInt(data.attrib.size());

+				for(Entry<String, String> es : data.attrib(false).entrySet()) {

+					writeString(os,es.getKey());

+					writeString(os,es.getValue());

+				}

+			}

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			

+			byte[] buff = new byte[BUFF_SIZE];

+			data.name = readString(is, buff);

+			data.type = is.readInt();

+			data.description = readString(is,buff);

+			data.parent = readString(is,buff);

+			int count = is.readInt();

+			if(count>0) {

+				Map<String, String> da = data.attrib(true);

+				for(int i=0;i<count;++i) {

+					da.put(readString(is,buff), readString(is,buff));

+				}

+			}

+		}

+

+    }

+    

+	@Override

+	public Result<Data> create(AuthzTrans trans, Data data) {

+		String ns = data.name;

+		// Ensure Parent is set

+		int ldot = ns.lastIndexOf('.');

+		data.parent=ldot<0?".":ns.substring(0,ldot);

+

+		// insert Attributes

+		StringBuilder stmt = new StringBuilder();

+		stmt.append(BEGIN_BATCH);

+		attribInsertStmts(stmt, data);

+		stmt.append(APPLY_BATCH);

+		try {

+			getSession(trans).execute(stmt.toString());

+//// TEST CODE for Exception				

+//			boolean force = true; 

+//			if(force) {

+//				throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<InetSocketAddress,Throwable>());

+////				throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message");

+//			}

+////END TEST CODE

+

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			trans.info().log(stmt);

+			return Result.err(Result.ERR_Backend, "Backend Access");

+		}

+		return super.create(trans, data);

+	}

+

+	@Override

+	public Result<Void> update(AuthzTrans trans, Data data) {

+		String ns = data.name;

+		// Ensure Parent is set

+		int ldot = ns.lastIndexOf('.');

+		data.parent=ldot<0?".":ns.substring(0,ldot);

+

+		StringBuilder stmt = new StringBuilder();

+		stmt.append(BEGIN_BATCH);

+		try {

+			Map<String, String> localAttr = data.attrib;

+			Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns);

+			if(rremoteAttr.notOK()) {

+				return Result.err(rremoteAttr);

+			}

+			// update Attributes

+			String str;

+			for(Entry<String, String> es : localAttr.entrySet()) {

+				str = rremoteAttr.value.get(es.getKey());

+				if(str==null || !str.equals(es.getValue())) {

+					attribInsertStmt(stmt, ns, es.getKey(),es.getValue());

+				}

+			}

+			

+			// No point in deleting... insert overwrites...

+//			for(Entry<String, String> es : remoteAttr.entrySet()) {

+//				str = localAttr.get(es.getKey());

+//				if(str==null || !str.equals(es.getValue())) {

+//					attribDeleteStmt(stmt, ns, es.getKey());

+//				}

+//			}

+			if(stmt.length()>BEGIN_BATCH.length()) {

+				stmt.append(APPLY_BATCH);

+				getSession(trans).execute(stmt.toString());

+			}

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			trans.info().log(stmt);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		return super.update(trans,data);

+	}

+

+	/* (non-Javadoc)

+	 * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object)

+	 */

+	@Override

+	public Result<List<Data>> read(AuthzTrans trans, Data data) {

+		Result<List<Data>> rld = super.read(trans, data);

+		

+		if(rld.isOKhasData()) {

+			for(Data d : rld.value) {

+				// Note: Map is null at this point, save time/mem by assignment

+				Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);

+				if(rabn.isOK()) {

+					d.attrib = rabn.value;

+				} else {

+					return Result.err(rabn);

+				}

+			}

+		}

+		return rld;

+	}

+

+	/* (non-Javadoc)

+	 * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object[])

+	 */

+	@Override

+	public Result<List<Data>> read(AuthzTrans trans, Object... key) {

+		Result<List<Data>> rld = super.read(trans, key);

+

+		if(rld.isOKhasData()) {

+			for(Data d : rld.value) {

+				// Note: Map is null at this point, save time/mem by assignment

+				Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);

+				if(rabn.isOK()) {

+					d.attrib = rabn.value;

+				} else {

+					return Result.err(rabn);

+				}

+			}

+		}

+		return rld;

+	}

+

+	@Override

+	public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {

+		TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE);

+		try {

+			StringBuilder stmt = new StringBuilder();

+			attribDeleteAllStmt(stmt, data);

+			try {

+				getSession(trans).execute(stmt.toString());

+			} catch (DriverException | APIException | IOException e) {

+				reportPerhapsReset(trans,e);

+				trans.info().log(stmt);

+				return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+			}

+		} finally {

+			tt.done();

+		}

+		return super.delete(trans, data, reread);

+

+	}

+    

+	public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) {

+		Map<String,String> map = new HashMap<String,String>();

+		TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE);

+		try {

+			ResultSet rs = getSession(trans).execute("SELECT key,value FROM " 

+					+ TABLE_ATTRIB 

+					+ " WHERE ns='"

+					+ ns

+					+ "';");

+			

+			for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {

+				Row r = iter.next();

+				map.put(r.getString(0), r.getString(1));

+			}

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		} finally {

+			tt.done();

+		}

+		return Result.ok(map);

+	}

+

+	public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) {

+		Set<String> set = new HashSet<String>();

+		TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE);

+		try {

+			ResultSet rs = getSession(trans).execute("SELECT ns FROM " 

+				+ TABLE_ATTRIB 

+				+ " WHERE key='"

+				+ key

+				+ "';");

+		

+			for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {

+				Row r = iter.next();

+				set.add(r.getString(0));

+			}

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		} finally {

+			tt.done();

+		}

+		return Result.ok(set);

+	}

+

+	public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) {

+		try {

+			getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString());

+			return Result.ok();

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+	}

+	

+	private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) {

+		sb.append("INSERT INTO ");

+		sb.append(TABLE_ATTRIB);

+		sb.append(" (ns,key,value) VALUES ('");

+		sb.append(ns);

+		sb.append(SQCSQ);

+		sb.append(key);

+		sb.append(SQCSQ);

+		sb.append(value);

+		sb.append("');");

+		return sb;

+	}

+	

+	public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) {

+		try {

+			getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString());

+			return Result.ok();

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+	}

+	

+	private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) {

+		stmt.append("DELETE FROM ");

+		stmt.append(TABLE_ATTRIB);

+		stmt.append(" WHERE ns='");

+		stmt.append(ns);

+		stmt.append("' AND key='");

+		stmt.append(key);

+		stmt.append("';");

+		return stmt;

+	}

+	

+	private void attribDeleteAllStmt(StringBuilder stmt, Data data) {

+		stmt.append("  DELETE FROM ");

+		stmt.append(TABLE_ATTRIB);

+		stmt.append(" WHERE ns='");

+		stmt.append(data.name);

+		stmt.append(SQSCCR);

+	}

+

+	private void attribInsertStmts(StringBuilder stmt, Data data) {

+		// INSERT new Attrib

+		for(Entry<String,String> es : data.attrib(false).entrySet() ) {

+			stmt.append("  ");

+			attribInsertStmt(stmt,data.name,es.getKey(),es.getValue());

+		}

+	}

+

+	/**

+	 * Add description to Namespace

+	 * @param trans

+	 * @param ns

+	 * @param description

+	 * @return

+	 */

+	public Result<Void> addDescription(AuthzTrans trans, String ns, String description) {

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 

+				+ description + "' WHERE name = '" + ns + "';");

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		Data data = new Data();

+		data.name=ns;

+		wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null );

+		return Result.ok();

+	}

+

+	public Result<List<Data>> getChildren(AuthzTrans trans, String parent) {

+		return psNS.read(trans, R_TEXT, new Object[]{parent});

+	}

+		

+

+    /**

+     * Log Modification statements to History

+     * 

+     * @param modified           which CRUD action was done

+     * @param data               entity data that needs a log entry

+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+        //TODO Must log history

+        HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject ? override[1] : data.name;

+        hd.memo = memo ? override[0] : (data.name + " was "  + modified.name() + 'd' );

+		if(modified==CRUD.delete) {

+			try {

+				hd.reconstruct = data.bytify();

+			} catch (IOException e) {

+				trans.error().log(e,"Could not serialize NsDAO.Data");

+			}

+		}

+

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+	    trans.error().log("Cannot log to History");

+	}

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {

+	    trans.error().log("Cannot touch CacheInfo");

+	}

+    }

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java
new file mode 100644
index 0000000..21e5728
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java
@@ -0,0 +1,62 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+public class NsSplit {

+	public final String ns;

+	public final String name;

+	public final NsDAO.Data nsd;

+	

+	public NsSplit(NsDAO.Data nsd, String child) {

+		this.nsd = nsd;

+		if(child.startsWith(nsd.name)) {

+			ns = nsd.name;

+			int dot = ns.length();

+			if(dot<child.length() && child.charAt(dot)=='.') {

+    			name = child.substring(dot+1);

+			} else {

+				name="";

+			}

+		} else {

+			name=null;

+			ns = null;

+		}

+	}

+	

+	public NsSplit(String ns, String name) {

+		this.ns = ns;

+		this.name = name;

+		this.nsd = new NsDAO.Data();

+		nsd.name = ns;

+		int dot = ns.lastIndexOf('.');

+		if(dot>=0) {

+			nsd.parent = ns.substring(0, dot);

+		} else {

+			nsd.parent = ".";

+		}

+	}

+

+	public boolean isOK() {

+		return ns!=null && name !=null;

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java
new file mode 100644
index 0000000..c098acb
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java
@@ -0,0 +1,74 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+/**

+ * Defines the Type Codes in the NS Table.

+ *

+ */

+public enum NsType {

+		UNKNOWN (-1),

+		DOT (0),

+		ROOT (1), 

+		COMPANY (2), 

+		APP (3), 

+		STACKED_APP (10), 

+		STACK (11);

+		

+		public final int type;

+		private NsType(int t) {

+			type = t;

+		}

+		/**

+		 * This is not the Ordinal, but the Type that is stored in NS Tables

+		 * 

+		 * @param t

+		 * @return

+		 */

+		public static NsType fromType(int t) {

+			for(NsType nst : values()) {

+				if(t==nst.type) {

+					return nst;

+				}

+			}

+			return UNKNOWN;

+		}

+		

+		/**

+		 * Use this one rather than "valueOf" to avoid Exception

+		 * @param s

+		 * @return

+		 */

+		public static NsType fromString(String s) {

+			if(s!=null) {

+				for(NsType nst : values()) {

+					if(nst.name().equals(s)) {

+						return nst;

+					}

+				}

+			}

+			return UNKNOWN;

+		}

+

+		

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java
new file mode 100644
index 0000000..e0b368f
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java
@@ -0,0 +1,502 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Set;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassAccess;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+import org.onap.aaf.dao.aaf.hl.Question;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.util.Split;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.exceptions.DriverException;

+

+public class PermDAO extends CassDAOImpl<AuthzTrans,PermDAO.Data> {

+

+	public static final String TABLE = "perm";

+

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+	private static final String STAR = "*";

+	

+	private final HistoryDAO historyDAO;

+	private final CacheInfoDAO infoDAO;

+	

+	private PSInfo psNS, psChildren, psByType;

+

+	public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+		super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		init(trans);

+		historyDAO = new HistoryDAO(trans, this);

+		infoDAO = new CacheInfoDAO(trans,this);

+	}

+

+	public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {

+		super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		historyDAO = hDAO;

+		infoDAO=ciDAO;

+		init(trans);

+	}

+

+

+	private static final int KEYLIMIT = 4;

+	public static class Data extends CacheableData implements Bytification {

+		public String		ns;

+		public String		type;

+		public String		instance;

+		public String		action;

+		public Set<String>  roles; 

+		public String		description;

+

+		public Data() {}

+		

+		public Data(NsSplit nss, String instance, String action) {

+			ns = nss.ns;

+			type = nss.name;

+			this.instance = instance;

+			this.action = action;

+		}

+

+		public String fullType() {

+			return ns + '.' + type;

+		}

+		

+		public String fullPerm() {

+			return ns + '.' + type + '|' + instance + '|' + action;

+		}

+

+		public String encode() {

+			return ns + '|' + type + '|' + instance + '|' + action;

+		}

+		

+		/**

+		 * Decode Perm String, including breaking into appropriate Namespace

+		 * 

+		 * @param trans

+		 * @param q

+		 * @param p

+		 * @return

+		 */

+		public static Result<Data> decode(AuthzTrans trans, Question q, String p) {

+			String[] ss = Split.splitTrim('|', p,4);

+			if(ss[2]==null) {

+				return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");

+			}

+			Data data = new Data();

+			if(ss[3]==null) { // older 3 part encoding must be evaluated for NS

+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);

+				if(nss.notOK()) {

+					return Result.err(nss);

+				}

+				data.ns=nss.value.ns;

+				data.type=nss.value.name;

+				data.instance=ss[1];

+				data.action=ss[2];

+			} else { // new 4 part encoding

+				data.ns=ss[0];

+				data.type=ss[1];

+				data.instance=ss[2];

+				data.action=ss[3];

+			}

+			return Result.ok(data);

+		}

+

+		/**

+		 * Decode Perm String, including breaking into appropriate Namespace

+		 * 

+		 * @param trans

+		 * @param q

+		 * @param p

+		 * @return

+		 */

+		public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {

+			String[] ss = Split.splitTrim('|', p,4);

+			if(ss[2]==null) {

+				return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");

+			}

+			

+			if(ss[3]==null) { // older 3 part encoding must be evaluated for NS

+				ss[3] = ss[2];

+				ss[2] = ss[1];

+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);

+				if(nss.notOK()) {

+					return Result.err(nss);

+				}

+				ss[1] = nss.value.name;

+				ss[0] = nss.value.ns;

+			}

+			return Result.ok(ss);

+		}

+

+		public static Data create(NsDAO.Data ns, String name) {

+			NsSplit nss = new NsSplit(ns,name);

+			Data rv = new Data();

+			rv.ns = nss.ns;

+			String[] s = nss.name.split("\\|");

+			switch(s.length) {

+				case 3:

+					rv.type=s[0];

+					rv.instance=s[1];

+					rv.action=s[2];

+					break;

+				case 2:

+					rv.type=s[0];

+					rv.instance=s[1];

+					rv.action=STAR;

+					break;

+				default:

+					rv.type=s[0];

+					rv.instance = STAR;

+					rv.action = STAR;

+			}

+			return rv;

+		}

+		

+		public static Data create(AuthzTrans trans, Question q, String name) {

+			String[] s = name.split("\\|");

+			Result<NsSplit> rdns = q.deriveNsSplit(trans, s[0]);

+			Data rv = new PermDAO.Data();

+			if(rdns.isOKhasData()) {

+				switch(s.length) {

+					case 3:

+						rv.type=s[1];

+						rv.instance=s[2];

+						rv.action=s[3];

+						break;

+					case 2:

+						rv.type=s[1];

+						rv.instance=s[2];

+						rv.action=STAR;

+						break;

+					default:

+						rv.type=s[1];

+						rv.instance = STAR;

+						rv.action = STAR;

+				}

+			}

+			return rv;

+		}

+		

+        ////////////////////////////////////////

+        // Getters

+        public Set<String> roles(boolean mutable) {

+            if (roles == null) {

+                roles = new HashSet<String>();

+            } else if (mutable && !(roles instanceof HashSet)) {

+                roles = new HashSet<String>(roles);

+            }

+            return roles;

+        }

+

+		@Override

+		public int[] invalidate(Cached<?,?> cache) {

+			return new int[] {

+				seg(cache,ns),

+				seg(cache,ns,type),

+				seg(cache,ns,type,STAR),

+				seg(cache,ns,type,instance,action)

+			};

+		}

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			PermLoader.deflt.marshal(this, new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			PermLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+

+		@Override

+		public String toString() {

+			return encode();

+		}

+	}

+	

+	private static class PermLoader extends Loader<Data> implements Streamer<Data> {

+		public static final int MAGIC=283939453;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=96;

+

+    	public static final PermLoader deflt = new PermLoader(KEYLIMIT);

+    	

+		public PermLoader(int keylimit) {

+			super(keylimit);

+		}

+		

+		@Override

+		public Data load(Data data, Row row) {

+			// Int more efficient Match "fields" string

+			data.ns = row.getString(0);

+			data.type = row.getString(1);

+			data.instance = row.getString(2);

+			data.action = row.getString(3);

+			data.roles = row.getSet(4,String.class);

+			data.description = row.getString(5);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.ns;

+			obj[++idx]=data.type;

+			obj[++idx]=data.instance;

+			obj[++idx]=data.action;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.roles;

+			obj[++idx]=data.description;

+		}

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.ns);

+			writeString(os, data.type);

+			writeString(os, data.instance);

+			writeString(os, data.action);

+			writeStringSet(os, data.roles);

+			writeString(os, data.description);

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.ns = readString(is, buff);

+			data.type = readString(is,buff);

+			data.instance = readString(is,buff);

+			data.action = readString(is,buff);

+			data.roles = readStringSet(is,buff);

+			data.description = readString(is,buff);

+		}

+	}

+	

+	private void init(AuthzTrans trans) {

+		// the 3 is the number of key fields

+		String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt);

+		

+		// Other SELECT style statements... match with a local Method

+		psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + 

+				" WHERE ns = ? AND type = ?", new PermLoader(2) {

+			@Override

+			protected void key(Data data, int idx, Object[] obj) {

+				obj[idx]=data.type;

+			}

+		},readConsistency);

+		

+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE ns = ?", new PermLoader(1),readConsistency);

+				

+		psChildren = new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +  " FROM " + TABLE + 

+				" WHERE ns=? AND type > ? AND type < ?", 

+				new PermLoader(3) {

+			@Override

+			protected void key(Data data, int _idx, Object[] obj) {

+			    	int idx = _idx;

+				obj[idx] = data.ns;

+				obj[++idx]=data.type + DOT;

+				obj[++idx]=data.type + DOT_PLUS_ONE;

+			}

+		},readConsistency);

+

+	}

+

+

+	/**

+	 * Add a single Permission to the Role's Permission Collection

+	 * 

+	 * @param trans

+	 * @param roleFullName

+	 * @param perm

+	 * @param type

+	 * @param action

+	 * @return

+	 */

+	public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {

+		// Note: Prepared Statements for Collection updates aren't supported

+		//ResultSet rv =

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'"	+ roleFullName + "'} " +

+				"WHERE " +

+					"ns = '" + perm.ns + "' AND " +

+					"type = '" + perm.type + "' AND " +

+					"instance = '" + perm.instance + "' AND " +

+					"action = '" + perm.action + "';"

+					);

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " +

+				perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);

+		return Result.ok();

+	}

+

+	/**

+	 * Remove a single Permission from the Role's Permission Collection

+	 * @param trans

+	 * @param roleFullName

+	 * @param perm

+	 * @param type

+	 * @param action

+	 * @return

+	 */

+	public Result<Void> delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {

+		// Note: Prepared Statements for Collection updates aren't supported

+		//ResultSet rv =

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " +

+				"WHERE " +

+					"ns = '" + perm.ns + "' AND " +

+					"type = '" + perm.type + "' AND " +

+					"instance = '" + perm.instance + "' AND " +

+					"action = '" + perm.action + "';"

+					);

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		//TODO how can we tell when it doesn't?

+		wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " +

+				perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);

+		return Result.ok();

+	}

+

+

+	

+	/**

+	 * Additional method: 

+	 * 		Select all Permissions by Name

+	 * 

+	 * @param name

+	 * @return

+	 * @throws DAOException

+	 */

+	public Result<List<Data>> readByType(AuthzTrans trans, String ns, String type) {

+		return psByType.read(trans, R_TEXT, new Object[]{ns, type});

+	}

+	

+	public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String type) {

+		return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE});

+	}

+

+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {

+		return psNS.read(trans, R_TEXT, new Object[]{ns});

+	}

+

+	/**

+	 * Add description to this permission

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @param type

+	 * @param instance

+	 * @param action

+	 * @param description

+	 * @return

+	 */

+	public Result<Void> addDescription(AuthzTrans trans, String ns, String type,

+			String instance, String action, String description) {

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 

+				+ description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'"

+				+ "AND instance = '" + instance + "' AND action = '" + action + "';");

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		Data data = new Data();

+		data.ns=ns;

+		data.type=type;

+		data.instance=instance;

+		data.action=action;

+		wasModified(trans, CRUD.update, data, "Added description " + description + " to permission " 

+				+ data.encode(), null );

+		return Result.ok();

+	}

+	

+	/**

+	 * Log Modification statements to History

+	 */

+	@Override

+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+		// Need to update history

+		HistoryDAO.Data hd = HistoryDAO.newInitedData();

+		hd.user = trans.user();

+		hd.action = modified.name();

+		hd.target = TABLE;

+		hd.subject = subject ? override[1] : data.fullType();

+		if (memo) {

+            hd.memo = String.format("%s", override[0]);

+        } else {

+            hd.memo = String.format("%sd %s|%s|%s", modified.name(),data.fullType(),data.instance,data.action);

+        }

+		

+		if(modified==CRUD.delete) {

+			try {

+				hd.reconstruct = data.bytify();

+			} catch (IOException e) {

+				trans.error().log(e,"Could not serialize PermDAO.Data");

+			}

+		}

+		

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {

+        	trans.error().log("Cannot touch CacheInfo");

+        }

+	}

+}

+

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java
new file mode 100644
index 0000000..5b0190e
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java
@@ -0,0 +1,412 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Set;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassAccess;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+import org.onap.aaf.dao.aaf.hl.Question;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.util.Split;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.exceptions.DriverException;

+

+public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {

+

+	public static final String TABLE = "role";

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+    

+	private final HistoryDAO historyDAO;

+	private final CacheInfoDAO infoDAO;

+

+	private PSInfo psChildren, psNS, psName;

+

+	public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+		super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+        // Set up sub-DAOs

+        historyDAO = new HistoryDAO(trans, this);

+		infoDAO = new CacheInfoDAO(trans,this);

+		init(trans);

+	}

+

+	public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {

+		super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		historyDAO = hDAO;

+		infoDAO = ciDAO;

+		init(trans);

+	}

+

+

+    //////////////////////////////////////////

+    // Data Definition, matches Cassandra DM

+    //////////////////////////////////////////

+    private static final int KEYLIMIT = 2;

+    /**

+     * Data class that matches the Cassandra Table "role"

+     */

+	public static class Data extends CacheableData implements Bytification {

+    	public String		ns;

+		public String		name;

+		public Set<String>  perms;

+		public String		description;

+

+        ////////////////////////////////////////

+        // Getters

+		public Set<String> perms(boolean mutable) {

+			if (perms == null) {

+				perms = new HashSet<String>();

+			} else if (mutable && !(perms instanceof HashSet)) {

+				perms = new HashSet<String>(perms);

+			}

+			return perms;

+		}

+		

+		public static Data create(NsDAO.Data ns, String name) {

+			NsSplit nss = new NsSplit(ns,name);		

+			RoleDAO.Data rv = new Data();

+			rv.ns = nss.ns;

+			rv.name=nss.name;

+			return rv;

+		}

+		

+		public String fullName() {

+			return ns + '.' + name;

+		}

+		

+		public String encode() {

+			return ns + '|' + name;

+		}

+		

+		/**

+		 * Decode Perm String, including breaking into appropriate Namespace

+		 * 

+		 * @param trans

+		 * @param q

+		 * @param r

+		 * @return

+		 */

+		public static Result<Data> decode(AuthzTrans trans, Question q, String r) {

+			String[] ss = Split.splitTrim('|', r,2);

+			Data data = new Data();

+			if(ss[1]==null) { // older 1 part encoding must be evaluated for NS

+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);

+				if(nss.notOK()) {

+					return Result.err(nss);

+				}

+				data.ns=nss.value.ns;

+				data.name=nss.value.name;

+			} else { // new 4 part encoding

+				data.ns=ss[0];

+				data.name=ss[1];

+			}

+			return Result.ok(data);

+		}

+

+		/**

+		 * Decode from UserRole Data

+		 * @param urdd

+		 * @return

+		 */

+		public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {

+			RoleDAO.Data rd = new RoleDAO.Data();

+			rd.ns = urdd.ns;

+			rd.name = urdd.rname;

+			return rd;

+		}

+

+

+		/**

+		 * Decode Perm String, including breaking into appropriate Namespace

+		 * 

+		 * @param trans

+		 * @param q

+		 * @param p

+		 * @return

+		 */

+		public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {

+			String[] ss = Split.splitTrim('|', p,2);

+			if(ss[1]==null) { // older 1 part encoding must be evaluated for NS

+				Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);

+				if(nss.notOK()) {

+					return Result.err(nss);

+				}

+				ss[0] = nss.value.ns;

+				ss[1] = nss.value.name;

+			}

+			return Result.ok(ss);

+		}

+		

+		@Override

+		public int[] invalidate(Cached<?,?> cache) {

+			return new int[] {

+				seg(cache,ns,name),

+				seg(cache,ns),

+				seg(cache,name),

+			};

+		}

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			RoleLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			RoleLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+

+		@Override

+		public String toString() {

+			return ns + '.' + name;

+		}

+    }

+

+    private static class RoleLoader extends Loader<Data> implements Streamer<Data> {

+		public static final int MAGIC=923577343;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=96;

+

+    	public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);

+    	

+		public RoleLoader(int keylimit) {

+			super(keylimit);

+		}

+		

+		@Override

+		public Data load(Data data, Row row) {

+			// Int more efficient

+			data.ns = row.getString(0);

+			data.name = row.getString(1);

+			data.perms = row.getSet(2,String.class);

+			data.description = row.getString(3);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.ns;

+			obj[++idx]=data.name;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.perms;

+			obj[++idx]=data.description;

+		}

+

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+			writeString(os, data.ns);

+			writeString(os, data.name);

+			writeStringSet(os,data.perms);

+			writeString(os, data.description);

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			byte[] buff = new byte[BUFF_SIZE];

+			data.ns = readString(is, buff);

+			data.name = readString(is,buff);

+			data.perms = readStringSet(is,buff);

+			data.description = readString(is,buff);

+		}

+    };

+

+	private void init(AuthzTrans trans) {

+		String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);

+		

+		psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE ns = ?", new RoleLoader(1),readConsistency);

+

+		psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +

+				" WHERE name = ?", new RoleLoader(1),readConsistency);

+

+		psChildren = new PSInfo(trans, SELECT_SP +  helpers[FIELD_COMMAS] +  " FROM " + TABLE + 

+				" WHERE ns=? AND name > ? AND name < ?", 

+				new RoleLoader(3) {

+			@Override

+			protected void key(Data data, int _idx, Object[] obj) {

+			    	int idx = _idx;

+				obj[idx] = data.ns;

+				obj[++idx]=data.name + DOT;

+				obj[++idx]=data.name + DOT_PLUS_ONE;

+			}

+		},readConsistency);

+		

+	}

+

+	public Result<List<Data>> readNS(AuthzTrans trans, String ns) {

+		return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});

+	}

+

+	public Result<List<Data>> readName(AuthzTrans trans, String name) {

+		return psName.read(trans, R_TEXT + name, new Object[]{name});

+	}

+

+	public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {

+		if(role.length()==0 || "*".equals(role)) {

+			return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR}); 

+		} else {

+			return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});

+		}

+	}

+

+	/**

+	 * Add a single Permission to the Role's Permission Collection

+	 * 

+	 * @param trans

+	 * @param role

+	 * @param perm

+	 * @param type

+	 * @param action

+	 * @return

+	 */

+	public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {

+		// Note: Prepared Statements for Collection updates aren't supported

+		String pencode = perm.encode();

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" + 

+				pencode + "'} WHERE " +

+				"ns = '" + role.ns + "' AND name = '" + role.name + "';");

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());

+		return Result.ok();

+	}

+

+	/**

+	 * Remove a single Permission from the Role's Permission Collection

+	 * @param trans

+	 * @param role

+	 * @param perm

+	 * @param type

+	 * @param action

+	 * @return

+	 */

+	public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {

+		// Note: Prepared Statements for Collection updates aren't supported

+

+		String pencode = perm.encode();

+		

+		//ResultSet rv =

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" + 

+				pencode	+ "'} WHERE " +

+				"ns = '" + role.ns + "' AND name = '" + role.name + "';");

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		//TODO how can we tell when it doesn't?

+		wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );

+		return Result.ok();

+	}

+	

+	/**

+	 * Add description to role

+	 * 

+	 * @param trans

+	 * @param ns

+	 * @param name

+	 * @param description

+	 * @return

+	 */

+	public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {

+		try {

+			getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" 

+				+ description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");

+		} catch (DriverException | APIException | IOException e) {

+			reportPerhapsReset(trans,e);

+			return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);

+		}

+

+		Data data = new Data();

+		data.ns=ns;

+		data.name=name;

+		wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );

+		return Result.ok();

+	}

+	

+	

+    /**

+     * Log Modification statements to History

+     * @param modified           which CRUD action was done

+     * @param data               entity data that needs a log entry

+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data

+     */

+    @Override

+    protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+    	HistoryDAO.Data hd = HistoryDAO.newInitedData();

+        hd.user = trans.user();

+        hd.action = modified.name();

+        hd.target = TABLE;

+        hd.subject = subject ? override[1] : data.fullName();

+        hd.memo = memo ? override[0] : (data.fullName() + " was "  + modified.name() + 'd' );

+		if(modified==CRUD.delete) {

+			try {

+				hd.reconstruct = data.bytify();

+			} catch (IOException e) {

+				trans.error().log(e,"Could not serialize RoleDAO.Data");

+			}

+		}

+

+        if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+        }

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {

+        	trans.error().log("Cannot touch CacheInfo for Role");

+        }

+    }

+

+    

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java
new file mode 100644
index 0000000..246df6a
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java
@@ -0,0 +1,88 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import org.onap.aaf.authz.layer.Result;

+

+

+

+

+/**

+ * Add additional Behavior for Specific Applications for Results

+ * 

+ * In this case, we add additional BitField information accessible by

+ * method (

+ *

+ * @param <RV>

+ */

+public class Status<RV> extends Result<RV> {

+	

+	// 10/1/2013:  Initially, I used enum, but it's not extensible.

+    public final static int ERR_NsNotFound = Result.ERR_General+1,

+    						ERR_RoleNotFound = Result.ERR_General+2,

+    						ERR_PermissionNotFound = Result.ERR_General+3, 

+    						ERR_UserNotFound = Result.ERR_General+4,

+    						ERR_UserRoleNotFound = Result.ERR_General+5,

+    						ERR_DelegateNotFound = Result.ERR_General+6,

+    						ERR_InvalidDelegate = Result.ERR_General+7,

+    						ERR_DependencyExists = Result.ERR_General+8,

+    						ERR_NoApprovals = Result.ERR_General+9,

+    						ACC_Now = Result.ERR_General+10,

+    						ACC_Future = Result.ERR_General+11,

+    						ERR_ChoiceNeeded = Result.ERR_General+12,

+    						ERR_FutureNotRequested = Result.ERR_General+13;

+  

+	/**

+     * Constructor for Result set. 

+     * @param data

+     * @param status

+     */

+    private Status(RV value, int status, String details, String[] variables ) {

+    	super(value,status,details,variables);

+    }

+

+	public static String name(int status) {

+		switch(status) {

+			case OK: return "OK";

+			case ERR_NsNotFound: return "ERR_NsNotFound";

+			case ERR_RoleNotFound: return "ERR_RoleNotFound";

+			case ERR_PermissionNotFound: return "ERR_PermissionNotFound"; 

+			case ERR_UserNotFound: return "ERR_UserNotFound";

+			case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound";

+			case ERR_DelegateNotFound: return "ERR_DelegateNotFound";

+			case ERR_InvalidDelegate: return "ERR_InvalidDelegate";

+			case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists";

+			case ERR_DependencyExists: return "ERR_DependencyExists";

+			case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted";

+			case ERR_Denied: return "ERR_Denied";

+			case ERR_Policy: return "ERR_Policy";

+			case ERR_BadData: return "ERR_BadData";

+			case ERR_NotImplemented: return "ERR_NotImplemented";

+			case ERR_NotFound: return "ERR_NotFound";

+			case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded";

+		}

+		//case ERR_General:   or unknown... 

+		return "ERR_General";

+	}

+    

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java
new file mode 100644
index 0000000..2968160
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java
@@ -0,0 +1,320 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.cass;

+

+import java.io.ByteArrayOutputStream;

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.List;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.Bytification;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.Loader;

+import org.onap.aaf.dao.Streamer;

+import org.onap.aaf.dao.aaf.hl.Question;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Slot;

+import org.onap.aaf.inno.env.util.Chrono;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Row;

+

+public class UserRoleDAO extends CassDAOImpl<AuthzTrans,UserRoleDAO.Data> {

+	public static final String TABLE = "user_role";

+	

+    public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F

+

+	private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";

+	public Slot transURSlot;

+	

+	private final HistoryDAO historyDAO;

+	private final CacheInfoDAO infoDAO;

+	

+	private PSInfo psByUser, psByRole, psUserInRole;

+

+

+

+	public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {

+		super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		transURSlot = trans.slot(TRANS_UR_SLOT);

+		init(trans);

+

+		// Set up sub-DAOs

+		historyDAO = new HistoryDAO(trans, this);

+		infoDAO = new CacheInfoDAO(trans,this);

+	}

+

+	public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {

+		super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));

+		transURSlot = trans.slot(TRANS_UR_SLOT);

+		historyDAO = hDAO;

+		infoDAO = ciDAO;

+		init(trans);

+	}

+

+	private static final int KEYLIMIT = 2;

+	public static class Data extends CacheableData implements Bytification {

+		public String  user;

+		public String  role;

+		public String  ns; 

+		public String  rname; 

+		public Date   expires;

+		

+		@Override

+		public int[] invalidate(Cached<?,?> cache) {

+			// Note: I'm not worried about Name collisions, because the formats are different:

+			// myName ... etc versus

+			// com. ...

+			// The "dot" makes the difference.

+			return new int[] {

+				seg(cache,user,role),

+				seg(cache,user),

+				seg(cache,role)

+			};

+		}

+

+		@Override

+		public ByteBuffer bytify() throws IOException {

+			ByteArrayOutputStream baos = new ByteArrayOutputStream();

+			URLoader.deflt.marshal(this,new DataOutputStream(baos));

+			return ByteBuffer.wrap(baos.toByteArray());

+		}

+		

+		@Override

+		public void reconstitute(ByteBuffer bb) throws IOException {

+			URLoader.deflt.unmarshal(this, toDIS(bb));

+		}

+

+		public void role(String ns, String rname) {

+			this.ns = ns;

+			this.rname = rname;

+			this.role = ns + '.' + rname;

+		}

+		

+		public void role(RoleDAO.Data rdd) {

+			ns = rdd.ns;

+			rname = rdd.name;

+			role = rdd.fullName();

+		}

+

+		

+		public boolean role(AuthzTrans trans, Question ques, String role) {

+			this.role = role;

+			Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);

+			if(rnss.isOKhasData()) {

+				ns = rnss.value.ns;

+				rname = rnss.value.name;

+				return true;

+			} else {

+				return false;

+			}

+		}

+

+		@Override

+		public String toString() {

+			return user + '|' + ns + '|' +  rname + '|' + Chrono.dateStamp(expires);

+		}

+

+

+	}

+	

+	private static class URLoader extends Loader<Data> implements Streamer<Data> {

+		public static final int MAGIC=738469903;

+    	public static final int VERSION=1;

+    	public static final int BUFF_SIZE=48;

+    	

+    	public static final URLoader deflt = new URLoader(KEYLIMIT);

+

+		public URLoader(int keylimit) {

+			super(keylimit);

+		}

+

+		@Override

+		public Data load(Data data, Row row) {

+			data.user = row.getString(0);

+			data.role = row.getString(1);

+			data.ns = row.getString(2);

+			data.rname = row.getString(3);

+			data.expires = row.getDate(4);

+			return data;

+		}

+

+		@Override

+		protected void key(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.user;

+			obj[++idx]=data.role;

+		}

+

+		@Override

+		protected void body(Data data, int _idx, Object[] obj) {

+		    	int idx = _idx;

+			obj[idx]=data.ns;

+			obj[++idx]=data.rname;

+			obj[++idx]=data.expires;

+		}

+		

+		@Override

+		public void marshal(Data data, DataOutputStream os) throws IOException {

+			writeHeader(os,MAGIC,VERSION);

+

+			writeString(os, data.user);

+			writeString(os, data.role);

+			writeString(os, data.ns);

+			writeString(os, data.rname);

+			os.writeLong(data.expires==null?-1:data.expires.getTime());

+		}

+

+		@Override

+		public void unmarshal(Data data, DataInputStream is) throws IOException {

+			/*int version = */readHeader(is,MAGIC,VERSION);

+			// If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields

+			

+			byte[] buff = new byte[BUFF_SIZE];

+			data.user = readString(is,buff);

+			data.role = readString(is,buff);

+			data.ns = readString(is,buff);

+			data.rname = readString(is,buff);

+			long l = is.readLong();

+			data.expires = l<0?null:new Date(l);

+		}

+

+	};

+	

+	private void init(AuthzTrans trans) {

+		String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);

+		

+		psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?", 

+			new URLoader(1) {

+				@Override

+				protected void key(Data data, int idx, Object[] obj) {

+					obj[idx]=data.user;

+				}

+			},readConsistency);

+		

+		// Note: We understand this call may have poor performance, so only should be used in Management (Delete) func

+		psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING", 

+				new URLoader(1) {

+					@Override

+					protected void key(Data data, int idx, Object[] obj) {

+						obj[idx]=data.role;

+					}

+				},readConsistency);

+		

+		psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",

+				URLoader.deflt,readConsistency);

+	}

+

+	public Result<List<Data>> readByUser(AuthzTrans trans, String user) {

+		return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});

+	}

+

+	/**

+	 * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly

+	 * @param trans

+	 * @param role

+	 * @return

+	 * @throws DAOException

+	 */

+	public Result<List<Data>> readByRole(AuthzTrans trans, String role) {

+		return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});

+	}

+	

+	/**

+	 * Direct Lookup of User Role

+	 * Don't forget to check for Expiration

+	 */

+	public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {

+		return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});

+	}

+

+

+	/**

+     * Log Modification statements to History

+     * @param modified           which CRUD action was done

+     * @param data               entity data that needs a log entry

+     * @param overrideMessage    if this is specified, we use it rather than crafting a history message based on data

+     */

+	@Override

+	protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {

+    	boolean memo = override.length>0 && override[0]!=null;

+    	boolean subject = override.length>1 && override[1]!=null;

+

+		HistoryDAO.Data hd = HistoryDAO.newInitedData();

+		HistoryDAO.Data hdRole = HistoryDAO.newInitedData();

+		

+        hd.user = hdRole.user = trans.user();

+		hd.action = modified.name();

+		// Modifying User/Role is an Update to Role, not a Create.  JG, 07-14-2015

+		hdRole.action = CRUD.update.name();

+		hd.target = TABLE;

+		hdRole.target = RoleDAO.TABLE;

+		hd.subject = subject?override[1] : (data.user + '|'+data.role);

+		hdRole.subject = data.role;

+		switch(modified) {

+			case create: 

+				hd.memo = hdRole.memo = memo

+					? String.format("%s by %s", override[0], hd.user)

+					: String.format("%s added to %s",data.user,data.role);	

+				break;

+			case update: 

+				hd.memo = hdRole.memo = memo

+					? String.format("%s by %s", override[0], hd.user)

+					: String.format("%s - %s was updated",data.user,data.role);

+				break;

+			case delete: 

+				hd.memo = hdRole.memo = memo

+					? String.format("%s by %s", override[0], hd.user)

+					: String.format("%s removed from %s",data.user,data.role);

+				try {

+					hd.reconstruct = hdRole.reconstruct = data.bytify();

+				} catch (IOException e) {

+					trans.warn().log(e,"Deleted UserRole could not be serialized");

+				}

+				break;

+			default:

+				hd.memo = hdRole.memo = memo

+				? String.format("%s by %s", override[0], hd.user)

+				: "n/a";

+		}

+

+		if(historyDAO.create(trans, hd).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+		}

+		

+		if(historyDAO.create(trans, hdRole).status!=Status.OK) {

+        	trans.error().log("Cannot log to History");

+		}

+		// uses User as Segment

+        if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {

+        	trans.error().log("Cannot touch CacheInfo");

+        }

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java
new file mode 100644
index 0000000..f05a917
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java
@@ -0,0 +1,74 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.hl;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.authz.org.Executor;

+import org.onap.aaf.dao.aaf.cass.NsSplit;

+import org.onap.aaf.dao.aaf.cass.NsDAO.Data;

+

+public class CassExecutor implements Executor {

+

+	private Question q;

+	private Function f;

+	private AuthzTrans trans;

+

+	public CassExecutor(AuthzTrans trans, Function f) {

+		this.trans = trans;

+		this.f = f;

+		this.q = this.f.q;

+	}

+

+	@Override

+	public boolean hasPermission(String user, String ns, String type, String instance, String action) {

+		return isGranted(user, ns, type, instance, action);

+	}

+

+	@Override

+	public boolean inRole(String name) {

+		Result<NsSplit> nss = q.deriveNsSplit(trans, name);

+		if(nss.notOK())return false;

+		return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData();

+	}

+

+	public boolean isGranted(String user, String ns, String type, String instance, String action) {

+		return q.isGranted(trans, user, ns, type, instance,action);

+	}

+

+	@Override

+	public String namespace() throws Exception {

+		Result<Data> res = q.validNSOfDomain(trans,trans.user());

+		if(res.isOK()) {

+			String user[] = trans.user().split("\\.");

+			return user[user.length-1] + '.' + user[user.length-2];

+		}

+		throw new Exception(res.status + ' ' + res.details);

+	}

+

+	@Override

+	public String id() {

+		return trans.user();

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java
new file mode 100644
index 0000000..0404fee
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java
@@ -0,0 +1,1574 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.hl;

+

+import static org.onap.aaf.authz.layer.Result.OK;

+

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.Date;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Set;

+import java.util.UUID;

+

+import org.onap.aaf.authz.common.Define;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.authz.org.Executor;

+import org.onap.aaf.authz.org.Organization;

+import org.onap.aaf.authz.org.Organization.Expiration;

+import org.onap.aaf.authz.org.Organization.Identity;

+import org.onap.aaf.authz.org.Organization.Policy;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.aaf.cass.ApprovalDAO;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.DelegateDAO;

+import org.onap.aaf.dao.aaf.cass.FutureDAO;

+import org.onap.aaf.dao.aaf.cass.Namespace;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+import org.onap.aaf.dao.aaf.cass.NsSplit;

+import org.onap.aaf.dao.aaf.cass.NsType;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO.Data;

+import org.onap.aaf.dao.aaf.hl.Question.Access;

+

+public class Function {

+

+	public static final String FOP_CRED = "cred";

+	public static final String FOP_DELEGATE = "delegate";

+	public static final String FOP_NS = "ns";

+	public static final String FOP_PERM = "perm";

+	public static final String FOP_ROLE = "role";

+	public static final String FOP_USER_ROLE = "user_role";

+	// First Action should ALWAYS be "write", see "CreateRole"

+	public final Question q;

+

+	public Function(AuthzTrans trans, Question question) {

+		q = question;

+	}

+

+	private class ErrBuilder {

+		private StringBuilder sb;

+		private List<String> ao;

+

+		public void log(Result<?> result) {

+			if (result.notOK()) {

+				if (sb == null) {

+					sb = new StringBuilder();

+					ao = new ArrayList<String>();

+				}

+				sb.append(result.details);

+				sb.append('\n');

+				for (String s : result.variables) {

+					ao.add(s);

+				}

+			}

+		}

+

+		public String[] vars() {

+			String[] rv = new String[ao.size()];

+			ao.toArray(rv);

+			return rv;

+		}

+

+		public boolean hasErr() {

+			return sb != null;

+		}

+

+		@Override

+		public String toString() {

+			return sb == null ? "" : String.format(sb.toString(), ao);

+		}

+	}

+

+	/**

+	 * createNS

+	 * 

+	 * Create Namespace

+	 * 

+	 * @param trans

+	 * @param org

+	 * @param ns

+	 * @param user

+	 * @return

+	 * @throws DAOException

+	 * 

+	 *             To create an NS, you need to: 1) validate permission to

+	 *             modify parent NS 2) Does NS exist already? 3) Create NS with

+	 *             a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4)

+	 *             Loop through Roles with Parent NS, and map any that start

+	 *             with this NS into this one 5) Loop through Perms with Parent

+	 *             NS, and map any that start with this NS into this one

+	 */

+	public Result<Void> createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) {

+		Result<?> rq;

+

+		if (namespace.name.endsWith(Question.DOT_ADMIN)

+				|| namespace.name.endsWith(Question.DOT_OWNER)) {

+			return Result.err(Status.ERR_BadData,

+					"'admin' and 'owner' are reserved names in AAF");

+		}

+

+		try {

+			for (String u : namespace.owner) {

+				Organization org = trans.org();

+				Identity orgUser = org.getIdentity(trans, u);

+				if (orgUser == null || !orgUser.isResponsible()) {

+					// check if user has explicit permission

+					String reason;

+					if (org.isTestEnv() && (reason=org.validate(trans, Policy.AS_EMPLOYEE,

+							new CassExecutor(trans, this), u))!=null) {

+					    return Result.err(Status.ERR_Policy,reason);

+					}

+				}

+			}

+		} catch (Exception e) {

+			trans.error().log(e,

+					"Could not contact Organization for User Validation");

+		}

+

+		String user = trans.user();

+		// 1) May Change Parent?

+		int idx = namespace.name.lastIndexOf('.');

+		String parent;

+		if (idx < 0) {

+			if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "create")) {

+				return Result.err(Result.ERR_Security,

+						"%s may not create Root Namespaces", user);

+			}

+			parent = null;

+			fromApproval = true;

+		} else {

+			parent = namespace.name.substring(0, idx);

+		}

+

+		if (!fromApproval) {

+			Result<NsDAO.Data> rparent = q.deriveNs(trans, parent);

+			if (rparent.notOK()) {

+				return Result.err(rparent);

+			}

+			rparent = q.mayUser(trans, user, rparent.value, Access.write);

+			if (rparent.notOK()) {

+				return Result.err(rparent);

+			}

+		}

+

+		// 2) Does requested NS exist

+		if (q.nsDAO.read(trans, namespace.name).isOKhasData()) {

+			return Result.err(Status.ERR_ConflictAlreadyExists,

+					"Target Namespace already exists");

+		}

+

+		// Someone must be responsible.

+		if (namespace.owner == null || namespace.owner.isEmpty()) {

+			return Result

+					.err(Status.ERR_Policy,

+							"Namespaces must be assigned at least one responsible party");

+		}

+

+		// 3) Create NS

+		Date now = new Date();

+

+		Result<Void> r;

+		// 3a) Admin

+

+		try {

+			// Originally, added the enterer as Admin, but that's not necessary,

+			// or helpful for Operations folks..

+			// Admins can be empty, because they can be changed by lower level

+			// NSs

+			// if(ns.admin(false).isEmpty()) {

+			// ns.admin(true).add(user);

+			// }

+			if (namespace.admin != null) {

+				for (String u : namespace.admin) {

+					if ((r = checkValidID(trans, now, u)).notOK()) {

+						return r;

+					}

+				}

+			}

+

+			// 3b) Responsible

+			Organization org = trans.org();

+			for (String u : namespace.owner) {

+				Identity orgUser = org.getIdentity(trans, u);

+				if (orgUser == null) {

+					return Result

+							.err(Status.ERR_BadData,

+									"NS must be created with an %s approved Responsible Party",

+									org.getName());

+				}

+			}

+		} catch (Exception e) {

+			return Result.err(Status.ERR_UserNotFound, e.getMessage());

+		}

+

+		// VALIDATIONS done... Add NS

+		if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) {

+		    return Result.err(rq);

+		}

+

+		// Since Namespace is now created, we need to grab all subsequent errors

+		ErrBuilder eb = new ErrBuilder();

+

+		// Add UserRole(s)

+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();

+		urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime();

+		urdd.role(namespace.name, Question.ADMIN);

+		for (String admin : namespace.admin) {

+			urdd.user = admin;

+			eb.log(q.userRoleDAO.create(trans, urdd));

+		}

+		urdd.role(namespace.name,Question.OWNER);

+		for (String owner : namespace.owner) {

+			urdd.user = owner;

+			eb.log(q.userRoleDAO.create(trans, urdd));

+		}

+

+		addNSAdminRolesPerms(trans, eb, namespace.name);

+

+		addNSOwnerRolesPerms(trans, eb, namespace.name);

+

+		if (parent != null) {

+			// Build up with any errors

+

+			Result<NsDAO.Data> parentNS = q.deriveNs(trans, parent);

+			String targetNs = parentNS.value.name; // Get the Parent Namespace,

+													// not target

+			String targetName = namespace.name.substring(parentNS.value.name.length() + 1); // Remove the Parent Namespace from the

+									// Target + a dot, and you'll get the name

+			int targetNameDot = targetName.length() + 1;

+

+			// 4) Change any roles with children matching this NS, and

+			Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readChildren(trans,	targetNs, targetName);

+			if (rrdc.isOKhasData()) {

+				for (RoleDAO.Data rdd : rrdc.value) {

+					// Remove old Role from Perms, save them off

+					List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();

+					for(String p : rdd.perms(false)) {

+						Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);

+						if(rpdd.isOKhasData()) {

+							PermDAO.Data pdd = rpdd.value;

+							lpdd.add(pdd);

+							q.permDAO.delRole(trans, pdd, rdd);

+						} else{

+							trans.error().log(rpdd.errorString());

+						}

+					}

+					

+					// Save off Old keys

+					String delP1 = rdd.ns;

+					String delP2 = rdd.name;

+

+					// Write in new key

+					rdd.ns = namespace.name;

+					rdd.name = (delP2.length() > targetNameDot) ? delP2

+							.substring(targetNameDot) : "";

+							

+					// Need to use non-cached, because switching namespaces, not

+					// "create" per se

+					if ((rq = q.roleDAO.create(trans, rdd)).isOK()) {

+						// Put Role back into Perm, with correct info

+						for(PermDAO.Data pdd : lpdd) {

+							q.permDAO.addRole(trans, pdd, rdd);

+						}

+						// Change data for User Roles 

+						Result<List<UserRoleDAO.Data>> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName());

+						if(rurd.isOKhasData()) {

+							for(UserRoleDAO.Data urd : rurd.value) {

+								urd.ns = rdd.ns;

+								urd.rname = rdd.name;

+								q.userRoleDAO.update(trans, urd);

+							}

+						}

+						// Now delete old one

+						rdd.ns = delP1;

+						rdd.name = delP2;

+						if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) {

+							eb.log(rq);

+						}

+					} else {

+						eb.log(rq);

+					}

+				}

+			}

+

+			// 4) Change any Permissions with children matching this NS, and

+			Result<List<PermDAO.Data>> rpdc = q.permDAO.readChildren(trans,targetNs, targetName);

+			if (rpdc.isOKhasData()) {

+				for (PermDAO.Data pdd : rpdc.value) {

+					// Remove old Perm from Roles, save them off

+					List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();

+					

+					for(String rl : pdd.roles(false)) {

+						Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);

+						if(rrdd.isOKhasData()) {

+							RoleDAO.Data rdd = rrdd.value;

+							lrdd.add(rdd);

+							q.roleDAO.delPerm(trans, rdd, pdd);

+						} else{

+							trans.error().log(rrdd.errorString());

+						}

+					}

+					

+					// Save off Old keys

+					String delP1 = pdd.ns;

+					String delP2 = pdd.type;

+					pdd.ns = namespace.name;

+					pdd.type = (delP2.length() > targetNameDot) ? delP2

+							.substring(targetNameDot) : "";

+					if ((rq = q.permDAO.create(trans, pdd)).isOK()) {

+						// Put Role back into Perm, with correct info

+						for(RoleDAO.Data rdd : lrdd) {

+							q.roleDAO.addPerm(trans, rdd, pdd);

+						}

+

+						pdd.ns = delP1;

+						pdd.type = delP2;

+						if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) {

+							eb.log(rq);

+							// } else {

+							// Need to invalidate directly, because we're

+							// switching places in NS, not normal cache behavior

+							// q.permDAO.invalidate(trans,pdd);

+						}

+					} else {

+						eb.log(rq);

+					}

+				}

+			}

+			if (eb.hasErr()) {

+				return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars());

+			}

+		}

+		return Result.ok();

+	}

+

+	private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {

+		// Admin Role/Perm

+		RoleDAO.Data rd = new RoleDAO.Data();

+		rd.ns = ns;

+		rd.name = "admin";

+		rd.description = "AAF Namespace Administrators";

+

+		PermDAO.Data pd = new PermDAO.Data();

+		pd.ns = ns;

+		pd.type = "access";

+		pd.instance = Question.ASTERIX;

+		pd.action = Question.ASTERIX;

+		pd.description = "AAF Namespace Write Access";

+

+		rd.perms = new HashSet<String>();

+		rd.perms.add(pd.encode());

+		eb.log(q.roleDAO.create(trans, rd));

+

+		pd.roles = new HashSet<String>();

+		pd.roles.add(rd.encode());

+		eb.log(q.permDAO.create(trans, pd));

+	}

+

+	private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {

+		RoleDAO.Data rd = new RoleDAO.Data();

+		rd.ns = ns;

+		rd.name = "owner";

+		rd.description = "AAF Namespace Owners";

+

+		PermDAO.Data pd = new PermDAO.Data();

+		pd.ns = ns;

+		pd.type = "access";

+		pd.instance = Question.ASTERIX;

+		pd.action = Question.READ;

+		pd.description = "AAF Namespace Read Access";

+

+		rd.perms = new HashSet<String>();

+		rd.perms.add(pd.encode());

+		eb.log(q.roleDAO.create(trans, rd));

+

+		pd.roles = new HashSet<String>();

+		pd.roles.add(rd.encode());

+		eb.log(q.permDAO.create(trans, pd));

+	}

+

+	/**

+	 * deleteNS

+	 * 

+	 * Delete Namespace

+	 * 

+	 * @param trans

+	 * @param org

+	 * @param ns

+	 * @param force

+	 * @param user

+	 * @return

+	 * @throws DAOException

+	 * 

+	 * 

+	 *             To delete an NS, you need to: 1) validate permission to

+	 *             modify this NS 2) Find all Roles with this NS, and 2a) if

+	 *             Force, delete them, else modify to Parent NS 3) Find all

+	 *             Perms with this NS, and modify to Parent NS 3a) if Force,

+	 *             delete them, else modify to Parent NS 4) Find all IDs

+	 *             associated to this NS, and deny if exists. 5) Remove NS

+	 */

+	public Result<Void> deleteNS(AuthzTrans trans, String ns) {

+		boolean force = trans.forceRequested();

+		boolean move = trans.moveRequested();

+		// 1) Validate

+		Result<List<NsDAO.Data>> nsl;

+		if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) {

+			return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns);

+		}

+		NsDAO.Data nsd = nsl.value.get(0);

+		NsType nt;

+		if (move && !q.canMove(nt = NsType.fromType(nsd.type))) {

+			return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name());

+		}

+

+		Result<NsDAO.Data> dnr = q.mayUser(trans, trans.user(), nsd, Access.write);

+		if (dnr.status != Status.OK) {

+			return Result.err(dnr);

+		}

+

+		// 2) Find Parent

+		String user = trans.user();

+		int idx = ns.lastIndexOf('.');

+		NsDAO.Data parent;

+		if (idx < 0) {

+			if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "delete")) {

+				return Result.err(Result.ERR_Security,

+						"%s may not delete Root Namespaces", user);

+			}

+			parent = null;

+		} else {

+			Result<NsDAO.Data> rlparent = q.deriveNs(trans,	ns.substring(0, idx));

+			if (rlparent.notOKorIsEmpty()) {

+				return Result.err(rlparent);

+			}

+			parent = rlparent.value;

+		}

+

+		// Build up with any errors

+		// If sb != null below is an indication of error

+		StringBuilder sb = null;

+		ErrBuilder er = new ErrBuilder();

+

+		// 2a) Deny if any IDs on Namespace

+		Result<List<CredDAO.Data>> creds = q.credDAO.readNS(trans, ns);

+		if (creds.isOKhasData()) {

+			if (force || move) {

+				for (CredDAO.Data cd : creds.value) {

+					er.log(q.credDAO.delete(trans, cd, false));

+					// Since we're deleting all the creds, we should delete all

+					// the user Roles for that Cred

+					Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO

+							.readByUser(trans, cd.id);

+					if (rlurd.isOK()) {

+						for (UserRoleDAO.Data data : rlurd.value) {

+						    q.userRoleDAO.delete(trans, data, false);

+						}

+					}

+

+				}

+			} else {

+				// first possible StringBuilder Create.

+				sb = new StringBuilder();

+				sb.append('[');

+				sb.append(ns);

+				sb.append("] contains users");

+			}

+		}

+

+		// 2b) Find (or delete if forced flag is set) dependencies

+		// First, find if NS Perms are the only ones

+		Result<List<PermDAO.Data>> rpdc = q.permDAO.readNS(trans, ns);

+		if (rpdc.isOKhasData()) {

+			// Since there are now NS perms, we have to count NON-NS perms.

+			// FYI, if we delete them now, and the NS is not deleted, it is in

+			// an inconsistent state.

+			boolean nonaccess = false;

+			for (PermDAO.Data pdd : rpdc.value) {

+				if (!"access".equals(pdd.type)) {

+					nonaccess = true;

+					break;

+				}

+			}

+			if (nonaccess && !force && !move) {

+				if (sb == null) {

+					sb = new StringBuilder();

+					sb.append('[');

+					sb.append(ns);

+					sb.append("] contains ");

+				} else {

+					sb.append(", ");

+				}

+				sb.append("permissions");

+			}

+		}

+

+		Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readNS(trans, ns);

+		if (rrdc.isOKhasData()) {

+			// Since there are now NS roles, we have to count NON-NS roles.

+			// FYI, if we delete th)em now, and the NS is not deleted, it is in

+			// an inconsistent state.

+			int count = rrdc.value.size();

+			for (RoleDAO.Data rdd : rrdc.value) {

+				if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) {

+					--count;

+				}

+			}

+			if (count > 0 && !force && !move) {

+				if (sb == null) {

+					sb = new StringBuilder();

+					sb.append('[');

+					sb.append(ns);

+					sb.append("] contains ");

+				} else {

+					sb.append(", ");

+				}

+				sb.append("roles");

+			}

+		}

+

+		// 2c) Deny if dependencies exist that would be moved to root level

+		// parent is root level parent here. Need to find closest parent ns that

+		// exists

+		if (sb != null) {

+			if (!force && !move) {

+				sb.append(".\n  Delete dependencies and try again.  Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent.");

+				return Result.err(Status.ERR_DependencyExists, sb.toString());

+			}

+

+			if (move && (parent == null || parent.type == NsType.COMPANY.type)) {

+				return Result

+						.err(Status.ERR_DependencyExists,

+								"Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again",

+								parent.name);

+			}

+		} else if (move && parent != null) {

+			sb = new StringBuilder();

+			// 3) Change any roles with children matching this NS, and

+			moveRoles(trans, parent, sb, rrdc);

+			// 4) Change any Perms with children matching this NS, and

+			movePerms(trans, parent, sb, rpdc);

+		}

+

+		if (sb != null && sb.length() > 0) {

+			return Result.err(Status.ERR_DependencyExists, sb.toString());

+		}

+

+		if (er.hasErr()) {

+			if (trans.debug().isLoggable()) {

+				trans.debug().log(er.toString());

+			}

+			return Result.err(Status.ERR_DependencyExists,

+					"Namespace members cannot be deleted for %s", ns);

+		}

+

+		// 5) OK... good to go for NS Deletion...

+		if (!rpdc.isEmpty()) {

+			for (PermDAO.Data perm : rpdc.value) {

+				deletePerm(trans, perm, true, true);

+			}

+		}

+		if (!rrdc.isEmpty()) {

+			for (RoleDAO.Data role : rrdc.value) {

+				deleteRole(trans, role, true, true);

+			}

+		}

+

+		return q.nsDAO.delete(trans, nsd, false);

+	}

+

+	public Result<List<String>> getOwners(AuthzTrans trans, String ns,

+			boolean includeExpired) {

+		return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired);

+	}

+

+	private Result<Void> mayAddOwner(AuthzTrans trans, String ns, String id) {

+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		Identity user;

+		Organization org = trans.org();

+		try {

+			if ((user = org.getIdentity(trans, id)) == null) {

+				return Result.err(Status.ERR_Policy,

+						"%s reports that this is not a valid credential",

+						org.getName());

+			}

+			if (user.isResponsible()) {

+				return Result.ok();

+			} else {

+				String reason="This is not a Test Environment";

+				if (org.isTestEnv() && (reason = org.validate(trans, Policy.AS_EMPLOYEE, 

+						new CassExecutor(trans, this), id))==null) {

+					return Result.ok();

+				}

+				return Result.err(Status.ERR_Policy,reason);

+			}

+		} catch (Exception e) {

+			return Result.err(e);

+		}

+	}

+

+	private Result<Void> mayAddAdmin(AuthzTrans trans, String ns,	String id) {

+		// Does NS Exist?

+		Result<Void> r = checkValidID(trans, new Date(), id);

+		if (r.notOK()) {

+			return r;

+		}

+		// Is id able to be an Admin

+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+	

+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+		return r;

+	}

+

+	private Result<Void> checkValidID(AuthzTrans trans, Date now, String user) {

+		Organization org = trans.org();

+		if (user.endsWith(org.getRealm())) {

+			try {

+				if (org.getIdentity(trans, user) == null) {

+					return Result.err(Status.ERR_Denied,

+							"%s reports that %s is a faulty ID", org.getName(),

+							user);

+				}

+				return Result.ok();

+			} catch (Exception e) {

+				return Result.err(Result.ERR_Security,

+						"%s is not a valid %s Credential", user, org.getName());

+			}

+		} else {

+			Result<List<CredDAO.Data>> cdr = q.credDAO.readID(trans, user);

+			if (cdr.notOKorIsEmpty()) {

+				return Result.err(Status.ERR_Security,

+						"%s is not a valid AAF Credential", user);

+			}

+	

+			for (CredDAO.Data cd : cdr.value) {

+				if (cd.expires.after(now)) {

+					return Result.ok();

+				}

+			}

+		}

+		return Result.err(Result.ERR_Security, "%s has expired", user);

+	}

+

+	public Result<Void> delOwner(AuthzTrans trans, String ns, String id) {

+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		return delUserRole(trans, id, ns,Question.OWNER);

+	}

+

+	public Result<List<String>> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) {

+		return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired);

+	}

+

+	public Result<Void> delAdmin(AuthzTrans trans, String ns, String id) {

+		Result<NsDAO.Data> rq = q.deriveNs(trans, ns);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		rq = q.mayUser(trans, trans.user(), rq.value, Access.write);

+		if (rq.notOK()) {

+			return Result.err(rq);

+		}

+

+		return delUserRole(trans, id, ns, Question.ADMIN);

+	}

+

+	/**

+	 * Helper function that moves permissions from a namespace being deleted to

+	 * its parent namespace

+	 * 

+	 * @param trans

+	 * @param parent

+	 * @param sb

+	 * @param rpdc

+	 *            - list of permissions in namespace being deleted

+	 */

+	private void movePerms(AuthzTrans trans, NsDAO.Data parent,

+			StringBuilder sb, Result<List<PermDAO.Data>> rpdc) {

+

+		Result<Void> rv;

+		Result<PermDAO.Data> pd;

+

+		if (rpdc.isOKhasData()) {

+			for (PermDAO.Data pdd : rpdc.value) {

+				String delP2 = pdd.type;

+				if ("access".equals(delP2)) {

+				    continue;

+				}

+				// Remove old Perm from Roles, save them off

+				List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();

+				

+				for(String rl : pdd.roles(false)) {

+					Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);

+					if(rrdd.isOKhasData()) {

+						RoleDAO.Data rdd = rrdd.value;

+						lrdd.add(rdd);

+						q.roleDAO.delPerm(trans, rdd, pdd);

+					} else{

+						trans.error().log(rrdd.errorString());

+					}

+				}

+				

+				// Save off Old keys

+				String delP1 = pdd.ns;

+				NsSplit nss = new NsSplit(parent, pdd.fullType());

+				pdd.ns = nss.ns;

+				pdd.type = nss.name;

+				// Use direct Create/Delete, because switching namespaces

+				if ((pd = q.permDAO.create(trans, pdd)).isOK()) {

+					// Put Role back into Perm, with correct info

+					for(RoleDAO.Data rdd : lrdd) {

+						q.roleDAO.addPerm(trans, rdd, pdd);

+					}

+

+					pdd.ns = delP1;

+					pdd.type = delP2;

+					if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) {

+						sb.append(rv.details);

+						sb.append('\n');

+						// } else {

+						// Need to invalidate directly, because we're switching

+						// places in NS, not normal cache behavior

+						// q.permDAO.invalidate(trans,pdd);

+					}

+				} else {

+					sb.append(pd.details);

+					sb.append('\n');

+				}

+			}

+		}

+	}

+

+	/**

+	 * Helper function that moves roles from a namespace being deleted to its

+	 * parent namespace

+	 * 

+	 * @param trans

+	 * @param parent

+	 * @param sb

+	 * @param rrdc

+	 *            - list of roles in namespace being deleted

+	 */

+	private void moveRoles(AuthzTrans trans, NsDAO.Data parent,

+			StringBuilder sb, Result<List<RoleDAO.Data>> rrdc) {

+

+		Result<Void> rv;

+		Result<RoleDAO.Data> rd;

+

+		if (rrdc.isOKhasData()) {

+			for (RoleDAO.Data rdd : rrdc.value) {

+				String delP2 = rdd.name;

+				if ("admin".equals(delP2) || "owner".equals(delP2)) {

+				    continue;

+				}

+				// Remove old Role from Perms, save them off

+				List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();

+				for(String p : rdd.perms(false)) {

+					Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);

+					if(rpdd.isOKhasData()) {

+						PermDAO.Data pdd = rpdd.value;

+						lpdd.add(pdd);

+						q.permDAO.delRole(trans, pdd, rdd);

+					} else{

+						trans.error().log(rpdd.errorString());

+					}

+				}

+				

+				// Save off Old keys

+				String delP1 = rdd.ns;

+

+				NsSplit nss = new NsSplit(parent, rdd.fullName());

+				rdd.ns = nss.ns;

+				rdd.name = nss.name;

+				// Use direct Create/Delete, because switching namespaces

+				if ((rd = q.roleDAO.create(trans, rdd)).isOK()) {

+					// Put Role back into Perm, with correct info

+					for(PermDAO.Data pdd : lpdd) {

+						q.permDAO.addRole(trans, pdd, rdd);

+					}

+

+					rdd.ns = delP1;

+					rdd.name = delP2;

+					if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) {

+						sb.append(rv.details);

+						sb.append('\n');

+						// } else {

+						// Need to invalidate directly, because we're switching

+						// places in NS, not normal cache behavior

+						// q.roleDAO.invalidate(trans,rdd);

+					}

+				} else {

+					sb.append(rd.details);

+					sb.append('\n');

+				}

+			}

+		}

+	}

+

+	/**

+	 * Create Permission (and any missing Permission between this and Parent) if

+	 * we have permission

+	 * 

+	 * Pass in the desired Management Permission for this Permission

+	 * 

+	 * If Force is set, then Roles listed will be created, if allowed,

+	 * pre-granted.

+	 */

+	public Result<Void> createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) {

+		String user = trans.user();

+		// Next, see if User is allowed to Manage Parent Permission

+

+		Result<NsDAO.Data> rnsd;

+		if (!fromApproval) {

+			rnsd = q.mayUser(trans, user, perm, Access.write);

+			if (rnsd.notOK()) {

+				return Result.err(rnsd);

+			}

+		} else {

+			rnsd = q.deriveNs(trans, perm.ns);

+		}

+

+		// Does Child exist?

+		if (!trans.forceRequested()) {

+			if (q.permDAO.read(trans, perm).isOKhasData()) {

+				return Result.err(Status.ERR_ConflictAlreadyExists,

+						"Permission [%s.%s|%s|%s] already exists.", perm.ns,

+						perm.type, perm.instance, perm.action);

+			}

+		}

+

+		// Attempt to add perms to roles, creating as possible

+		Set<String> roles;

+		String pstring = perm.encode();

+

+		// For each Role

+		for (String role : roles = perm.roles(true)) {

+			Result<RoleDAO.Data> rdd = RoleDAO.Data.decode(trans,q,role);

+			if(rdd.isOKhasData()) {

+				RoleDAO.Data rd = rdd.value;

+				if (!fromApproval) {

+					// May User write to the Role in question.

+					Result<NsDAO.Data> rns = q.mayUser(trans, user, rd,

+							Access.write);

+					if (rns.notOK()) {

+						// Remove the role from Add, because

+						roles.remove(role); // Don't allow adding

+						trans.warn()

+								.log("User [%s] does not have permission to relate Permissions to Role [%s]",

+										user, role);

+					}

+				}

+

+				Result<List<RoleDAO.Data>> rlrd;

+				if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) {

+					rd.perms(true).add(pstring);

+					if (q.roleDAO.create(trans, rd).notOK()) {

+						roles.remove(role); // Role doesn't exist, and can't be

+											// created

+					}

+				} else {

+					rd = rlrd.value.get(0);

+					if (!rd.perms.contains(pstring)) {

+						q.roleDAO.addPerm(trans, rd, perm);

+					}

+				}

+			}

+		}

+

+		Result<PermDAO.Data> pdr = q.permDAO.create(trans, perm);

+		if (pdr.isOK()) {

+			return Result.ok();

+		} else { 

+			return Result.err(pdr);

+		}

+	}

+

+	public Result<Void> deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) {

+		String user = trans.user();

+

+		// Next, see if User is allowed to Manage Permission

+		Result<NsDAO.Data> rnsd;

+		if (!fromApproval) {

+			rnsd = q.mayUser(trans, user, perm, Access.write);

+			if (rnsd.notOK()) {

+				return Result.err(rnsd);

+			}

+		}

+		// Does Perm exist?

+		Result<List<PermDAO.Data>> pdr = q.permDAO.read(trans, perm);

+		if (pdr.notOKorIsEmpty()) {

+			return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.",

+					perm.ns,perm.type, perm.instance, perm.action);

+		}

+		// Get perm, but with rest of data.

+		PermDAO.Data fullperm = pdr.value.get(0);

+

+		// Attached to any Roles?

+		if (fullperm.roles != null) {

+			if (force) {

+				for (String role : fullperm.roles) {

+					Result<Void> rv = null;

+					Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, q, role);

+					if(rrdd.isOKhasData()) {

+						trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete");

+						if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) {

+							if (rv.notOK()) {

+								trans.error().log("Error removing Role during delFromPermRole: ",

+												trans.getUserPrincipal(),

+												rv.errorString());

+							}

+						}

+					} else {

+						return Result.err(rrdd);

+					}

+				}

+			} else if (!fullperm.roles.isEmpty()) {

+				return Result

+						.err(Status.ERR_DependencyExists,

+								"Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.",

+								fullperm.ns, fullperm.type, fullperm.instance, fullperm.action);

+			}

+		}

+

+		return q.permDAO.delete(trans, fullperm, false);

+	}

+

+	public Result<Void> deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) {

+		String user = trans.user();

+

+		// Next, see if User is allowed to Manage Role

+		Result<NsDAO.Data> rnsd;

+		if (!fromApproval) {

+			rnsd = q.mayUser(trans, user, role, Access.write);

+			if (rnsd.notOK()) {

+				return Result.err(rnsd);

+			}

+		}

+

+		// Are there any Users Attached to Role?

+		Result<List<UserRoleDAO.Data>> urdr = q.userRoleDAO.readByRole(trans,role.fullName());

+		if (force) {

+			if (urdr.isOKhasData()) {

+				for (UserRoleDAO.Data urd : urdr.value) {

+					q.userRoleDAO.delete(trans, urd, false);

+				}

+			}

+		} else if (urdr.isOKhasData()) {

+			return Result.err(Status.ERR_DependencyExists,

+							"Role [%s.%s] cannot be deleted as it is used by 1 or more Users.",

+							role.ns, role.name);

+		}

+

+		// Does Role exist?

+		Result<List<RoleDAO.Data>> rdr = q.roleDAO.read(trans, role);

+		if (rdr.notOKorIsEmpty()) {

+			return Result.err(Status.ERR_RoleNotFound,

+					"Role [%s.%s] does not exist", role.ns, role.name);

+		}

+		RoleDAO.Data fullrole = rdr.value.get(0); // full key search

+

+		// Remove Self from Permissions... always, force or not.  Force only applies to Dependencies (Users)

+		if (fullrole.perms != null) {

+			for (String perm : fullrole.perms(false)) {

+				Result<PermDAO.Data> rpd = PermDAO.Data.decode(trans,q,perm);

+				if (rpd.isOK()) {

+					trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete");

+

+					Result<?> r = q.permDAO.delRole(trans, rpd.value, fullrole);

+					if (r.notOK()) {

+						trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details);

+					}

+				} else {

+					trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole);

+				}

+			}

+		}

+		return q.roleDAO.delete(trans, fullrole, false);

+	}

+

+	/**

+	 * Only owner of Permission may add to Role

+	 * 

+	 * If force set, however, Role will be created before Grant, if User is

+	 * allowed to create.

+	 * 

+	 * @param trans

+	 * @param role

+	 * @param pd

+	 * @return

+	 */

+	public Result<Void> addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {

+		String user = trans.user();

+		

+		if (!fromApproval) {

+			Result<NsDAO.Data> rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY);

+			if(rRoleCo.notOK()) {

+				return Result.err(rRoleCo);

+			}

+			Result<NsDAO.Data> rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY);

+			if(rPermCo.notOK()) {

+				return Result.err(rPermCo);

+			}

+

+			// Not from same company

+			if(!rRoleCo.value.name.equals(rPermCo.value.name)) {

+				Result<Data> r;

+				// Only grant if User ALSO has Write ability in Other Company

+				if((r = q.mayUser(trans, user, role, Access.write)).notOK()) {

+					return Result.err(r);

+				}

+			}

+			

+

+			// Must be Perm Admin, or Granted Special Permission

+			Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);

+			if (ucp.notOK()) {

+				// Don't allow CLI potential Grantees to change their own AAF

+				// Perms,

+				if ((Define.ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type)) 

+						|| !q.isGranted(trans, trans.user(),Define.ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) {

+				// Not otherwise granted

+				// TODO Needed?

+					return Result.err(ucp);

+				}

+				// Final Check... Don't allow Grantees to add to Roles they are

+				// part of

+				Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO

+						.readByUser(trans, trans.user());

+				if (rlurd.isOK()) {

+					for (UserRoleDAO.Data ur : rlurd.value) {

+						if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) {

+							return Result.err(ucp);

+						}

+					}

+				}

+			}

+		}

+

+		Result<List<PermDAO.Data>> rlpd = q.permDAO.read(trans, pd);

+		if (rlpd.notOKorIsEmpty()) {

+			return Result.err(Status.ERR_PermissionNotFound,

+					"Permission must exist to add to Role");

+		}

+

+		Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(trans, role); // Already

+																		// Checked

+																		// for

+																		// can

+																		// change

+																		// Role

+		Result<Void> rv;

+

+		if (rlrd.notOKorIsEmpty()) {

+			if (trans.forceRequested()) {

+				Result<NsDAO.Data> ucr = q.mayUser(trans, user, role,

+						Access.write);

+				if (ucr.notOK()) {

+				    return Result

+				    		.err(Status.ERR_Denied,

+				    				"Role [%s.%s] does not exist. User [%s] cannot create.",

+				    				role.ns, role.name, user);

+				}

+

+				role.perms(true).add(pd.encode());

+				Result<RoleDAO.Data> rdd = q.roleDAO.create(trans, role);

+				if (rdd.isOK()) {

+					rv = Result.ok();

+				} else {

+					rv = Result.err(rdd);

+				}

+			} else {

+			    return Result.err(Status.ERR_RoleNotFound,

+			    		"Role [%s.%s] does not exist.", role.ns, role.name);

+			}

+		} else {

+			role = rlrd.value.get(0);

+			if (role.perms(false).contains(pd.encode())) {

+				return Result.err(Status.ERR_ConflictAlreadyExists,

+								"Permission [%s.%s] is already a member of role [%s,%s]",

+								pd.ns, pd.type, role.ns, role.name);

+			}

+			role.perms(true).add(pd.encode()); // this is added for Caching

+												// access purposes... doesn't

+												// affect addPerm

+			rv = q.roleDAO.addPerm(trans, role, pd);

+		}

+		if (rv.status == Status.OK) {

+			return q.permDAO.addRole(trans, pd, role);

+			// exploring how to add information message to successful http

+			// request

+		}

+		return rv;

+	}

+

+	/**

+	 * Either Owner of Role or Permission may delete from Role

+	 * 

+	 * @param trans

+	 * @param role

+	 * @param pd

+	 * @return

+	 */

+	public Result<Void> delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {

+		String user = trans.user();

+		if (!fromApproval) {

+			Result<NsDAO.Data> ucr = q.mayUser(trans, user, role, Access.write);

+			Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);

+

+			// If Can't change either Role or Perm, then deny

+			if (ucr.notOK() && ucp.notOK()) {

+				return Result.err(Status.ERR_Denied,

+						"User [" + trans.user()

+								+ "] does not have permission to delete ["

+								+ pd.encode() + "] from Role ["

+								+ role.fullName() + ']');

+			}

+		}

+

+		Result<List<RoleDAO.Data>> rlr = q.roleDAO.read(trans, role);

+		if (rlr.notOKorIsEmpty()) {

+			// If Bad Data, clean out

+			Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);

+			if (rlp.isOKhasData()) {

+				for (PermDAO.Data pv : rlp.value) {

+					q.permDAO.delRole(trans, pv, role);

+				}

+			}

+			return Result.err(rlr);

+		}

+		String perm1 = pd.encode();

+		boolean notFound;

+		if (trans.forceRequested()) {

+			notFound = false;

+		} else { // only check if force not set.

+			notFound = true;

+			for (RoleDAO.Data r : rlr.value) {

+				if (r.perms != null) {

+					for (String perm : r.perms) {

+						if (perm1.equals(perm)) {

+							notFound = false;

+							break;

+						}

+					}

+					if(!notFound) {

+						break;

+					}

+				}

+			}

+		}

+		if (notFound) { // Need to check both, in case of corruption

+			return Result.err(Status.ERR_PermissionNotFound,

+					"Permission [%s.%s|%s|%s] not associated with any Role",

+					pd.ns,pd.type,pd.instance,pd.action);

+		}

+

+		// Read Perm for full data

+		Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);

+		Result<Void> rv = null;

+		if (rlp.isOKhasData()) {

+			for (PermDAO.Data pv : rlp.value) {

+				if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) {

+					if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) {

+						trans.error().log(

+								"Error removing Perm during delFromPermRole:",

+								trans.getUserPrincipal(), rv.errorString());

+					}

+				} else {

+					trans.error().log(

+							"Error removing Role during delFromPermRole:",

+							trans.getUserPrincipal(), rv.errorString());

+				}

+			}

+		} else {

+			rv = q.roleDAO.delPerm(trans, role, pd);

+			if (rv.notOK()) {

+				trans.error().log("Error removing Role during delFromPermRole",

+						rv.errorString());

+			}

+		}

+		return rv == null ? Result.ok() : rv;

+	}

+

+	public Result<Void> delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) {

+		Result<NsSplit> nss = q.deriveNsSplit(trans, role);

+		if (nss.notOK()) {

+			return Result.err(nss);

+		}

+		RoleDAO.Data rd = new RoleDAO.Data();

+		rd.ns = nss.value.ns;

+		rd.name = nss.value.name;

+		return delPermFromRole(trans, rd, pd, false);

+	}

+

+	/**

+	 * Add a User to Role

+	 * 

+	 * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if

+	 * Credential) or known Organizational User

+	 * 

+	 * @param trans

+	 * @param org

+	 * @param urData

+	 * @return

+	 * @throws DAOException

+	 */

+	public Result<Void> addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) {

+		Result<Void> rv;

+		if(Question.ADMIN.equals(urData.rname)) {

+			rv = mayAddAdmin(trans, urData.ns, urData.user);

+		} else if(Question.OWNER.equals(urData.rname)) {

+			rv = mayAddOwner(trans, urData.ns, urData.user);

+		} else {

+			rv = checkValidID(trans, new Date(), urData.user);

+		}

+		if(rv.notOK()) {

+			return rv; 

+		}

+		

+		// Check if record exists

+		if (q.userRoleDAO.read(trans, urData).isOKhasData()) {

+			return Result.err(Status.ERR_ConflictAlreadyExists,

+					"User Role exists");

+		}

+		if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {

+			return Result.err(Status.ERR_RoleNotFound,

+					"Role [%s.%s] does not exist", urData.ns, urData.rname);

+		}

+

+		urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime();

+		

+		

+		Result<UserRoleDAO.Data> udr = q.userRoleDAO.create(trans, urData);

+		switch (udr.status) {

+		case OK:

+			return Result.ok();

+		default:

+			return Result.err(udr);

+		}

+	}

+

+	public Result<Void> addUserRole(AuthzTrans trans, String user, String ns, String rname) {

+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();

+		urdd.ns = ns;

+		urdd.role(ns, rname);

+		urdd.user = user;

+		return addUserRole(trans,urdd);

+	}

+

+	/**

+	 * Extend User Role.

+	 * 

+	 * extend the Expiration data, according to Organization rules.

+	 * 

+	 * @param trans

+	 * @param org

+	 * @param urData

+	 * @return

+	 */

+	public Result<Void> extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) {

+		// Check if record still exists

+		if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) {

+			return Result.err(Status.ERR_UserRoleNotFound,

+					"User Role does not exist");

+		}

+		if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {

+			return Result.err(Status.ERR_RoleNotFound,

+					"Role [%s.%s] does not exist", urData.ns,urData.rname);

+		}

+		// Special case for "Admin" roles. Issue brought forward with Prod

+		// problem 9/26

+

+		urData.expires = trans.org().expiration(null, Expiration.UserInRole).getTime(); // get

+																				// Full

+																				// time

+																				// starting

+																				// today

+		return q.userRoleDAO.update(trans, urData);

+	}

+

+	// ////////////////////////////////////////////////////

+	// Special User Role Functions

+	// These exist, because User Roles have Expiration dates, which must be

+	// accounted for

+	// Also, as of July, 2015, Namespace Owners and Admins are now regular User

+	// Roles

+	// ////////////////////////////////////////////////////

+	public Result<List<String>> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) {

+		Result<List<UserRoleDAO.Data>> rurdd = q.userRoleDAO.readByRole(trans,role);

+		if (rurdd.notOK()) {

+			return Result.err(rurdd);

+		}

+		Date now = new Date();

+		List<UserRoleDAO.Data> list = rurdd.value;

+		List<String> rv = new ArrayList<String>(list.size()); // presize

+		for (UserRoleDAO.Data urdd : rurdd.value) {

+			if (includeExpired || urdd.expires.after(now)) {

+				rv.add(urdd.user);

+			}

+		}

+		return Result.ok(rv);

+	}

+

+	public Result<Void> delUserRole(AuthzTrans trans, String user, String ns, String rname) {

+		UserRoleDAO.Data urdd = new UserRoleDAO.Data();

+		urdd.user = user;

+		urdd.role(ns,rname);

+		Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, urdd);

+		if (r.status == 404 || r.isEmpty()) {

+			return Result.err(Status.ERR_UserRoleNotFound,

+					"UserRole [%s] [%s.%s]", user, ns, rname);

+		}

+		if (r.notOK()) {

+			return Result.err(r);

+		}

+

+		return q.userRoleDAO.delete(trans, urdd, false);

+	}

+

+	public Result<List<Identity>> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user,

+			NsDAO.Data nsd, String op) {

+		// Create Future Object

+		List<Identity> approvers=null;

+		Result<FutureDAO.Data> fr = q.futureDAO.create(trans, data, id);

+		if (fr.isOK()) {

+			// User Future ID as ticket for Approvals

+			final UUID ticket = fr.value.id;

+			ApprovalDAO.Data ad;

+			try {

+				Organization org = trans.org();

+				approvers = org.getApprovers(trans, user);

+				for (Identity u : approvers) {

+					ad = new ApprovalDAO.Data();

+					// Note ad.id is set by ApprovalDAO Create

+					ad.ticket = ticket;

+					ad.user = user;

+					ad.approver = u.id();

+					ad.status = ApprovalDAO.PENDING;

+					ad.memo = data.memo;

+					ad.type = org.getApproverType();

+					ad.operation = op;

+					// Note ad.updated is created in System

+					Result<ApprovalDAO.Data> ar = q.approvalDAO.create(trans,ad);

+					if (ar.notOK()) {

+						return Result.err(Status.ERR_ActionNotCompleted,

+								"Approval for %s, %s could not be created: %s",

+								ad.user, ad.approver, ar.details);

+					}

+				}

+				if (nsd != null) {

+					Result<List<UserRoleDAO.Data>> rrbr = q.userRoleDAO

+							.readByRole(trans, nsd.name + Question.DOT_OWNER);

+					if (rrbr.isOK()) {

+						for (UserRoleDAO.Data urd : rrbr.value) {

+							ad = new ApprovalDAO.Data();

+							// Note ad.id is set by ApprovalDAO Create

+							ad.ticket = ticket;

+							ad.user = user;

+							ad.approver = urd.user;

+							ad.status = ApprovalDAO.PENDING;

+							ad.memo = data.memo;

+							ad.type = "owner";

+							ad.operation = op;

+							// Note ad.updated is created in System

+							Result<ApprovalDAO.Data> ar = q.approvalDAO.create(trans, ad);

+							if (ar.notOK()) {

+								return Result.err(Status.ERR_ActionNotCompleted,

+												"Approval for %s, %s could not be created: %s",

+												ad.user, ad.approver,

+												ar.details);

+							}

+						}

+					}

+				}

+			} catch (Exception e) {

+				return Result.err(e);

+			}

+		}

+		

+		return Result.ok(approvers);

+	}

+

+	public Result<Void> performFutureOp(AuthzTrans trans, ApprovalDAO.Data cd) {

+		Result<List<FutureDAO.Data>> fd = q.futureDAO.read(trans, cd.ticket);

+		Result<List<ApprovalDAO.Data>> allApprovalsForTicket = q.approvalDAO

+				.readByTicket(trans, cd.ticket);

+		Result<Void> rv = Result.ok();

+		for (FutureDAO.Data curr : fd.value) {

+			if ("approved".equalsIgnoreCase(cd.status)) {

+				if (allApprovalsForTicket.value.size() <= 1) {

+					// should check if any other pendings before performing

+					// actions

+					try {

+						if (FOP_ROLE.equalsIgnoreCase(curr.target)) {

+							RoleDAO.Data data = new RoleDAO.Data();

+							data.reconstitute(curr.construct);

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								Result<RoleDAO.Data> rd;

+								if ((rd = q.roleDAO.dao().create(trans, data)).notOK()) {

+									rv = Result.err(rd);

+								}

+							} else if ("D".equalsIgnoreCase(cd.operation)) {

+								rv = deleteRole(trans, data, true, true);

+							}

+	

+						} else if (FOP_PERM.equalsIgnoreCase(curr.target)) {

+							PermDAO.Data pdd = new PermDAO.Data();

+							pdd.reconstitute(curr.construct);

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								rv = createPerm(trans, pdd, true);

+							} else if ("D".equalsIgnoreCase(cd.operation)) {

+								rv = deletePerm(trans, pdd, true, true);

+							} else if ("G".equalsIgnoreCase(cd.operation)) {

+								Set<String> roles = pdd.roles(true);

+								Result<RoleDAO.Data> rrdd = null;

+								for (String roleStr : roles) {

+									rrdd = RoleDAO.Data.decode(trans, q, roleStr);

+									if (rrdd.isOKhasData()) {

+										rv = addPermToRole(trans, rrdd.value, pdd, true);

+									} else {

+										trans.error().log(rrdd.errorString());

+									}

+								}

+							} else if ("UG".equalsIgnoreCase(cd.operation)) {

+								Set<String> roles = pdd.roles(true);

+								Result<RoleDAO.Data> rrdd;

+								for (String roleStr : roles) {

+									rrdd = RoleDAO.Data.decode(trans, q, roleStr);

+									if (rrdd.isOKhasData()) {

+										rv = delPermFromRole(trans, rrdd.value, pdd,	true);

+									} else {

+										trans.error().log(rrdd.errorString());

+									}

+								}

+							}

+	

+						} else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {

+							UserRoleDAO.Data data = new UserRoleDAO.Data();

+							data.reconstitute(curr.construct);

+							// if I am the last to approve, create user role

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								rv = addUserRole(trans, data);

+							} else if ("U".equals(cd.operation)) {

+								rv = extendUserRole(trans, data, true);

+							}

+	

+						} else if (FOP_NS.equalsIgnoreCase(curr.target)) {

+							Namespace namespace = new Namespace();

+							namespace.reconstitute(curr.construct);

+	

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								rv = createNS(trans, namespace, true);

+							}

+	

+						} else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) {

+							DelegateDAO.Data data = new DelegateDAO.Data();

+							data.reconstitute(curr.construct);

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								Result<DelegateDAO.Data> dd;

+								if ((dd = q.delegateDAO.create(trans, data)).notOK()) {

+									rv = Result.err(dd);

+								}

+							} else if ("U".equalsIgnoreCase(cd.operation)) {

+								rv = q.delegateDAO.update(trans, data);

+							}

+						} else if (FOP_CRED.equalsIgnoreCase(curr.target)) {

+							CredDAO.Data data = new CredDAO.Data();

+							data.reconstitute(curr.construct);

+							if ("C".equalsIgnoreCase(cd.operation)) {

+								Result<CredDAO.Data> rd;

+								if ((rd = q.credDAO.dao().create(trans, data)).notOK()) {

+									rv = Result.err(rd);

+								}

+							}

+						}

+					} catch (IOException e) {

+						trans.error().log("IOException: ", e.getMessage(),

+								" \n occurred while performing", cd.memo,

+								" from approval ", cd.id.toString());

+					}

+				}

+			} else if ("denied".equalsIgnoreCase(cd.status)) {

+				for (ApprovalDAO.Data ad : allApprovalsForTicket.value) {

+				    q.approvalDAO.delete(trans, ad, false);

+				}

+				q.futureDAO.delete(trans, curr, false);

+				if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {

+					// if I am the last to approve, create user role

+					if ("U".equals(cd.operation)) {

+						UserRoleDAO.Data data = new UserRoleDAO.Data();

+						try {

+							data.reconstitute(curr.construct);

+						} catch (IOException e) {

+							trans.error().log("Cannot reconstitue",curr.memo);

+						}

+						rv = delUserRole(trans, data.user, data.ns, data.rname);

+					}

+				}

+

+			}

+	

+			// if I am the last to approve, delete the future object

+			if (rv.isOK() && allApprovalsForTicket.value.size() <= 1) {

+				q.futureDAO.delete(trans, curr, false);

+			}

+	

+		} // end for each

+		return rv;

+	

+	}

+

+	public Executor newExecutor(AuthzTrans trans) {

+		return new CassExecutor(trans, this);

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java
new file mode 100644
index 0000000..40f5917
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java
@@ -0,0 +1,184 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.hl;

+

+import java.util.ArrayList;

+import java.util.Date;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+import java.util.Set;

+import java.util.TreeSet;

+

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+

+/**

+ * PermLookup is a Storage class for the various pieces of looking up Permission 

+ * during Transactions to avoid duplicate processing

+ * 

+ *

+ */

+// Package on purpose

+class PermLookup {

+	private AuthzTrans trans;

+	private String user;

+	private Question q;

+	private Result<List<UserRoleDAO.Data>> userRoles = null;

+	private Result<List<RoleDAO.Data>> roles = null;

+	private Result<Set<String>> permNames = null;

+	private Result<List<PermDAO.Data>> perms = null;

+	

+	private PermLookup() {}

+	

+	static PermLookup get(AuthzTrans trans, Question q, String user) {

+		PermLookup lp=null;

+		Map<String, PermLookup> permMap = trans.get(Question.PERMS, null);

+		if (permMap == null) {

+			trans.put(Question.PERMS, permMap = new HashMap<String, PermLookup>());

+		} else {

+			lp = permMap.get(user);

+		}

+

+		if (lp == null) {

+			lp = new PermLookup();

+			lp.trans = trans;

+			lp.user = user;

+			lp.q = q;

+			permMap.put(user, lp);

+		}

+		return lp;

+	}

+	

+	public Result<List<UserRoleDAO.Data>> getUserRoles() {

+		if(userRoles==null) {

+			userRoles = q.userRoleDAO.readByUser(trans,user);

+			if(userRoles.isOKhasData()) {

+				List<UserRoleDAO.Data> lurdd = new ArrayList<UserRoleDAO.Data>();

+				Date now = new Date();

+				for(UserRoleDAO.Data urdd : userRoles.value) {

+					if(urdd.expires.after(now)) { // Remove Expired

+						lurdd.add(urdd);

+					}

+				}

+				if(lurdd.size()==0) {

+					return userRoles = Result.err(Status.ERR_UserNotFound,

+								"%s not found or not associated with any Roles: ",

+								user);

+				} else {

+					return userRoles = Result.ok(lurdd);

+				}

+			} else {

+				return userRoles;

+			}

+		} else {

+			return userRoles;

+		}

+	}

+

+	public Result<List<RoleDAO.Data>> getRoles() {

+		if(roles==null) {

+			Result<List<UserRoleDAO.Data>> rur = getUserRoles();

+			if(rur.isOK()) {

+				List<RoleDAO.Data> lrdd = new ArrayList<RoleDAO.Data>();

+				for (UserRoleDAO.Data urdata : rur.value) {

+					// Gather all permissions from all Roles

+					    if(urdata.ns==null || urdata.rname==null) {

+					    	trans.error().printf("DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role);

+					    } else {

+							Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(

+									trans, urdata.ns, urdata.rname);

+							if(rlrd.isOK()) {

+								lrdd.addAll(rlrd.value);

+							}

+					    }

+					}

+				return roles = Result.ok(lrdd);

+			} else {

+				return roles = Result.err(rur);

+			}

+		} else {

+			return roles;

+		}

+	}

+

+	public Result<Set<String>> getPermNames() {

+		if(permNames==null) {

+			Result<List<RoleDAO.Data>> rlrd = getRoles();

+			if (rlrd.isOK()) {

+				Set<String> pns = new TreeSet<String>();

+				for (RoleDAO.Data rdata : rlrd.value) {

+					pns.addAll(rdata.perms(false));

+				}

+				return permNames = Result.ok(pns);

+			} else {

+				return permNames = Result.err(rlrd);

+			}

+		} else {

+			return permNames;

+		}

+	}

+	

+	public Result<List<PermDAO.Data>> getPerms(boolean lookup) {

+		if(perms==null) {

+			// Note: It should be ok for a Valid user to have no permissions -

+			// 8/12/2013

+			Result<Set<String>> rss = getPermNames();

+			if(rss.isOK()) {

+				List<PermDAO.Data> lpdd = new ArrayList<PermDAO.Data>();

+				for (String perm : rss.value) {

+					if(lookup) {

+						Result<String[]> ap = PermDAO.Data.decodeToArray(trans, q, perm);

+						if(ap.isOK()) {

+							Result<List<PermDAO.Data>> rlpd = q.permDAO.read(perm,trans,ap);

+							if (rlpd.isOKhasData()) {

+								for (PermDAO.Data pData : rlpd.value) {

+									lpdd.add(pData);

+								}

+							}

+						} else {

+							trans.error().log("In getPermsByUser, for", user, perm);

+						}

+					} else {

+						Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, q, perm);

+						if (pr.notOK()) {

+							trans.error().log("In getPermsByUser, for", user, pr.errorString());

+						} else {

+							lpdd.add(pr.value);

+						}

+					}

+

+				}

+				return perms = Result.ok(lpdd);

+			} else {

+				return perms = Result.err(rss);

+			}

+		} else {

+			return perms;

+		}

+	}

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java
new file mode 100644
index 0000000..c552cc9
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java
@@ -0,0 +1,1087 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.hl;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.security.NoSuchAlgorithmException;

+import java.security.SecureRandom;

+import java.util.ArrayList;

+import java.util.Collections;

+import java.util.Comparator;

+import java.util.Date;

+import java.util.HashSet;

+import java.util.List;

+import java.util.Set;

+import java.util.TreeSet;

+

+import org.onap.aaf.authz.common.Define;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.env.AuthzTransFilter;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.authz.org.Organization;

+import org.onap.aaf.authz.org.Organization.Identity;

+import org.onap.aaf.dao.AbsCassDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.aaf.cached.CachedCertDAO;

+import org.onap.aaf.dao.aaf.cached.CachedCredDAO;

+import org.onap.aaf.dao.aaf.cached.CachedNSDAO;

+import org.onap.aaf.dao.aaf.cached.CachedPermDAO;

+import org.onap.aaf.dao.aaf.cached.CachedRoleDAO;

+import org.onap.aaf.dao.aaf.cached.CachedUserRoleDAO;

+import org.onap.aaf.dao.aaf.cass.ApprovalDAO;

+import org.onap.aaf.dao.aaf.cass.CacheInfoDAO;

+import org.onap.aaf.dao.aaf.cass.CertDAO;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.DelegateDAO;

+import org.onap.aaf.dao.aaf.cass.FutureDAO;

+import org.onap.aaf.dao.aaf.cass.HistoryDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+import org.onap.aaf.dao.aaf.cass.NsSplit;

+import org.onap.aaf.dao.aaf.cass.NsType;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO.Data;

+

+import org.onap.aaf.cadi.Hash;

+import org.onap.aaf.cadi.aaf.PermEval;

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.Slot;

+import org.onap.aaf.inno.env.TimeTaken;

+import org.onap.aaf.inno.env.util.Chrono;

+import com.datastax.driver.core.Cluster;

+

+/**

+ * Question HL DAO

+ * 

+ * A Data Access Combination Object which asks Security and other Questions

+ * 

+ *

+ */

+public class Question {

+	// DON'T CHANGE FROM lower Case!!!

+	public static enum Type {

+		ns, role, perm, cred

+	};

+

+	public static final String OWNER="owner";

+	public static final String ADMIN="admin";

+	public static final String DOT_OWNER=".owner";

+	public static final String DOT_ADMIN=".admin";

+	static final String ASTERIX = "*";

+

+	public static enum Access {

+		read, write, create

+	};

+

+	public static final String READ = Access.read.name();

+	public static final String WRITE = Access.write.name();

+	public static final String CREATE = Access.create.name();

+

+	public static final String ROLE = Type.role.name();

+	public static final String PERM = Type.perm.name();

+	public static final String NS = Type.ns.name();

+	public static final String CRED = Type.cred.name();

+	private static final String DELG = "delg";

+	public static final String ATTRIB = "attrib";

+

+

+	public static final int MAX_SCOPE = 10;

+	public static final int APP_SCOPE = 3;

+	public static final int COMPANY_SCOPE = 2;

+	static Slot PERMS;

+

+	private static Set<String> specialLog = null;

+	public static final SecureRandom random = new SecureRandom();

+	private static long traceID = random.nextLong();

+	private static final String SPECIAL_LOG_SLOT = "SPECIAL_LOG_SLOT";

+	private static Slot specialLogSlot = null;

+	private static Slot transIDSlot = null;

+

+

+	public final HistoryDAO historyDAO;

+	public final CachedNSDAO nsDAO;

+	public final CachedRoleDAO roleDAO;

+	public final CachedPermDAO permDAO;

+	public final CachedUserRoleDAO userRoleDAO;

+	public final CachedCredDAO credDAO;

+	public final CachedCertDAO certDAO;

+	public final DelegateDAO delegateDAO;

+	public final FutureDAO futureDAO;

+	public final ApprovalDAO approvalDAO;

+	private final CacheInfoDAO cacheInfoDAO;

+

+	// final ContactDAO contDAO;

+	// private static final String DOMAIN = "@aaf.att.com";

+	// private static final int DOMAIN_LENGTH = 0;

+

+	public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException {

+		PERMS = trans.slot("USER_PERMS");

+		trans.init().log("Instantiating DAOs");

+		historyDAO = new HistoryDAO(trans, cluster, keyspace);

+

+		// Deal with Cached Entries

+		cacheInfoDAO = new CacheInfoDAO(trans, historyDAO);

+

+		nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO),

+				cacheInfoDAO);

+		permDAO = new CachedPermDAO(

+				new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);

+		roleDAO = new CachedRoleDAO(

+				new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);

+		userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO,

+				cacheInfoDAO), cacheInfoDAO);

+		credDAO = new CachedCredDAO(

+				new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);

+		certDAO = new CachedCertDAO(

+				new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO);

+

+		futureDAO = new FutureDAO(trans, historyDAO);

+		delegateDAO = new DelegateDAO(trans, historyDAO);

+		approvalDAO = new ApprovalDAO(trans, historyDAO);

+

+		// Only want to aggressively cleanse User related Caches... The others,

+		// just normal refresh

+		if(startClean) {

+			CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO);

+			CachedDAO.startRefresh(trans.env(), cacheInfoDAO);

+		}

+		// Set a Timer to Check Caches to send messages for Caching changes

+		

+		if(specialLogSlot==null) {

+			specialLogSlot = trans.slot(SPECIAL_LOG_SLOT);

+			transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT);

+		}

+		

+		AbsCassDAO.primePSIs(trans);

+	}

+

+

+	public void close(AuthzTrans trans) {

+		historyDAO.close(trans);

+		cacheInfoDAO.close(trans);

+		nsDAO.close(trans);

+		permDAO.close(trans);

+		roleDAO.close(trans);

+		userRoleDAO.close(trans);

+		credDAO.close(trans);

+		certDAO.close(trans);

+		delegateDAO.close(trans);

+		futureDAO.close(trans);

+		approvalDAO.close(trans);

+	}

+

+	public Result<PermDAO.Data> permFrom(AuthzTrans trans, String type,

+			String instance, String action) {

+		Result<NsDAO.Data> rnd = deriveNs(trans, type);

+		if (rnd.isOK()) {

+			return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type),

+					instance, action));

+		} else {

+			return Result.err(rnd);

+		}

+	}

+

+	/**

+	 * getPermsByUser

+	 * 

+	 * Because this call is frequently called internally, AND because we already

+	 * look for it in the initial Call, we cache within the Transaction

+	 * 

+	 * @param trans

+	 * @param user

+	 * @return

+	 */

+	public Result<List<PermDAO.Data>> getPermsByUser(AuthzTrans trans, String user, boolean lookup) {

+		return PermLookup.get(trans, this, user).getPerms(lookup);

+	}

+	

+	public Result<List<PermDAO.Data>> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) {

+		PermLookup plUser = PermLookup.get(trans, this, user);

+		Result<Set<String>> plPermNames = plUser.getPermNames();

+		if(plPermNames.notOK()) {

+			return Result.err(plPermNames);

+		}

+		

+		Set<String> nss;

+		if(forUser.equals(user)) {

+			nss = null;

+		} else {

+			// Setup a TreeSet to check on Namespaces to 

+			nss = new TreeSet<String>();

+			PermLookup fUser = PermLookup.get(trans, this, forUser);

+			Result<Set<String>> forUpn = fUser.getPermNames();

+			if(forUpn.notOK()) {

+				return Result.err(forUpn);

+			}

+			

+			for(String pn : forUpn.value) {

+				Result<String[]> decoded = PermDAO.Data.decodeToArray(trans, this, pn);

+				if(decoded.isOKhasData()) {

+					nss.add(decoded.value[0]);

+				} else {

+					trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString());

+				}

+			}

+		}

+

+		List<PermDAO.Data> rlpUser = new ArrayList<PermDAO.Data>();

+		Result<PermDAO.Data> rpdd;

+		PermDAO.Data pdd;

+		for(String pn : plPermNames.value) {

+			rpdd = PermDAO.Data.decode(trans, this, pn);

+			if(rpdd.isOKhasData()) {

+				pdd=rpdd.value;

+				if(nss==null || nss.contains(pdd.ns)) {

+					rlpUser.add(pdd);

+				}

+			} else {

+				trans.error().log(pn,", derived from a Role, is invalid.  Run Data Cleanup:",rpdd.errorString());

+			}

+		}

+		return Result.ok(rlpUser); 

+	}

+

+	public Result<List<PermDAO.Data>> getPermsByType(AuthzTrans trans, String perm) {

+		Result<NsSplit> nss = deriveNsSplit(trans, perm);

+		if (nss.notOK()) {

+			return Result.err(nss);

+		}

+		return permDAO.readByType(trans, nss.value.ns, nss.value.name);

+	}

+

+	public Result<List<PermDAO.Data>> getPermsByName(AuthzTrans trans,

+			String type, String instance, String action) {

+		Result<NsSplit> nss = deriveNsSplit(trans, type);

+		if (nss.notOK()) {

+			return Result.err(nss);

+		}

+		return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action);

+	}

+

+	public Result<List<PermDAO.Data>> getPermsByRole(AuthzTrans trans, String role, boolean lookup) {

+		Result<NsSplit> nss = deriveNsSplit(trans, role);

+		if (nss.notOK()) {

+			return Result.err(nss);

+		}

+

+		Result<List<RoleDAO.Data>> rlrd = roleDAO.read(trans, nss.value.ns,

+				nss.value.name);

+		if (rlrd.notOKorIsEmpty()) {

+			return Result.err(rlrd);

+		}

+		// Using Set to avoid duplicates

+		Set<String> permNames = new HashSet<String>();

+		if (rlrd.isOKhasData()) {

+			for (RoleDAO.Data drr : rlrd.value) {

+				permNames.addAll(drr.perms(false));

+			}

+		}

+

+		// Note: It should be ok for a Valid user to have no permissions -

+		// 8/12/2013

+		List<PermDAO.Data> perms = new ArrayList<PermDAO.Data>();

+		for (String perm : permNames) {

+			Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, this, perm);

+			if (pr.notOK()) {

+				return Result.err(pr);

+			}

+

+			if(lookup) {

+				Result<List<PermDAO.Data>> rlpd = permDAO.read(trans, pr.value);

+				if (rlpd.isOKhasData()) {

+					for (PermDAO.Data pData : rlpd.value) {

+						perms.add(pData);

+					}

+				}

+			} else {

+				perms.add(pr.value);

+			}

+		}

+

+		return Result.ok(perms);

+	}

+

+	public Result<List<RoleDAO.Data>> getRolesByName(AuthzTrans trans,

+			String role) {

+		Result<NsSplit> nss = deriveNsSplit(trans, role);

+		if (nss.notOK()) {

+			return Result.err(nss);

+		}

+		String r = nss.value.name;

+		if (r.endsWith(".*")) { // do children Search

+			return roleDAO.readChildren(trans, nss.value.ns,

+					r.substring(0, r.length() - 2));

+		} else if (ASTERIX.equals(r)) {

+			return roleDAO.readChildren(trans, nss.value.ns, ASTERIX);

+		} else {

+			return roleDAO.read(trans, nss.value.ns, r);

+		}

+	}

+

+	/**

+	 * Derive NS

+	 * 

+	 * Given a Child Namespace, figure out what the best Namespace parent is.

+	 * 

+	 * For instance, if in the NS table, the parent "com.att" exists, but not

+	 * "com.att.child" or "com.att.a.b.c", then passing in either

+	 * "com.att.child" or "com.att.a.b.c" will return "com.att"

+	 * 

+	 * Uses recursive search on Cached DAO data

+	 * 

+	 * @param trans

+	 * @param child

+	 * @return

+	 */

+	public Result<NsDAO.Data> deriveNs(AuthzTrans trans, String child) {

+		Result<List<NsDAO.Data>> r = nsDAO.read(trans, child);

+		

+		if (r.isOKhasData()) {

+			return Result.ok(r.value.get(0));

+		} else {

+			int dot = child == null ? -1 : child.lastIndexOf('.');

+			if (dot < 0) {

+				return Result.err(Status.ERR_NsNotFound,

+						"No Namespace for [%s]", child);

+			} else {

+				return deriveNs(trans, child.substring(0, dot));

+			}

+		}

+	}

+

+	public Result<NsDAO.Data> deriveFirstNsForType(AuthzTrans trans, String str, NsType type) {

+		NsDAO.Data nsd;

+

+		System.out.println("value of str before for loop ---------0---++++++++++++++++++" +str);

+		for(int idx = str.indexOf('.');idx>=0;idx=str.indexOf('.',idx+1)) {

+		//	System.out.println("printing value of str-----------------1------------++++++++++++++++++++++" +str);

+			Result<List<Data>> rld = nsDAO.read(trans, str.substring(0,idx));

+			System.out.println("value of idx is -----------------++++++++++++++++++++++++++" +idx);

+			System.out.println("printing value of str.substring-----------------1------------++++++++++++++++++++++" + (str.substring(0,idx)));

+			System.out.println("value of ResultListData ------------------2------------+++++++++++++++++++++++++++" +rld);

+			if(rld.isOKhasData()) {

+				System.out.println("In if loop -----------------3-------------- ++++++++++++++++");

+				System.out.println("value of nsd=rld.value.get(0).type -----------4------++++++++++++++++++++++++++++++++++++" +(nsd=rld.value.get(0)).type);

+				System.out.println("value of rld.value.get(0).name.toString()+++++++++++++++++++++++++++++++ " +rld.value.get(0).name);

+				if(type.type == (nsd=rld.value.get(0)).type) {

+					return Result.ok(nsd);

+				}

+			} else {

+				System.out.println("In else loop ----------------4------------+++++++++++++++++++++++");

+				return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str.substring(0,idx));

+			}

+		}

+		return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name());

+	}

+

+	public Result<NsSplit> deriveNsSplit(AuthzTrans trans, String child) {

+		Result<NsDAO.Data> ndd = deriveNs(trans, child);

+		if (ndd.isOK()) {

+			NsSplit nss = new NsSplit(ndd.value, child);

+			if (nss.isOK()) {

+				return Result.ok(nss);

+			} else {

+				return Result.err(Status.ERR_NsNotFound,

+						"Cannot split [%s] into valid namespace elements",

+						child);

+			}

+		}

+		return Result.err(ndd);

+	}

+

+	/**

+	 * Translate an ID into it's domain

+	 * 

+	 * i.e. myid1234@myapp.att.com results in domain of com.att.myapp

+	 * 

+	 * @param id

+	 * @return

+	 */

+	public static String domain2ns(String id) {

+		int at = id.indexOf('@');

+		if (at >= 0) {

+			String[] domain = id.substring(at + 1).split("\\.");

+			StringBuilder ns = new StringBuilder(id.length());

+			boolean first = true;

+			for (int i = domain.length - 1; i >= 0; --i) {

+				if (first) {

+					first = false;

+				} else {

+					ns.append('.');

+				}

+				ns.append(domain[i]);

+			}

+			return ns.toString();

+		} else {

+			return "";

+		}

+

+	}

+

+	/**

+	 * Validate Namespace of ID@Domain

+	 * 

+	 * Namespace is reverse order of Domain.

+	 * 

+	 * i.e. myid1234@myapp.att.com results in domain of com.att.myapp

+	 * 

+	 * @param trans

+	 * @param id

+	 * @return

+	 */

+	public Result<NsDAO.Data> validNSOfDomain(AuthzTrans trans, String id) {

+		// Take domain, reverse order, and check on NS

+		String ns;

+		if(id.indexOf('@')<0) { // it's already an ns, not an ID

+			ns = id;

+		} else {

+			ns = domain2ns(id);

+		}

+		if (ns.length() > 0) {

+			if(!trans.org().getDomain().equals(ns)) { 

+				Result<List<NsDAO.Data>> rlnsd = nsDAO.read(trans, ns);

+				if (rlnsd.isOKhasData()) {

+					return Result.ok(rlnsd.value.get(0));

+				}

+			}

+		}

+		return Result.err(Status.ERR_NsNotFound,

+				"A Namespace is not available for %s", id);

+	}

+

+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) {

+		// <ns>.access|:role:<role name>|<read|write>

+		String ns = ndd.name;

+		int last;

+		do {

+			if (isGranted(trans, user, ns, "access", ":ns", access.name())) {

+				return Result.ok(ndd);

+			}

+			if ((last = ns.lastIndexOf('.')) >= 0) {

+				ns = ns.substring(0, last);

+			}

+		} while (last >= 0);

+		// <root ns>.ns|:<client ns>:ns|<access>

+		// AAF-724 - Make consistent response for May User", and not take the

+		// last check... too confusing.

+		Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":"	+ ndd.name + ":ns", access.name());

+		if (rv.isOK()) {

+			return rv;

+		} else if(rv.status==Result.ERR_Backend) {

+			return Result.err(rv);

+		} else {

+			return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]",

+					user, access.name(), ndd.name);

+		}

+	}

+

+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) {

+		Result<NsDAO.Data> rnsd = deriveNs(trans, rdd.ns);

+		if (rnsd.isOK()) {

+			return mayUser(trans, user, rnsd.value, rdd, access);

+		}

+		return rnsd;

+	}

+

+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) {

+		// 1) Is User in the Role?

+		Result<List<UserRoleDAO.Data>> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName());

+		if (rurd.isOKhasData()) {

+			return Result.ok(ndd);

+		}

+

+		String roleInst = ":role:" + rdd.name;

+		// <ns>.access|:role:<role name>|<read|write>

+		String ns = rdd.ns;

+		int last;

+		do {

+			if (isGranted(trans, user, ns,"access", roleInst, access.name())) {

+				return Result.ok(ndd);

+			}

+			if ((last = ns.lastIndexOf('.')) >= 0) {

+				ns = ns.substring(0, last);

+			}

+		} while (last >= 0);

+

+		// Check if Access by Global Role perm

+		// <root ns>.ns|:<client ns>:role:name|<access>

+		Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":"

+				+ rdd.ns + roleInst, access.name());

+		if (rnsd.isOK()) {

+			return rnsd;

+		} else if(rnsd.status==Result.ERR_Backend) {

+			return Result.err(rnsd);

+		}

+

+		// Check if Access to Whole NS

+		// AAF-724 - Make consistent response for May User", and not take the

+		// last check... too confusing.

+		Result<org.onap.aaf.dao.aaf.cass.NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, 

+				":" + rdd.ns + ":ns", access.name());

+		if (rv.isOK()) {

+			return rv;

+		} else if(rnsd.status==Result.ERR_Backend) {

+			return Result.err(rnsd);

+		} else {

+			return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]",

+					user, access.name(), rdd.fullName());

+		}

+

+	}

+

+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) {

+		Result<NsDAO.Data> rnsd = deriveNs(trans, pdd.ns);

+		if (rnsd.isOK()) {

+			return mayUser(trans, user, rnsd.value, pdd, access);

+		}

+		return rnsd;

+	}

+

+	public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) {

+		if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) {

+			return Result.ok(ndd);

+		}

+		String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action;

+		// <ns>.access|:role:<role name>|<read|write>

+		String ns = ndd.name;

+		int last;

+		do {

+			if (isGranted(trans, user, ns, "access", permInst, access.name())) {

+				return Result.ok(ndd);

+			}

+			if ((last = ns.lastIndexOf('.')) >= 0) {

+				ns = ns.substring(0, last);

+			}

+		} while (last >= 0);

+

+		// Check if Access by NS perm

+		// <root ns>.ns|:<client ns>:role:name|<access>

+		Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name());

+		if (rnsd.isOK()) {

+			return rnsd;

+		} else if(rnsd.status==Result.ERR_Backend) {

+			return Result.err(rnsd);

+		}

+

+		// Check if Access to Whole NS

+		// AAF-724 - Make consistent response for May User", and not take the

+		// last check... too confusing.

+		Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":"	+ pdd.ns + ":ns", access.name());

+		if (rv.isOK()) {

+			return rv;

+		} else {

+			return Result.err(Status.ERR_Denied,

+					"[%s] may not %s Perm [%s|%s|%s]", user, access.name(),

+					pdd.fullType(), pdd.instance, pdd.action);

+		}

+

+	}

+

+	public Result<Void> mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) {

+		try {

+			boolean isUser = trans.user().equals(dd.user);

+			boolean isDelegate = dd.delegate != null

+					&& (dd.user.equals(dd.delegate) || trans.user().equals(

+							dd.delegate));

+			Organization org = trans.org();

+			switch (access) {

+			case create:

+				if (org.getIdentity(trans, dd.user) == null) {

+					return Result.err(Status.ERR_UserNotFound,

+							"[%s] is not a user in the company database.",

+							dd.user);

+				}

+				if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) {

+					return Result.err(Status.ERR_UserNotFound,

+							"[%s] is not a user in the company database.",

+							dd.delegate);

+				}

+				if (!trans.forceRequested() && dd.user != null && dd.user.equals(dd.delegate)) {

+					return Result.err(Status.ERR_BadData,

+							"[%s] cannot be a delegate for self", dd.user);

+				}

+				if (!isUser	&& !isGranted(trans, trans.user(), Define.ROOT_NS,DELG,

+								org.getDomain(), Question.CREATE)) {

+					return Result.err(Status.ERR_Denied,

+							"[%s] may not create a delegate for [%s]",

+							trans.user(), dd.user);

+				}

+				break;

+			case read:

+			case write:

+				if (!isUser	&& !isDelegate && 

+						!isGranted(trans, trans.user(), Define.ROOT_NS,DELG,org.getDomain(), access.name())) {

+					return Result.err(Status.ERR_Denied,

+							"[%s] may not %s delegates for [%s]", trans.user(),

+							access.name(), dd.user);

+				}

+				break;

+			default:

+				return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name());

+			}

+		} catch (Exception e) {

+			return Result.err(e);

+		}

+		return Result.ok();

+	}

+

+	/*

+	 * Check (recursively, if necessary), if able to do something based on NS

+	 */

+	private Result<NsDAO.Data> mayUserVirtueOfNS(AuthzTrans trans, String user,	NsDAO.Data nsd, String ns_and_type, String access) {

+		String ns = nsd.name;

+

+		// If an ADMIN of the Namespace, then allow

+		

+		Result<List<UserRoleDAO.Data>> rurd;

+		if ((rurd = userRoleDAO.readUserInRole(trans, user, nsd.name+ADMIN)).isOKhasData()) {

+			return Result.ok(nsd);

+		} else if(rurd.status==Result.ERR_Backend) {

+			return Result.err(rurd);

+		}

+		

+		// If Specially granted Global Permission

+		if (isGranted(trans, user, Define.ROOT_NS,NS, ns_and_type, access)) {

+			return Result.ok(nsd);

+		}

+

+		// Check recur

+

+		int dot = ns.length();

+		if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) {

+			Result<NsDAO.Data> rnsd = deriveNs(trans, ns.substring(0, dot));

+			if (rnsd.isOK()) {

+				rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access);

+			} else if(rnsd.status==Result.ERR_Backend) {

+				return Result.err(rnsd);

+			}

+			if (rnsd.isOK()) {

+				return Result.ok(nsd);

+			} else if(rnsd.status==Result.ERR_Backend) {

+				return Result.err(rnsd);

+			}

+		}

+		return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access,

+				ns_and_type);

+	}

+

+	

+	/**

+	 * isGranted

+	 * 

+	 * Important function - Check internal Permission Schemes for Permission to

+	 * do things

+	 * 

+	 * @param trans

+	 * @param type

+	 * @param instance

+	 * @param action

+	 * @return

+	 */

+	public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) {

+		Result<List<PermDAO.Data>> perms = getPermsByUser(trans, user, false);

+		if (perms.isOK()) {

+			for (PermDAO.Data pd : perms.value) {

+				if (ns.equals(pd.ns)) {

+					if (type.equals(pd.type)) {

+						if (PermEval.evalInstance(pd.instance, instance)) {

+							if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action 

+								return true;

+							}

+						}

+					}

+				}

+			}

+		}

+		return false;

+	}

+

+	public Result<Date> doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException {

+		Result<List<CredDAO.Data>> result;

+		TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE);

+		try {

+			result = credDAO.readID(trans, user);

+		} finally {

+			tt.done();

+		}

+

+		Result<Date> rv = null;

+		if(result.isOK()) {

+			if (result.isEmpty()) {

+				rv = Result.err(Status.ERR_UserNotFound, user);

+				if (willSpecialLog(trans,user)) {

+					trans.audit().log("Special DEBUG:", user, " does not exist in DB");

+				}

+			} else {

+				Date now = new Date();//long now = System.currentTimeMillis();

+				ByteBuffer md5=null;

+	

+				// Bug noticed 6/22. Sorting on the result can cause Concurrency Issues.	 

+				List<CredDAO.Data> cddl;

+				if(result.value.size() > 1) {

+					cddl = new ArrayList<CredDAO.Data>(result.value.size());

+					for(CredDAO.Data old : result.value) {

+						if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) {

+							cddl.add(old);

+						}

+					}

+					if(cddl.size()>1) {

+						Collections.sort(cddl,new Comparator<CredDAO.Data>() {

+							@Override

+							public int compare(org.onap.aaf.dao.aaf.cass.CredDAO.Data a,

+											   org.onap.aaf.dao.aaf.cass.CredDAO.Data b) {

+								return b.expires.compareTo(a.expires);

+							}

+						});

+					}

+				} else {

+					cddl = result.value;

+				}

+	

+				for (CredDAO.Data cdd : cddl) {

+					if (cdd.expires.after(now)) {

+						try {

+							switch(cdd.type) {

+								case CredDAO.BASIC_AUTH:

+									if(md5==null) {

+										md5=ByteBuffer.wrap(Hash.encryptMD5(cred));

+									}

+									if(md5.compareTo(cdd.cred)==0) {

+										return Result.ok(cdd.expires);

+									} else if (willSpecialLog(trans,user)) {

+										trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires);

+									}

+									break;

+								case CredDAO.BASIC_AUTH_SHA256:

+									ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length);

+									bb.putInt(cdd.other);

+									bb.put(cred);

+									byte[] hash = Hash.hashSHA256(bb.array());

+	

+									ByteBuffer sha256 = ByteBuffer.wrap(hash);

+									if(sha256.compareTo(cdd.cred)==0) {

+										return Result.ok(cdd.expires);

+									} else if (willSpecialLog(trans,user)) {

+										trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires);

+									}

+									break;

+								default:

+									trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires));

+							}

+						} catch (NoSuchAlgorithmException e) {

+							trans.error().log(e);

+						}

+					} else {

+						rv = Result.err(Status.ERR_Security,

+								"Credentials expired " + cdd.expires.toString());

+					}

+				} // end for each

+			}

+		} else {

+			return Result.err(result);

+		}

+		return rv == null ? Result.create((Date) null, Status.ERR_Security,

+				"Wrong credential") : rv;

+	}

+

+

+	public Result<CredDAO.Data> userCredSetup(AuthzTrans trans, CredDAO.Data cred) {

+		if(cred.type==CredDAO.RAW) {

+			TimeTaken tt = trans.start("Hash Cred", Env.SUB);

+			try {

+				cred.type = CredDAO.BASIC_AUTH_SHA256;

+				cred.other = random.nextInt();

+				ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity());

+				bb.putInt(cred.other);

+				bb.put(cred.cred);

+				byte[] hash = Hash.hashSHA256(bb.array());

+				cred.cred = ByteBuffer.wrap(hash);

+				return Result.ok(cred);

+			} catch (NoSuchAlgorithmException e) {

+				return Result.err(Status.ERR_General,e.getLocalizedMessage());

+			} finally {

+				tt.done();

+			}

+			

+		}

+		return Result.err(Status.ERR_Security,"invalid/unreadable credential");

+	}

+

+

+	public static final String APPROVED = "APPROVE";

+	public static final String REJECT = "REJECT";

+	public static final String PENDING = "PENDING";

+

+	public Result<Void> canAddUser(AuthzTrans trans, UserRoleDAO.Data data,

+			List<ApprovalDAO.Data> approvals) {

+		// get the approval policy for the organization

+

+		// get the list of approvals with an accept status

+

+		// validate the approvals against the policy

+

+		// for now check if all approvals are received and return

+		// SUCCESS/FAILURE/SKIP

+		boolean bReject = false;

+		boolean bPending = false;

+

+		for (ApprovalDAO.Data approval : approvals) {

+			if (approval.status.equals(REJECT)) {

+				bReject = true;

+			} else if (approval.status.equals(PENDING)) {

+				bPending = true;

+			}

+		}

+		if (bReject) {

+			return Result.err(Status.ERR_Policy,

+					"Approval Polocy not conformed");

+		}

+		if (bPending) {

+			return Result.err(Status.ERR_ActionNotCompleted,

+					"Required Approvals not received");

+		}

+

+		return Result.ok();

+	}

+

+	private static final String NO_CACHE_NAME = "No Cache Data named %s";

+

+	public Result<Void> clearCache(AuthzTrans trans, String cname) {

+		boolean all = "all".equals(cname);

+		Result<Void> rv = null;

+

+		if (all || NsDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg);

+		}

+		if (all || PermDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg);

+		}

+		if (all || RoleDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg);

+		}

+		if (all || UserRoleDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg);

+		}

+		if (all || CredDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg);

+		}

+		if (all || CertDAO.TABLE.equals(cname)) {

+			int seg[] = series(NsDAO.CACHE_SEG);

+			for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);}

+			rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg);

+		}

+

+		if (rv == null) {

+			rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);

+		}

+		return rv;

+	}

+

+	public Result<Void> cacheClear(AuthzTrans trans, String cname,Integer segment) {

+		Result<Void> rv;

+		if (NsDAO.TABLE.equals(cname)) {

+			rv = nsDAO.invalidate(segment);

+		} else if (PermDAO.TABLE.equals(cname)) {

+			rv = permDAO.invalidate(segment);

+		} else if (RoleDAO.TABLE.equals(cname)) {

+			rv = roleDAO.invalidate(segment);

+		} else if (UserRoleDAO.TABLE.equals(cname)) {

+			rv = userRoleDAO.invalidate(segment);

+		} else if (CredDAO.TABLE.equals(cname)) {

+			rv = credDAO.invalidate(segment);

+		} else if (CertDAO.TABLE.equals(cname)) {

+			rv = certDAO.invalidate(segment);

+		} else {

+			rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);

+		}

+		return rv;

+	}

+

+	private int[] series(int max) {

+		int[] series = new int[max];

+		for (int i = 0; i < max; ++i)

+			series[i] = i;

+		return series;

+	}

+

+	public boolean isDelegated(AuthzTrans trans, String user, String approver) {

+		Result<List<DelegateDAO.Data>> userDelegatedFor = delegateDAO

+				.readByDelegate(trans, user);

+		for (DelegateDAO.Data curr : userDelegatedFor.value) {

+			if (curr.user.equals(approver) && curr.delegate.equals(user)

+					&& curr.expires.after(new Date())) {

+				return true;

+			}

+		}

+		return false;

+	}

+

+	public static boolean willSpecialLog(AuthzTrans trans, String user) {

+		Boolean b = trans.get(specialLogSlot, null);

+		if(b==null) {

+			if(specialLog==null) {

+				return false;

+			} else {

+				b = specialLog.contains(user);

+				trans.put(specialLogSlot, b);

+			}

+		}

+		return b;

+	}

+	

+	public static void logEncryptTrace(AuthzTrans trans, String data) {

+		long ti;

+		trans.put(transIDSlot, ti=nextTraceID());

+		trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"');

+	}

+

+	private synchronized static long nextTraceID() {

+		return ++traceID;

+	}

+

+	public static synchronized boolean specialLogOn(AuthzTrans trans, String id) {

+		if (specialLog == null) {

+			specialLog = new HashSet<String>();

+		}

+		boolean rc = specialLog.add(id);

+		if(rc) {

+			trans.trace().log("Trace on for",id);			

+		}

+		return rc;

+	}

+

+	public static synchronized boolean specialLogOff(AuthzTrans trans, String id) {

+		if(specialLog==null) {

+			return false;

+		}

+		boolean rv = specialLog.remove(id);

+		if (specialLog.isEmpty()) {

+			specialLog = null;

+		}

+		if(rv) {

+			trans.trace().log("Trace off for",id);

+		}

+		return rv;

+	}

+

+	/** 

+	 * canMove

+	 * Which Types can be moved

+	 * @param nsType

+	 * @return

+	 */

+	public boolean canMove(NsType nsType) {

+		boolean rv;

+		switch(nsType) {

+			case DOT:

+			case ROOT:

+			case COMPANY:

+			case UNKNOWN:

+				rv = false;

+				break;

+			default:

+				rv = true;

+		}

+		return rv;

+	}

+

+	public Result<String> isOwnerSponsor(AuthzTrans trans, String user, String ns, Identity mechID) {

+		

+		Identity caller;

+		Organization org = trans.org();

+		try {

+			caller = org.getIdentity(trans, user);

+			if(caller==null || !caller.isFound()) {

+				return Result.err(Status.ERR_NotFound,"%s is not a registered %s entity",user,org.getName());

+			}

+		} catch (Exception e) {

+			return Result.err(e);

+		}

+		String sponsor = mechID.responsibleTo();

+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);

+		boolean isOwner = false;

+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){

+			if(urdd.expires.after(new Date())) {

+				isOwner = true;

+			}

+		}};

+		if(!isOwner) {

+			return Result.err(Status.ERR_Policy,"%s is not a current owner of %s",user,ns);

+		}

+		

+		if(!caller.id().equals(sponsor)) {

+			return Result.err(Status.ERR_Denied,"%s is not the sponsor of %s",user,mechID.id());

+		}

+		return Result.ok(sponsor);

+	}

+	

+	public boolean isAdmin(AuthzTrans trans, String user, String ns) {

+		Date now = new Date();

+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+ADMIN);

+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){

+			if(urdd.expires.after(now)) {

+				return true;

+			}

+		}};

+		return false;

+	}

+	

+	public boolean isOwner(AuthzTrans trans, String user, String ns) {

+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);

+		Date now = new Date();

+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){

+			if(urdd.expires.after(now)) {

+				return true;

+			}

+		}};

+		return false;

+	}

+

+	public int countOwner(AuthzTrans trans, String user, String ns) {

+		Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);

+		Date now = new Date();

+		int count = 0;

+		if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){

+			if(urdd.expires.after(now)) {

+				++count;

+			}

+		}};

+		return count;

+	}

+

+}

diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java b/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java
new file mode 100644
index 0000000..9e60443
--- /dev/null
+++ b/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java
@@ -0,0 +1,142 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.session;

+

+import java.io.IOException;

+

+import javax.servlet.Filter;

+import javax.servlet.FilterChain;

+import javax.servlet.FilterConfig;

+import javax.servlet.ServletException;

+import javax.servlet.ServletRequest;

+import javax.servlet.ServletResponse;

+

+import org.onap.aaf.cssa.rserv.TransFilter;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.EnvStore;

+import org.onap.aaf.inno.env.Slot;

+import org.onap.aaf.inno.env.TransStore;

+import org.onap.aaf.inno.env.util.Pool;

+import org.onap.aaf.inno.env.util.Pool.Creator;

+import org.onap.aaf.inno.env.util.Pool.Pooled;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.Session;

+

+public class SessionFilter<TRANS extends TransStore> implements Filter {

+	public static final String SESSION_SLOT = "__SESSION__";

+	private static Slot sessionSlot;

+	private static Pool<Session> pool;

+

+	public SessionFilter(EnvStore<?> env, Cluster cluster, String keyspace) {

+		synchronized(env) {

+			if(sessionSlot==null) {

+				sessionSlot = env.slot(SESSION_SLOT);

+			}

+			if(pool==null) {

+				pool = new Pool<Session>(new SessionCreator(env,cluster,keyspace));

+			}

+		}

+	}

+

+	@Override

+	public void init(FilterConfig fc) throws ServletException {

+		// Session does not need any sort of configuration from Filter

+	}

+

+	@Override

+	public void doFilter(ServletRequest req, ServletResponse resp,	FilterChain chain) throws IOException, ServletException {

+		@SuppressWarnings("unchecked")

+		TRANS trans = (TRANS)req.getAttribute(TransFilter.TRANS_TAG);

+		try {

+			Pooled<Session> psess = pool.get();

+			try {

+				trans.put(sessionSlot, psess.content);

+				chain.doFilter(req, resp);

+			} finally {

+				psess.done();

+			}

+		} catch (APIException e) {

+			throw new ServletException(e);

+		}

+	}

+

+	public Pooled<Session> load(TRANS trans) throws APIException {

+		Pooled<Session> psess = pool.get();

+		trans.put(sessionSlot, psess.content);

+		return psess;

+	}

+	

+	

+	/**

+	 * Clear will drain the pool, so that new Sessions will be constructed.

+	 * 

+	 * Suitable for Management calls.	 

+	 */

+	public static void clear() {

+		if(pool!=null) {

+			pool.drain();

+		} 

+	}

+	

+	@Override

+	public void destroy() {

+		pool.drain();

+	}

+

+	private class SessionCreator implements Creator<Session> {

+		private Cluster cluster;

+		private String keyspace;

+		private Env env;

+		

+		public SessionCreator(Env env, Cluster cluster, String keyspace) {

+			this.cluster = cluster;

+			this.keyspace = keyspace;

+			this.env = env;

+		}

+		

+		@Override

+		public Session create() throws APIException {

+			env.info().log("Creating a Cassandra Session");

+			return cluster.connect(keyspace);

+		}

+

+		@Override

+		public void destroy(Session t) {

+			env.info().log("Shutting down a Cassandra Session");

+			t.close();

+		}

+

+		@Override

+		public boolean isValid(Session t) {

+			return true;

+		}

+

+		@Override

+		public void reuse(Session t) {

+			// Nothing is needed to reuse this Session

+		}

+		

+	}

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java b/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java
new file mode 100644
index 0000000..86bc1ab
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java
@@ -0,0 +1,500 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.authz.cass.hl;

+

+import static junit.framework.Assert.assertEquals;

+import static junit.framework.Assert.assertFalse;

+import static junit.framework.Assert.assertTrue;

+

+import java.security.Principal;

+import java.util.ArrayList;

+import java.util.Date;

+import java.util.List;

+

+import org.junit.AfterClass;

+import org.junit.BeforeClass;

+import org.junit.Test;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO.Data;

+import org.onap.aaf.dao.aaf.hl.Question;

+import org.onap.aaf.dao.aaf.hl.Question.Access;

+import org.onap.aaf.dao.aaf.test.AbsJUCass;

+

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.TimeTaken;

+

+public class JU_Question extends AbsJUCass {

+

+	private static final int EXPIRES_IN = 60000000;

+	private static final String COM_TEST_JU = "com.test.ju_question";

+	private static final String JU9999_JU_TEST_COM = "ju9999@ju.test.com";

+	private static final String JU9998_JU_TEST_COM = "ju9998@ju.test.com";

+	private static final String READ = "read";

+	private static final int NFR_1 = 80;

+	private static final int NFR_2 = 4000;

+	private static final int ROLE_LEVEL1 = 1000;

+	private static final int PERM_LEVEL1 = 1000;

+//	private static final int PERM_LEVEL2 = 20;

+	private static Question q;

+	private static NsDAO.Data ndd;

+

+	@BeforeClass

+	public static void startupBeforeClass() throws Exception {

+		details=false;

+		AuthzTrans trans = env.newTransNoAvg();

+		q = new Question(trans,cluster,AUTHZ, false);

+		ndd = new NsDAO.Data();

+		ndd.name=COM_TEST_JU;

+		ndd.type=3; // app

+		ndd.parent="com.test";

+		ndd.description="Temporary Namespace for JU_Question";

+		q.nsDAO.create(trans, ndd);

+	}

+	

+	@AfterClass

+	public static void endAfterClass() throws Exception {

+		q.nsDAO.delete(trans, ndd,false);

+	}

+//    @Test

+	public void mayUserRead_EmptyPerm() {

+		PermDAO.Data pdd = new PermDAO.Data();

+		Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,pdd,Access.read);

+		assertFalse(result.isOK());

+	}

+

+//    @Test

+	public void mayUserRead_OnePermNotExist() {

+		Result<NsDAO.Data> result = q.mayUser(trans,JU9999_JU_TEST_COM,newPerm(0,0,READ),Access.read);

+		assertFalse(result.isOK());

+		assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm [" + COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());

+	}

+	

+//    @Test

+	public void mayUserRead_OnePermExistDenied() {

+		PermDAO.Data perm = newPerm(0,0,READ);

+		q.permDAO.create(trans,perm);

+		try {

+			Result<NsDAO.Data> result;

+			TimeTaken tt = trans.start("q.mayUser...", Env.SUB);

+			try {

+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);

+			} finally {

+				tt.done();

+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);

+			}

+			assertFalse(result.isOK());

+			assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm ["+COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString());

+		} finally {

+			q.permDAO.delete(trans, perm, false);

+		}

+	}

+

+//    @Test

+	public void mayUserRead_OnePermOneRoleExistOK() {

+		PermDAO.Data perm = newPerm(0,0,READ);

+		RoleDAO.Data role = newRole(0,perm);

+		UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);

+		try {

+			q.permDAO.create(trans,perm);

+			q.roleDAO.create(trans,role);

+			q.userRoleDAO.create(trans,ur);

+			

+			Result<NsDAO.Data> result;

+			TimeTaken tt = trans.start("q.mayUser...", Env.SUB);

+			try {

+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);

+			} finally {

+				tt.done();

+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);

+			}

+			assertTrue(result.isOK());

+		} finally {

+			q.permDAO.delete(trans, perm, false);

+			q.roleDAO.delete(trans, role, false);

+			q.userRoleDAO.delete(trans, ur, false);

+		}

+	}

+

+//	@Test

+	public void filter_OnePermOneRoleExistOK() {

+		PermDAO.Data perm = newPerm(0,0,READ);

+		RoleDAO.Data role = newRole(0,perm);

+		UserRoleDAO.Data ur1 = newUserRole(role,JU9998_JU_TEST_COM,EXPIRES_IN);

+		UserRoleDAO.Data ur2 = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);

+		try {

+			q.permDAO.create(trans,perm);

+			q.roleDAO.create(trans,role);

+			q.userRoleDAO.create(trans,ur1);

+			q.userRoleDAO.create(trans,ur2);

+			

+			Result<List<PermDAO.Data>> pres;

+			TimeTaken tt = trans.start("q.getPerms...", Env.SUB);

+			try {

+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9999_JU_TEST_COM);

+			} finally {

+				tt.done();

+				trans.info().log("filter_OnePermOneRleExistOK",tt);

+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);

+			}

+			assertTrue(pres.isOK());

+			

+			try {

+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);

+			} finally {

+				tt.done();

+				trans.info().log("filter_OnePermOneRleExistOK No Value",tt);

+				assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()<NFR_1);

+			}

+			assertFalse(pres.isOKhasData());

+

+		} finally {

+			q.permDAO.delete(trans, perm, false);

+			q.roleDAO.delete(trans, role, false);

+			q.userRoleDAO.delete(trans, ur1, false);

+			q.userRoleDAO.delete(trans, ur2, false);

+		}

+	}

+

+//    @Test

+	public void mayUserRead_OnePermMultiRoleExistOK() {

+		PermDAO.Data perm = newPerm(0,0,READ);

+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();

+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();

+		try {

+			q.permDAO.create(trans,perm);

+			for(int i=0;i<ROLE_LEVEL1;++i) {

+				RoleDAO.Data role = newRole(i,perm);

+				lrole.add(role);

+				q.roleDAO.create(trans,role);

+				

+				UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,60000000);

+				lur.add(ur);

+				q.userRoleDAO.create(trans,ur);

+			}

+			

+			Result<NsDAO.Data> result;

+			TimeTaken tt = trans.start("mayUserRead_OnePermMultiRoleExistOK", Env.SUB);

+			try {

+				result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read);

+			} finally {

+				tt.done();

+				env.info().log(tt,ROLE_LEVEL1,"iterations");

+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+			}

+			assertTrue(result.isOK());

+		} finally {

+			q.permDAO.delete(trans, perm, false);

+			for(RoleDAO.Data role : lrole) {

+				q.roleDAO.delete(trans, role, false);

+			}

+			for(UserRoleDAO.Data ur : lur) {

+				q.userRoleDAO.delete(trans, ur, false);

+			}

+		}

+	}

+

+    @Test

+	public void mayUserRead_MultiPermOneRoleExistOK() {

+		RoleDAO.Data role = newRole(0);

+		UserRoleDAO.Data ur = newUserRole(role,JU9999_JU_TEST_COM,EXPIRES_IN);

+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();

+		try {

+			for(int i=0;i<PERM_LEVEL1;++i) {

+				lperm.add(newPerm(i,i,READ,role));

+			}

+			q.roleDAO.create(trans, role);

+			q.userRoleDAO.create(trans, ur);

+			

+			Result<NsDAO.Data> result;

+			TimeTaken tt = trans.start("mayUserRead_MultiPermOneRoleExistOK", Env.SUB);

+			try {

+				result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(PERM_LEVEL1-1),Access.read);

+			} finally {

+				tt.done();

+				env.info().log(tt,PERM_LEVEL1,"iterations");

+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+			}

+			assertTrue(result.isOK());

+		} finally {

+			for(PermDAO.Data perm : lperm) {

+				q.permDAO.delete(trans, perm, false);

+			}

+			q.roleDAO.delete(trans, role, false);

+			q.userRoleDAO.delete(trans, ur, false);

+		}

+	}

+

+////	@Test

+//	public void mayUserRead_MultiPermMultiRoleExistOK() {

+//		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();

+//		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();

+//		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();

+//

+//		try {

+//			RoleDAO.Data role;

+//			UserRoleDAO.Data ur;

+//			for(int i=0;i<ROLE_LEVEL1;++i) {

+//				lrole.add(role=newRole(i));

+//				q.roleDAO.create(trans, role);

+//				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));

+//				q.userRoleDAO.create(trans, ur);

+//				for(int j=0;j<PERM_LEVEL2;++j) {

+//					lperm.add(newPerm(i,j,READ,role));

+//				}

+//			}

+//			

+//			Result<NsDAO.Data> result;

+//			TimeTaken tt = trans.start("mayUserRead_MultiPermMultiRoleExistOK", Env.SUB);

+//			try {

+//				result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(ROLE_LEVEL1*PERM_LEVEL2-1),Access.read);

+//			} finally {

+//				tt.done();

+//				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role");

+//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+//			}

+//			assertTrue(result.isOK());

+//		} finally {

+//			for(PermDAO.Data perm : lperm) {

+//				q.permDAO.delete(trans, perm, false);

+//			}

+//			for(RoleDAO.Data role : lrole) {

+//				q.roleDAO.delete(trans, role, false);

+//			}

+//			for(UserRoleDAO.Data ur : lur) {

+//				q.userRoleDAO.delete(trans, ur, false);

+//			}

+//		}

+//	}

+

+	@Test

+	public void mayUserRead_MultiPermMultiRoleExist_10x10() {

+		env.info().log("Original Filter Method 10x10");

+		mayUserRead_MultiPermMultiRoleExist(10,10);

+		env.info().log("New Filter Method 10x10");

+		mayUserRead_MultiPermMultiRoleExist_NewOK(10,10);

+	}

+

+//	@Test

+	public void mayUserRead_MultiPermMultiRoleExist_20x10() {

+		env.info().log("mayUserRead_MultiPermMultiRoleExist_20x10");

+		mayUserRead_MultiPermMultiRoleExist_NewOK(20,10);

+	}

+

+//	@Test

+	public void mayUserRead_MultiPermMultiRoleExist_100x10() {

+		env.info().log("mayUserRead_MultiPermMultiRoleExist_100x10");

+		mayUserRead_MultiPermMultiRoleExist_NewOK(100,10);

+	}

+

+//	@Test

+	public void mayUserRead_MultiPermMultiRoleExist_100x20() {

+		env.info().log("mayUserRead_MultiPermMultiRoleExist_100x20");

+		mayUserRead_MultiPermMultiRoleExist_NewOK(100,20);

+	}

+

+//	@Test

+	public void mayUserRead_MultiPermMultiRoleExist_1000x20() {

+		env.info().log("mayUserRead_MultiPermMultiRoleExist_1000x20");

+		mayUserRead_MultiPermMultiRoleExist_NewOK(1000,20);

+	}

+

+	private void mayUserRead_MultiPermMultiRoleExist(int roleLevel, int permLevel) {

+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();

+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();

+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();

+		load(roleLevel, permLevel, lperm,lrole,lur);

+

+

+		Result<List<PermDAO.Data>> pres;

+		trans.setUser(new Principal() {

+			@Override

+			public String getName() {

+				return JU9999_JU_TEST_COM;

+			}

+		});

+

+		try {

+			TimeTaken group = trans.start("  Original Security Method (1st time)", Env.SUB);

+			try {

+				TimeTaken tt = trans.start("    Get User Perms for "+JU9998_JU_TEST_COM, Env.SUB);

+				try {

+					pres = q.getPermsByUser(trans,JU9998_JU_TEST_COM,true);

+				} finally {

+					tt.done();

+					env.info().log(tt,"  Looked up (full) getPermsByUser for",JU9998_JU_TEST_COM);

+				}

+				assertTrue(pres.isOK());

+				tt = trans.start("    q.mayUser", Env.SUB);

+				List<PermDAO.Data> reduced = new ArrayList<PermDAO.Data>();

+				

+				try {

+					for(PermDAO.Data p : pres.value) {

+						Result<Data> r = q.mayUser(trans,JU9999_JU_TEST_COM,p,Access.read);

+						if(r.isOK()) {

+							reduced.add(p);

+						}

+					}

+				} finally {

+					tt.done();

+					env.info().log(tt," reduced" + pres.value.size(),"perms","to",reduced.size());

+	//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+				}

+	//			assertFalse(result.isOK());

+			} finally {

+				group.done();

+				env.info().log(group,"  Original Validation Method (1st pass)");

+			}

+			

+

+		} finally {

+			unload(lperm, lrole, lur);

+		}

+	}

+

+	private void mayUserRead_MultiPermMultiRoleExist_NewOK(int roleLevel, int permLevel) {

+		List<PermDAO.Data> lperm = new ArrayList<PermDAO.Data>();

+		List<RoleDAO.Data> lrole = new ArrayList<RoleDAO.Data>();

+		List<UserRoleDAO.Data> lur = new ArrayList<UserRoleDAO.Data>();

+		load(roleLevel, permLevel, lperm,lrole,lur);

+

+		try {

+

+			Result<List<PermDAO.Data>> pres;

+			TimeTaken tt = trans.start("  mayUserRead_MultiPermMultiRoleExist_New New Filter", Env.SUB);

+			try {

+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);

+			} finally {

+				tt.done();

+				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");

+//				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+			}

+//			assertTrue(pres.isOKhasData());

+

+			tt = trans.start("  mayUserRead_MultiPermMultiRoleExist_New New Filter (2nd time)", Env.SUB);

+			try {

+				pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM);

+			} finally {

+				tt.done();

+				env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles");

+				assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis()<NFR_2);

+			}

+//			assertTrue(pres.isOKhasData());

+

+		} finally {

+			unload(lperm, lrole, lur);

+		}

+	}

+

+

+	private void load(int roleLevel, int permLevel,	List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {

+		RoleDAO.Data role;

+		UserRoleDAO.Data ur;

+		PermDAO.Data perm;

+		

+		int onethirdR=roleLevel/3;

+		int twothirdR=onethirdR*2;

+		int onethirdP=permLevel/3;

+		int twothirdP=onethirdP*2;

+

+		for(int i=0;i<roleLevel;++i) {

+			lrole.add(role=newRole(i));

+			if(i<onethirdR) { // one has

+				lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));

+				q.userRoleDAO.create(trans, ur);

+				for(int j=0;j<onethirdP;++j) {

+					lperm.add(perm=newPerm(i,j,READ,role));

+					q.permDAO.create(trans, perm);

+				}

+			} else if(i<twothirdR) { // both have

+				lur.add(ur=newUserRole(role, JU9998_JU_TEST_COM, EXPIRES_IN));

+				q.userRoleDAO.create(trans, ur);

+				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));

+				q.userRoleDAO.create(trans, ur);

+				for(int j=onethirdP;j<twothirdP;++j) {

+					lperm.add(perm=newPerm(i,j,READ,role));

+					q.permDAO.create(trans, perm);

+				}

+			} else { // other has

+				lur.add(ur=newUserRole(role, JU9999_JU_TEST_COM, EXPIRES_IN));

+				q.userRoleDAO.create(trans, ur);

+				for(int j=twothirdP;j<permLevel;++j) {

+					lperm.add(perm=newPerm(i,j,READ,role));

+					q.permDAO.create(trans, perm);

+				}

+			}

+			q.roleDAO.create(trans, role);

+		}

+

+	}

+	

+	private void unload(List<PermDAO.Data> lperm , List<RoleDAO.Data> lrole, List<UserRoleDAO.Data> lur) {

+		for(PermDAO.Data perm : lperm) {

+			q.permDAO.delete(trans, perm, false);

+		}

+		for(RoleDAO.Data role : lrole) {

+			q.roleDAO.delete(trans, role, false);

+		}

+		for(UserRoleDAO.Data ur : lur) {

+			q.userRoleDAO.delete(trans, ur, false);

+		}

+

+	}

+	private PermDAO.Data newPerm(int permNum, int instNum, String action, RoleDAO.Data ... grant) {

+		PermDAO.Data pdd = new PermDAO.Data();

+		pdd.ns=COM_TEST_JU;

+		pdd.type="myPerm"+permNum;

+		pdd.instance="myInstance"+instNum;

+		pdd.action=action;

+		for(RoleDAO.Data r : grant) {

+			pdd.roles(true).add(r.fullName());

+			r.perms(true).add(pdd.encode());

+		}

+		return pdd;

+	}

+

+	private RoleDAO.Data newRole(int roleNum, PermDAO.Data ... grant) {

+		RoleDAO.Data rdd = new RoleDAO.Data();

+		rdd.ns = COM_TEST_JU+roleNum;

+		rdd.name = "myRole"+roleNum;

+		for(PermDAO.Data p : grant) {

+			rdd.perms(true).add(p.encode());

+			p.roles(true).add(rdd.fullName());

+		}

+		return rdd;

+	}

+

+	private UserRoleDAO.Data newUserRole(RoleDAO.Data role,String user, long offset) {

+		UserRoleDAO.Data urd = new UserRoleDAO.Data();

+		urd.user=user;

+		urd.role(role);

+		urd.expires=new Date(System.currentTimeMillis()+offset);

+		return urd;

+	}

+

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java
new file mode 100644
index 0000000..aa0785a
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java
@@ -0,0 +1,127 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import static org.junit.Assert.*;

+

+import java.util.Date;

+import java.util.List;

+import java.util.Map;

+import java.util.Timer;

+

+import org.junit.Before;

+import org.junit.Test;

+import org.junit.runner.RunWith;

+import org.mockito.Mock;

+import org.onap.aaf.authz.env.AuthzEnv;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.cache.Cache;

+import org.onap.aaf.cache.Cache.Dated;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.Cached;

+import org.onap.aaf.dao.Cached.Getter;

+import org.powermock.modules.junit4.PowerMockRunner;

+

+//import org.onap.aaf.dao.Cached.Refresh;

+import org.onap.aaf.inno.env.Trans;

+

+@RunWith(PowerMockRunner.class)

+public class JU_Cached {

+	Cached cached;

+	@Mock

+	CIDAO<Trans> ciDaoMock;

+	@Mock

+	AuthzEnv authzEnvMock;

+	@Mock

+	CIDAO<AuthzTrans> cidaoATMock;

+	

+	String name = "nameString";

+	

+	@Before

+	public void setUp(){

+		cached = new Cached(ciDaoMock, name, 0);

+	}

+	

+	@Test(expected=ArithmeticException.class)

+	public void testCachedIdx(){

+		int Result = cached.cacheIdx("1234567890");		

+	}

+	

+	@Test(expected=ArithmeticException.class)

+	public void testInvalidate(){

+		int Res = cached.invalidate(name);

+	}

+	

+	@SuppressWarnings("static-access")

+	@Test

+	public void testStopTimer(){

+		cached.stopTimer();

+		assertTrue(true);

+	}

+

+	@SuppressWarnings("static-access")

+	@Test

+	public void testStartRefresh(){

+		cached.startRefresh(authzEnvMock, cidaoATMock);

+		assertTrue(true);

+	}

+//	@Mock

+//	Trans transMock;

+//	@Mock

+//	Getter<DAO> getterMock;

+//	

+//	@Test

+//	public void testGet(){

+//		cached.get(transMock, name, getterMock);

+//		fail("not implemented");

+//	}

+//	

+//	@SuppressWarnings("unchecked")

+//	public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {

+//		List<DATA> ld = null;

+//		Result<List<DATA>> rld = null;

+//		

+//		int cacheIdx = cacheIdx(key);

+//		Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);

+//		

+//		// Check for saved element in cache

+//		Dated cached = map.get(key);

+//		// Note: These Segment Timestamps are kept up to date with DB

+//		Date dbStamp = info.get(trans, name,cacheIdx);

+//		

+//		// Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)

+//		if(cached!=null && dbStamp.before(cached.timestamp)) {

+//			ld = (List<DATA>)cached.data;

+//			rld = Result.ok(ld);

+//		} else {

+//			rld = getter.get();

+//			if(rld.isOK()) { // only store valid lists

+//				map.put(key, new Dated(rld.value));  // successful item found gets put in cache

+////			} else if(rld.status == Result.ERR_Backend){

+////				map.remove(key);

+//			}

+//		}

+//		return rld;

+//	}

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java
new file mode 100644
index 0000000..3bb78d2
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java
@@ -0,0 +1,66 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import static org.junit.Assert.*;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.junit.Assert;

+import org.junit.Before;

+import org.junit.Test;

+import org.junit.runner.RunWith;

+import org.mockito.Mock;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.CachedDAO;

+import org.onap.aaf.dao.DAO;

+import org.powermock.modules.junit4.PowerMockRunner;

+

+import org.onap.aaf.inno.env.Trans;

+

+@RunWith(PowerMockRunner.class)

+public class JU_CachedDAO {

+	CachedDAO cachedDAO;

+	@Mock

+	DAO daoMock;

+	@Mock

+	CIDAO<Trans> ciDAOMock; 

+	int segsize=1;

+	Object[ ] objs = new Object[2];

+	

+	@Before

+	public void setUp(){

+		objs[0] = "helo";

+		objs[1] = "polo";

+		cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize);

+	}

+		

+	@Test

+	public void testKeyFromObjs(){

+		String result = cachedDAO.keyFromObjs(objs);

+		System.out.println("value of resut " +result);

+		assertTrue(true);

+	}

+	

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java
new file mode 100644
index 0000000..41443fb
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java
@@ -0,0 +1,74 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import static org.junit.Assert.*;

+

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.List;

+

+import org.junit.Before;

+import org.junit.Test;

+import org.junit.runner.RunWith;

+import org.mockito.Mock;

+import org.onap.aaf.dao.CassAccess;

+import org.powermock.modules.junit4.PowerMockRunner;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+//import org.onap.aaf.dao.CassAccess.Resettable;

+import com.datastax.driver.core.Cluster.Builder;

+

+@RunWith(PowerMockRunner.class)

+public class JU_CassAccess {

+	CassAccess cassAccess;

+	

+	public static final String KEYSPACE = "authz";

+	public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";

+	public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";

+	public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";

+	public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";

+	public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";

+	public static final String LATITUDE = "LATITUDE";

+	public static final String LONGITUDE = "LONGITUDE";

+	//private static final List<Resettable> resetExceptions = new ArrayList<Resettable>();

+	public static final String ERR_ACCESS_MSG = "Accessing Backend";

+	private static Builder cb = null;

+	@Mock

+	Env envMock;

+	String prefix=null;

+	

+	@Before

+	public void setUp(){

+		cassAccess = new CassAccess();

+	}

+

+

+	@Test(expected=APIException.class)

+	public void testCluster() throws APIException, IOException {

+		cassAccess.cluster(envMock, prefix);

+		

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java
new file mode 100644
index 0000000..34106e2
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java
@@ -0,0 +1,97 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import static org.junit.Assert.*;

+

+import org.junit.Before;

+import org.junit.Test;

+import org.junit.runner.RunWith;

+import org.mockito.Mock;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.dao.CassDAOImpl;

+import org.onap.aaf.dao.Loader;

+import org.powermock.api.mockito.PowerMockito;

+import org.powermock.modules.junit4.PowerMockRunner;

+

+import org.onap.aaf.inno.env.Data;

+import org.onap.aaf.inno.env.Trans;

+import org.onap.aaf.inno.env.TransStore;

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ConsistencyLevel;

+

+@RunWith(PowerMockRunner.class)

+public class JU_CassDAOImpl {

+

+public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";

+public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";

+

+CassDAOImpl cassDAOImpl;

+

+

+@Mock

+TransStore transStoreMock;

+@SuppressWarnings("rawtypes")

+Class dcMock;

+@SuppressWarnings("rawtypes")

+Loader loaderMock;

+Cluster clusterMock;

+Class<Data> classDataMock;

+ConsistencyLevel consistencyLevelMock;

+Trans transMock;

+

+@Mock

+AuthzTrans authzTransMock;

+

+

+

+	@SuppressWarnings({ "rawtypes", "unchecked" })

+	@Before

+	public void setUp()

+	{

+		String name = "name";

+		String keySpace = "keySpace";

+		String table = "table";

+		cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock);

+	}

+

+	

+	@Test 

+	public void testReadConsistency() {

+		String table = "users";

+		PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO");

+		ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table);

+		System.out.println("Consistency level" + consistencyLevel.name());

+		assertEquals("TWO", consistencyLevel.name());

+	}

+	

+	@Test 

+	public void testWriteConsistency() {

+		String table = "users";

+		PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null);

+		ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table);

+		System.out.println("Consistency level" + consistencyLevel.name());

+		assertEquals("ONE", consistencyLevel.name());

+	}

+	

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java
new file mode 100644
index 0000000..4c3b11c
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java
@@ -0,0 +1,50 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao;

+

+import static org.junit.Assert.*;

+

+import org.junit.Before;

+import org.junit.Test;

+import org.junit.runner.RunWith;

+import org.onap.aaf.dao.DAOException;

+import org.powermock.api.mockito.PowerMockito;

+import org.powermock.modules.junit4.PowerMockRunner;

+

+@RunWith(PowerMockRunner.class)

+public class JU_DAOException {

+DAOException daoException;

+

+	//DAOException daoException = new DAOException();

+	String message = "message";

+	Throwable cause;	

+	@Before

+	public void setUp(){

+	daoException = new DAOException();	

+	}

+

+	@Test

+	public void test(){

+		assertTrue(true);

+	}

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java
new file mode 100644
index 0000000..887f88b
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java
@@ -0,0 +1,200 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.IOException;

+import java.io.InputStream;

+import java.net.URL;

+import java.security.NoSuchAlgorithmException;

+import java.util.Properties;

+

+import org.junit.After;

+import org.junit.AfterClass;

+import org.junit.Before;

+import org.junit.BeforeClass;

+import org.onap.aaf.authz.env.AuthzEnv;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.dao.CassAccess;

+import org.onap.aaf.dao.CassDAOImpl;

+

+import org.onap.aaf.cadi.Hash;

+import org.onap.aaf.cadi.Symm;

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.Env;

+import org.onap.aaf.inno.env.Trans.Metric;

+import com.datastax.driver.core.Cluster;

+

+import junit.framework.Assert;

+

+/**

+ * Do Setup of Cassandra for Cassandra JUnit Testing

+ * 

+ *

+ */

+public class AbsJUCass {

+	protected static final String AUTHZ = "authz";

+	protected static Cluster cluster;

+	protected static AuthzEnv env;

+	protected static int iterations = 0;

+	protected static float totals=0.0f;

+	protected static float remote = 0.0f;

+	protected static float json = 0.0f;

+	protected static AuthzTrans trans;

+	protected static boolean details = true;

+	

+	@BeforeClass 

+	public static void startup() throws APIException, IOException {

+		synchronized(AUTHZ) {

+			if(env==null) {

+				final String resource = "cadi.properties";

+	            File f = new File("etc" + resource);

+	            InputStream is=null;

+	            Properties props = new Properties();

+	            try {

+	                if(f.exists()) {

+	                    is = new FileInputStream(f);

+	                } else {

+	                    URL rsrc = ClassLoader.getSystemResource(resource);

+	                    is = rsrc.openStream();

+	                }

+	                props.load(is);

+	            } finally {

+	                if(is==null) {

+	                	env= new AuthzEnv();

+	                    Assert.fail(resource + " must exist in etc dir, or in Classpath");

+	                }

+	                is.close();

+	            }

+				env = new AuthzEnv(props);

+			}

+		}

+		cluster = CassAccess.cluster(env,"LOCAL");

+

+		env.info().log("Connecting to Cluster");

+		try {

+			cluster.connect(AUTHZ);

+		} catch(Exception e) {

+			cluster=null;

+			env.error().log(e);

+			Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());

+		}

+		env.info().log("Connected");

+		

+		// Load special data here

+		

+		// WebPhone

+		env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389");

+		env.setProperty("com.sun.jndi.ldap.connect.pool","true");

+		

+		iterations = 0;

+		

+	}

+	

+	@AfterClass

+	public static void shutdown() {

+		if(cluster!=null) {

+			cluster.close();

+			cluster = null;

+		}

+	}

+

+	@Before

+	public void newTrans() {

+		trans = env.newTrans();

+		

+		trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));

+	}

+	

+	@After

+	public void auditTrail() {

+		if(totals==0) { // "updateTotals()" was not called... just do one Trans

+			StringBuilder sb = new StringBuilder();

+			Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);

+			if(details) {

+				env.info().log(

+				sb,

+				"Total time:",

+				totals += metric.total,

+				"JSON time: ",

+				metric.buckets[0],

+				"REMOTE time: ",

+				metric.buckets[1]

+				);

+			} else {

+				totals += metric.total;

+			}

+		}

+	}

+	

+	protected void updateTotals() {

+		Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);

+		totals+=metric.total;

+		json  +=metric.buckets[0];

+		remote+=metric.buckets[1];

+	}

+

+

+	@AfterClass

+	public static void print() {

+		float transTime;

+		if(iterations==0) {

+			transTime=totals;

+		} else {

+			transTime=totals/iterations;

+		}

+		env.info().log(

+		"Total time:",

+		totals,   

+		"JSON time:",

+		json,

+		"REMOTE time:",

+		remote,

+		"Iterations:",

+		iterations,

+		"Transaction time:",

+		transTime

+		);

+	}

+	

+	/**

+	 * Take a User/Pass and turn into an MD5 Hashed BasicAuth

+	 * 

+	 * @param user

+	 * @param pass

+	 * @return

+	 * @throws IOException

+	 * @throws NoSuchAlgorithmException

+	 */

+	public static byte[] userPassToBytes(String user, String pass)

+			throws IOException, NoSuchAlgorithmException {

+		// Take the form of BasicAuth, so as to allow any character in Password

+		// (this is an issue in 1.0)

+		// Also, it makes it quicker to evaluate Basic Auth direct questions

+		String ba = Symm.base64url.encode(user + ':' + pass);

+		// Take MD5 Hash, so that data in DB can't be reversed out.

+		return Hash.encryptMD5(ba.getBytes());

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java
new file mode 100644
index 0000000..46720c3
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java
@@ -0,0 +1,147 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertNotSame;

+import static org.junit.Assert.assertTrue;

+

+import java.util.Date;

+import java.util.List;

+import java.util.UUID;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.ApprovalDAO;

+import org.onap.aaf.dao.aaf.cass.ApprovalDAO.Data;

+

+public class JU_ApprovalDAO  extends AbsJUCass {

+	@Test

+	public void testCRUD() throws Exception {

+		ApprovalDAO rrDAO = new ApprovalDAO(trans, cluster, AUTHZ);

+		ApprovalDAO.Data data = new ApprovalDAO.Data();

+		

+		data.ticket = UUID.randomUUID(); // normally, read from Future object

+		data.user = "testid@test.com";

+		data.approver = "mySuper@att.com";

+		data.type = "supervisor";

+		data.status = "pending";

+		data.operation = "C";

+		data.updated = new Date();

+		

+		try {

+			// Test create

+			rrDAO.create(trans, data);

+			

+			// Test Read by Ticket

+			Result<List<ApprovalDAO.Data>> rlad;

+			rlad = rrDAO.readByTicket(trans, data.ticket);

+			assertTrue(rlad.isOK());

+			assertEquals(1,rlad.value.size());

+			compare(data,rlad.value.get(0));

+			

+			// Hold onto original ID for deletion, and read tests

+			UUID id = rlad.value.get(0).id;

+			

+			try {

+				// Test Read by User

+				rlad = rrDAO.readByUser(trans, data.user);

+				assertTrue(rlad.isOKhasData());

+				boolean ok = false;

+				for(ApprovalDAO.Data a : rlad.value) {

+					if(a.id.equals(id)) {

+						ok = true;

+						compare(data,a);

+					}

+				}

+				assertTrue(ok);

+	

+				// Test Read by Approver

+				rlad = rrDAO.readByApprover(trans, data.approver);

+				assertTrue(rlad.isOKhasData());

+				ok = false;

+				for(ApprovalDAO.Data a : rlad.value) {

+					if(a.id.equals(id)) {

+						ok = true;

+						compare(data,a);

+					}

+				}

+				assertTrue(ok);

+	

+				// Test Read by ID

+				rlad = rrDAO.read(trans, id);

+				assertTrue(rlad.isOKhasData());

+				ok = false;

+				for(ApprovalDAO.Data a : rlad.value) {

+					if(a.id.equals(id)) {

+						ok = true;

+						compare(data,a);

+					}

+				}

+				assertTrue(ok);

+	

+				// Test Update

+				data.status = "approved";

+				data.id = id;

+				assertTrue(rrDAO.update(trans, data).isOK());

+				

+				rlad = rrDAO.read(trans, id);

+				assertTrue(rlad.isOKhasData());

+				ok = false;

+				for(ApprovalDAO.Data a : rlad.value) {

+					if(a.id.equals(id)) {

+						ok = true;

+						compare(data,a);

+					}

+				}

+				assertTrue(ok);

+

+			} finally {

+				// Delete

+				data.id = id;

+				rrDAO.delete(trans, data, true);

+				rlad = rrDAO.read(trans, id);

+				assertTrue(rlad.isOK());

+				assertTrue(rlad.isEmpty());

+			}

+			

+		} finally {

+			rrDAO.close(trans);

+		}

+	}

+

+	private void compare(Data d1, Data d2) {

+		assertNotSame(d1.id,d2.id);

+		assertEquals(d1.ticket,d2.ticket);

+		assertEquals(d1.user,d2.user);

+		assertEquals(d1.approver,d2.approver);

+		assertEquals(d1.type,d2.type);

+		assertEquals(d1.status,d2.status);

+		assertEquals(d1.operation,d2.operation);

+		assertNotSame(d1.updated,d2.updated);

+	}

+

+	

+	

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java
new file mode 100644
index 0000000..0c92dc7
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java
@@ -0,0 +1,137 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.security.NoSuchAlgorithmException;

+import java.util.Date;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.ArtiDAO;

+import org.onap.aaf.dao.aaf.cass.ArtiDAO.Data;

+

+/**

+ * UserDAO unit test.

+ * User: tp007s

+ * Date: 7/19/13

+ */

+public class JU_ArtiDAO  extends AbsJUCass {

+	@Test

+	public void test() throws IOException, NoSuchAlgorithmException {

+		ArtiDAO adao = new ArtiDAO(trans,cluster,"authz");

+		try {

+			// Create

+	        ArtiDAO.Data data = new ArtiDAO.Data();

+	        data.mechid="m55555@perturbed.att.com";

+	        data.machine="perturbed1232.att.com";

+	        data.type(false).add("file");

+	        data.type(false).add("jks");

+	        data.sponsor="Fred Flintstone";

+	        data.ca="devl";

+	        data.dir="/opt/app/aft/keys";

+	        data.appName="kumquat";

+	        data.os_user="aft";

+	        data.notify="email:myname@bogus.email.com";

+	        data.expires=new Date();

+	        

+//	        Bytification

+	        ByteBuffer bb = data.bytify();

+	        Data bdata = new ArtiDAO.Data();

+	        bdata.reconstitute(bb);

+	        checkData1(data, bdata);

+	        

+	        

+//	        DB work

+			adao.create(trans,data);

+			try {

+				// Validate Read with key fields in Data

+				Result<List<ArtiDAO.Data>> rlcd = adao.read(trans,data);

+				assertTrue(rlcd.isOKhasData());

+				for(ArtiDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}

+	

+				// Validate Read with key fields in Data

+				rlcd = adao.read(trans,data.mechid, data.machine);

+				assertTrue(rlcd.isOKhasData());

+				for(ArtiDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}

+	

+				// By Machine

+				rlcd = adao.readByMachine(trans,data.machine);

+				assertTrue(rlcd.isOKhasData());

+				for(ArtiDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}

+				

+				// By MechID

+				rlcd = adao.readByMechID(trans,data.mechid);

+				assertTrue(rlcd.isOKhasData());

+				for(ArtiDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}

+	

+				// Update

+				data.sponsor = "Wilma Flintstone";

+				adao.update(trans,data);

+				rlcd = adao.read(trans,data);

+				assertTrue(rlcd.isOKhasData());

+				for(ArtiDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}			

+

+			} finally {

+				// Always delete data, even if failure.

+				adao.delete(trans,data, true);

+			}

+		} finally {

+			adao.close(trans);

+		}

+

+		

+	}

+

+	private void checkData1(Data data, Data d) {

+		assertEquals(data.mechid,d.mechid);

+		assertEquals(data.machine,d.machine);

+		assertEquals(data.type(false).size(),d.type(false).size());

+		for(String s: data.type(false)) {

+			assertTrue(d.type(false).contains(s));

+		}

+		assertEquals(data.sponsor,d.sponsor);

+		assertEquals(data.ca,d.ca);

+		assertEquals(data.dir,d.dir);

+		assertEquals(data.appName,d.appName);

+		assertEquals(data.os_user,d.os_user);

+		assertEquals(data.notify,d.notify);

+		assertEquals(data.expires,d.expires);

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java
new file mode 100644
index 0000000..65efef4
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java
@@ -0,0 +1,266 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.Date;

+

+import org.junit.Test;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+import org.onap.aaf.dao.aaf.cass.NsType;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.UserRoleDAO;

+

+public class JU_Bytification {

+

+	@Test

+	public void testNS() throws IOException {

+		

+		// Normal

+		NsDAO.Data ns = new NsDAO.Data();

+		ns.name = "com.att.<pass>";

+		ns.type = NsType.APP.type;

+

+		ByteBuffer bb = ns.bytify();

+		

+		NsDAO.Data nsr = new NsDAO.Data();

+		nsr.reconstitute(bb);

+		check(ns,nsr);

+		

+		// Empty admin

+//		ns.admin(true).clear();

+		bb = ns.bytify();

+		nsr = new NsDAO.Data();

+		nsr.reconstitute(bb);

+		check(ns,nsr);

+		

+		// Empty responsible

+//		ns.responsible(true).clear();

+		bb = ns.bytify();

+		nsr = new NsDAO.Data();

+		nsr.reconstitute(bb);

+		check(ns,nsr);

+

+		bb = ns.bytify();

+		nsr = new NsDAO.Data();

+		nsr.reconstitute(bb);

+		check(ns,nsr);

+	}

+	

+	private void check(NsDAO.Data a, NsDAO.Data b) {

+		assertEquals(a.name,b.name);

+		assertEquals(a.type,b.type);

+//		assertEquals(a.admin.size(),b.admin.size());

+		

+//		for(String s: a.admin) {

+//			assertTrue(b.admin.contains(s));

+//		}

+//		

+//		assertEquals(a.responsible.size(),b.responsible.size());

+//		for(String s: a.responsible) {

+//			assertTrue(b.responsible.contains(s));

+//		}

+	}

+

+	@Test

+	public void testRole() throws IOException {

+		RoleDAO.Data rd1 = new RoleDAO.Data();

+		rd1.ns = "com.att.<pass>";

+		rd1.name = "my.role";

+		rd1.perms(true).add("com.att.<pass>.my.Perm|myInstance|myAction");

+		rd1.perms(true).add("com.att.<pass>.my.Perm|myInstance|myAction2");

+

+		// Normal

+		ByteBuffer bb = rd1.bytify();

+		RoleDAO.Data rd2 = new RoleDAO.Data();

+		rd2.reconstitute(bb);

+		check(rd1,rd2);

+		

+		// Overshoot Buffer

+		StringBuilder sb = new StringBuilder(300);

+		sb.append("role|instance|veryLongAction...");

+		for(int i=0;i<280;++i) {

+			sb.append('a');

+		}

+		rd1.perms(true).add(sb.toString());

+		bb = rd1.bytify();

+		rd2 = new RoleDAO.Data();

+		rd2.reconstitute(bb);

+		check(rd1,rd2);

+		

+		// No Perms

+		rd1.perms.clear();

+		

+		bb = rd1.bytify();

+		rd2 = new RoleDAO.Data();

+		rd2.reconstitute(bb);

+		check(rd1,rd2);

+		

+		// 1000 Perms

+		for(int i=0;i<1000;++i) {

+			rd1.perms(true).add("com|inst|action"+ i);

+		}

+

+		bb = rd1.bytify();

+		rd2 = new RoleDAO.Data();

+		rd2.reconstitute(bb);

+		check(rd1,rd2);

+

+	}

+	

+	private void check(RoleDAO.Data a, RoleDAO.Data b) {

+		assertEquals(a.ns,b.ns);

+		assertEquals(a.name,b.name);

+		

+		assertEquals(a.perms.size(),b.perms.size());

+		for(String s: a.perms) {

+			assertTrue(b.perms.contains(s));

+		}

+	}

+

+	@Test

+	public void testPerm() throws IOException {

+		PermDAO.Data pd1 = new PermDAO.Data();

+		pd1.ns = "com.att.<pass>";

+		pd1.type = "my.perm";

+		pd1.instance = "instance";

+		pd1.action = "read";

+		pd1.roles(true).add("com.att.<pass>.my.Role");

+		pd1.roles(true).add("com.att.<pass>.my.Role2");

+

+		// Normal

+		ByteBuffer bb = pd1.bytify();

+		PermDAO.Data rd2 = new PermDAO.Data();

+		rd2.reconstitute(bb);

+		check(pd1,rd2);

+		

+		// No Perms

+		pd1.roles.clear();

+		

+		bb = pd1.bytify();

+		rd2 = new PermDAO.Data();

+		rd2.reconstitute(bb);

+		check(pd1,rd2);

+		

+		// 1000 Perms

+		for(int i=0;i<1000;++i) {

+			pd1.roles(true).add("com.att.<pass>.my.Role"+ i);

+		}

+

+		bb = pd1.bytify();

+		rd2 = new PermDAO.Data();

+		rd2.reconstitute(bb);

+		check(pd1,rd2);

+

+	}

+	

+	private void check(PermDAO.Data a, PermDAO.Data b) {

+		assertEquals(a.ns,b.ns);

+		assertEquals(a.type,b.type);

+		assertEquals(a.instance,b.instance);

+		assertEquals(a.action,b.action);

+		

+		assertEquals(a.roles.size(),b.roles.size());

+		for(String s: a.roles) {

+			assertTrue(b.roles.contains(s));

+		}

+	}

+

+	@Test

+	public void testUserRole() throws IOException {

+		UserRoleDAO.Data urd1 = new UserRoleDAO.Data();

+		urd1.user = "myname@abc.att.com";

+		urd1.role("com.att.<pass>","my.role");

+		urd1.expires = new Date();

+

+		// Normal

+		ByteBuffer bb = urd1.bytify();

+		UserRoleDAO.Data urd2 = new UserRoleDAO.Data();

+		urd2.reconstitute(bb);

+		check(urd1,urd2);

+		

+		// A null

+		urd1.expires = null; 

+		urd1.role = null;

+		

+		bb = urd1.bytify();

+		urd2 = new UserRoleDAO.Data();

+		urd2.reconstitute(bb);

+		check(urd1,urd2);

+	}

+

+	private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {

+		assertEquals(a.user,b.user);

+		assertEquals(a.role,b.role);

+		assertEquals(a.expires,b.expires);

+	}

+

+	

+	@Test

+	public void testCred() throws IOException {

+		CredDAO.Data cd = new CredDAO.Data();

+		cd.id = "m55555@abc.att.com";

+		cd.ns = "com.att.abc";

+		cd.type = 2;

+		cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});

+		cd.expires = new Date();

+

+		// Normal

+		ByteBuffer bb = cd.bytify();

+		CredDAO.Data cd2 = new CredDAO.Data();

+		cd2.reconstitute(bb);

+		check(cd,cd2);

+		

+		// nulls

+		cd.expires = null;

+		cd.cred = null;

+		

+		bb = cd.bytify();

+		cd2 = new CredDAO.Data();

+		cd2.reconstitute(bb);

+		check(cd,cd2);

+

+	}

+

+	private void check(CredDAO.Data a, CredDAO.Data b) {

+		assertEquals(a.id,b.id);

+		assertEquals(a.ns,b.ns);

+		assertEquals(a.type,b.type);

+		if(a.cred==null) {

+			assertEquals(a.cred,b.cred); 

+		} else {

+			int l = a.cred.limit();

+			assertEquals(l,b.cred.limit());

+			for (int i=0;i<l;++i) {

+				assertEquals(a.cred.get(),b.cred.get());

+			}

+		}

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CacheInfoDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CacheInfoDAO.java
new file mode 100644
index 0000000..a2e96f2
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CacheInfoDAO.java
@@ -0,0 +1,65 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import java.io.IOException;

+import java.util.Date;

+

+import org.junit.Test;

+import org.onap.aaf.authz.env.AuthzTrans;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.CIDAO;

+import org.onap.aaf.dao.DAOException;

+import org.onap.aaf.dao.aaf.cass.CacheInfoDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.Status;

+

+import org.onap.aaf.inno.env.APIException;

+import org.onap.aaf.inno.env.util.Chrono;

+

+import junit.framework.Assert;

+

+

+public class JU_CacheInfoDAO extends AbsJUCass {

+

+	@Test

+	public void test() throws DAOException, APIException, IOException {

+		CIDAO<AuthzTrans> id = new CacheInfoDAO(trans, cluster, AUTHZ);

+		Date date  = new Date();

+		

+		id.touch(trans, RoleDAO.TABLE,1);

+		try {

+			Thread.sleep(3000);

+		} catch (InterruptedException e) {

+		}

+		Result<Void> rid = id.check(trans);

+		Assert.assertEquals(rid.status,Status.OK);

+		Date[] dates = CacheInfoDAO.info.get(RoleDAO.TABLE);

+		if(dates.length>0 && dates[1]!=null) {

+			System.out.println(Chrono.dateStamp(dates[1]));

+			System.out.println(Chrono.dateStamp(date));

+			Assert.assertTrue(Math.abs(dates[1].getTime() - date.getTime())<20000); // allow for 4 seconds, given Remote DB

+		}

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java
new file mode 100644
index 0000000..498f8ce
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java
@@ -0,0 +1,105 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.math.BigInteger;

+import java.nio.ByteBuffer;

+import java.security.NoSuchAlgorithmException;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.CertDAO;

+import org.onap.aaf.dao.aaf.cass.CertDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+/**

+ * UserDAO unit test.

+ * User: tp007s

+ * Date: 7/19/13

+ */

+public class JU_CertDAO  extends AbsJUCass {

+	@Test

+	public void test() throws IOException, NoSuchAlgorithmException, APIException {

+		CertDAO cdao = new CertDAO(trans,cluster,"authz");

+		try {

+			// Create

+	        CertDAO.Data data = new CertDAO.Data();

+	        data.serial=new BigInteger("11839383");

+	        data.id = "m55555@tguard.att.com";

+	        data.x500="CN=ju_cert.dao.att.com, OU=AAF, O=\"ATT Services, Inc.\", L=Southfield, ST=Michigan, C=US";

+	        data.x509="I'm a cert";

+	        data.ca = "aaf";

+			cdao.create(trans,data);

+

+//	        Bytification

+	        ByteBuffer bb = data.bytify();

+	        Data bdata = new CertDAO.Data();

+	        bdata.reconstitute(bb);

+	        checkData1(data, bdata);

+

+			// Validate Read with key fields in Data

+			Result<List<CertDAO.Data>> rlcd = cdao.read(trans,data);

+			assertTrue(rlcd.isOKhasData());

+			for(CertDAO.Data d : rlcd.value) {

+				checkData1(data,d);

+			}

+

+			// Validate Read with key fields in Data

+			rlcd = cdao.read(trans,data.ca,data.serial);

+			assertTrue(rlcd.isOKhasData());

+			for(CertDAO.Data d : rlcd.value) {

+				checkData1(data,d);

+			}

+

+			// Update

+			data.id = "m66666.tguard.att.com";

+			cdao.update(trans,data);

+			rlcd = cdao.read(trans,data);

+			assertTrue(rlcd.isOKhasData());

+			for(CertDAO.Data d : rlcd.value) {

+				checkData1(data,d);

+			}			

+			

+			cdao.delete(trans,data, true);

+		} finally {

+			cdao.close(trans);

+		}

+

+		

+	}

+

+	private void checkData1(Data data, Data d) {

+		assertEquals(data.ca,d.ca);

+		assertEquals(data.serial,d.serial);

+		assertEquals(data.id,d.id);

+		assertEquals(data.x500,d.x500);

+		assertEquals(data.x509,d.x509);

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java
new file mode 100644
index 0000000..3cf860a
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java
@@ -0,0 +1,252 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.security.NoSuchAlgorithmException;

+import java.util.Date;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.CredDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+/**

+ * UserDAO unit test.

+ * User: tp007s

+ * Date: 7/19/13

+ */

+public class JU_CredDAO  extends AbsJUCass {

+	@Test

+	public void test() throws IOException, NoSuchAlgorithmException, APIException {

+		CredDAO udao = new CredDAO(trans,cluster,"authz");

+		try {

+			// Create

+	        CredDAO.Data data = new CredDAO.Data();

+	        data.id = "m55555@aaf.att.com";

+	        data.type = CredDAO.BASIC_AUTH;

+	        data.notes = "temp pass";

+	        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));

+	        data.other = 12;

+	        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);

+			udao.create(trans,data);

+			

+//	        Bytification

+	        ByteBuffer bb = data.bytify();

+	        Data bdata = new CredDAO.Data();

+	        bdata.reconstitute(bb);

+	        checkData1(data, bdata);

+

+			// Validate Read with key fields in Data

+			Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);

+			assertTrue(rlcd.isOKhasData());

+			for(CredDAO.Data d : rlcd.value) {

+				checkData1(data,d);

+			}

+			

+			// Update

+			data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));

+			udao.update(trans,data);

+			rlcd = udao.read(trans,data);

+			assertTrue(rlcd.isOKhasData());

+			for(CredDAO.Data d : rlcd.value) {

+				checkData1(data,d);

+			}			

+			

+			udao.delete(trans,data, true);

+		} finally {

+			udao.close(trans);

+		}

+

+		

+	}

+

+	private void checkData1(Data data, Data d) {

+		assertEquals(data.id,d.id);

+		assertEquals(data.type,d.type);

+		assertEquals(data.ns,d.ns);

+		assertEquals(data.notes,d.notes);

+		assertEquals(data.cred,d.cred);

+		assertEquals(data.other,d.other);

+		assertEquals(data.expires,d.expires);

+	}

+

+//    private String                          CONST_myName = "MyName";

+//    public static final java.nio.ByteBuffer CONST_MY_CRED = get_CONST_MY_CRED();

+//    public static final int                 CONST_CRED_TYPE = 11;

+//

+//    public static final Date                CONST_UPDATE_DATE = new Date(System.currentTimeMillis()+60000*24);

+//    @Test

+//    public void test() {

+//        UserDAO ud = new UserDAO(trans, cluster,"authz");

+//        try {

+//            UserDAO.Data data = createPrototypeUserData();

+//            ud.create(trans, data);

+//

+//            // Validate Read with key fields in Data

+//            for(UserDAO.Data d : ud.read(trans, data)) {

+//                checkData1(data,d);

+//            }

+//

+//            // Validate readByName

+//            for(UserDAO.Data d : ud.read(trans, CONST_myName)) {

+//                checkData1(data,d);

+//            }

+//

+//            ud.delete(trans, data);

+//            List<UserDAO.Data> d_2 = ud.read(trans, CONST_myName);

+//

+//            // Validate that data was deleted

+//            assertEquals("User should not be found after deleted", 0, d_2.size() );

+//

+//            data = new UserDAO.Data();

+//            data.name = CONST_myName;

+//            data.cred = CONST_MY_CRED;

+//            data.cred_type= CONST_CRED_TYPE;

+//            data.expires = new Date(System.currentTimeMillis()+60000*24);

+//            final Result<UserDAO.Data> user = ud.r_create(trans, data);

+//            assertEquals("ud.createUser should work", Result.Status.OK, user.status);

+//

+//            checkDataIgnoreDateDiff(data, user.value);

+//

+//            // finally leave system in consistent state by deleting user again

+//            ud.delete(trans,data);

+//

+//        } catch (DAOException e) {

+//            e.printStackTrace();

+//            fail("Fail due to Exception");

+//        } finally {

+//            ud.close(trans);

+//        }

+//    }

+//

+//    private UserDAO.Data createPrototypeUserData() {

+//        UserDAO.Data data = new UserDAO.Data();

+//        data.name = CONST_myName;

+//

+//        data.cred_type = CONST_CRED_TYPE;

+//        data.cred      = CONST_MY_CRED;

+//        data.expires = CONST_UPDATE_DATE;

+//        return data;

+//    }

+//

+//    //    @Test

+//    //    public void testReadByUser() throws Exception {

+//    //           // this test was done above in our super test, since it uses the same setup

+//    //    }

+//

+//    @Test

+//    public void testFunctionCreateUser() throws Exception {

+//        String name = "roger_rabbit";

+//        Integer credType = CONST_CRED_TYPE;

+//        java.nio.ByteBuffer cred = CONST_MY_CRED;

+//        final UserDAO ud = new UserDAO(trans, cluster,"authz");

+//        final UserDAO.Data data = createPrototypeUserData();

+//        Result<UserDAO.Data> ret = ud.r_create(trans, data);

+//        Result<List<Data>> byUserNameLookup = ud.r_read(trans, name);

+//        

+//        assertEquals("sanity test w/ different username (different than other test cases) failed", name, byUserNameLookup.value.get(0).name);

+//        assertEquals("delete roger_rabbit failed", true, ud.delete(trans, byUserNameLookup.value.get(0)));

+//    }

+//

+//    @Test

+//    public void testLowLevelCassandraCreateData_Given_UserAlreadyPresent_ShouldPass() throws Exception {

+//        UserDAO ud = new UserDAO(trans, cluster,"authz");

+//

+//        final UserDAO.Data data = createPrototypeUserData();

+//        final UserDAO.Data data1 = ud.create(trans, data);

+//        final UserDAO.Data data2 = ud.create(trans, data);

+//

+//        assertNotNull(data1);

+//        assertNotNull(data2);

+//

+//        assertEquals(CONST_myName, data1.name);

+//        assertEquals(CONST_myName, data2.name);

+//    }

+//

+//    @Test

+//    public void testCreateUser_Given_UserAlreadyPresent_ShouldFail() throws Exception {

+//        UserDAO ud = new UserDAO(trans, cluster,"authz");

+//

+//        final UserDAO.Data data = createPrototypeUserData();

+//

+//        // make sure that some prev test did not leave the user in the DB

+//        ud.delete(trans, data);

+//

+//        // attempt to create same user twice !!!

+//        

+//        final Result<UserDAO.Data> data1 = ud.r_create(trans, data);

+//        final Result<UserDAO.Data> data2 = ud.r_create(trans, data);

+//

+//        assertNotNull(data1);

+//        assertNotNull(data2);

+//

+//        assertEquals(true,   Result.Status.OK == data1.status);

+//        assertEquals(false,  Result.Status.OK == data2.status);

+//    }

+//

+//    private void checkData1(UserDAO.Data data, UserDAO.Data d) {

+//        data.name = CONST_myName;

+//

+//        data.cred_type = CONST_CRED_TYPE;

+//        data.cred      = CONST_MY_CRED;

+//        data.expires   = CONST_UPDATE_DATE;

+//

+//        assertEquals(data.name, d.name);

+//        assertEquals(data.cred_type, d.cred_type);

+//        assertEquals(data.cred, d.cred);

+//        assertEquals(data.expires, d.expires);

+//

+//    }

+//

+//    private void checkDataIgnoreDateDiff(UserDAO.Data data, UserDAO.Data d) {

+//        data.name = CONST_myName;

+//

+//        data.cred_type = CONST_CRED_TYPE;

+//        data.cred      = CONST_MY_CRED;

+//        data.expires   = CONST_UPDATE_DATE;

+//

+//        assertEquals(data.name, d.name);

+//        assertEquals(data.cred_type, d.cred_type);

+//        assertEquals(data.cred, d.cred);

+//         // we allow dates to be different, e.g. high level calls e.g. createUser sets the date itself.

+//        //assertEquals(data.updated, d.updated);

+//

+//    }

+//

+//    /**

+//     * Get a CONST_MY_CRED ByteBuffer, which is the java type for a cass blob.

+//     * @return

+//     */

+//    private static java.nio.ByteBuffer get_CONST_MY_CRED() {

+//     return ByteBuffer.wrap("Hello".getBytes());

+//    }

+//

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java
new file mode 100644
index 0000000..d93ec39
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java
@@ -0,0 +1,107 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.nio.ByteBuffer;

+import java.util.Date;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.DelegateDAO;

+import org.onap.aaf.dao.aaf.cass.DelegateDAO.Data;

+

+

+public class JU_DelegateDAO  extends AbsJUCass {

+	@Test

+	public void testCRUD() throws Exception {

+		DelegateDAO dao = new DelegateDAO(trans, cluster, AUTHZ);

+		DelegateDAO.Data data = new DelegateDAO.Data();

+		data.user = "myname";

+		data.delegate = "yourname";

+		data.expires = new Date();

+		

+//        Bytification

+        ByteBuffer bb = data.bytify();

+        Data bdata = new DelegateDAO.Data();

+        bdata.reconstitute(bb);

+        compare(data, bdata);

+

+		try {

+			// Test create

+			Result<Data> ddcr = dao.create(trans,data);

+			assertTrue(ddcr.isOK());

+			

+			

+			// Read by User

+			Result<List<DelegateDAO.Data>> records = dao.read(trans,data.user);

+			assertTrue(records.isOKhasData());

+			for(DelegateDAO.Data rdata : records.value) 

+				compare(data,rdata);

+

+			// Read by Delegate

+			records = dao.readByDelegate(trans,data.delegate);

+			assertTrue(records.isOKhasData());

+			for(DelegateDAO.Data rdata : records.value) 

+				compare(data,rdata);

+			

+			// Update

+			data.delegate = "hisname";

+			data.expires = new Date();

+			assertTrue(dao.update(trans, data).isOK());

+

+			// Read by User

+			records = dao.read(trans,data.user);

+			assertTrue(records.isOKhasData());

+			for(DelegateDAO.Data rdata : records.value) 

+				compare(data,rdata);

+

+			// Read by Delegate

+			records = dao.readByDelegate(trans,data.delegate);

+			assertTrue(records.isOKhasData());

+			for(DelegateDAO.Data rdata : records.value) 

+				compare(data,rdata);

+

+			// Test delete

+			dao.delete(trans,data, true);

+			records = dao.read(trans,data.user);

+			assertTrue(records.isEmpty());

+			

+			

+		} finally {

+			dao.close(trans);

+		}

+	}

+	

+	private void compare(Data d1, Data d2) {

+		assertEquals(d1.user, d2.user);

+		assertEquals(d1.delegate, d2.delegate);

+		assertEquals(d1.expires,d2.expires);

+	}

+

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java
new file mode 100644
index 0000000..9b0fa2e
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java
@@ -0,0 +1,91 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.security.NoSuchAlgorithmException;

+import java.util.Date;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.CredDAO;

+import org.onap.aaf.dao.aaf.cass.CredDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+public class JU_FastCalling extends AbsJUCass {

+

+	@Test

+	public void test() throws IOException, NoSuchAlgorithmException, APIException {

+		trans.setProperty("cassandra.writeConsistency.cred","ONE");

+		

+		CredDAO udao = new CredDAO(env.newTransNoAvg(),cluster,"authz");

+		System.out.println("Starting calls");

+		for(iterations=0;iterations<8;++iterations) {

+			try {

+				// Create

+		        CredDAO.Data data = new CredDAO.Data();

+		        data.id = "m55555@aaf.att.com";

+		        data.type = CredDAO.BASIC_AUTH;

+		        data.cred      = ByteBuffer.wrap(userPassToBytes("m55555","mypass"));

+		        data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90);

+				udao.create(trans,data);

+				

+				// Validate Read with key fields in Data

+				Result<List<CredDAO.Data>> rlcd = udao.read(trans,data);

+				assertTrue(rlcd.isOKhasData());

+				for(CredDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}

+				

+				// Update

+				data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass"));

+				udao.update(trans,data);

+				rlcd = udao.read(trans,data);

+				assertTrue(rlcd.isOKhasData());

+				for(CredDAO.Data d : rlcd.value) {

+					checkData1(data,d);

+				}			

+				

+				udao.delete(trans,data, true);

+			} finally {

+				updateTotals();

+				newTrans();

+			}

+		}

+

+	}

+

+	private void checkData1(Data data, Data d) {

+		assertEquals(data.id,d.id);

+		assertEquals(data.type,d.type);

+		assertEquals(data.cred,d.cred);

+		assertEquals(data.expires,d.expires);

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java
new file mode 100644
index 0000000..29ce5d4
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java
@@ -0,0 +1,154 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertNotNull;

+import static org.junit.Assert.assertTrue;

+

+import java.nio.ByteBuffer;

+import java.util.List;

+import java.util.Random;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.HistoryDAO;

+

+public class JU_HistoryDAO  extends AbsJUCass {

+	

+	@Test

+	public void testCreate() throws Exception {

+		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);

+		HistoryDAO.Data data = createHistoryData();

+		

+		try {

+			historyDAO.create(trans,data);			

+			Thread.sleep(200);// History Create is Async

+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans,data.user,data.yr_mon);

+			assertTrue(records.isOKhasData());

+			for(HistoryDAO.Data d : records.value) {

+				assertHistory(data, d);

+			}

+		} finally {

+			historyDAO.close(trans);

+		}

+	}

+	

+	@Test

+	public void tesReadByUser() throws Exception {

+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);

+		HistoryDAO.Data data = createHistoryData();

+		

+		try {

+			historyDAO.create(trans,data);

+			Thread.sleep(200);// History Create is Async

+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUser(trans, data.user,data.yr_mon);

+			assertTrue(records.isOKhasData());

+			for(HistoryDAO.Data d : records.value) {

+				assertHistory(data, d);

+			}

+		} finally {

+			historyDAO.close(trans);

+		}

+	}

+	

+/*

+	@Test

+	public void readByUserAndMonth() throws Exception {

+		HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ);

+		HistoryDAO.Data data = createHistoryData();

+		

+		try {

+			historyDAO.create(trans,data);			

+			Thread.sleep(200);// History Create is Async

+			Result<List<HistoryDAO.Data>> records = historyDAO.readByUserAndMonth(trans,

+					data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),

+					Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)));

+			assertTrue(records.isOKhasData());

+			for(HistoryDAO.Data d : records.value) {

+				assertHistory(data, d);

+			}

+		} finally {

+			historyDAO.close(trans);

+		}

+	}

+*/	

+	//TODO readadd this

+//	@Test

+//	public void readByUserAndDay() throws Exception {

+//		HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ);

+//		HistoryDAO.Data data = createHistoryData();

+//		

+//		try {

+//			historyDAO.create(trans, data);		

+//			Thread.sleep(200);// History Create is Async

+//			

+//			String dayTime = String.valueOf(data.day_time);

+//			String day = null;

+//			if (dayTime.length() < 8)

+//				day = dayTime.substring(0, 1);

+//			else 

+//				day = dayTime.substring(0, 2);

+//			

+//			List<HistoryDAO.Data> records = historyDAO.readByUserBetweenDates(trans,

+//							data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)),

+//							Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)),

+//							Integer.valueOf(day), 0);

+//			assertEquals(1,records.size());

+//			for(HistoryDAO.Data d : records) {

+//				assertHistory(data, d);

+//			}

+//		} finally {

+//			historyDAO.close(trans);

+//		}

+//	}

+	private HistoryDAO.Data createHistoryData() {

+		HistoryDAO.Data data = HistoryDAO.newInitedData();

+		Random random = new Random();

+		data.user = "test" + random.nextInt();

+		data.action = "add";

+		data.target = "history";

+		data.memo = "adding a row into history table";

+//		data.detail().put("id", "test");

+//		data.detail().put("name", "test");

+		//String temp = "Test Blob Message";

+		data.reconstruct = ByteBuffer.wrap("Temp Blob Message".getBytes());		

+		return data;

+	}

+	

+	private void assertHistory(HistoryDAO.Data ip, HistoryDAO.Data op) {

+		assertEquals(ip.yr_mon, op.yr_mon);		

+//		assertEquals(ip.day_time, op.day_time);		

+		assertEquals(ip.user, op.user);		

+		assertEquals(ip.action, op.action);

+		assertEquals(ip.target, op.target);

+		assertEquals(ip.memo, op.memo);

+		//TODO : have to see if third party assert utility can be used

+//		assertTrue(CollectionUtils.isEqualCollection(ip.detail, op.detail));

+//		for (String key : ip.detail().keySet()) {

+//			assertNotNull(op.detail().get(key));

+//		}

+		assertNotNull(op.reconstruct);

+	}

+	

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java
new file mode 100644
index 0000000..ad9ed28
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java
@@ -0,0 +1,187 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+import static org.junit.Assert.assertFalse;

+import static org.junit.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.NsDAO;

+import org.onap.aaf.dao.aaf.cass.NsType;

+import org.onap.aaf.dao.aaf.cass.NsDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+

+public class JU_NsDAO extends AbsJUCass {

+	private static final String CRM = "ju_crm";

+	private static final String SWM = "ju_swm";

+

+	@Test

+	public void test() throws APIException, IOException  {

+		NsDAO nsd = new NsDAO(trans, cluster, AUTHZ);

+		try {

+			final String nsparent = "com.test";

+			final String ns1 = nsparent +".ju_ns";

+			final String ns2 = nsparent + ".ju_ns2";

+			

+			Map<String,String> oAttribs = new HashMap<String,String>();

+			oAttribs.put(SWM, "swm_data");

+			oAttribs.put(CRM, "crm_data");

+			Data data = new NsDAO.Data();

+			data.name = ns1;

+			data.type = NsType.APP.type;

+			data.attrib(true).putAll(oAttribs);

+			

+

+			Result<List<Data>> rdrr;

+

+			// CREATE

+			Result<Data> rdc = nsd.create(trans, data);

+			assertTrue(rdc.isOK());

+			

+			try {

+//		        Bytification

+		        ByteBuffer bb = data.bytify();

+		        Data bdata = new NsDAO.Data();

+		        bdata.reconstitute(bb);

+		        compare(data, bdata);

+

+				// Test READ by Object

+				rdrr = nsd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				Data d = rdrr.value.get(0);

+				assertEquals(d.name,data.name);

+				assertEquals(d.type,data.type);

+				attribsEqual(d.attrib(false),data.attrib(false));

+				attribsEqual(oAttribs,data.attrib(false));

+				

+				// Test Read by Key

+				rdrr = nsd.read(trans, data.name);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				d = rdrr.value.get(0);

+				assertEquals(d.name,data.name);

+				assertEquals(d.type,data.type);

+				attribsEqual(d.attrib(false),data.attrib(false));

+				attribsEqual(oAttribs,data.attrib(false));

+				

+				// Read NS by Type

+				Result<Set<String>> rtypes = nsd.readNsByAttrib(trans, SWM);

+				Set<String> types;

+				if(rtypes.notOK()) {

+					throw new IOException(rtypes.errorString());

+				} else {

+					types = rtypes.value;

+				}

+				assertEquals(1,types.size());

+				assertEquals(true,types.contains(ns1));

+				

+				// Add second NS to test list of data returned

+				Data data2 = new NsDAO.Data();

+				data2.name = ns2;

+				data2.type = 3; // app

+				Result<Data> rdc2 = nsd.create(trans, data2);

+				assertTrue(rdc2.isOK());

+				

+					// Interrupt - test PARENT

+					Result<List<Data>> rdchildren = nsd.getChildren(trans, "com.test");

+					assertTrue(rdchildren.isOKhasData());

+					boolean child1 = false;

+					boolean child2 = false;

+					for(Data dchild : rdchildren.value) {

+						if(ns1.equals(dchild.name))child1=true;

+						if(ns2.equals(dchild.name))child2=true;

+					}

+					assertTrue(child1);

+					assertTrue(child2);

+

+				// FINISH DATA 2 by deleting

+				Result<Void> rddr = nsd.delete(trans, data2, true);

+				assertTrue(rddr.isOK());

+

+				// ADD DESCRIPTION

+				String description = "This is my test Namespace";

+				assertFalse(description.equalsIgnoreCase(data.description));

+				

+				Result<Void> addDesc = nsd.addDescription(trans, data.name, description);

+				assertTrue(addDesc.isOK());

+				rdrr = nsd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				assertEquals(rdrr.value.get(0).description,description);

+				

+				// UPDATE

+				String newDescription = "zz1234 Owns This Namespace Now";

+				oAttribs.put("mso", "mso_data");

+				data.attrib(true).put("mso", "mso_data");

+				data.description = newDescription;

+				Result<Void> update = nsd.update(trans, data);

+				assertTrue(update.isOK());

+				rdrr = nsd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				assertEquals(rdrr.value.get(0).description,newDescription);

+				attribsEqual(oAttribs, rdrr.value.get(0).attrib);

+				

+				

+			} catch (IOException e) {

+				e.printStackTrace();

+			} finally {

+				// DELETE

+				Result<Void> rddr = nsd.delete(trans, data, true);

+				assertTrue(rddr.isOK());

+				rdrr = nsd.read(trans, data);

+				assertTrue(rdrr.isOK() && rdrr.isEmpty());

+				assertEquals(rdrr.value.size(),0);

+			}

+		} finally {

+			nsd.close(trans);

+		}

+	}

+

+	private void compare(NsDAO.Data d, NsDAO.Data data) {

+		assertEquals(d.name,data.name);

+		assertEquals(d.type,data.type);

+		attribsEqual(d.attrib(false),data.attrib(false));

+		attribsEqual(d.attrib(false),data.attrib(false));

+	}

+	

+	private void attribsEqual(Map<String,String> aa, Map<String,String> ba) {

+		assertEquals(aa.size(),ba.size());

+		for(Entry<String, String> es : aa.entrySet()) {

+			assertEquals(es.getValue(),ba.get(es.getKey()));

+		}

+	}

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java
new file mode 100644
index 0000000..9215269
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java
@@ -0,0 +1,59 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static org.junit.Assert.assertEquals;

+

+import org.junit.AfterClass;

+import org.junit.Test;

+import org.onap.aaf.dao.aaf.cass.NsType;

+

+public class JU_NsType {

+

+	@AfterClass

+	public static void tearDownAfterClass() throws Exception {

+	}

+

+	@Test

+	public void test() {

+		NsType nt,nt2;

+		String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};

+		for(String s : tests) {

+			nt = NsType.valueOf(s);

+			assertEquals(s,nt.name());

+			

+			nt2 = NsType.fromString(s);

+			assertEquals(nt,nt2);

+			

+			int t = nt.type;

+			nt2 = NsType.fromType(t);

+			assertEquals(nt,nt2);

+		}

+		

+		nt  = NsType.fromType(Integer.MIN_VALUE);

+		assertEquals(nt,NsType.UNKNOWN);

+		nt = NsType.fromString("Garbage");

+		assertEquals(nt,NsType.UNKNOWN);

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java
new file mode 100644
index 0000000..582ce18
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java
@@ -0,0 +1,176 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static junit.framework.Assert.assertEquals;

+import static junit.framework.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.List;

+import java.util.Set;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.PermDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+/**

+ * Test the PermissionDAO

+ * 

+ * Utilize AbsJUCass to initialize and pre-load Cass

+ * 

+ *

+ */

+public class JU_PermDAO extends AbsJUCass{

+

+	@Test

+	public void test() throws APIException, IOException {

+		PermDAO pd = new PermDAO(trans,cluster,"authz");

+		try {

+			PermDAO.Data data = new PermDAO.Data();

+			data.ns = "com.test.ju_perm";

+			data.type = "MyType";

+			data.instance = "MyInstance";

+			data.action = "MyAction";

+			data.roles(true).add(data.ns + ".dev");

+			

+

+

+			// CREATE

+			Result<Data> rpdc = pd.create(trans,data);

+			assertTrue(rpdc.isOK());

+

+			Result<List<PermDAO.Data>> rlpd;

+			try {

+//		        Bytification

+		        ByteBuffer bb = data.bytify();

+		        Data bdata = new PermDAO.Data();

+		        bdata.reconstitute(bb);

+		        compare(data, bdata);

+

+				// Validate Read with key fields in Data

+				if((rlpd = pd.read(trans,data)).isOK())

+				  for(PermDAO.Data d : rlpd.value) {

+					checkData1(data,d);

+				}

+				

+				// Validate readByName

+				if((rlpd = pd.readByType(trans,data.ns, data.type)).isOK())

+				  for(PermDAO.Data d : rlpd.value) {

+					checkData1(data,d);

+				}

+				

+				// Add Role

+				RoleDAO.Data role = new RoleDAO.Data();

+				role.ns = data.ns;

+				role.name = "test";

+				

+				Result<Void> rvpd = pd.addRole(trans, data, role.fullName());

+				assertTrue(rvpd.isOK());

+				// Validate Read with key fields in Data

+				if((rlpd = pd.read(trans,data)).isOK())

+				  for(PermDAO.Data d : rlpd.value) {

+					checkData2(data,d);

+				  }

+				

+				// Remove Role

+				rvpd = pd.delRole(trans, data, role.fullName());

+				assertTrue(rvpd.isOK());

+				if((rlpd = pd.read(trans,data)).isOK())

+					for(PermDAO.Data d : rlpd.value) {

+						checkData1(data,d);

+					}

+				

+				// Add Child

+				Data data2 = new Data();

+				data2.ns = data.ns;

+				data2.type = data.type + ".2";

+				data2.instance = data.instance;

+				data2.action = data.action;

+				

+				rpdc = pd.create(trans, data2);

+				assertTrue(rpdc.isOK());

+				try {

+					rlpd = pd.readChildren(trans, data.ns,data.type);

+					assertTrue(rlpd.isOKhasData());

+					assertEquals(rlpd.value.size(),1);

+					assertEquals(rlpd.value.get(0).fullType(),data2.fullType());

+				} finally {

+					// Delete Child

+					pd.delete(trans, data2,true);

+

+				}

+			} catch (IOException e) {

+				e.printStackTrace();

+			} finally {

+				// DELETE

+				Result<Void> rpdd = pd.delete(trans,data,true);

+				assertTrue(rpdd.isOK());

+				rlpd = pd.read(trans, data);

+				assertTrue(rlpd.isOK() && rlpd.isEmpty());

+				assertEquals(rlpd.value.size(),0);

+			}

+		} finally {

+			pd.close(trans);

+		}

+	}

+

+	private void compare(Data a, Data b) {

+		assertEquals(a.ns,b.ns);

+		assertEquals(a.type,b.type);

+		assertEquals(a.instance,b.instance);

+		assertEquals(a.action,b.action);

+		assertEquals(a.roles(false).size(),b.roles(false).size());

+		for(String s: a.roles(false)) {

+			assertTrue(b.roles(false).contains(s));

+		}

+	}

+	private void checkData1(Data data, Data d) {

+		assertEquals(data.ns,d.ns);

+		assertEquals(data.type,d.type);

+		assertEquals(data.instance,d.instance);

+		assertEquals(data.action,d.action);

+		

+		Set<String> ss = d.roles(true);

+		assertEquals(1,ss.size());

+		assertTrue(ss.contains(data.ns+".dev"));

+	}

+	

+	private void checkData2(Data data, Data d) {

+		assertEquals(data.ns,d.ns);

+		assertEquals(data.type,d.type);

+		assertEquals(data.instance,d.instance);

+		assertEquals(data.action,d.action);

+		

+		Set<String> ss = d.roles(true);

+		assertEquals(2,ss.size());

+		assertTrue(ss.contains(data.ns+".dev"));

+		assertTrue(ss.contains(data.ns+".test"));

+	}

+

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java
new file mode 100644
index 0000000..ba61c61
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java
@@ -0,0 +1,139 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import static junit.framework.Assert.assertEquals;

+import static junit.framework.Assert.assertTrue;

+

+import java.io.IOException;

+import java.nio.ByteBuffer;

+import java.util.List;

+

+import org.junit.Test;

+import org.onap.aaf.authz.layer.Result;

+import org.onap.aaf.dao.aaf.cass.PermDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO;

+import org.onap.aaf.dao.aaf.cass.RoleDAO.Data;

+

+import org.onap.aaf.inno.env.APIException;

+

+

+public class JU_RoleDAO extends AbsJUCass {

+

+	@Test

+	public void test()  throws IOException, APIException {

+		RoleDAO rd = new RoleDAO(trans, cluster, AUTHZ);

+		try {

+			Data data = new RoleDAO.Data();

+			data.ns = "com.test.ju_role";

+			data.name = "role1";

+

+//	        Bytification

+	        ByteBuffer bb = data.bytify();

+	        Data bdata = new RoleDAO.Data();

+	        bdata.reconstitute(bb);

+	        compare(data, bdata);

+

+			// CREATE

+			Result<Data> rdc = rd.create(trans, data);

+			assertTrue(rdc.isOK());

+			Result<List<Data>> rdrr;

+			try {

+				// READ

+				rdrr = rd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				Data d = rdrr.value.get(0);

+				assertEquals(d.perms.size(),0);

+				assertEquals(d.name,data.name);

+				assertEquals(d.ns,data.ns);

+

+				PermDAO.Data perm = new PermDAO.Data();

+				perm.ns = data.ns;

+				perm.type = "Perm";

+				perm.instance = "perm1";

+				perm.action = "write";

+				

+				// ADD Perm

+				Result<Void> rdar = rd.addPerm(trans, data, perm);

+				assertTrue(rdar.isOK());

+				rdrr = rd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				assertEquals(rdrr.value.get(0).perms.size(),1);

+				assertTrue(rdrr.value.get(0).perms.contains(perm.encode()));

+				

+				// DEL Perm

+				rdar = rd.delPerm(trans, data,perm);

+				assertTrue(rdar.isOK());

+				rdrr = rd.read(trans, data);

+				assertTrue(rdrr.isOKhasData());

+				assertEquals(rdrr.value.size(),1);

+				assertEquals(rdrr.value.get(0).perms.size(),0);

+

+				// Add Child

+				Data data2 = new Data();

+				data2.ns = data.ns;

+				data2.name = data.name + ".2";

+				

+				rdc = rd.create(trans, data2);

+				assertTrue(rdc.isOK());

+				try {

+					rdrr = rd.readChildren(trans, data.ns,data.name);

+					assertTrue(rdrr.isOKhasData());

+					assertEquals(rdrr.value.size(),1);

+					assertEquals(rdrr.value.get(0).name,data.name + ".2");

+					

+					rdrr = rd.readChildren(trans, data.ns,"*");

+					assertTrue(rdrr.isOKhasData());

+					assertEquals(rdrr.value.size(),2);

+

+				} finally {

+					// Delete Child

+					rd.delete(trans, data2, true);

+				}

+	

+			} finally {

+				// DELETE

+				Result<Void> rddr = rd.delete(trans, data, true);

+				assertTrue(rddr.isOK());

+				rdrr = rd.read(trans, data);

+				assertTrue(rdrr.isOK() && rdrr.isEmpty());

+				assertEquals(rdrr.value.size(),0);

+			}

+		} finally {

+			rd.close(trans);

+		}

+	}

+

+	private void compare(Data a, Data b) {

+		assertEquals(a.name,b.name);

+		assertEquals(a.description, b.description);

+		assertEquals(a.ns,b.ns);

+		assertEquals(a.perms(false).size(),b.perms(false).size());

+		for(String p : a.perms(false)) {

+			assertTrue(b.perms(false).contains(p));

+		}

+	}

+

+}

diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java
new file mode 100644
index 0000000..379eb5e
--- /dev/null
+++ b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java
@@ -0,0 +1,74 @@
+/*******************************************************************************

+ * ============LICENSE_START====================================================

+ * * org.onap.aaf

+ * * ===========================================================================

+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+ * * ===========================================================================

+ * * Licensed under the Apache License, Version 2.0 (the "License");

+ * * you may not use this file except in compliance with the License.

+ * * You may obtain a copy of the License at

+ * * 

+ *  *      http://www.apache.org/licenses/LICENSE-2.0

+ * * 

+ *  * Unless required by applicable law or agreed to in writing, software

+ * * distributed under the License is distributed on an "AS IS" BASIS,

+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * * See the License for the specific language governing permissions and

+ * * limitations under the License.

+ * * ============LICENSE_END====================================================

+ * *

+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+ * *

+ ******************************************************************************/

+package org.onap.aaf.dao.aaf.test;

+

+import org.onap.aaf.authz.env.AuthzEnv;

+

+import com.datastax.driver.core.Cluster;

+import com.datastax.driver.core.ResultSet;

+import com.datastax.driver.core.Row;

+import com.datastax.driver.core.Session;

+

+public class NS_ChildUpdate {

+

+	public static void main(String[] args) {

+		if(args.length < 3 ) {

+			System.out.println("usage: NS_ChildUpdate machine mechid (encrypted)passwd");

+		} else {

+			try {

+				AuthzEnv env = new AuthzEnv();

+				env.setLog4JNames("log.properties","authz","authz","audit","init","trace");

+				

+				Cluster cluster = Cluster.builder()

+						.addContactPoint(args[0])

+						.withCredentials(args[1],env.decrypt(args[2], false))

+						.build();

+	

+				Session session = cluster.connect("authz");

+				try {

+					ResultSet result = session.execute("SELECT name,parent FROM ns");

+					int count = 0;

+					for(Row r : result.all()) {

+						++count;

+						String name = r.getString(0);

+						String parent = r.getString(1);

+						if(parent==null) {

+							int idx = name.lastIndexOf('.');

+							

+							parent = idx>0?name.substring(0, idx):".";

+							System.out.println("UPDATE " + name + " to " + parent);

+							session.execute("UPDATE ns SET parent='" + parent + "' WHERE name='" + name + "';");

+						}

+					}

+					System.out.println("Processed " + count + " records");

+				} finally {

+					session.close();

+					cluster.close();

+				}

+			} catch (Exception e) {

+				e.printStackTrace();

+			}

+		}

+	}

+

+}

diff --git a/authz-cass/src/test/resources/cadi.properties b/authz-cass/src/test/resources/cadi.properties
new file mode 100644
index 0000000..8f1209a
--- /dev/null
+++ b/authz-cass/src/test/resources/cadi.properties
@@ -0,0 +1,52 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START====================================================

+# * org.onap.aaf

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+###############################################################################

+# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.

+###############################################################################

+##

+## AUTHZ API (authz-service) Properties

+##

+

+cadi_prop_file=com.att.aaf.props;com.att.aaf.common.props

+

+#cadi_trust_all_x509=true

+#cadi_alias=aaf.att

+https.protocols=TLSv1.1,TLSv1.2

+

+cm_url=https://XXX:8150

+

+basic_realm=localized

+basic_warn=false

+localhost_deny=false

+

+cass_group_name=com.att.aaf

+cass_cluster_name=mithrilcsp.sbc.com

+aaf_default_realm=com.att.csp

+

+aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE

+aaf_id=???

+aaf_password=enc:XXX

+

+aaf_user_expires=3000

+aaf_clean_interval=4000

+