usergrid-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From g...@apache.org
Subject [03/50] [abbrv] usergrid git commit: Removing export and import files along with the other export that uses Jclouds
Date Mon, 02 May 2016 17:54:34 GMT
http://git-wip-us.apache.org/repos/asf/usergrid/blob/cb1356c2/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportConnectionsTest.java
----------------------------------------------------------------------
diff --git a/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportConnectionsTest.java
b/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportConnectionsTest.java
deleted file mode 100644
index 05948ff..0000000
--- a/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportConnectionsTest.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.usergrid.management.importer;
-
-
-import java.util.UUID;
-
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.usergrid.NewOrgAppAdminRule;
-import org.apache.usergrid.ServiceITSetup;
-import org.apache.usergrid.ServiceITSetupImpl;
-import org.apache.usergrid.cassandra.ClearShiroSubject;
-import org.apache.usergrid.persistence.EntityManager;
-import org.apache.usergrid.persistence.PagingResultsIterator;
-import org.apache.usergrid.persistence.Results;
-import org.apache.usergrid.persistence.entities.FileImport;
-import org.apache.usergrid.persistence.entities.Import;
-import org.apache.usergrid.persistence.Query;
-
-import static org.junit.Assert.assertEquals;
-
-
-
-public class ImportConnectionsTest {
-
-    private static final Logger logger = LoggerFactory.getLogger(ImportConnectionsTest.class);
-
-    @Rule
-    public ClearShiroSubject clearShiroSubject = new ClearShiroSubject();
-
-    @ClassRule
-    public static final ServiceITSetup setup =
-        new ServiceITSetupImpl( );
-
-    @Rule
-    public NewOrgAppAdminRule newOrgAppAdminRule = new NewOrgAppAdminRule( setup );
-
-
-    @Test
-    @Ignore("Pending merge of export-feature branch. Because getTargetEntities() is broken")
-    public void testCreateAndCountConnectionsViaGet() throws Exception {
-
-        doTestCreateAndCountConnections(new ConnectionCounter() {
-            @Override
-            public int count(Import importEntity) {
-                return getConnectionCountViaGet(importEntity);
-            }
-        });
-    }
-
-
-    @Test
-    public void testCreateAndCountConnectionsViaSearch() throws Exception {
-
-        doTestCreateAndCountConnections(new ConnectionCounter() {
-            @Override
-            public int count(Import importEntity) {
-                return getConnectionCountViaSearch(importEntity);
-            }
-        });
-    }
-
-
-    interface ConnectionCounter {
-        int count( Import importEntity );
-    }
-
-
-    public void doTestCreateAndCountConnections(
-        ConnectionCounter counter) throws Exception {
-
-        final int connectionCount = 15;
-
-        EntityManager emMgmtApp = setup.getEmf().getEntityManager(setup.getEmf().getManagementAppId());
-
-        Import importEntity = new Import();
-        importEntity = emMgmtApp.create( importEntity );
-
-        UUID applicationId = newOrgAppAdminRule.getApplicationInfo().getId();
-
-        for ( int i=0; i<connectionCount; i++ ) {
-            FileImport fileImport = new FileImport("dummyFileName" + i, applicationId);
-            fileImport = emMgmtApp.create(fileImport);
-            emMgmtApp.createConnection(importEntity, "includes", fileImport);
-        }
-
-        int retries = 0;
-        int maxRetries = 20;
-        boolean done = false;
-        int count = 0;
-        while ( !done && retries++ < maxRetries ) {
-
-            count = counter.count( importEntity );
-            if ( count == connectionCount ) {
-                logger.debug("Count good!");
-                done = true;
-            } else {
-                logger.debug("Got {} of {} Waiting...", count, connectionCount );
-                Thread.sleep(1000);
-            }
-        }
-        if ( retries >= maxRetries ) {
-            throw new RuntimeException("Max retries was reached");
-        }
-
-        assertEquals("did not get all connections", connectionCount, count);
-    }
-
-
-    private int getConnectionCountViaGet( final Import importRoot ) {
-
-        try {
-            EntityManager emMgmtApp = setup.getEmf()
-                .getEntityManager(setup.getEmf().getManagementAppId() );
-
-            Results entities = emMgmtApp.getTargetEntities(
-                importRoot, "includes", null, Query.Level.ALL_PROPERTIES);
-
-            PagingResultsIterator itr = new PagingResultsIterator( entities );
-            int count = 0;
-            while ( itr.hasNext() ) {
-                itr.next();
-                count++;
-            }
-            return count;
-        }
-        catch ( Exception e ) {
-            logger.error( "application doesn't exist within the current context" );
-            throw new RuntimeException( e );
-        }
-    }
-
-
-    private int getConnectionCountViaSearch( final Import importRoot ) {
-
-        try {
-            EntityManager emMgmtApp = setup.getEmf()
-                .getEntityManager(setup.getEmf().getManagementAppId() );
-
-            Query query = Query.fromQL("select *");
-            query.setEntityType("file_import");
-            query.setConnectionType("includes");
-            query.setLimit(10000);
-
-            Results entities = emMgmtApp.searchTargetEntities(importRoot, query);
-            return entities.size();
-
-//            PagingResultsIterator itr = new PagingResultsIterator( entities );
-//            int count = 0;
-//            while ( itr.hasNext() ) {
-//                itr.next();
-//                count++;
-//            }
-//            return count;
-        }
-        catch ( Exception e ) {
-            logger.error( "application doesn't exist within the current context" );
-            throw new RuntimeException( e );
-        }
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/usergrid/blob/cb1356c2/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportServiceIT.java
----------------------------------------------------------------------
diff --git a/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportServiceIT.java
b/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportServiceIT.java
deleted file mode 100644
index 993a189..0000000
--- a/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportServiceIT.java
+++ /dev/null
@@ -1,678 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.usergrid.management.importer;
-
-import com.amazonaws.SDKGlobalConfiguration;
-import com.google.common.collect.BiMap;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.util.concurrent.Service;
-import com.google.inject.Module;
-import org.apache.commons.lang.RandomStringUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.usergrid.ServiceITSetup;
-import org.apache.usergrid.ServiceITSetupImpl;
-import org.apache.usergrid.batch.JobExecution;
-import org.apache.usergrid.batch.service.JobSchedulerService;
-import org.apache.usergrid.cassandra.ClearShiroSubject;
-import org.apache.usergrid.management.OrganizationInfo;
-import org.apache.usergrid.management.UserInfo;
-import org.apache.usergrid.management.export.ExportService;
-import org.apache.usergrid.management.export.S3Export;
-import org.apache.usergrid.management.export.S3ExportImpl;
-import org.apache.usergrid.persistence.*;
-import org.apache.usergrid.persistence.entities.Import;
-import org.apache.usergrid.persistence.entities.JobData;
-import org.apache.usergrid.persistence.exceptions.EntityNotFoundException;
-import org.apache.usergrid.persistence.Query.Level;
-import org.apache.usergrid.persistence.index.utils.UUIDUtils;
-import org.apache.usergrid.services.notifications.QueueListener;
-import org.apache.usergrid.setup.ConcurrentProcessSingleton;
-
-import org.jclouds.ContextBuilder;
-import org.jclouds.blobstore.BlobStore;
-import org.jclouds.blobstore.BlobStoreContext;
-import org.jclouds.blobstore.domain.Blob;
-import org.jclouds.http.config.JavaUrlHttpCommandExecutorServiceModule;
-import org.jclouds.logging.log4j.config.Log4JLoggingModule;
-import org.jclouds.netty.config.NettyPayloadModule;
-import org.junit.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.util.*;
-
-import java.util.UUID;
-
-import static org.hamcrest.core.Is.is;
-import static org.hamcrest.core.IsNot.not;
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-
-public class ImportServiceIT {
-
-    private static final Logger logger = LoggerFactory.getLogger(ImportServiceIT.class);
-
-    // app-level data generated only once
-    private static UserInfo adminUser;
-    private static OrganizationInfo organization;
-    private static UUID applicationId;
-
-    QueueListener listener;
-
-    final String bucketName = System.getProperty( "bucketName" )
-        + RandomStringUtils.randomAlphanumeric(10).toLowerCase();
-
-    @Rule
-    public ClearShiroSubject clearShiroSubject = new ClearShiroSubject();
-
-    @ClassRule
-    public static final ServiceITSetup setup =
-        new ServiceITSetupImpl(  );
-
-
-    @BeforeClass
-    public static void setup() throws Exception {
-        String username = "test"+ UUIDUtils.newTimeUUID();
-
-        // start the scheduler after we're all set up
-         // start the scheduler after we're all set up
-        JobSchedulerService jobScheduler = ConcurrentProcessSingleton
-            .getInstance().getSpringResource().getBean( JobSchedulerService.class );
-
-        if ( jobScheduler.state() != Service.State.RUNNING ) {
-            jobScheduler.startAsync();
-            jobScheduler.awaitRunning();
-        }
-
-        //creates sample test application
-        adminUser = setup.getMgmtSvc().createAdminUser( null,
-            username, username, username+"@test.com", username, false, false );
-        organization = setup.getMgmtSvc().createOrganization( username, adminUser, true );
-        applicationId = setup.getMgmtSvc().createApplication( organization.getUuid(), username+"app"
).getId();
-    }
-
-
-    @Before
-    public void before() {
-
-        boolean configured =
-                   !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR))
-                && !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR))
-                && !StringUtils.isEmpty(System.getProperty("bucketName"));
-
-        if ( !configured ) {
-            logger.warn("Skipping test because {}, {} and bucketName not " +
-                "specified as system properties, e.g. in your Maven settings.xml file.",
-                new Object[] {
-                    SDKGlobalConfiguration.SECRET_KEY_ENV_VAR,
-                    SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR
-                });
-        }
-
-        Assume.assumeTrue( configured );
-   }
-
-    @After
-    public void after() throws Exception {
-        if(listener != null) {
-            listener.stop();
-            listener = null;
-        }
-    }
-
-    // test case to check if application is imported correctly
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import organization not supported")
-    public void testImportApplication() throws Exception {
-
-        EntityManager em = setup.getEmf().getEntityManager( applicationId );
-
-        // Create five user entities (we already have one admin user)
-        List<Entity> entities = new ArrayList<>();
-        for ( int i = 0; i < 5; i++ ) {
-            Map<String, Object> userProperties =  new LinkedHashMap<>();
-            userProperties.put( "parameter1", "user" + i );
-            userProperties.put( "parameter2", "user" + i + "@test.com" );
-            entities.add( em.create( "custom", userProperties ) );
-        }
-        // Creates connections
-        em.createConnection( new SimpleEntityRef( "custom",  entities.get(0).getUuid() ),
-                  "related", new SimpleEntityRef( "custom",  entities.get(1).getUuid() )
);
-        em.createConnection( new SimpleEntityRef( "custom",  entities.get(1).getUuid() ),
-                  "related", new SimpleEntityRef( "custom",  entities.get(0).getUuid() )
);
-
-        logger.debug("\n\nExport the application\n\n");
-
-        // Export the application which needs to be tested for import
-        ExportService exportService = setup.getExportService();
-        S3Export s3Export = new S3ExportImpl();
-        HashMap<String, Object> payload = payloadBuilder();
-        payload.put( "organizationId",  organization.getUuid());
-        payload.put( "applicationId", applicationId );
-
-        // Schedule the export job
-        UUID exportUUID = exportService.schedule( payload );
-
-        // Create and initialize jobData returned in JobExecution.
-        JobData jobData = jobExportDataCreator(payload, exportUUID, s3Export);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        // Export the application and wait for the export job to finish
-        exportService.doExport( jobExecution );
-        while ( !exportService.getState( exportUUID ).equals( "FINISHED" ) ) {
-           // wait...
-        }
-
-        logger.debug("Import the application\n\n");
-
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        // scheduele the import job
-        final Import importEntity = importService.schedule( null,  payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        jobData = jobImportDataCreator( payload,importUUID, s3Import );
-
-        jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        // import the application file and wait for it to finish
-        importService.doImport(jobExecution);
-        while ( !importService.getState( importUUID ).equals( "FINISHED" ) ) {
-           // wait...
-        }
-
-        if (logger.isDebugEnabled()) {
-            logger.debug("Verify Import");
-        }
-
-        try {
-            //checks if temp import files are created i.e. downloaded from S3
-            //assertThat(importService.getEphemeralFile().size(), is(not(0)));
-
-            Set<String> collections = em.getApplicationCollections();
-
-            // check if all collections in the application are updated
-            for (String collectionName : collections) {
-
-                if (logger.isDebugEnabled()) {
-                    logger.debug("Checking collection {}", collectionName);
-                }
-
-                Results collection = em.getCollection(applicationId, collectionName, null,
Level.ALL_PROPERTIES);
-
-                for (Entity eachEntity : collection.getEntities() ) {
-
-                    if (logger.isDebugEnabled()) {
-                        logger.debug("Checking entity {} {}:{}",
-                            new Object[]{eachEntity.getName(), eachEntity.getType(), eachEntity.getUuid()});
-                    }
-
-                    //check for dictionaries --> checking permissions in the dictionaries
-                    EntityRef er;
-                    Map<Object, Object> dictionaries;
-
-                    //checking for permissions for the roles collection
-                    if (collectionName.equals("roles")) {
-                        if (eachEntity.getName().equalsIgnoreCase("admin")) {
-                            er = eachEntity;
-                            dictionaries = em.getDictionaryAsMap(er, "permissions");
-                            assertThat(dictionaries.size(), is(not(0))); // admin has permission
-                        } else {
-                            er = eachEntity;
-                            dictionaries = em.getDictionaryAsMap(er, "permissions");
-                            assertThat(dictionaries.size(), is(0)); // other roles do not
-                        }
-                    }
-                }
-
-                if (collectionName.equals("customs")) {
-                    // check if connections are created for only the 1st 2 entities in the
custom collection
-                    Results r;
-                    List<ConnectionRef> connections;
-                    for (int i = 0; i < 2; i++) {
-                        r = em.getTargetEntities(entities.get(i), "related", null, Level.IDS);
-                        connections = r.getConnections();
-                        assertNotNull(connections);
-                    }
-                }
-            }
-        }
-        finally {
-            //delete bucket
-            deleteBucket();
-        }
-    }
-
-    // test case to check if all applications file for an organization are imported correctly
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import organization not supported")
-    public void testImportOrganization() throws Exception {
-
-        // creates 5 entities in usertests collection
-        EntityManager em = setup.getEmf().getEntityManager( applicationId );
-
-        //intialize user object to be posted
-        Map<String, Object> userProperties = null;
-
-        Entity entity[] = new Entity[5];
-        //creates entities
-        for ( int i = 0; i < 5; i++ ) {
-            userProperties = new LinkedHashMap<String, Object>();
-            userProperties.put( "name", "user" + i );
-            userProperties.put( "email", "user" + i + "@test.com" );
-            entity[i] = em.create( "usertests", userProperties );
-            em.getCollections(entity[i]).contains("usertests");
-        }
-
-        //creates test connections between first 2 entities in usertests collection
-        ConnectedEntityRef ref = em.createConnection( entity[0], "related", entity[1]);
-
-        em.createConnection( entity[1], "related", entity[0]);
-
-        //create 2nd test application, add entities to it, create connections and set permissions
-        createAndSetup2ndApplication();
-
-        //export all applications in an organization
-        ExportService exportService = setup.getExportService();
-        S3Export s3Export = new S3ExportImpl();
-        HashMap<String, Object> payload = payloadBuilder();
-
-        payload.put( "organizationId",  organization.getUuid());
-
-        //schdeule the export job
-        UUID exportUUID = exportService.schedule( payload );
-
-        //create and initialize jobData returned in JobExecution.
-        JobData jobData = jobExportDataCreator(payload, exportUUID, s3Export);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        //export organization data and wait for the export job to finish
-        exportService.doExport( jobExecution );
-        while ( !exportService.getState( exportUUID ).equals( "FINISHED" ) ) {
-            ;
-        }
-        //TODO: can check if the temp files got created
-
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        //schedule the import job
-        final Import importEntity = importService.schedule(  null, payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        jobData = jobImportDataCreator( payload,importUUID, s3Import );
-
-        jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        //import the all application files for the organization and wait for the import to
finish
-        importService.doImport(jobExecution);
-        while ( !importService.getState( importUUID ).equals( Import.State.FINISHED ) ) {
-            ;
-        }
-
-        try {
-            //checks if temp import files are created i.e. downloaded from S3
-            //assertThat(importService.getEphemeralFile().size(), is(not(0)));
-
-            //get all applications for an organization
-            BiMap<UUID, String> applications =
-                setup.getMgmtSvc().getApplicationsForOrganization(organization.getUuid());
-
-            for (BiMap.Entry<UUID, String> app : applications.entrySet()) {
-
-                //check if all collections-entities are updated - created and modified should
be different
-                UUID appID = app.getKey();
-                em = setup.getEmf().getEntityManager(appID);
-                Set<String> collections = em.getApplicationCollections();
-                Iterator<String> itr = collections.iterator();
-                while (itr.hasNext()) {
-                    String collectionName = itr.next();
-                    Results collection = em.getCollection(appID, collectionName, null, Level.ALL_PROPERTIES);
-                    List<Entity> entities = collection.getEntities();
-
-                    if (collectionName.equals("usertests")) {
-
-                        // check if connections are created for only the 1st 2 entities in
user collection
-                        Results r;
-                        List<ConnectionRef> connections;
-                        for (int i = 0; i < 2; i++) {
-                            r = em.getTargetEntities(entities.get(i), "related", null, Level.IDS);
-                            connections = r.getConnections();
-                            assertNotNull(connections);
-                        }
-
-                        //check if dictionary is created
-                        EntityRef er;
-                        Map<Object, Object> dictionaries1, dictionaries2;
-                        for (int i = 0; i < 3; i++) {
-                            er = entities.get(i);
-                            dictionaries1 = em.getDictionaryAsMap(er, "connected_types");
-                            dictionaries2 = em.getDictionaryAsMap(er, "connecting_types");
-
-                            if (i == 2) {
-                                //for entity 2, these should be empty
-                                assertThat(dictionaries1.size(), is(0));
-                                assertThat(dictionaries2.size(), is(0));
-                            } else {
-                                assertThat(dictionaries1.size(), is(not(0)));
-                                assertThat(dictionaries2.size(), is(not(0)));
-                            }
-                        }
-                    }
-                }
-            }
-        }
-        finally {
-            //delete bucket
-            deleteBucket();
-        }
-    }
-
-    /**
-     * Test to schedule a job with null config
-     */
-    @Test(expected=NullPointerException.class)
-    public void testScheduleJobWithNullConfig() throws Exception {
-        HashMap<String, Object> payload = null;
-
-        ImportService importService = setup.getImportService();
-        final Import importEntity = importService.schedule( null,  payload );
-
-
-        assertNull(importEntity);
-    }
-
-    /**
-     * Test to get state of a job with null UUID
-     */
-    @Test(expected = NullPointerException.class)
-    public void testGetStateWithNullUUID() throws Exception {
-        UUID uuid= null;
-
-        ImportService importService = setup.getImportService();
-        Import.State state = importService.getState( uuid );
-
-    }
-
-    /**
-     * Test to get state of a job with fake UUID
-     */
-    @Test(expected = EntityNotFoundException.class)
-    public void testGetStateWithFakeUUID() throws Exception {
-        UUID fake = UUID.fromString( "AAAAAAAA-FFFF-FFFF-FFFF-AAAAAAAAAAAA" );
-
-        ImportService importService = setup.getImportService();
-        Import.State state = importService.getState( fake );
-
-    }
-
-
-    /**
-     * Test to get error message of a job with null state
-     */
-    @Test
-    public void testErrorMessageWithNullState() throws Exception {
-        UUID state = null;
-        ImportService importService = setup.getImportService();
-        String error = importService.getErrorMessage(state);
-
-        assertEquals(error,"UUID passed in cannot be null");
-    }
-
-    /**
-     * Test to get error message of a job with fake UUID
-     */
-    @Test
-    public void testErrorMessageWithFakeUUID() throws Exception {
-        UUID state = UUID.fromString( "AAAAAAAA-FFFF-FFFF-FFFF-AAAAAAAAAAAA" );
-        ImportService importService = setup.getImportService();
-        String error = importService.getErrorMessage(state);
-
-        assertEquals(error,"No Such Element found");
-    }
-
-    /**
-     * Test to the doImport method with null organziation ID
-     */
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import organization not supported")
-    public void testDoImportWithNullOrganizationID() throws Exception {
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        HashMap<String, Object> payload = payloadBuilder();
-
-        //schedule the import job
-        final Import importEntity = importService.schedule( null,  payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        JobData jobData = jobImportDataCreator(payload, importUUID, s3Import);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        importService.doImport(jobExecution);
-        assertEquals(importService.getState(importUUID),Import.State.FAILED);
-    }
-
-    /**
-     * Test to the doImport method with fake organization ID
-     */
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import organization not supported")
-    public void testDoImportWithFakeOrganizationID() throws Exception {
-
-        UUID fakeOrgId = UUID.fromString( "AAAAAAAA-FFFF-FFFF-FFFF-AAAAAAAAAAAA" );
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        HashMap<String, Object> payload = payloadBuilder();
-
-        payload.put("organizationId",fakeOrgId);
-        //schedule the import job
-        final Import importEntity = importService.schedule( null,  payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        JobData jobData = jobImportDataCreator(payload, importUUID, s3Import);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        //import the all application files for the organization and wait for the import to
finish
-        importService.doImport(jobExecution);
-        assertEquals(Import.State.FAILED, importService.getState(importUUID));
-    }
-
-    /**
-     * Test to the doImport method with fake application ID
-     */
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import application not supported")
-    public void testDoImportWithFakeApplicationID() throws Exception {
-
-        UUID fakeappId = UUID.fromString( "AAAAAAAA-FFFF-FFFF-FFFF-AAAAAAAAAAAA" );
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        HashMap<String, Object> payload = payloadBuilder();
-
-        payload.put("organizationId",organization.getUuid());
-        payload.put("applicationId",fakeappId);
-
-        //schedule the import job
-        final Import importEntity = importService.schedule(  null, payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        JobData jobData = jobImportDataCreator(payload, importUUID, s3Import);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        //import the application files for the organization and wait for the import to finish
-        importService.doImport(jobExecution);
-        assertEquals(Import.State.FAILED, importService.getState(importUUID));
-    }
-
-    /**
-     * Test to the doImport Collection method with fake application ID
-     */
-    @Test
-    @Ignore("Pending merge of export-feature branch. Import application not supported")
-    public void testDoImportCollectionWithFakeApplicationID() throws Exception {
-
-        UUID fakeappId = UUID.fromString( "AAAAAAAA-FFFF-FFFF-FFFF-AAAAAAAAAAAA" );
-        // import
-        S3Import s3Import = new S3ImportImpl();
-        ImportService importService = setup.getImportService();
-
-        HashMap<String, Object> payload = payloadBuilder();
-
-        payload.put("organizationId",organization.getUuid());
-        payload.put("applicationId",fakeappId);
-        payload.put("collectionName","custom-test");
-
-        //schedule the import job
-        final Import importEntity = importService.schedule( null,  payload );
-        final UUID importUUID = importEntity.getUuid();
-
-        //create and initialize jobData returned in JobExecution.
-        JobData jobData = jobImportDataCreator(payload, importUUID, s3Import);
-
-        JobExecution jobExecution = mock( JobExecution.class );
-        when( jobExecution.getJobData() ).thenReturn( jobData );
-
-        //import the all collection files for the organization-application and wait for the
import to finish
-        importService.doImport(jobExecution);
-        assertEquals(importService.getState(importUUID),Import.State.FAILED);
-    }
-
-    /*Creates fake payload for testing purposes.*/
-    public HashMap<String, Object> payloadBuilder() {
-
-        HashMap<String, Object> payload = new HashMap<String, Object>();
-        Map<String, Object> properties = new HashMap<String, Object>();
-        Map<String, Object> storage_info = new HashMap<String, Object>();
-        storage_info.put( "bucket_location", bucketName );
-
-        storage_info.put( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR,
-            System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ) );
-        storage_info.put( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR,
-            System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR ) );
-
-        properties.put( "storage_provider", "s3" );
-        properties.put( "storage_info", storage_info );
-
-        payload.put( "path","test-organization/test-app" );
-        payload.put( "properties", properties );
-        return payload;
-    }
-
-    //creates fake import job
-    public JobData jobImportDataCreator(HashMap<String, Object> payload,UUID importUUID,S3Import
s3Import) {
-        JobData jobData = new JobData();
-
-        jobData.setProperty( "jobName", "importJob" );
-        jobData.setProperty( "importInfo", payload );
-        jobData.setProperty( "importId", importUUID );
-        jobData.setProperty( "s3Import", s3Import );
-
-        return jobData;
-    }
-
-    //creates fake export job
-    public JobData jobExportDataCreator(HashMap<String, Object> payload,UUID exportUUID,S3Export
s3Export) {
-        JobData jobData = new JobData();
-
-        jobData.setProperty( "jobName", "exportJob" );
-        jobData.setProperty( "exportInfo", payload );
-        jobData.setProperty( "exportId", exportUUID );
-        jobData.setProperty( "s3Export", s3Export);
-
-        return jobData;
-    }
-
-    // delete the s3 bucket which was created for testing
-    public void deleteBucket() {
-
-        String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR );
-        String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR
);
-
-        Properties overrides = new Properties();
-        overrides.setProperty( "s3" + ".identity", accessId );
-        overrides.setProperty( "s3" + ".credential", secretKey );
-
-        Blob bo = null;
-        BlobStore blobStore = null;
-        final Iterable<? extends Module> MODULES = ImmutableSet
-                .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(),
-                        new NettyPayloadModule());
-
-        BlobStoreContext context =
-                ContextBuilder.newBuilder("s3").credentials( accessId, secretKey ).modules(
MODULES )
-                        .overrides( overrides ).buildView( BlobStoreContext.class );
-
-        blobStore = context.getBlobStore();
-        blobStore.deleteContainer( bucketName );
-    }
-
-    //creates 2nd application for testing import from an organization having multiple applications
-    void createAndSetup2ndApplication() throws Exception {
-
-        UUID appId = setup.getMgmtSvc().createApplication( organization.getUuid(), "test-app-2"
).getId();
-        EntityManager emTest = setup.getEmf().getEntityManager(appId);
-
-        Map<String, Object> userProperties = null;
-
-        Entity entityTest[] = new Entity[5];
-
-        //creates entities and set permissions
-        for ( int i = 0; i < 5; i++ ) {
-            userProperties = new LinkedHashMap<String, Object>();
-            userProperties.put( "name", "user" + i );
-            userProperties.put( "email", "user" + i + "@test.com" );
-            entityTest[i] = emTest.create( "testobject", userProperties );
-        }
-
-        //create connection
-        emTest.createConnection( new SimpleEntityRef( "testobject",  entityTest[0].getUuid()),
-            "related",
-            new SimpleEntityRef( "testobject",  entityTest[1].getUuid()));
-
-        emTest.createConnection( new SimpleEntityRef( "testobject",  entityTest[1].getUuid()),
-            "related",
-            new SimpleEntityRef( "testobject",  entityTest[0].getUuid()));
-    }
-}

http://git-wip-us.apache.org/repos/asf/usergrid/blob/cb1356c2/stack/services/src/test/java/org/apache/usergrid/management/importer/MockS3ImportImpl.java
----------------------------------------------------------------------
diff --git a/stack/services/src/test/java/org/apache/usergrid/management/importer/MockS3ImportImpl.java
b/stack/services/src/test/java/org/apache/usergrid/management/importer/MockS3ImportImpl.java
deleted file mode 100644
index 16096b6..0000000
--- a/stack/services/src/test/java/org/apache/usergrid/management/importer/MockS3ImportImpl.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.usergrid.management.importer;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class MockS3ImportImpl implements S3Import{
-    private static final Logger logger = LoggerFactory.getLogger(MockS3ImportImpl.class);
-
-    @Override
-    public List<String> getBucketFileNames(String bucketName, String endsWith, String
accessId, String secretKey) {
-        return new ArrayList<>();
-    }
-
-    @Override
-    public File copyFileFromBucket(String blobFileName, String bucketName, String accessId,
String secretKey) throws IOException {
-        return File.createTempFile("test","tmp");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/usergrid/blob/cb1356c2/stack/services/src/test/java/org/apache/usergrid/management/importer/S3Upload.java
----------------------------------------------------------------------
diff --git a/stack/services/src/test/java/org/apache/usergrid/management/importer/S3Upload.java
b/stack/services/src/test/java/org/apache/usergrid/management/importer/S3Upload.java
deleted file mode 100644
index ca24407..0000000
--- a/stack/services/src/test/java/org/apache/usergrid/management/importer/S3Upload.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.usergrid.management.importer;
-
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import com.google.common.hash.Hashing;
-import com.google.common.io.Files;
-import org.jclouds.ContextBuilder;
-import org.jclouds.blobstore.BlobStore;
-import org.jclouds.blobstore.BlobStoreContext;
-import org.jclouds.blobstore.domain.Blob;
-import org.jclouds.blobstore.domain.BlobBuilder;
-import org.jclouds.blobstore.options.PutOptions;
-import org.jclouds.http.config.JavaUrlHttpCommandExecutorServiceModule;
-import org.jclouds.logging.log4j.config.Log4JLoggingModule;
-import org.jclouds.netty.config.NettyPayloadModule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Module;
-
-
-/**
- * Helper class made to upload resource files to s3 so they can be imported later
- */
-public class S3Upload {
-
-    private static final Logger logger = LoggerFactory.getLogger( S3Upload.class );
-
-    public void copyToS3( String accessKey, String secretKey, String bucketName, List<String>
filenames )
-        throws FileNotFoundException {
-
-        Properties overrides = new Properties();
-        overrides.setProperty( "s3" + ".identity", accessKey );
-        overrides.setProperty( "s3" + ".credential", secretKey );
-
-        final Iterable<? extends Module> MODULES = ImmutableSet.of(
-            new JavaUrlHttpCommandExecutorServiceModule(),
-            new Log4JLoggingModule(),
-            new NettyPayloadModule() );
-
-        BlobStoreContext context = ContextBuilder.newBuilder( "s3" )
-            .credentials( accessKey, secretKey )
-            .modules( MODULES )
-            .overrides( overrides )
-            .buildView( BlobStoreContext.class );
-
-        // Create Container (the bucket in s3)
-        try {
-            BlobStore blobStore = context.getBlobStore();
-            if ( blobStore.createContainerInLocation( null, bucketName ) ) {
-                logger.info( "Created bucket " + bucketName );
-            }
-        }
-        catch ( Exception ex ) {
-            throw new RuntimeException( "Could not create bucket",ex );
-        }
-
-        Iterator<String> fileNameIterator = filenames.iterator();
-
-        while (fileNameIterator.hasNext()) {
-
-            String filename = fileNameIterator.next();
-            File uploadFile = new File( filename );
-
-            try {
-                BlobStore blobStore = context.getBlobStore();
-
-                // need this for JClouds 1.7.x:
-//                BlobBuilder.PayloadBlobBuilder blobBuilder =  blobStore.blobBuilder( filename
)
-//                    .payload( uploadFile ).calculateMD5().contentType( "application/json"
);
-
-                // needed for JClouds 1.8.x:
-                BlobBuilder blobBuilder = blobStore.blobBuilder( filename )
-                    .payload( uploadFile )
-                    .contentMD5(Files.hash( uploadFile, Hashing.md5()))
-                    .contentType( "application/json" );
-
-                Blob blob = blobBuilder.build();
-
-                final String uploadedFile = blobStore.putBlob( bucketName, blob, PutOptions.Builder.multipart()
);
-
-                //wait for the file to finish uploading
-                Thread.sleep(4000);
-
-                logger.info( "Uploaded file name={} etag={}", filename, uploadedFile );
-            }
-            catch ( Exception e ) {
-                logger.error( "Error uploading to blob store. File: " + filename, e );
-            }
-        }
-    }
-}


Mime
View raw message